diff --git a/configs/development.ini b/configs/development.ini --- a/configs/development.ini +++ b/configs/development.ini @@ -345,6 +345,10 @@ rc_cache.cache_repo.arguments.filename = #rc_cache.cache_repo.arguments.db = 1 #rc_cache.cache_repo.arguments.distributed_lock = true +## cache settings for SQL queries +rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru +rc_cache.sql_cache_short.expiration_time = 30 + #################################### ### BEAKER CACHE #### @@ -355,16 +359,12 @@ rc_cache.cache_repo.arguments.filename = beaker.cache.data_dir = %(here)s/data/cache/beaker_data beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock -beaker.cache.regions = long_term, sql_cache_short +beaker.cache.regions = long_term beaker.cache.long_term.type = memorylru_base beaker.cache.long_term.expire = 172800 beaker.cache.long_term.key_length = 256 -beaker.cache.sql_cache_short.type = memorylru_base -beaker.cache.sql_cache_short.expire = 10 -beaker.cache.sql_cache_short.key_length = 256 - #################################### ### BEAKER SESSION #### diff --git a/configs/production.ini b/configs/production.ini --- a/configs/production.ini +++ b/configs/production.ini @@ -320,6 +320,10 @@ rc_cache.cache_repo.arguments.filename = #rc_cache.cache_repo.arguments.db = 1 #rc_cache.cache_repo.arguments.distributed_lock = true +## cache settings for SQL queries +rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru +rc_cache.sql_cache_short.expiration_time = 30 + #################################### ### BEAKER CACHE #### @@ -330,16 +334,12 @@ rc_cache.cache_repo.arguments.filename = beaker.cache.data_dir = %(here)s/data/cache/beaker_data beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock -beaker.cache.regions = long_term, sql_cache_short +beaker.cache.regions = long_term beaker.cache.long_term.type = memory beaker.cache.long_term.expire = 172800 beaker.cache.long_term.key_length = 256 -beaker.cache.sql_cache_short.type = memory -beaker.cache.sql_cache_short.expire = 10 -beaker.cache.sql_cache_short.key_length = 256 - #################################### ### BEAKER SESSION #### diff --git a/rhodecode/apps/admin/tests/test_admin_permissions.py b/rhodecode/apps/admin/tests/test_admin_permissions.py --- a/rhodecode/apps/admin/tests/test_admin_permissions.py +++ b/rhodecode/apps/admin/tests/test_admin_permissions.py @@ -25,7 +25,7 @@ from rhodecode.model.meta import Session from rhodecode.model.permission import PermissionModel from rhodecode.model.ssh_key import SshKeyModel from rhodecode.tests import ( - TestController, clear_all_caches, assert_session_flash) + TestController, clear_cache_regions, assert_session_flash) def route_path(name, params=None, **kwargs): @@ -221,22 +221,21 @@ class TestAdminPermissionsController(Tes def test_index_ips(self): self.log_user() response = self.app.get(route_path('admin_permissions_ips')) - # TODO: Test response... response.mustcontain('All IP addresses are allowed') def test_add_delete_ips(self): + clear_cache_regions(['sql_cache_short']) self.log_user() - clear_all_caches() # ADD default_user_id = User.get_default_user().user_id self.app.post( route_path('edit_user_ips_add', user_id=default_user_id), - params={'new_ip': '127.0.0.0/24', 'csrf_token': self.csrf_token}) + params={'new_ip': '0.0.0.0/24', 'csrf_token': self.csrf_token}) response = self.app.get(route_path('admin_permissions_ips')) - response.mustcontain('127.0.0.0/24') - response.mustcontain('127.0.0.0 - 127.0.0.255') + response.mustcontain('0.0.0.0/24') + response.mustcontain('0.0.0.0 - 0.0.0.255') # DELETE default_user_id = User.get_default_user().user_id @@ -249,11 +248,11 @@ class TestAdminPermissionsController(Tes assert_session_flash(response, 'Removed ip address from user whitelist') - clear_all_caches() + clear_cache_regions(['sql_cache_short']) response = self.app.get(route_path('admin_permissions_ips')) response.mustcontain('All IP addresses are allowed') - response.mustcontain(no=['127.0.0.0/24']) - response.mustcontain(no=['127.0.0.0 - 127.0.0.255']) + response.mustcontain(no=['0.0.0.0/24']) + response.mustcontain(no=['0.0.0.0 - 0.0.0.255']) def test_index_overview(self): self.log_user() diff --git a/rhodecode/apps/login/tests/test_password_reset.py b/rhodecode/apps/login/tests/test_password_reset.py --- a/rhodecode/apps/login/tests/test_password_reset.py +++ b/rhodecode/apps/login/tests/test_password_reset.py @@ -22,7 +22,7 @@ import pytest from rhodecode.lib import helpers as h from rhodecode.tests import ( - TestController, clear_all_caches, + TestController, clear_cache_regions, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) from rhodecode.tests.fixture import Fixture from rhodecode.tests.utils import AssertResponse @@ -64,7 +64,7 @@ class TestPasswordReset(TestController): ]) def test_password_reset_settings( self, pwd_reset_setting, show_link, show_reset): - clear_all_caches() + clear_cache_regions() self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) params = { 'csrf_token': self.csrf_token, diff --git a/rhodecode/authentication/views.py b/rhodecode/authentication/views.py --- a/rhodecode/authentication/views.py +++ b/rhodecode/authentication/views.py @@ -31,7 +31,6 @@ from rhodecode.authentication.base impor from rhodecode.lib import helpers as h from rhodecode.lib.auth import ( LoginRequired, HasPermissionAllDecorator, CSRFRequired) -from rhodecode.lib.caches import clear_cache_manager from rhodecode.model.forms import AuthSettingsForm from rhodecode.model.meta import Session from rhodecode.model.settings import SettingsModel diff --git a/rhodecode/config/middleware.py b/rhodecode/config/middleware.py --- a/rhodecode/config/middleware.py +++ b/rhodecode/config/middleware.py @@ -427,20 +427,47 @@ def _sanitize_vcs_settings(settings): def _sanitize_cache_settings(settings): _string_setting(settings, 'cache_dir', os.path.join(tempfile.gettempdir(), 'rc_cache')) + # cache_perms + _string_setting( + settings, + 'rc_cache.cache_perms.backend', + 'dogpile.cache.rc.file_namespace') + _int_setting( + settings, + 'rc_cache.cache_perms.expiration_time', + 60) + _string_setting( + settings, + 'rc_cache.cache_perms.arguments.filename', + os.path.join(tempfile.gettempdir(), 'rc_cache_1')) - _string_setting(settings, 'rc_cache.cache_perms.backend', - 'dogpile.cache.rc.file_namespace') - _int_setting(settings, 'rc_cache.cache_perms.expiration_time', - 60) - _string_setting(settings, 'rc_cache.cache_perms.arguments.filename', - os.path.join(tempfile.gettempdir(), 'rc_cache_1')) + # cache_repo + _string_setting( + settings, + 'rc_cache.cache_repo.backend', + 'dogpile.cache.rc.file_namespace') + _int_setting( + settings, + 'rc_cache.cache_repo.expiration_time', + 60) + _string_setting( + settings, + 'rc_cache.cache_repo.arguments.filename', + os.path.join(tempfile.gettempdir(), 'rc_cache_2')) - _string_setting(settings, 'rc_cache.cache_repo.backend', - 'dogpile.cache.rc.file_namespace') - _int_setting(settings, 'rc_cache.cache_repo.expiration_time', - 60) - _string_setting(settings, 'rc_cache.cache_repo.arguments.filename', - os.path.join(tempfile.gettempdir(), 'rc_cache_2')) + # sql_cache_short + _string_setting( + settings, + 'rc_cache.sql_cache_short.backend', + 'dogpile.cache.rc.memory_lru') + _int_setting( + settings, + 'rc_cache.sql_cache_short.expiration_time', + 30) + _int_setting( + settings, + 'rc_cache.sql_cache_short.max_size', + 10000) def _int_setting(settings, name, default): diff --git a/rhodecode/lib/caches.py b/rhodecode/lib/caches.py --- a/rhodecode/lib/caches.py +++ b/rhodecode/lib/caches.py @@ -23,7 +23,7 @@ import beaker import logging import threading -from beaker.cache import _cache_decorate, cache_regions, region_invalidate +from beaker.cache import _cache_decorate, region_invalidate from sqlalchemy.exc import IntegrityError from rhodecode.lib.utils import safe_str, sha1 @@ -84,44 +84,6 @@ def configure_cache_region( beaker.cache.cache_regions[region_name] = region_settings -def get_cache_manager(region_name, cache_name, custom_ttl=None): - """ - Creates a Beaker cache manager. Such instance can be used like that:: - - _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name) - cache_manager = caches.get_cache_manager('some_namespace_name', _namespace) - _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id) - def heavy_compute(): - ... - result = cache_manager.get(_cache_key, createfunc=heavy_compute) - - :param region_name: region from ini file - :param cache_name: custom cache name, usually prefix+repo_name. eg - file_switcher_repo1 - :param custom_ttl: override .ini file timeout on this cache - :return: instance of cache manager - """ - - cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG) - if custom_ttl: - log.debug('Updating region %s with custom ttl: %s', - region_name, custom_ttl) - cache_config.update({'expire': custom_ttl}) - - return beaker.cache.Cache._get_cache(cache_name, cache_config) - - -def clear_cache_manager(cache_manager): - """ - namespace = 'foobar' - cache_manager = get_cache_manager('some_namespace_name', namespace) - clear_cache_manager(cache_manager) - """ - - log.debug('Clearing all values for cache manager %s', cache_manager) - cache_manager.clear() - - def compute_key_from_params(*args): """ Helper to compute key from given params to be used in cache manager diff --git a/rhodecode/lib/caching_query.py b/rhodecode/lib/caching_query.py --- a/rhodecode/lib/caching_query.py +++ b/rhodecode/lib/caching_query.py @@ -18,16 +18,16 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ -""" -caching_query.py +"""caching_query.py -Represent persistence structures which allow the usage of -Beaker caching with SQLAlchemy. +Represent functions and classes +which allow the usage of Dogpile caching with SQLAlchemy. +Introduces a query option called FromCache. The three new concepts introduced here are: * CachingQuery - a Query subclass that caches and - retrieves results in/from Beaker. + retrieves results in/from dogpile.cache. * FromCache - a query option that establishes caching parameters on a Query * RelationshipCache - a variant of FromCache which is specific @@ -36,57 +36,44 @@ The three new concepts introduced here a a Query. The rest of what's here are standard SQLAlchemy and -Beaker constructs. +dogpile.cache constructs. """ -import beaker -from beaker.exceptions import BeakerException - from sqlalchemy.orm.interfaces import MapperOption from sqlalchemy.orm.query import Query from sqlalchemy.sql import visitors +from dogpile.cache.api import NO_VALUE from rhodecode.lib.utils2 import safe_str class CachingQuery(Query): - """A Query subclass which optionally loads full results from a Beaker + """A Query subclass which optionally loads full results from a dogpile cache region. - The CachingQuery stores additional state that allows it to consult - a Beaker cache before accessing the database: - - * A "region", which is a cache region argument passed to a - Beaker CacheManager, specifies a particular cache configuration - (including backend implementation, expiration times, etc.) - * A "namespace", which is a qualifying name that identifies a - group of keys within the cache. A query that filters on a name - might use the name "by_name", a query that filters on a date range - to a joined table might use the name "related_date_range". - - When the above state is present, a Beaker cache is retrieved. - - The "namespace" name is first concatenated with - a string composed of the individual entities and columns the Query - requests, i.e. such as ``Query(User.id, User.name)``. - - The Beaker cache is then loaded from the cache manager based - on the region and composed namespace. The key within the cache - itself is then constructed against the bind parameters specified - by this query, which are usually literals defined in the - WHERE clause. + The CachingQuery optionally stores additional state that allows it to consult + a dogpile.cache cache before accessing the database, in the form + of a FromCache or RelationshipCache object. Each of these objects + refer to the name of a :class:`dogpile.cache.Region` that's been configured + and stored in a lookup dictionary. When such an object has associated + itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region` + is used to locate a cached result. If none is present, then the + Query is invoked normally, the results being cached. The FromCache and RelationshipCache mapper options below represent the "public" method of configuring this state upon the CachingQuery. """ + def _get_region(self): + from rhodecode.lib.rc_cache import region_meta + return region_meta.dogpile_cache_regions - def __init__(self, manager, *args, **kw): - self.cache_manager = manager + def __init__(self, regions, *args, **kw): + self.cache_regions = regions or self._get_region() Query.__init__(self, *args, **kw) def __iter__(self): - """override __iter__ to pull results from Beaker + """override __iter__ to pull results from dogpile if particular attributes have been configured. Note that this approach does *not* detach the loaded objects from @@ -98,109 +85,121 @@ class CachingQuery(Query): in the cache are not the same ones in the current Session. """ - if hasattr(self, '_cache_parameters'): + super_ = super(CachingQuery, self) + + if hasattr(self, '_cache_region'): + return self.get_value(createfunc=lambda: list(super_.__iter__())) + else: + return super_.__iter__() + + def _execute_and_instances(self, context): + """override _execute_and_instances to pull results from dogpile + if the query is invoked directly from an external context. + + This method is necessary in order to maintain compatibility + with the "baked query" system now used by default in some + relationship loader scenarios. Note also the + RelationshipCache._generate_cache_key method which enables + the baked query to be used within lazy loads. - def caching_query(): - return list(Query.__iter__(self)) + .. versionadded:: 1.2.7 + """ + super_ = super(CachingQuery, self) - return self.get_value(createfunc=caching_query) + if context.query is not self and hasattr(self, '_cache_region'): + # special logic called when the Query._execute_and_instances() + # method is called directly from the baked query + return self.get_value( + createfunc=lambda: list( + super_._execute_and_instances(context) + ) + ) else: - return Query.__iter__(self) + return super_._execute_and_instances(context) + + def _get_cache_plus_key(self): + """Return a cache region plus key.""" + dogpile_region = self.cache_regions[self._cache_region.region] + if self._cache_region.cache_key: + key = self._cache_region.cache_key + else: + key = _key_from_query(self) + return dogpile_region, key def invalidate(self): - """Invalidate the value represented by this Query.""" + """Invalidate the cache value represented by this Query.""" - cache, cache_key = _get_cache_parameters(self) - cache.remove(cache_key) + dogpile_region, cache_key = self._get_cache_plus_key() + dogpile_region.delete(cache_key) - def get_value(self, merge=True, createfunc=None): + def get_value(self, merge=True, createfunc=None, + expiration_time=None, ignore_expiration=False): """Return the value from the cache for this query. Raise KeyError if no value present and no createfunc specified. """ - cache, cache_key = _get_cache_parameters(self) - ret = cache.get_value(cache_key, createfunc=createfunc) + dogpile_region, cache_key = self._get_cache_plus_key() + + # ignore_expiration means, if the value is in the cache + # but is expired, return it anyway. This doesn't make sense + # with createfunc, which says, if the value is expired, generate + # a new value. + assert not ignore_expiration or not createfunc, \ + "Can't ignore expiration and also provide createfunc" + + if ignore_expiration or not createfunc: + cached_value = dogpile_region.get(cache_key, + expiration_time=expiration_time, + ignore_expiration=ignore_expiration) + else: + cached_value = dogpile_region.get_or_create( + cache_key, + createfunc, + expiration_time=expiration_time + ) + if cached_value is NO_VALUE: + raise KeyError(cache_key) if merge: - ret = self.merge_result(ret, load=False) - return ret + cached_value = self.merge_result(cached_value, load=False) + return cached_value def set_value(self, value): """Set the value in the cache for this query.""" - cache, cache_key = _get_cache_parameters(self) - cache.put(cache_key, value) + dogpile_region, cache_key = self._get_cache_plus_key() + dogpile_region.set(cache_key, value) -def query_callable(manager, query_cls=CachingQuery): +def query_callable(regions=None, query_cls=CachingQuery): def query(*arg, **kw): - return query_cls(manager, *arg, **kw) + return query_cls(regions, *arg, **kw) return query -def get_cache_region(name, region): - if region not in beaker.cache.cache_regions: - raise BeakerException('Cache region `%s` not configured ' - 'Check if proper cache settings are in the .ini files' % region) - kw = beaker.cache.cache_regions[region] - return beaker.cache.Cache._get_cache(name, kw) +def _key_from_query(query, qualifier=None): + """Given a Query, create a cache key. - -def _get_cache_parameters(query): - """For a query with cache_region and cache_namespace configured, - return the correspoinding Cache instance and cache key, based - on this query's current criterion and parameter values. + There are many approaches to this; here we use the simplest, + which is to create an md5 hash of the text of the SQL statement, + combined with stringified versions of all the bound parameters + within it. There's a bit of a performance hit with + compiling out "query.statement" here; other approaches include + setting up an explicit cache key with a particular Query, + then combining that with the bound parameter values. """ - if not hasattr(query, '_cache_parameters'): - raise ValueError("This Query does not have caching " - "parameters configured.") - region, namespace, cache_key = query._cache_parameters - - namespace = _namespace_from_query(namespace, query) - - if cache_key is None: - # cache key - the value arguments from this query's parameters. - args = [safe_str(x) for x in _params_from_query(query)] - args.extend(filter(lambda k: k not in ['None', None, u'None'], - [str(query._limit), str(query._offset)])) - - cache_key = " ".join(args) - - if cache_key is None: - raise Exception('Cache key cannot be None') + stmt = query.with_labels().statement + compiled = stmt.compile() + params = compiled.params - # get cache - #cache = query.cache_manager.get_cache_region(namespace, region) - cache = get_cache_region(namespace, region) - # optional - hash the cache_key too for consistent length - # import uuid - # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) - - return cache, cache_key - - -def _namespace_from_query(namespace, query): - # cache namespace - the token handed in by the - # option + class we're querying against - namespace = " ".join([namespace] + [str(x) for x in query._entities]) - - # memcached wants this - namespace = namespace.replace(' ', '_') - - return namespace - - -def _set_cache_parameters(query, region, namespace, cache_key): - - if hasattr(query, '_cache_parameters'): - region, namespace, cache_key = query._cache_parameters - raise ValueError("This query is already configured " - "for region %r namespace %r" % - (region, namespace)) - query._cache_parameters = region, namespace, cache_key + # here we return the key as a long string. our "key mangler" + # set up with the region will boil it down to an md5. + return " ".join( + [safe_str(compiled)] + + [safe_str(params[k]) for k in sorted(params)]) class FromCache(MapperOption): @@ -208,15 +207,12 @@ class FromCache(MapperOption): propagate_to_loaders = False - def __init__(self, region, namespace, cache_key=None): + def __init__(self, region="sql_cache_short", cache_key=None): """Construct a new FromCache. :param region: the cache region. Should be a - region configured in the Beaker CacheManager. - - :param namespace: the cache namespace. Should - be a name uniquely describing the target Query's - lexical structure. + region configured in the dictionary of dogpile + regions. :param cache_key: optional. A string cache key that will serve as the key to the query. Use this @@ -226,14 +222,11 @@ class FromCache(MapperOption): """ self.region = region - self.namespace = namespace self.cache_key = cache_key def process_query(self, query): """Process a Query during normal loading operation.""" - - _set_cache_parameters(query, self.region, self.namespace, - self.cache_key) + query._cache_region = self class RelationshipCache(MapperOption): @@ -242,27 +235,39 @@ class RelationshipCache(MapperOption): propagate_to_loaders = True - def __init__(self, region, namespace, attribute): + def __init__(self, attribute, region="sql_cache_short", cache_key=None): """Construct a new RelationshipCache. - :param region: the cache region. Should be a - region configured in the Beaker CacheManager. - - :param namespace: the cache namespace. Should - be a name uniquely describing the target Query's - lexical structure. - :param attribute: A Class.attribute which indicates a particular class relationship() whose lazy loader should be pulled from the cache. + :param region: name of the cache region. + + :param cache_key: optional. A string cache key + that will serve as the key to the query, bypassing + the usual means of forming a key from the Query itself. + """ self.region = region - self.namespace = namespace + self.cache_key = cache_key self._relationship_options = { (attribute.property.parent.class_, attribute.property.key): self } + def _generate_cache_key(self, path): + """Indicate to the lazy-loader strategy that a "baked" query + may be used by returning ``None``. + + If this method is omitted, the default implementation of + :class:`.MapperOption._generate_cache_key` takes place, which + returns ``False`` to disable the "baked" query from being used. + + .. versionadded:: 1.2.7 + + """ + return None + def process_query_conditionally(self, query): """Process a Query that is used within a lazy loader. @@ -271,17 +276,14 @@ class RelationshipCache(MapperOption): """ if query._current_path: - mapper, key = query._current_path[-2:] + mapper, prop = query._current_path[-2:] + key = prop.key for cls in mapper.class_.__mro__: if (cls, key) in self._relationship_options: - relationship_option = \ - self._relationship_options[(cls, key)] - _set_cache_parameters( - query, - relationship_option.region, - relationship_option.namespace, - None) + relationship_option = self._relationship_options[(cls, key)] + query._cache_region = relationship_option + break def and_(self, option): """Chain another RelationshipCache option to this one. @@ -294,32 +296,3 @@ class RelationshipCache(MapperOption): self._relationship_options.update(option._relationship_options) return self - -def _params_from_query(query): - """Pull the bind parameter values from a query. - - This takes into account any scalar attribute bindparam set up. - - E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7))) - would return [5, 7]. - - """ - v = [] - def visit_bindparam(bind): - - if bind.key in query._params: - value = query._params[bind.key] - elif bind.callable: - # lazyloader may dig a callable in here, intended - # to late-evaluate params after autoflush is called. - # convert to a scalar value. - value = bind.callable() - else: - value = bind.value - - v.append(value) - if query._criterion is not None: - visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam}) - for f in query._from_obj: - visitors.traverse(f, {}, {'bindparam':visit_bindparam}) - return v diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py --- a/rhodecode/model/db.py +++ b/rhodecode/model/db.py @@ -690,7 +690,7 @@ class User(Base, BaseModel): .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) if cache: feed_tokens = feed_tokens.options( - FromCache("long_term", "get_user_feed_token_%s" % self.user_id)) + FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id)) feed_tokens = feed_tokens.all() if feed_tokens: diff --git a/rhodecode/model/meta.py b/rhodecode/model/meta.py --- a/rhodecode/model/meta.py +++ b/rhodecode/model/meta.py @@ -24,27 +24,22 @@ SQLAlchemy Metadata and Session object from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker -from beaker import cache from rhodecode.lib import caching_query - -# Beaker CacheManager. A home base for cache configurations. -cache_manager = cache.CacheManager() +__all__ = ['Base', 'Session'] -__all__ = ['Base', 'Session'] -# -# SQLAlchemy session manager. Updated by model.init_model() -# +# scoped_session. Apply our custom CachingQuery class to it, +# using a callable that will associate the dictionary +# of regions with the Query. +# to use cache use this in query +# .options(FromCache("sqlalchemy_cache_type", "cachekey")) Session = scoped_session( sessionmaker( - query_cls=caching_query.query_callable(cache_manager), + query_cls=caching_query.query_callable(), expire_on_commit=True, ) ) # The declarative Base Base = declarative_base() - -#to use cache use this in query -#.options(FromCache("sqlalchemy_cache_type", "cachekey")) diff --git a/rhodecode/model/settings.py b/rhodecode/model/settings.py --- a/rhodecode/model/settings.py +++ b/rhodecode/model/settings.py @@ -21,11 +21,12 @@ import os import hashlib import logging +import time from collections import namedtuple from functools import wraps import bleach -from rhodecode.lib import caches +from rhodecode.lib import caches, rc_cache from rhodecode.lib.utils2 import ( Optional, AttributeDict, safe_str, remove_prefix, str2bool) from rhodecode.lib.vcs.backends import base @@ -206,13 +207,17 @@ class SettingsModel(BaseModel): return res def invalidate_settings_cache(self): - namespace = 'rhodecode_settings' - cache_manager = caches.get_cache_manager('sql_cache_short', namespace) - caches.clear_cache_manager(cache_manager) + # NOTE:(marcink) we flush the whole sql_cache_short region, because it + # reads different settings etc. It's little too much but those caches are + # anyway very short lived and it's a safest way. + region = rc_cache.get_or_create_region('sql_cache_short') + region.invalidate() def get_all_settings(self, cache=False): + region = rc_cache.get_or_create_region('sql_cache_short') - def _compute(): + @region.cache_on_arguments(should_cache_fn=lambda v: cache) + def _get_all_settings(name, key): q = self._get_settings_query() if not q: raise Exception('Could not get application settings !') @@ -223,20 +228,14 @@ class SettingsModel(BaseModel): } return settings - if cache: - log.debug('Fetching app settings using cache') - repo = self._get_repo(self.repo) if self.repo else None - namespace = 'rhodecode_settings' - cache_manager = caches.get_cache_manager( - 'sql_cache_short', namespace) - _cache_key = ( - "get_repo_{}_settings".format(repo.repo_id) - if repo else "get_app_settings") + repo = self._get_repo(self.repo) if self.repo else None + key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app" + start = time.time() + result = _get_all_settings('rhodecode_settings', key) + total = time.time() - start + log.debug('Fetching app settings for key: %s took: %.3fs', key, total) - return cache_manager.get(_cache_key, createfunc=_compute) - - else: - return _compute() + return result def get_auth_settings(self): q = self._get_settings_query() diff --git a/rhodecode/tests/__init__.py b/rhodecode/tests/__init__.py --- a/rhodecode/tests/__init__.py +++ b/rhodecode/tests/__init__.py @@ -41,7 +41,7 @@ log = logging.getLogger(__name__) __all__ = [ 'get_new_dir', 'TestController', - 'link_to', 'clear_all_caches', + 'link_to', 'clear_cache_regions', 'assert_session_flash', 'login_user', 'no_newline_id_generator', 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO', 'NEW_HG_REPO', 'NEW_GIT_REPO', @@ -95,10 +95,12 @@ TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, ' TEST_REPO_PREFIX = 'vcs-test' -def clear_all_caches(): - from beaker.cache import cache_managers - for _cache in cache_managers.values(): - _cache.clear() +def clear_cache_regions(regions=None): + # dogpile + from rhodecode.lib.rc_cache import region_meta + for region_name, region in region_meta.dogpile_cache_regions.items(): + if not regions or region_name in regions: + region.invalidate() def get_new_dir(title): diff --git a/rhodecode/tests/lib/test_utils.py b/rhodecode/tests/lib/test_utils.py --- a/rhodecode/tests/lib/test_utils.py +++ b/rhodecode/tests/lib/test_utils.py @@ -70,8 +70,7 @@ def disable_hooks(request, hooks): ui_settings.invalidate() ui_settings = session.query(db.RhodeCodeUi).options( - caching_query.FromCache( - 'sql_cache_short', 'get_hook_settings', 'get_hook_settings')) + caching_query.FromCache('sql_cache_short', 'get_hook_settings')) ui_settings.invalidate() @request.addfinalizer diff --git a/rhodecode/tests/rhodecode.ini b/rhodecode/tests/rhodecode.ini --- a/rhodecode/tests/rhodecode.ini +++ b/rhodecode/tests/rhodecode.ini @@ -292,15 +292,12 @@ cache_dir = %(here)s/data beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock -beaker.cache.regions = long_term, sql_cache_short +beaker.cache.regions = long_term beaker.cache.long_term.type = memory beaker.cache.long_term.expire = 36000 beaker.cache.long_term.key_length = 256 -beaker.cache.sql_cache_short.type = memory -beaker.cache.sql_cache_short.expire = 1 -beaker.cache.sql_cache_short.key_length = 256 ##################################### ### DOGPILE CACHE #### @@ -311,6 +308,12 @@ rc_cache.cache_perms.backend = dogpile.c rc_cache.cache_perms.expiration_time = 0 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1 + +## cache settings for SQL queries +rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru +rc_cache.sql_cache_short.expiration_time = 0 + + #################################### ### BEAKER SESSION #### ####################################