diff --git a/configs/development.ini b/configs/development.ini --- a/configs/development.ini +++ b/configs/development.ini @@ -351,22 +351,6 @@ rc_cache.sql_cache_short.expiration_time #################################### -### BEAKER CACHE #### -#################################### - -## locking and default file storage for Beaker. Putting this into a ramdisk -## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data -beaker.cache.data_dir = %(here)s/data/cache/beaker_data -beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock - -beaker.cache.regions = long_term - -beaker.cache.long_term.type = memorylru_base -beaker.cache.long_term.expire = 172800 -beaker.cache.long_term.key_length = 256 - - -#################################### ### BEAKER SESSION #### #################################### diff --git a/configs/production.ini b/configs/production.ini --- a/configs/production.ini +++ b/configs/production.ini @@ -326,22 +326,6 @@ rc_cache.sql_cache_short.expiration_time #################################### -### BEAKER CACHE #### -#################################### - -## locking and default file storage for Beaker. Putting this into a ramdisk -## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data -beaker.cache.data_dir = %(here)s/data/cache/beaker_data -beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock - -beaker.cache.regions = long_term - -beaker.cache.long_term.type = memory -beaker.cache.long_term.expire = 172800 -beaker.cache.long_term.key_length = 256 - - -#################################### ### BEAKER SESSION #### #################################### diff --git a/rhodecode/apps/repository/views/repo_feed.py b/rhodecode/apps/repository/views/repo_feed.py --- a/rhodecode/apps/repository/views/repo_feed.py +++ b/rhodecode/apps/repository/views/repo_feed.py @@ -17,17 +17,17 @@ # This program is dual-licensed. If you wish to learn more about the # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ - +import time import pytz import logging -from beaker.cache import cache_region from pyramid.view import view_config from pyramid.response import Response from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed from rhodecode.apps._base import RepoAppView from rhodecode.lib import audit_logger +from rhodecode.lib import rc_cache from rhodecode.lib import helpers as h from rhodecode.lib.auth import ( LoginRequired, HasRepoPermissionAnyDecorator) @@ -124,11 +124,23 @@ class RepoFeedView(RepoAppView): """ self.load_default_context() - def _generate_feed(): + cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( + self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.db_repo.repo_id) + + region = rc_cache.get_or_create_region('cache_repo_longterm', + cache_namespace_uid) + + condition = not self.path_filter.is_enabled + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, + condition=condition) + def generate_atom_feed(repo_id, _repo_name, _feed_type): feed = Atom1Feed( - title=self.title % self.db_repo_name, - link=h.route_url('repo_summary', repo_name=self.db_repo_name), - description=self.description % self.db_repo_name, + title=self.title % _repo_name, + link=h.route_url('repo_summary', repo_name=_repo_name), + description=self.description % _repo_name, language=self.language, ttl=self.ttl ) @@ -136,30 +148,31 @@ class RepoFeedView(RepoAppView): for commit in reversed(self._get_commits()): date = self._set_timezone(commit.date) feed.add_item( - unique_id=self.uid(self.db_repo.repo_id, commit.raw_id), + unique_id=self.uid(repo_id, commit.raw_id), title=self._get_title(commit), author_name=commit.author, description=self._get_description(commit), link=h.route_url( - 'repo_commit', repo_name=self.db_repo_name, + 'repo_commit', repo_name=_repo_name, commit_id=commit.raw_id), pubdate=date,) return feed.mime_type, feed.writeString('utf-8') - @cache_region('long_term') - def _generate_feed_and_cache(cache_key): - return _generate_feed() + start = time.time() + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) + with inv_context_manager as invalidation_context: + # check for stored invalidation signal, and maybe purge the cache + # before computing it again + if invalidation_context.should_invalidate(): + generate_atom_feed.invalidate( + self.db_repo.repo_id, self.db_repo.repo_name, 'atom') - if self.path_filter.is_enabled: - mime_type, feed = _generate_feed() - else: - invalidator_context = CacheKey.repo_context_cache( - _generate_feed_and_cache, self.db_repo_name, - CacheKey.CACHE_TYPE_ATOM) - with invalidator_context as context: - context.invalidate() - mime_type, feed = context.compute() + mime_type, feed = generate_atom_feed( + self.db_repo.repo_id, self.db_repo.repo_name, 'atom') + compute_time = time.time() - start + log.debug('Repo ATOM feed computed in %.3fs', compute_time) response = Response(feed) response.content_type = mime_type @@ -177,11 +190,22 @@ class RepoFeedView(RepoAppView): """ self.load_default_context() - def _generate_feed(): + cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( + self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.db_repo.repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', + cache_namespace_uid) + + condition = not self.path_filter.is_enabled + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, + condition=condition) + def generate_rss_feed(repo_id, _repo_name, _feed_type): feed = Rss201rev2Feed( - title=self.title % self.db_repo_name, - link=h.route_url('repo_summary', repo_name=self.db_repo_name), - description=self.description % self.db_repo_name, + title=self.title % _repo_name, + link=h.route_url('repo_summary', repo_name=_repo_name), + description=self.description % _repo_name, language=self.language, ttl=self.ttl ) @@ -189,31 +213,31 @@ class RepoFeedView(RepoAppView): for commit in reversed(self._get_commits()): date = self._set_timezone(commit.date) feed.add_item( - unique_id=self.uid(self.db_repo.repo_id, commit.raw_id), + unique_id=self.uid(repo_id, commit.raw_id), title=self._get_title(commit), author_name=commit.author, description=self._get_description(commit), link=h.route_url( - 'repo_commit', repo_name=self.db_repo_name, + 'repo_commit', repo_name=_repo_name, commit_id=commit.raw_id), pubdate=date,) return feed.mime_type, feed.writeString('utf-8') - @cache_region('long_term') - def _generate_feed_and_cache(cache_key): - return _generate_feed() + start = time.time() + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) + with inv_context_manager as invalidation_context: + # check for stored invalidation signal, and maybe purge the cache + # before computing it again + if invalidation_context.should_invalidate(): + generate_rss_feed.invalidate( + self.db_repo.repo_id, self.db_repo.repo_name, 'rss') - if self.path_filter.is_enabled: - mime_type, feed = _generate_feed() - else: - invalidator_context = CacheKey.repo_context_cache( - _generate_feed_and_cache, self.db_repo_name, - CacheKey.CACHE_TYPE_RSS) - - with invalidator_context as context: - context.invalidate() - mime_type, feed = context.compute() + mime_type, feed = generate_rss_feed( + self.db_repo.repo_id, self.db_repo.repo_name, 'rss') + compute_time = time.time() - start + log.debug('Repo RSS feed computed in %.3fs', compute_time) response = Response(feed) response.content_type = mime_type diff --git a/rhodecode/apps/repository/views/repo_files.py b/rhodecode/apps/repository/views/repo_files.py --- a/rhodecode/apps/repository/views/repo_files.py +++ b/rhodecode/apps/repository/views/repo_files.py @@ -34,7 +34,7 @@ import rhodecode from rhodecode.apps._base import RepoAppView from rhodecode.controllers.utils import parse_path_ref -from rhodecode.lib import diffs, helpers as h, caches, rc_cache +from rhodecode.lib import diffs, helpers as h, rc_cache from rhodecode.lib import audit_logger from rhodecode.lib.exceptions import NonRelativePathError from rhodecode.lib.codeblocks import ( diff --git a/rhodecode/apps/repository/views/repo_summary.py b/rhodecode/apps/repository/views/repo_summary.py --- a/rhodecode/apps/repository/views/repo_summary.py +++ b/rhodecode/apps/repository/views/repo_summary.py @@ -18,12 +18,12 @@ # RhodeCode Enterprise Edition, including its added features, Support services, # and proprietary license terms, please see https://rhodecode.com/licenses/ +import time import logging import string import rhodecode from pyramid.view import view_config -from beaker.cache import cache_region from rhodecode.controllers import utils from rhodecode.apps._base import RepoAppView @@ -53,26 +53,32 @@ class RepoSummaryView(RepoAppView): c.rhodecode_repo = self.rhodecode_vcs_repo return c - def _get_readme_data(self, db_repo, default_renderer): - repo_name = db_repo.repo_name + def _get_readme_data(self, db_repo, renderer_type): + log.debug('Looking for README file') - @cache_region('long_term') - def _generate_readme(cache_key): + cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( + db_repo.repo_id, CacheKey.CACHE_TYPE_README) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.db_repo.repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) + def generate_repo_readme(repo_id, _repo_name, _renderer_type): readme_data = None readme_node = None readme_filename = None commit = self._get_landing_commit_or_none(db_repo) if commit: log.debug("Searching for a README file.") - readme_node = ReadmeFinder(default_renderer).search(commit) + readme_node = ReadmeFinder(_renderer_type).search(commit) if readme_node: relative_urls = { 'raw': h.route_path( - 'repo_file_raw', repo_name=repo_name, + 'repo_file_raw', repo_name=_repo_name, commit_id=commit.raw_id, f_path=readme_node.path), 'standard': h.route_path( - 'repo_files', repo_name=repo_name, + 'repo_files', repo_name=_repo_name, commit_id=commit.raw_id, f_path=readme_node.path), } readme_data = self._render_readme_or_none( @@ -80,14 +86,21 @@ class RepoSummaryView(RepoAppView): readme_filename = readme_node.path return readme_data, readme_filename - invalidator_context = CacheKey.repo_context_cache( - _generate_readme, repo_name, CacheKey.CACHE_TYPE_README) + start = time.time() + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) + with inv_context_manager as invalidation_context: + # check for stored invalidation signal, and maybe purge the cache + # before computing it again + if invalidation_context.should_invalidate(): + generate_repo_readme.invalidate( + db_repo.repo_id, db_repo.repo_name, renderer_type) - with invalidator_context as context: - context.invalidate() - computed = context.compute() - - return computed + instance = generate_repo_readme( + db_repo.repo_id, db_repo.repo_name, renderer_type) + compute_time = time.time() - start + log.debug('Repo readme generated and computed in %.3fs', compute_time) + return instance def _get_landing_commit_or_none(self, db_repo): log.debug("Getting the landing commit.") diff --git a/rhodecode/authentication/base.py b/rhodecode/authentication/base.py --- a/rhodecode/authentication/base.py +++ b/rhodecode/authentication/base.py @@ -35,7 +35,7 @@ from pyramid.threadlocal import get_curr from rhodecode.authentication.interface import IAuthnPluginRegistry from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase -from rhodecode.lib import caches, rc_cache +from rhodecode.lib import rc_cache from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt from rhodecode.lib.utils2 import safe_int, safe_str from rhodecode.lib.exceptions import LdapConnectionError diff --git a/rhodecode/config/middleware.py b/rhodecode/config/middleware.py --- a/rhodecode/config/middleware.py +++ b/rhodecode/config/middleware.py @@ -232,7 +232,6 @@ def includeme(config): # Includes which are required. The application would fail without them. config.include('pyramid_mako') config.include('pyramid_beaker') - config.include('rhodecode.lib.caches') config.include('rhodecode.lib.rc_cache') config.include('rhodecode.authentication') @@ -467,6 +466,20 @@ def _sanitize_cache_settings(settings): 'rc_cache.cache_repo.arguments.filename', os.path.join(tempfile.gettempdir(), 'rc_cache_2')) + # cache_repo_longterm memory, 96H + _string_setting( + settings, + 'rc_cache.cache_repo_longterm.backend', + 'dogpile.cache.rc.memory_lru') + _int_setting( + settings, + 'rc_cache.cache_repo_longterm.expiration_time', + 345600) + _int_setting( + settings, + 'rc_cache.cache_repo_longterm.max_size', + 10000) + # sql_cache_short _string_setting( settings, diff --git a/rhodecode/lib/base.py b/rhodecode/lib/base.py --- a/rhodecode/lib/base.py +++ b/rhodecode/lib/base.py @@ -504,7 +504,6 @@ def bootstrap_config(request): # allow pyramid lookup in testing config.include('pyramid_mako') config.include('pyramid_beaker') - config.include('rhodecode.lib.caches') config.include('rhodecode.lib.rc_cache') add_events_routes(config) diff --git a/rhodecode/lib/caches.py b/rhodecode/lib/caches.py deleted file mode 100644 --- a/rhodecode/lib/caches.py +++ /dev/null @@ -1,188 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (C) 2015-2018 RhodeCode GmbH -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License, version 3 -# (only), as published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -# This program is dual-licensed. If you wish to learn more about the -# RhodeCode Enterprise Edition, including its added features, Support services, -# and proprietary license terms, please see https://rhodecode.com/licenses/ -import functools - -import beaker -import logging -import threading - -from beaker.cache import _cache_decorate, region_invalidate -from sqlalchemy.exc import IntegrityError - -from rhodecode.lib.utils import safe_str, sha1 -from rhodecode.model.db import Session, CacheKey - -log = logging.getLogger(__name__) - - -DEFAULT_CACHE_MANAGER_CONFIG = { - 'type': 'memorylru_base', - 'max_items': 10240, - 'key_length': 256, - 'enabled': True -} - - -def get_default_cache_settings(settings): - cache_settings = {} - for key in settings.keys(): - for prefix in ['beaker.cache.', 'cache.']: - if key.startswith(prefix): - name = key.split(prefix)[1].strip() - cache_settings[name] = settings[key].strip() - return cache_settings - - -# set cache regions for beaker so celery can utilise it -def configure_caches(settings, default_region_settings=None): - cache_settings = {'regions': None} - # main cache settings used as default ... - cache_settings.update(get_default_cache_settings(settings)) - default_region_settings = default_region_settings or \ - {'type': DEFAULT_CACHE_MANAGER_CONFIG['type']} - if cache_settings['regions']: - for region in cache_settings['regions'].split(','): - region = region.strip() - region_settings = default_region_settings.copy() - for key, value in cache_settings.items(): - if key.startswith(region): - region_settings[key.split(region + '.')[-1]] = value - log.debug('Configuring cache region `%s` with settings %s', - region, region_settings) - configure_cache_region( - region, region_settings, cache_settings) - - -def configure_cache_region( - region_name, region_settings, default_cache_kw, default_expire=60): - default_type = default_cache_kw.get('type', 'memory') - default_lock_dir = default_cache_kw.get('lock_dir') - default_data_dir = default_cache_kw.get('data_dir') - - region_settings['lock_dir'] = region_settings.get('lock_dir', default_lock_dir) - region_settings['data_dir'] = region_settings.get('data_dir', default_data_dir) - region_settings['type'] = region_settings.get('type', default_type) - region_settings['expire'] = int(region_settings.get('expire', default_expire)) - - beaker.cache.cache_regions[region_name] = region_settings - - -def compute_key_from_params(*args): - """ - Helper to compute key from given params to be used in cache manager - """ - return sha1("_".join(map(safe_str, args))) - - -def get_repo_namespace_key(prefix, repo_name): - return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name)) - - -class ActiveRegionCache(object): - def __init__(self, context): - self.context = context - - def invalidate(self, *args, **kwargs): - return False - - def compute(self): - log.debug('Context cache: getting obj %s from cache', self.context) - return self.context.compute_func(self.context.cache_key) - - -class FreshRegionCache(ActiveRegionCache): - def invalidate(self): - log.debug('Context cache: invalidating cache for %s', self.context) - region_invalidate( - self.context.compute_func, None, self.context.cache_key) - return True - - -class InvalidationContext(object): - def __repr__(self): - return ''.format( - safe_str(self.repo_name), safe_str(self.cache_type)) - - def __init__(self, compute_func, repo_name, cache_type, - raise_exception=False, thread_scoped=False): - self.compute_func = compute_func - self.repo_name = repo_name - self.cache_type = cache_type - self.cache_key = compute_key_from_params( - repo_name, cache_type) - self.raise_exception = raise_exception - - # Append the thread id to the cache key if this invalidation context - # should be scoped to the current thread. - if thread_scoped: - thread_id = threading.current_thread().ident - self.cache_key = '{cache_key}_{thread_id}'.format( - cache_key=self.cache_key, thread_id=thread_id) - - def get_cache_obj(self): - cache_key = CacheKey.get_cache_key( - self.repo_name, self.cache_type) - cache_obj = CacheKey.get_active_cache(cache_key) - if not cache_obj: - cache_obj = CacheKey(cache_key, self.repo_name) - return cache_obj - - def __enter__(self): - """ - Test if current object is valid, and return CacheRegion function - that does invalidation and calculation - """ - - self.cache_obj = self.get_cache_obj() - if self.cache_obj.cache_active: - # means our cache obj is existing and marked as it's - # cache is not outdated, we return BaseInvalidator - self.skip_cache_active_change = True - return ActiveRegionCache(self) - - # the key is either not existing or set to False, we return - # the real invalidator which re-computes value. We additionally set - # the flag to actually update the Database objects - self.skip_cache_active_change = False - return FreshRegionCache(self) - - def __exit__(self, exc_type, exc_val, exc_tb): - - if self.skip_cache_active_change: - return - - try: - self.cache_obj.cache_active = True - Session().add(self.cache_obj) - Session().commit() - except IntegrityError: - # if we catch integrity error, it means we inserted this object - # assumption is that's really an edge race-condition case and - # it's safe is to skip it - Session().rollback() - except Exception: - log.exception('Failed to commit on cache key update') - Session().rollback() - if self.raise_exception: - raise - - -def includeme(config): - configure_caches(config.registry.settings) diff --git a/rhodecode/lib/dbmigrate/schema/db_1_2_0.py b/rhodecode/lib/dbmigrate/schema/db_1_2_0.py --- a/rhodecode/lib/dbmigrate/schema/db_1_2_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_1_2_0.py @@ -648,17 +648,7 @@ class Repository(Base, BaseModel): @property def scm_instance_cached(self): - @cache_region('long_term') - def _c(repo_name): - return self.__get_instance() - rn = self.repo_name - - inv = self.invalidate - if inv is not None: - region_invalidate(_c, None, rn) - # update our cache - CacheInvalidation.set_valid(inv.cache_key) - return _c(rn) + return self.__get_instance() def __get_instance(self): diff --git a/rhodecode/lib/dbmigrate/schema/db_1_3_0.py b/rhodecode/lib/dbmigrate/schema/db_1_3_0.py --- a/rhodecode/lib/dbmigrate/schema/db_1_3_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_1_3_0.py @@ -670,17 +670,7 @@ class Repository(Base, BaseModel): @property def scm_instance_cached(self): - @cache_region('long_term') - def _c(repo_name): - return self.__get_instance() - rn = self.repo_name - log.debug('Getting cached instance of repo') - inv = self.invalidate - if inv is not None: - region_invalidate(_c, None, rn) - # update our cache - CacheInvalidation.set_valid(inv.cache_key) - return _c(rn) + return self.__get_instance() def __get_instance(self): repo_full_path = self.repo_full_path diff --git a/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_11_0_0.py @@ -2262,18 +2262,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): config = config or self._config @@ -3165,27 +3154,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/lib/dbmigrate/schema/db_4_3_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_3_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_3_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_3_0_0.py @@ -1963,18 +1963,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): repo_full_path = self.repo_full_path @@ -2849,25 +2838,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext(compute_func, repo_name, cache_type) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/lib/dbmigrate/schema/db_4_4_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_4_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_4_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_4_0_0.py @@ -1966,18 +1966,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): config = config or self._config @@ -2841,27 +2830,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/lib/dbmigrate/schema/db_4_4_0_1.py b/rhodecode/lib/dbmigrate/schema/db_4_4_0_1.py --- a/rhodecode/lib/dbmigrate/schema/db_4_4_0_1.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_4_0_1.py @@ -1966,18 +1966,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): config = config or self._config @@ -2841,26 +2830,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) class ChangesetComment(Base, BaseModel): diff --git a/rhodecode/lib/dbmigrate/schema/db_4_4_0_2.py b/rhodecode/lib/dbmigrate/schema/db_4_4_0_2.py --- a/rhodecode/lib/dbmigrate/schema/db_4_4_0_2.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_4_0_2.py @@ -1968,18 +1968,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): config = config or self._config @@ -2845,27 +2834,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/lib/dbmigrate/schema/db_4_5_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_5_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_5_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_5_0_0.py @@ -1968,18 +1968,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): config = config or self._config @@ -2845,27 +2834,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_7_0_0.py @@ -2010,18 +2010,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): config = config or self._config @@ -2900,27 +2889,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py b/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py --- a/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_7_0_1.py @@ -2011,18 +2011,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): config = config or self._config @@ -2901,27 +2890,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py b/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py --- a/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py +++ b/rhodecode/lib/dbmigrate/schema/db_4_9_0_0.py @@ -2199,18 +2199,7 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): - return self._get_instance() - - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + self._get_instance() def _get_instance(self, cache=True, config=None): config = config or self._config @@ -3101,27 +3090,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/lib/middleware/simplesvn.py b/rhodecode/lib/middleware/simplesvn.py --- a/rhodecode/lib/middleware/simplesvn.py +++ b/rhodecode/lib/middleware/simplesvn.py @@ -26,7 +26,7 @@ from urlparse import urljoin import requests from pyramid.httpexceptions import HTTPNotAcceptable -from rhodecode.lib import caches +from rhodecode.lib import rc_cache from rhodecode.lib.middleware import simplevcs from rhodecode.lib.utils import is_valid_repo from rhodecode.lib.utils2 import str2bool, safe_int @@ -86,7 +86,7 @@ class SimpleSvnApp(object): if response.headers.get('SVN-Txn-name'): svn_tx_id = response.headers.get('SVN-Txn-name') - txn_id = caches.compute_key_from_params( + txn_id = rc_cache.compute_key_from_params( self.config['repository'], svn_tx_id) port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1]) store_txn_id_data(txn_id, {'port': port}) diff --git a/rhodecode/lib/middleware/simplevcs.py b/rhodecode/lib/middleware/simplevcs.py --- a/rhodecode/lib/middleware/simplevcs.py +++ b/rhodecode/lib/middleware/simplevcs.py @@ -40,7 +40,7 @@ from zope.cachedescriptors.property impo import rhodecode from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin -from rhodecode.lib import caches, rc_cache +from rhodecode.lib import rc_cache from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware from rhodecode.lib.base import ( BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context) @@ -77,7 +77,7 @@ def extract_svn_txn_id(acl_repo_name, da match = pat.search(sub_el.text) if match: svn_tx_id = match.groupdict()['txn_id'] - txn_id = caches.compute_key_from_params( + txn_id = rc_cache.compute_key_from_params( acl_repo_name, svn_tx_id) return txn_id except Exception: diff --git a/rhodecode/lib/rc_cache/__init__.py b/rhodecode/lib/rc_cache/__init__.py --- a/rhodecode/lib/rc_cache/__init__.py +++ b/rhodecode/lib/rc_cache/__init__.py @@ -39,7 +39,8 @@ log = logging.getLogger(__name__) from . import region_meta from .utils import ( get_default_cache_settings, key_generator, get_or_create_region, - clear_cache_namespace, make_region) + clear_cache_namespace, make_region, InvalidationContext, + FreshRegionCache, ActiveRegionCache) def configure_dogpile_cache(settings): diff --git a/rhodecode/lib/rc_cache/utils.py b/rhodecode/lib/rc_cache/utils.py --- a/rhodecode/lib/rc_cache/utils.py +++ b/rhodecode/lib/rc_cache/utils.py @@ -20,11 +20,16 @@ import os import logging import functools +import threading from dogpile.cache import CacheRegion from dogpile.cache.util import compat +import rhodecode from rhodecode.lib.utils import safe_str, sha1 +from rhodecode.lib.utils2 import safe_unicode +from rhodecode.model.db import Session, CacheKey, IntegrityError + from . import region_meta log = logging.getLogger(__name__) @@ -183,3 +188,127 @@ def clear_cache_namespace(cache_region, cache_keys = region.backend.list_keys(prefix=cache_namespace_uid) region.delete_multi(cache_keys) return len(cache_keys) + + +class ActiveRegionCache(object): + def __init__(self, context): + self.context = context + + def should_invalidate(self): + return False + + +class FreshRegionCache(object): + def __init__(self, context): + self.context = context + + def should_invalidate(self): + return True + + +class InvalidationContext(object): + """ + usage:: + + import time + from rhodecode.lib import rc_cache + my_id = 1 + cache_namespace_uid = 'cache_demo.{}'.format(my_id) + invalidation_namespace = 'repo_cache:1' + region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, + expiration_time=30, + condition=True) + def heavy_compute(cache_name, param1, param2): + print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2)) + import time + time.sleep(30) + return True + + start = time.time() + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) + with inv_context_manager as invalidation_context: + # check for stored invalidation signal, and maybe purge the cache + # before computing it again + if invalidation_context.should_invalidate(): + heavy_compute.invalidate('some_name', 'param1', 'param2') + + result = heavy_compute('some_name', 'param1', 'param2') + compute_time = time.time() - start + print(compute_time) + + # To send global invalidation signal, simply run + CacheKey.set_invalidate(invalidation_namespace) + + """ + + def __repr__(self): + return ''.format( + safe_str(self.cache_key), safe_str(self.uid)) + + def __init__(self, uid, invalidation_namespace='', + raise_exception=False, thread_scoped=True): + self.uid = uid + self.invalidation_namespace = invalidation_namespace + self.raise_exception = raise_exception + self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT') + self.thread_id = 'global' + + # Append the thread id to the cache key if this invalidation context + # should be scoped to the current thread. + if thread_scoped: + self.thread_id = threading.current_thread().ident + + self.cache_key = compute_key_from_params(uid) + self.cache_key = 'proc:{}_thread:{}_{}'.format( + self.proc_id, self.thread_id, self.cache_key) + + def get_or_create_cache_obj(self, uid, invalidation_namespace=''): + log.debug('Checking if %s cache key is present and active', self.cache_key) + cache_obj = CacheKey.get_active_cache(self.cache_key) + invalidation_namespace = invalidation_namespace or self.invalidation_namespace + if not cache_obj: + cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace) + return cache_obj + + def __enter__(self): + """ + Test if current object is valid, and return CacheRegion function + that does invalidation and calculation + """ + # register or get a new key based on uid + self.cache_obj = self.get_or_create_cache_obj(uid=self.uid) + + if self.cache_obj.cache_active: + # means our cache obj is existing and marked as it's + # cache is not outdated, we return ActiveRegionCache + self.skip_cache_active_change = True + return ActiveRegionCache(context=self) + + # the key is either not existing or set to False, we return + # the real invalidator which re-computes value. We additionally set + # the flag to actually update the Database objects + self.skip_cache_active_change = False + return FreshRegionCache(context=self) + + def __exit__(self, exc_type, exc_val, exc_tb): + + if self.skip_cache_active_change: + return + + try: + self.cache_obj.cache_active = True + Session().add(self.cache_obj) + Session().commit() + except IntegrityError: + # if we catch integrity error, it means we inserted this object + # assumption is that's really an edge race-condition case and + # it's safe is to skip it + Session().rollback() + except Exception: + log.exception('Failed to commit on cache key update') + Session().rollback() + if self.raise_exception: + raise diff --git a/rhodecode/model/db.py b/rhodecode/model/db.py --- a/rhodecode/model/db.py +++ b/rhodecode/model/db.py @@ -47,7 +47,6 @@ from sqlalchemy.ext.declarative import d from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.exc import IntegrityError # noqa from sqlalchemy.dialects.mysql import LONGTEXT -from beaker.cache import cache_region from zope.cachedescriptors.property import Lazy as LazyProperty from pyramid.threadlocal import get_current_request @@ -1845,8 +1844,10 @@ class Repository(Base, BaseModel): """ Returns associated cache keys for that repo """ + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.repo_id) return CacheKey.query()\ - .filter(CacheKey.cache_args == self.repo_name)\ + .filter(CacheKey.cache_args == invalidation_namespace)\ .order_by(CacheKey.cache_key)\ .all() @@ -2327,18 +2328,30 @@ class Repository(Base, BaseModel): return self._get_instance(cache=bool(cache), config=config) def _get_instance_cached(self): - @cache_region('long_term') - def _get_repo(cache_key): + from rhodecode.lib import rc_cache + + cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=self.repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) + def get_instance_cached(repo_id): return self._get_instance() - invalidator_context = CacheKey.repo_context_cache( - _get_repo, self.repo_name, None, thread_scoped=True) - - with invalidator_context as context: - context.invalidate() - repo = context.compute() - - return repo + start = time.time() + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) + with inv_context_manager as invalidation_context: + # check for stored invalidation signal, and maybe purge the cache + # before computing it again + if invalidation_context.should_invalidate(): + get_instance_cached.invalidate(self.repo_id) + + instance = get_instance_cached(self.repo_id) + compute_time = time.time() - start + log.debug('Repo instance fetched in %.3fs', compute_time) + return instance def _get_instance(self, cache=True, config=None): config = config or self._config @@ -3128,9 +3141,10 @@ class CacheKey(Base, BaseModel): base_table_args, ) - CACHE_TYPE_ATOM = 'ATOM' - CACHE_TYPE_RSS = 'RSS' + CACHE_TYPE_FEED = 'FEED' CACHE_TYPE_README = 'README' + # namespaces used to register process/thread aware caches + REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}' cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) @@ -3179,44 +3193,27 @@ class CacheKey(Base, BaseModel): Session().commit() @classmethod - def get_cache_key(cls, repo_name, cache_type): - """ - - Generate a cache key for this process of RhodeCode instance. - Prefix most likely will be process id or maybe explicitly set - instance_id from .ini file. - """ - import rhodecode - prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '') - - repo_as_unicode = safe_unicode(repo_name) - key = u'{}_{}'.format(repo_as_unicode, cache_type) \ - if cache_type else repo_as_unicode - - return u'{}{}'.format(prefix, key) - - @classmethod - def set_invalidate(cls, repo_name, delete=False): + def set_invalidate(cls, cache_uid, delete=False): """ Mark all caches of a repo as invalid in the database. """ try: - qry = Session().query(cls).filter(cls.cache_args == repo_name) + qry = Session().query(cls).filter(cls.cache_args == cache_uid) if delete: - log.debug('cache objects deleted for repo %s', - safe_str(repo_name)) qry.delete() + log.debug('cache objects deleted for cache args %s', + safe_str(cache_uid)) else: - log.debug('cache objects marked as invalid for repo %s', - safe_str(repo_name)) qry.update({"cache_active": False}) + log.debug('cache objects marked as invalid for cache args %s', + safe_str(cache_uid)) Session().commit() except Exception: log.exception( - 'Cache key invalidation failed for repository %s', - safe_str(repo_name)) + 'Cache key invalidation failed for cache args %s', + safe_str(cache_uid)) Session().rollback() @classmethod @@ -3226,27 +3223,6 @@ class CacheKey(Base, BaseModel): return inv_obj return None - @classmethod - def repo_context_cache(cls, compute_func, repo_name, cache_type, - thread_scoped=False): - """ - @cache_region('long_term') - def _heavy_calculation(cache_key): - return 'result' - - cache_context = CacheKey.repo_context_cache( - _heavy_calculation, repo_name, cache_type) - - with cache_context as context: - context.invalidate() - computed = context.compute() - - assert computed == 'result' - """ - from rhodecode.lib import caches - return caches.InvalidationContext( - compute_func, repo_name, cache_type, thread_scoped=thread_scoped) - class ChangesetComment(Base, BaseModel): __tablename__ = 'changeset_comments' diff --git a/rhodecode/model/scm.py b/rhodecode/model/scm.py --- a/rhodecode/model/scm.py +++ b/rhodecode/model/scm.py @@ -43,7 +43,7 @@ from rhodecode.lib.auth import ( HasRepoPermissionAny, HasRepoGroupPermissionAny, HasUserGroupPermissionAny) from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError -from rhodecode.lib import hooks_utils, caches +from rhodecode.lib import hooks_utils from rhodecode.lib.utils import ( get_filesystem_repos, make_db_config) from rhodecode.lib.utils2 import (safe_str, safe_unicode) @@ -269,10 +269,13 @@ class ScmModel(BaseModel): :param delete: delete the entry keys instead of setting bool flag on them, and also purge caches used by the dogpile """ - CacheKey.set_invalidate(repo_name, delete=delete) repo = Repository.get_by_repo_name(repo_name) if repo: + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=repo.repo_id) + CacheKey.set_invalidate(invalidation_namespace, delete=delete) + repo_id = repo.repo_id config = repo._config config.set('extensions', 'largefiles', '') diff --git a/rhodecode/model/settings.py b/rhodecode/model/settings.py --- a/rhodecode/model/settings.py +++ b/rhodecode/model/settings.py @@ -26,7 +26,7 @@ from collections import namedtuple from functools import wraps import bleach -from rhodecode.lib import caches, rc_cache +from rhodecode.lib import rc_cache from rhodecode.lib.utils2 import ( Optional, AttributeDict, safe_str, remove_prefix, str2bool) from rhodecode.lib.vcs.backends import base diff --git a/rhodecode/templates/admin/repos/repo_edit_caches.mako b/rhodecode/templates/admin/repos/repo_edit_caches.mako --- a/rhodecode/templates/admin/repos/repo_edit_caches.mako +++ b/rhodecode/templates/admin/repos/repo_edit_caches.mako @@ -36,14 +36,14 @@
- + %for cache in c.rhodecode_db_repo.cache_keys: - - + + %endfor diff --git a/rhodecode/tests/lib/test_caches.py b/rhodecode/tests/lib/test_caches.py --- a/rhodecode/tests/lib/test_caches.py +++ b/rhodecode/tests/lib/test_caches.py @@ -25,7 +25,7 @@ import pytest from rhodecode.lib import rc_cache -@pytest.mark.usefixtures( 'app') +@pytest.mark.usefixtures('app') class TestCaches(object): def test_cache_decorator_init_not_configured(self): diff --git a/rhodecode/tests/lib/test_libs.py b/rhodecode/tests/lib/test_libs.py --- a/rhodecode/tests/lib/test_libs.py +++ b/rhodecode/tests/lib/test_libs.py @@ -30,10 +30,12 @@ import pytest from rhodecode.tests import no_newline_id_generator from rhodecode.tests.utils import run_test_concurrently -from rhodecode.lib.helpers import InitialsGravatar +from rhodecode.lib import rc_cache +from rhodecode.lib.helpers import InitialsGravatar from rhodecode.lib.utils2 import AttributeDict -from rhodecode.model.db import Repository + +from rhodecode.model.db import Repository, CacheKey def _urls_for_proto(proto): @@ -558,87 +560,124 @@ def test_get_repo_by_id(test, expected): assert _test == expected -@pytest.mark.parametrize("test_repo_name, repo_type", [ - ("test_repo_1", None), - ("repo_group/foobar", None), - ("test_non_asci_ąćę", None), - (u"test_non_asci_unicode_ąćę", None), -]) -def test_invalidation_context(baseapp, test_repo_name, repo_type): - from beaker.cache import cache_region - from rhodecode.lib import caches - from rhodecode.model.db import CacheKey +def test_invalidation_context(baseapp): + repo_id = 999 + + cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( + repo_id, CacheKey.CACHE_TYPE_README) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) + + calls = [1, 2] - @cache_region('long_term') + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) def _dummy_func(cache_key): - return 'result' + val = calls.pop(0) + return 'result:{}'.format(val) + + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) + + # 1st call, fresh caches + with inv_context_manager as invalidation_context: + should_invalidate = invalidation_context.should_invalidate() + if should_invalidate: + _dummy_func.invalidate('some-key') + result = _dummy_func('some-key') + + assert isinstance(invalidation_context, rc_cache.FreshRegionCache) + assert should_invalidate is True - invalidator_context = CacheKey.repo_context_cache( - _dummy_func, test_repo_name, 'repo') + assert 'result:1' == result + # should be cached so calling it twice will give the same result ! + result = _dummy_func('some-key') + assert 'result:1' == result - with invalidator_context as context: - invalidated = context.invalidate() - result = context.compute() + # 2nd call, we create a new context manager, this should be now key aware, and + # return an active cache region + with inv_context_manager as invalidation_context: + should_invalidate = invalidation_context.should_invalidate() + assert isinstance(invalidation_context, rc_cache.ActiveRegionCache) + assert should_invalidate is False + + # Mark invalidation + CacheKey.set_invalidate(invalidation_namespace) - assert invalidated == True - assert 'result' == result - assert isinstance(context, caches.FreshRegionCache) - - assert 'InvalidationContext' in repr(invalidator_context) + # 3nd call, fresh caches + with inv_context_manager as invalidation_context: + should_invalidate = invalidation_context.should_invalidate() + if should_invalidate: + _dummy_func.invalidate('some-key') + result = _dummy_func('some-key') - with invalidator_context as context: - context.invalidate() - result = context.compute() + assert isinstance(invalidation_context, rc_cache.FreshRegionCache) + assert should_invalidate is True - assert 'result' == result - assert isinstance(context, caches.ActiveRegionCache) + assert 'result:2' == result + + # cached again, same result + result = _dummy_func('some-key') + assert 'result:2' == result def test_invalidation_context_exception_in_compute(baseapp): - from rhodecode.model.db import CacheKey - from beaker.cache import cache_region + repo_id = 888 - @cache_region('long_term') + cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( + repo_id, CacheKey.CACHE_TYPE_README) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) + + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) def _dummy_func(cache_key): - # this causes error since it doesn't get any params - raise Exception('ups') - - invalidator_context = CacheKey.repo_context_cache( - _dummy_func, 'test_repo_2', 'repo') + raise Exception('Error in cache func') with pytest.raises(Exception): - with invalidator_context as context: - context.invalidate() - context.compute() + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) + + # 1st call, fresh caches + with inv_context_manager as invalidation_context: + should_invalidate = invalidation_context.should_invalidate() + if should_invalidate: + _dummy_func.invalidate('some-key-2') + _dummy_func('some-key-2') @pytest.mark.parametrize('execution_number', range(5)) def test_cache_invalidation_race_condition(execution_number, baseapp): import time - from beaker.cache import cache_region - from rhodecode.model.db import CacheKey + + repo_id = 777 - if CacheKey.metadata.bind.url.get_backend_name() == "mysql": - reason = ( - 'Fails on MariaDB due to some locking issues. Investigation' - ' needed') - pytest.xfail(reason=reason) + cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( + repo_id, CacheKey.CACHE_TYPE_README) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=repo_id) + region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) @run_test_concurrently(25) def test_create_and_delete_cache_keys(): time.sleep(0.2) - @cache_region('long_term') + @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) def _dummy_func(cache_key): - return 'result' + val = 'async' + return 'result:{}'.format(val) + + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) - invalidator_context = CacheKey.repo_context_cache( - _dummy_func, 'test_repo_1', 'repo') + # 1st call, fresh caches + with inv_context_manager as invalidation_context: + should_invalidate = invalidation_context.should_invalidate() + if should_invalidate: + _dummy_func.invalidate('some-key-3') + _dummy_func('some-key-3') - with invalidator_context as context: - context.invalidate() - context.compute() - - CacheKey.set_invalidate('test_repo_1', delete=True) + # Mark invalidation + CacheKey.set_invalidate(invalidation_namespace) test_create_and_delete_cache_keys() diff --git a/rhodecode/tests/pylons_plugin.py b/rhodecode/tests/pylons_plugin.py --- a/rhodecode/tests/pylons_plugin.py +++ b/rhodecode/tests/pylons_plugin.py @@ -125,11 +125,6 @@ def vcsserver_factory(tmpdir_factory): overrides = list(overrides) overrides.append({'server:main': {'port': vcsserver_port}}) - if is_cygwin(): - platform_override = {'DEFAULT': { - 'beaker.cache.repo_object.type': 'nocache'}} - overrides.append(platform_override) - option_name = 'vcsserver_config_http' override_option_name = 'vcsserver_config_override' config_file = get_config( diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations.py b/rhodecode/tests/vcs_operations/test_vcs_operations.py --- a/rhodecode/tests/vcs_operations/test_vcs_operations.py +++ b/rhodecode/tests/vcs_operations/test_vcs_operations.py @@ -32,6 +32,7 @@ import time import pytest +from rhodecode.lib import rc_cache from rhodecode.model.auth_token import AuthTokenModel from rhodecode.model.db import Repository, UserIpMap, CacheKey from rhodecode.model.meta import Session @@ -217,46 +218,44 @@ class TestVCSOperations(object): _check_proper_git_push(stdout, stderr) - def test_push_invalidates_cache_hg(self, rc_web_server, tmpdir): - key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).scalar() - if not key: - key = CacheKey(HG_REPO, HG_REPO) + def test_push_invalidates_cache(self, rc_web_server, tmpdir): + hg_repo = Repository.get_by_repo_name(HG_REPO) + + # init cache objects + CacheKey.delete_all_cache() + cache_namespace_uid = 'cache_push_test.{}'.format(hg_repo.repo_id) + invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( + repo_id=hg_repo.repo_id) - key.cache_active = True - Session().add(key) - Session().commit() + inv_context_manager = rc_cache.InvalidationContext( + uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) - clone_url = rc_web_server.repo_clone_url(HG_REPO) + with inv_context_manager as invalidation_context: + # __enter__ will create and register cache objects + pass + + # clone to init cache + clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name) stdout, stderr = Command('/tmp').execute( 'hg clone', clone_url, tmpdir.strpath) + cache_keys = hg_repo.cache_keys + assert cache_keys != [] + for key in cache_keys: + assert key.cache_active is True + + # PUSH that should trigger invalidation cache stdout, stderr = _add_files_and_push( 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1) - key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).one() - assert key.cache_active is False - - def test_push_invalidates_cache_git(self, rc_web_server, tmpdir): - key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).scalar() - if not key: - key = CacheKey(GIT_REPO, GIT_REPO) - - key.cache_active = True - Session().add(key) + # flush... Session().commit() - - clone_url = rc_web_server.repo_clone_url(GIT_REPO) - stdout, stderr = Command('/tmp').execute( - 'git clone', clone_url, tmpdir.strpath) - - # commit some stuff into this repo - stdout, stderr = _add_files_and_push( - 'git', tmpdir.strpath, clone_url=clone_url, files_no=1) - _check_proper_git_push(stdout, stderr) - - key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).one() - - assert key.cache_active is False + hg_repo = Repository.get_by_repo_name(HG_REPO) + cache_keys = hg_repo.cache_keys + assert cache_keys != [] + for key in cache_keys: + # keys should be marked as not active + assert key.cache_active is False def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir): clone_url = rc_web_server.repo_clone_url(HG_REPO)
${_('Prefix')} ${_('Key')}${_('Namespace')} ${_('Active')}
${cache.get_prefix() or '-'}${cache.cache_key}${cache.cache_key}${cache.cache_args} ${h.bool2icon(cache.cache_active)}