- ${cache.get_prefix() or '-'} |
- ${cache.cache_key} |
+ ${cache.cache_key} |
+ ${cache.cache_args} |
${h.bool2icon(cache.cache_active)} |
%endfor
diff --git a/rhodecode/tests/lib/test_caches.py b/rhodecode/tests/lib/test_caches.py
--- a/rhodecode/tests/lib/test_caches.py
+++ b/rhodecode/tests/lib/test_caches.py
@@ -25,7 +25,7 @@ import pytest
from rhodecode.lib import rc_cache
-@pytest.mark.usefixtures( 'app')
+@pytest.mark.usefixtures('app')
class TestCaches(object):
def test_cache_decorator_init_not_configured(self):
diff --git a/rhodecode/tests/lib/test_libs.py b/rhodecode/tests/lib/test_libs.py
--- a/rhodecode/tests/lib/test_libs.py
+++ b/rhodecode/tests/lib/test_libs.py
@@ -30,10 +30,12 @@ import pytest
from rhodecode.tests import no_newline_id_generator
from rhodecode.tests.utils import run_test_concurrently
-from rhodecode.lib.helpers import InitialsGravatar
+from rhodecode.lib import rc_cache
+from rhodecode.lib.helpers import InitialsGravatar
from rhodecode.lib.utils2 import AttributeDict
-from rhodecode.model.db import Repository
+
+from rhodecode.model.db import Repository, CacheKey
def _urls_for_proto(proto):
@@ -558,87 +560,124 @@ def test_get_repo_by_id(test, expected):
assert _test == expected
-@pytest.mark.parametrize("test_repo_name, repo_type", [
- ("test_repo_1", None),
- ("repo_group/foobar", None),
- ("test_non_asci_ąćę", None),
- (u"test_non_asci_unicode_ąćę", None),
-])
-def test_invalidation_context(baseapp, test_repo_name, repo_type):
- from beaker.cache import cache_region
- from rhodecode.lib import caches
- from rhodecode.model.db import CacheKey
+def test_invalidation_context(baseapp):
+ repo_id = 999
+
+ cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
+ repo_id, CacheKey.CACHE_TYPE_README)
+ invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
+ repo_id=repo_id)
+ region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
+
+ calls = [1, 2]
- @cache_region('long_term')
+ @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
def _dummy_func(cache_key):
- return 'result'
+ val = calls.pop(0)
+ return 'result:{}'.format(val)
+
+ inv_context_manager = rc_cache.InvalidationContext(
+ uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
+
+ # 1st call, fresh caches
+ with inv_context_manager as invalidation_context:
+ should_invalidate = invalidation_context.should_invalidate()
+ if should_invalidate:
+ _dummy_func.invalidate('some-key')
+ result = _dummy_func('some-key')
+
+ assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
+ assert should_invalidate is True
- invalidator_context = CacheKey.repo_context_cache(
- _dummy_func, test_repo_name, 'repo')
+ assert 'result:1' == result
+ # should be cached so calling it twice will give the same result !
+ result = _dummy_func('some-key')
+ assert 'result:1' == result
- with invalidator_context as context:
- invalidated = context.invalidate()
- result = context.compute()
+ # 2nd call, we create a new context manager, this should be now key aware, and
+ # return an active cache region
+ with inv_context_manager as invalidation_context:
+ should_invalidate = invalidation_context.should_invalidate()
+ assert isinstance(invalidation_context, rc_cache.ActiveRegionCache)
+ assert should_invalidate is False
+
+ # Mark invalidation
+ CacheKey.set_invalidate(invalidation_namespace)
- assert invalidated == True
- assert 'result' == result
- assert isinstance(context, caches.FreshRegionCache)
-
- assert 'InvalidationContext' in repr(invalidator_context)
+ # 3nd call, fresh caches
+ with inv_context_manager as invalidation_context:
+ should_invalidate = invalidation_context.should_invalidate()
+ if should_invalidate:
+ _dummy_func.invalidate('some-key')
+ result = _dummy_func('some-key')
- with invalidator_context as context:
- context.invalidate()
- result = context.compute()
+ assert isinstance(invalidation_context, rc_cache.FreshRegionCache)
+ assert should_invalidate is True
- assert 'result' == result
- assert isinstance(context, caches.ActiveRegionCache)
+ assert 'result:2' == result
+
+ # cached again, same result
+ result = _dummy_func('some-key')
+ assert 'result:2' == result
def test_invalidation_context_exception_in_compute(baseapp):
- from rhodecode.model.db import CacheKey
- from beaker.cache import cache_region
+ repo_id = 888
- @cache_region('long_term')
+ cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
+ repo_id, CacheKey.CACHE_TYPE_README)
+ invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
+ repo_id=repo_id)
+ region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
+
+ @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
def _dummy_func(cache_key):
- # this causes error since it doesn't get any params
- raise Exception('ups')
-
- invalidator_context = CacheKey.repo_context_cache(
- _dummy_func, 'test_repo_2', 'repo')
+ raise Exception('Error in cache func')
with pytest.raises(Exception):
- with invalidator_context as context:
- context.invalidate()
- context.compute()
+ inv_context_manager = rc_cache.InvalidationContext(
+ uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
+
+ # 1st call, fresh caches
+ with inv_context_manager as invalidation_context:
+ should_invalidate = invalidation_context.should_invalidate()
+ if should_invalidate:
+ _dummy_func.invalidate('some-key-2')
+ _dummy_func('some-key-2')
@pytest.mark.parametrize('execution_number', range(5))
def test_cache_invalidation_race_condition(execution_number, baseapp):
import time
- from beaker.cache import cache_region
- from rhodecode.model.db import CacheKey
+
+ repo_id = 777
- if CacheKey.metadata.bind.url.get_backend_name() == "mysql":
- reason = (
- 'Fails on MariaDB due to some locking issues. Investigation'
- ' needed')
- pytest.xfail(reason=reason)
+ cache_namespace_uid = 'cache_repo_instance.{}_{}'.format(
+ repo_id, CacheKey.CACHE_TYPE_README)
+ invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
+ repo_id=repo_id)
+ region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
@run_test_concurrently(25)
def test_create_and_delete_cache_keys():
time.sleep(0.2)
- @cache_region('long_term')
+ @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
def _dummy_func(cache_key):
- return 'result'
+ val = 'async'
+ return 'result:{}'.format(val)
+
+ inv_context_manager = rc_cache.InvalidationContext(
+ uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
- invalidator_context = CacheKey.repo_context_cache(
- _dummy_func, 'test_repo_1', 'repo')
+ # 1st call, fresh caches
+ with inv_context_manager as invalidation_context:
+ should_invalidate = invalidation_context.should_invalidate()
+ if should_invalidate:
+ _dummy_func.invalidate('some-key-3')
+ _dummy_func('some-key-3')
- with invalidator_context as context:
- context.invalidate()
- context.compute()
-
- CacheKey.set_invalidate('test_repo_1', delete=True)
+ # Mark invalidation
+ CacheKey.set_invalidate(invalidation_namespace)
test_create_and_delete_cache_keys()
diff --git a/rhodecode/tests/pylons_plugin.py b/rhodecode/tests/pylons_plugin.py
--- a/rhodecode/tests/pylons_plugin.py
+++ b/rhodecode/tests/pylons_plugin.py
@@ -125,11 +125,6 @@ def vcsserver_factory(tmpdir_factory):
overrides = list(overrides)
overrides.append({'server:main': {'port': vcsserver_port}})
- if is_cygwin():
- platform_override = {'DEFAULT': {
- 'beaker.cache.repo_object.type': 'nocache'}}
- overrides.append(platform_override)
-
option_name = 'vcsserver_config_http'
override_option_name = 'vcsserver_config_override'
config_file = get_config(
diff --git a/rhodecode/tests/vcs_operations/test_vcs_operations.py b/rhodecode/tests/vcs_operations/test_vcs_operations.py
--- a/rhodecode/tests/vcs_operations/test_vcs_operations.py
+++ b/rhodecode/tests/vcs_operations/test_vcs_operations.py
@@ -32,6 +32,7 @@ import time
import pytest
+from rhodecode.lib import rc_cache
from rhodecode.model.auth_token import AuthTokenModel
from rhodecode.model.db import Repository, UserIpMap, CacheKey
from rhodecode.model.meta import Session
@@ -217,46 +218,44 @@ class TestVCSOperations(object):
_check_proper_git_push(stdout, stderr)
- def test_push_invalidates_cache_hg(self, rc_web_server, tmpdir):
- key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).scalar()
- if not key:
- key = CacheKey(HG_REPO, HG_REPO)
+ def test_push_invalidates_cache(self, rc_web_server, tmpdir):
+ hg_repo = Repository.get_by_repo_name(HG_REPO)
+
+ # init cache objects
+ CacheKey.delete_all_cache()
+ cache_namespace_uid = 'cache_push_test.{}'.format(hg_repo.repo_id)
+ invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
+ repo_id=hg_repo.repo_id)
- key.cache_active = True
- Session().add(key)
- Session().commit()
+ inv_context_manager = rc_cache.InvalidationContext(
+ uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
- clone_url = rc_web_server.repo_clone_url(HG_REPO)
+ with inv_context_manager as invalidation_context:
+ # __enter__ will create and register cache objects
+ pass
+
+ # clone to init cache
+ clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name)
stdout, stderr = Command('/tmp').execute(
'hg clone', clone_url, tmpdir.strpath)
+ cache_keys = hg_repo.cache_keys
+ assert cache_keys != []
+ for key in cache_keys:
+ assert key.cache_active is True
+
+ # PUSH that should trigger invalidation cache
stdout, stderr = _add_files_and_push(
'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
- key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).one()
- assert key.cache_active is False
-
- def test_push_invalidates_cache_git(self, rc_web_server, tmpdir):
- key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).scalar()
- if not key:
- key = CacheKey(GIT_REPO, GIT_REPO)
-
- key.cache_active = True
- Session().add(key)
+ # flush...
Session().commit()
-
- clone_url = rc_web_server.repo_clone_url(GIT_REPO)
- stdout, stderr = Command('/tmp').execute(
- 'git clone', clone_url, tmpdir.strpath)
-
- # commit some stuff into this repo
- stdout, stderr = _add_files_and_push(
- 'git', tmpdir.strpath, clone_url=clone_url, files_no=1)
- _check_proper_git_push(stdout, stderr)
-
- key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).one()
-
- assert key.cache_active is False
+ hg_repo = Repository.get_by_repo_name(HG_REPO)
+ cache_keys = hg_repo.cache_keys
+ assert cache_keys != []
+ for key in cache_keys:
+ # keys should be marked as not active
+ assert key.cache_active is False
def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
clone_url = rc_web_server.repo_clone_url(HG_REPO)