##// END OF EJS Templates
caches: synced cache logic with vcsserver.
marcink -
r3851:a5ece276 default
parent child Browse files
Show More
@@ -33,12 +33,16 b' register_backend('
33 33 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
34 34 "RedisPickleBackend")
35 35
36 register_backend(
37 "dogpile.cache.rc.redis_msgpack", "rhodecode.lib.rc_cache.backends",
38 "RedisMsgPackBackend")
39
36 40
37 41 log = logging.getLogger(__name__)
38 42
39 43 from . import region_meta
40 44 from .utils import (
41 get_default_cache_settings, key_generator, get_or_create_region,
45 get_default_cache_settings, backend_key_generator, get_or_create_region,
42 46 clear_cache_namespace, make_region, InvalidationContext,
43 47 FreshRegionCache, ActiveRegionCache)
44 48
@@ -61,13 +65,12 b' def configure_dogpile_cache(settings):'
61 65 for region_name in avail_regions:
62 66 new_region = make_region(
63 67 name=region_name,
64 function_key_generator=key_generator
68 function_key_generator=None
65 69 )
66 70
67 71 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
68
69 log.debug('dogpile: registering a new region %s[%s]',
70 region_name, new_region.__dict__)
72 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
73 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
71 74 region_meta.dogpile_cache_regions[region_name] = new_region
72 75
73 76
@@ -22,8 +22,10 b' import time'
22 22 import errno
23 23 import logging
24 24
25 import msgpack
25 26 import gevent
26 27
28 from dogpile.cache.api import CachedValue
27 29 from dogpile.cache.backends import memory as memory_backend
28 30 from dogpile.cache.backends import file as file_backend
29 31 from dogpile.cache.backends import redis as redis_backend
@@ -39,6 +41,7 b' log = logging.getLogger(__name__)'
39 41
40 42
41 43 class LRUMemoryBackend(memory_backend.MemoryBackend):
44 key_prefix = 'lru_mem_backend'
42 45 pickle_values = False
43 46
44 47 def __init__(self, arguments):
@@ -63,7 +66,8 b' class LRUMemoryBackend(memory_backend.Me'
63 66 self.delete(key)
64 67
65 68
66 class Serializer(object):
69 class PickleSerializer(object):
70
67 71 def _dumps(self, value, safe=False):
68 72 try:
69 73 return compat.pickle.dumps(value)
@@ -83,6 +87,32 b' class Serializer(object):'
83 87 raise
84 88
85 89
90 class MsgPackSerializer(object):
91
92 def _dumps(self, value, safe=False):
93 try:
94 return msgpack.packb(value)
95 except Exception:
96 if safe:
97 return NO_VALUE
98 else:
99 raise
100
101 def _loads(self, value, safe=True):
102 """
103 pickle maintained the `CachedValue` wrapper of the tuple
104 msgpack does not, so it must be added back in.
105 """
106 try:
107 value = msgpack.unpackb(value, use_list=False)
108 return CachedValue(*value)
109 except Exception:
110 if safe:
111 return NO_VALUE
112 else:
113 raise
114
115
86 116 import fcntl
87 117 flock_org = fcntl.flock
88 118
@@ -123,13 +153,16 b' class CustomLockFactory(FileLock):'
123 153 return fcntl
124 154
125 155
126 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
156 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
157 key_prefix = 'file_backend'
127 158
128 159 def __init__(self, arguments):
129 160 arguments['lock_factory'] = CustomLockFactory
130 161 super(FileNamespaceBackend, self).__init__(arguments)
131 162
132 163 def list_keys(self, prefix=''):
164 prefix = '{}:{}'.format(self.key_prefix, prefix)
165
133 166 def cond(v):
134 167 if not prefix:
135 168 return True
@@ -169,10 +202,9 b' class FileNamespaceBackend(Serializer, f'
169 202 dbm[key] = self._dumps(value)
170 203
171 204
172 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
205 class BaseRedisBackend(redis_backend.RedisBackend):
173 206 def list_keys(self, prefix=''):
174 if prefix:
175 prefix = prefix + '*'
207 prefix = '{}:{}*'.format(self.key_prefix, prefix)
176 208 return self.client.keys(prefix)
177 209
178 210 def get_store(self):
@@ -184,6 +216,15 b' class RedisPickleBackend(Serializer, red'
184 216 return NO_VALUE
185 217 return self._loads(value)
186 218
219 def get_multi(self, keys):
220 if not keys:
221 return []
222 values = self.client.mget(keys)
223 loads = self._loads
224 return [
225 loads(v) if v is not None else NO_VALUE
226 for v in values]
227
187 228 def set(self, key, value):
188 229 if self.redis_expiration_time:
189 230 self.client.setex(key, self.redis_expiration_time,
@@ -192,8 +233,9 b' class RedisPickleBackend(Serializer, red'
192 233 self.client.set(key, self._dumps(value))
193 234
194 235 def set_multi(self, mapping):
236 dumps = self._dumps
195 237 mapping = dict(
196 (k, self._dumps(v))
238 (k, dumps(v))
197 239 for k, v in mapping.items()
198 240 )
199 241
@@ -213,3 +255,13 b' class RedisPickleBackend(Serializer, red'
213 255 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
214 256 else:
215 257 return None
258
259
260 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
261 key_prefix = 'redis_pickle_backend'
262 pass
263
264
265 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
266 key_prefix = 'redis_msgpack_backend'
267 pass
@@ -134,13 +134,23 b' def compute_key_from_params(*args):'
134 134 return sha1("_".join(map(safe_str, args)))
135 135
136 136
137 def key_generator(namespace, fn):
137 def backend_key_generator(backend):
138 """
139 Special wrapper that also sends over the backend to the key generator
140 """
141 def wrapper(namespace, fn):
142 return key_generator(backend, namespace, fn)
143 return wrapper
144
145
146 def key_generator(backend, namespace, fn):
138 147 fname = fn.__name__
139 148
140 149 def generate_key(*args):
141 namespace_pref = namespace or 'default'
150 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
151 namespace_pref = namespace or 'default_namespace'
142 152 arg_key = compute_key_from_params(*args)
143 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
153 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
144 154
145 155 return final_key
146 156
@@ -167,7 +177,8 b' def get_or_create_region(region_name, re'
167 177 if not os.path.isdir(cache_dir):
168 178 os.makedirs(cache_dir)
169 179 new_region = make_region(
170 name=region_uid_name, function_key_generator=key_generator
180 name=region_uid_name,
181 function_key_generator=backend_key_generator(region_obj.actual_backend)
171 182 )
172 183 namespace_filename = os.path.join(
173 184 cache_dir, "{}.cache.dbm".format(region_namespace))
@@ -179,7 +190,7 b' def get_or_create_region(region_name, re'
179 190 )
180 191
181 192 # create and save in region caches
182 log.debug('configuring new region: %s',region_uid_name)
193 log.debug('configuring new region: %s', region_uid_name)
183 194 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
184 195
185 196 return region_obj
@@ -67,7 +67,7 b' class TestCaches(object):'
67 67 def test_cache_keygen(self, example_input, example_namespace):
68 68 def func_wrapped():
69 69 return 1
70 func = rc_cache.utils.key_generator(example_namespace, func_wrapped)
70 func = rc_cache.utils.key_generator(None, example_namespace, func_wrapped)
71 71 key = func(*example_input)
72 72 assert key
73 73
General Comments 0
You need to be logged in to leave comments. Login now