##// END OF EJS Templates
caches: synced code with ce changes
super-admin -
r1062:837924f7 python3
parent child Browse files
Show More
@@ -17,21 +17,22 b''
17 17
18 18 import logging
19 19 from dogpile.cache import register_backend
20 module_name = 'vcsserver'
20 21
21 22 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
23 24 "LRUMemoryBackend")
24 25
25 26 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
27 28 "FileNamespaceBackend")
28 29
29 30 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
31 32 "RedisPickleBackend")
32 33
33 34 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
35 36 "RedisMsgPackBackend")
36 37
37 38
@@ -18,17 +18,21 b''
18 18 import time
19 19 import errno
20 20 import logging
21 import functools
21 22
22 23 import msgpack
23 24 import redis
24 25 import pickle
26 import fcntl
27 flock_org = fcntl.flock
28 from typing import Union
25 29
26 from dogpile.cache.api import CachedValue
27 30 from dogpile.cache.backends import memory as memory_backend
28 31 from dogpile.cache.backends import file as file_backend
29 32 from dogpile.cache.backends import redis as redis_backend
30 from dogpile.cache.backends.file import NO_VALUE, FileLock
33 from dogpile.cache.backends.file import FileLock
31 34 from dogpile.cache.util import memoized_property
35 from dogpile.cache.api import Serializer, Deserializer
32 36
33 37 from pyramid.settings import asbool
34 38
@@ -67,55 +71,22 b' class LRUMemoryBackend(memory_backend.Me'
67 71 self.delete(key)
68 72
69 73
70 class PickleSerializer(object):
71
72 def _dumps(self, value, safe=False):
73 try:
74 return pickle.dumps(value)
75 except Exception:
76 if safe:
77 return NO_VALUE
78 else:
79 raise
80
81 def _loads(self, value, safe=True):
82 try:
83 return pickle.loads(value)
84 except Exception:
85 if safe:
86 return NO_VALUE
87 else:
88 raise
74 class PickleSerializer:
75 serializer: Union[None, Serializer] = staticmethod( # type: ignore
76 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
77 )
78 deserializer: Union[None, Deserializer] = staticmethod( # type: ignore
79 functools.partial(pickle.loads)
80 )
89 81
90 82
91 83 class MsgPackSerializer(object):
92
93 def _dumps(self, value, safe=False):
94 try:
95 return msgpack.packb(value)
96 except Exception:
97 if safe:
98 return NO_VALUE
99 else:
100 raise
101
102 def _loads(self, value, safe=True):
103 """
104 pickle maintained the `CachedValue` wrapper of the tuple
105 msgpack does not, so it must be added back in.
106 """
107 try:
108 value = msgpack.unpackb(value, use_list=False)
109 return CachedValue(*value)
110 except Exception:
111 if safe:
112 return NO_VALUE
113 else:
114 raise
115
116
117 import fcntl
118 flock_org = fcntl.flock
84 serializer: Union[None, Serializer] = staticmethod( # type: ignore
85 msgpack.packb
86 )
87 deserializer: Union[None, Deserializer] = staticmethod( # type: ignore
88 functools.partial(msgpack.unpackb, use_list=False)
89 )
119 90
120 91
121 92 class CustomLockFactory(FileLock):
@@ -161,36 +132,6 b' class FileNamespaceBackend(PickleSeriali'
161 132 def get_store(self):
162 133 return self.filename
163 134
164 def _dbm_get(self, key):
165 with self._dbm_file(False) as dbm:
166 if hasattr(dbm, 'get'):
167 value = dbm.get(key, NO_VALUE)
168 else:
169 # gdbm objects lack a .get method
170 try:
171 value = dbm[key]
172 except KeyError:
173 value = NO_VALUE
174 if value is not NO_VALUE:
175 value = self._loads(value)
176 return value
177
178 def get(self, key):
179 try:
180 return self._dbm_get(key)
181 except Exception:
182 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
183 raise
184
185 def set(self, key, value):
186 with self._dbm_file(True) as dbm:
187 dbm[key] = self._dumps(value)
188
189 def set_multi(self, mapping):
190 with self._dbm_file(True) as dbm:
191 for key, value in mapping.items():
192 dbm[key] = self._dumps(value)
193
194 135
195 136 class BaseRedisBackend(redis_backend.RedisBackend):
196 137 key_prefix = ''
@@ -217,58 +158,26 b' class BaseRedisBackend(redis_backend.Red'
217 158 )
218 159
219 160 connection_pool = redis.ConnectionPool(**args)
220
221 return redis.StrictRedis(connection_pool=connection_pool)
161 self.writer_client = redis.StrictRedis(
162 connection_pool=connection_pool
163 )
164 self.reader_client = self.writer_client
222 165
223 166 def list_keys(self, prefix=''):
224 167 prefix = '{}:{}*'.format(self.key_prefix, prefix)
225 return self.client.keys(prefix)
168 return self.reader_client.keys(prefix)
226 169
227 170 def get_store(self):
228 return self.client.connection_pool
229
230 def get(self, key):
231 value = self.client.get(key)
232 if value is None:
233 return NO_VALUE
234 return self._loads(value)
235
236 def get_multi(self, keys):
237 if not keys:
238 return []
239 values = self.client.mget(keys)
240 loads = self._loads
241 return [
242 loads(v) if v is not None else NO_VALUE
243 for v in values]
244
245 def set(self, key, value):
246 if self.redis_expiration_time:
247 self.client.setex(key, self.redis_expiration_time,
248 self._dumps(value))
249 else:
250 self.client.set(key, self._dumps(value))
251
252 def set_multi(self, mapping):
253 dumps = self._dumps
254 mapping = dict(
255 (k, dumps(v))
256 for k, v in mapping.items()
257 )
258
259 if not self.redis_expiration_time:
260 self.client.mset(mapping)
261 else:
262 pipe = self.client.pipeline()
263 for key, value in mapping.items():
264 pipe.setex(key, self.redis_expiration_time, value)
265 pipe.execute()
171 return self.reader_client.connection_pool
266 172
267 173 def get_mutex(self, key):
268 174 if self.distributed_lock:
269 175 lock_key = '_lock_{0}'.format(safe_str(key))
270 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
271 auto_renewal=self._lock_auto_renewal)
176 return get_mutex_lock(
177 self.writer_client, lock_key,
178 self._lock_timeout,
179 auto_renewal=self._lock_auto_renewal
180 )
272 181 else:
273 182 return None
274 183
@@ -284,7 +193,7 b' class RedisMsgPackBackend(MsgPackSeriali'
284 193
285 194
286 195 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
287 import redis_lock
196 from vcsserver.lib._vendor import redis_lock
288 197
289 198 class _RedisLockWrapper(object):
290 199 """LockWrapper for redis_lock"""
General Comments 0
You need to be logged in to leave comments. Login now