##// END OF EJS Templates
caches: synced code with ce changes
super-admin -
r1062:837924f7 python3
parent child Browse files
Show More
@@ -17,21 +17,22 b''
17
17
18 import logging
18 import logging
19 from dogpile.cache import register_backend
19 from dogpile.cache import register_backend
20 module_name = 'vcsserver'
20
21
21 register_backend(
22 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "dogpile.cache.rc.memory_lru", f"{module_name}.lib.rc_cache.backends",
23 "LRUMemoryBackend")
24 "LRUMemoryBackend")
24
25
25 register_backend(
26 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "dogpile.cache.rc.file_namespace", f"{module_name}.lib.rc_cache.backends",
27 "FileNamespaceBackend")
28 "FileNamespaceBackend")
28
29
29 register_backend(
30 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "dogpile.cache.rc.redis", f"{module_name}.lib.rc_cache.backends",
31 "RedisPickleBackend")
32 "RedisPickleBackend")
32
33
33 register_backend(
34 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 "dogpile.cache.rc.redis_msgpack", f"{module_name}.lib.rc_cache.backends",
35 "RedisMsgPackBackend")
36 "RedisMsgPackBackend")
36
37
37
38
@@ -18,17 +18,21 b''
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21 import functools
21
22
22 import msgpack
23 import msgpack
23 import redis
24 import redis
24 import pickle
25 import pickle
26 import fcntl
27 flock_org = fcntl.flock
28 from typing import Union
25
29
26 from dogpile.cache.api import CachedValue
27 from dogpile.cache.backends import memory as memory_backend
30 from dogpile.cache.backends import memory as memory_backend
28 from dogpile.cache.backends import file as file_backend
31 from dogpile.cache.backends import file as file_backend
29 from dogpile.cache.backends import redis as redis_backend
32 from dogpile.cache.backends import redis as redis_backend
30 from dogpile.cache.backends.file import NO_VALUE, FileLock
33 from dogpile.cache.backends.file import FileLock
31 from dogpile.cache.util import memoized_property
34 from dogpile.cache.util import memoized_property
35 from dogpile.cache.api import Serializer, Deserializer
32
36
33 from pyramid.settings import asbool
37 from pyramid.settings import asbool
34
38
@@ -67,55 +71,22 b' class LRUMemoryBackend(memory_backend.Me'
67 self.delete(key)
71 self.delete(key)
68
72
69
73
70 class PickleSerializer(object):
74 class PickleSerializer:
71
75 serializer: Union[None, Serializer] = staticmethod( # type: ignore
72 def _dumps(self, value, safe=False):
76 functools.partial(pickle.dumps, protocol=pickle.HIGHEST_PROTOCOL)
73 try:
77 )
74 return pickle.dumps(value)
78 deserializer: Union[None, Deserializer] = staticmethod( # type: ignore
75 except Exception:
79 functools.partial(pickle.loads)
76 if safe:
80 )
77 return NO_VALUE
78 else:
79 raise
80
81 def _loads(self, value, safe=True):
82 try:
83 return pickle.loads(value)
84 except Exception:
85 if safe:
86 return NO_VALUE
87 else:
88 raise
89
81
90
82
91 class MsgPackSerializer(object):
83 class MsgPackSerializer(object):
92
84 serializer: Union[None, Serializer] = staticmethod( # type: ignore
93 def _dumps(self, value, safe=False):
85 msgpack.packb
94 try:
86 )
95 return msgpack.packb(value)
87 deserializer: Union[None, Deserializer] = staticmethod( # type: ignore
96 except Exception:
88 functools.partial(msgpack.unpackb, use_list=False)
97 if safe:
89 )
98 return NO_VALUE
99 else:
100 raise
101
102 def _loads(self, value, safe=True):
103 """
104 pickle maintained the `CachedValue` wrapper of the tuple
105 msgpack does not, so it must be added back in.
106 """
107 try:
108 value = msgpack.unpackb(value, use_list=False)
109 return CachedValue(*value)
110 except Exception:
111 if safe:
112 return NO_VALUE
113 else:
114 raise
115
116
117 import fcntl
118 flock_org = fcntl.flock
119
90
120
91
121 class CustomLockFactory(FileLock):
92 class CustomLockFactory(FileLock):
@@ -161,36 +132,6 b' class FileNamespaceBackend(PickleSeriali'
161 def get_store(self):
132 def get_store(self):
162 return self.filename
133 return self.filename
163
134
164 def _dbm_get(self, key):
165 with self._dbm_file(False) as dbm:
166 if hasattr(dbm, 'get'):
167 value = dbm.get(key, NO_VALUE)
168 else:
169 # gdbm objects lack a .get method
170 try:
171 value = dbm[key]
172 except KeyError:
173 value = NO_VALUE
174 if value is not NO_VALUE:
175 value = self._loads(value)
176 return value
177
178 def get(self, key):
179 try:
180 return self._dbm_get(key)
181 except Exception:
182 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
183 raise
184
185 def set(self, key, value):
186 with self._dbm_file(True) as dbm:
187 dbm[key] = self._dumps(value)
188
189 def set_multi(self, mapping):
190 with self._dbm_file(True) as dbm:
191 for key, value in mapping.items():
192 dbm[key] = self._dumps(value)
193
194
135
195 class BaseRedisBackend(redis_backend.RedisBackend):
136 class BaseRedisBackend(redis_backend.RedisBackend):
196 key_prefix = ''
137 key_prefix = ''
@@ -217,58 +158,26 b' class BaseRedisBackend(redis_backend.Red'
217 )
158 )
218
159
219 connection_pool = redis.ConnectionPool(**args)
160 connection_pool = redis.ConnectionPool(**args)
220
161 self.writer_client = redis.StrictRedis(
221 return redis.StrictRedis(connection_pool=connection_pool)
162 connection_pool=connection_pool
163 )
164 self.reader_client = self.writer_client
222
165
223 def list_keys(self, prefix=''):
166 def list_keys(self, prefix=''):
224 prefix = '{}:{}*'.format(self.key_prefix, prefix)
167 prefix = '{}:{}*'.format(self.key_prefix, prefix)
225 return self.client.keys(prefix)
168 return self.reader_client.keys(prefix)
226
169
227 def get_store(self):
170 def get_store(self):
228 return self.client.connection_pool
171 return self.reader_client.connection_pool
229
230 def get(self, key):
231 value = self.client.get(key)
232 if value is None:
233 return NO_VALUE
234 return self._loads(value)
235
236 def get_multi(self, keys):
237 if not keys:
238 return []
239 values = self.client.mget(keys)
240 loads = self._loads
241 return [
242 loads(v) if v is not None else NO_VALUE
243 for v in values]
244
245 def set(self, key, value):
246 if self.redis_expiration_time:
247 self.client.setex(key, self.redis_expiration_time,
248 self._dumps(value))
249 else:
250 self.client.set(key, self._dumps(value))
251
252 def set_multi(self, mapping):
253 dumps = self._dumps
254 mapping = dict(
255 (k, dumps(v))
256 for k, v in mapping.items()
257 )
258
259 if not self.redis_expiration_time:
260 self.client.mset(mapping)
261 else:
262 pipe = self.client.pipeline()
263 for key, value in mapping.items():
264 pipe.setex(key, self.redis_expiration_time, value)
265 pipe.execute()
266
172
267 def get_mutex(self, key):
173 def get_mutex(self, key):
268 if self.distributed_lock:
174 if self.distributed_lock:
269 lock_key = '_lock_{0}'.format(safe_str(key))
175 lock_key = '_lock_{0}'.format(safe_str(key))
270 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
176 return get_mutex_lock(
271 auto_renewal=self._lock_auto_renewal)
177 self.writer_client, lock_key,
178 self._lock_timeout,
179 auto_renewal=self._lock_auto_renewal
180 )
272 else:
181 else:
273 return None
182 return None
274
183
@@ -284,7 +193,7 b' class RedisMsgPackBackend(MsgPackSeriali'
284
193
285
194
286 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
195 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
287 import redis_lock
196 from vcsserver.lib._vendor import redis_lock
288
197
289 class _RedisLockWrapper(object):
198 class _RedisLockWrapper(object):
290 """LockWrapper for redis_lock"""
199 """LockWrapper for redis_lock"""
General Comments 0
You need to be logged in to leave comments. Login now