##// END OF EJS Templates
caches: optimized defaults for safer more reliable behaviour
super-admin -
r951:fbffdae7 default
parent child Browse files
Show More
@@ -1,311 +1,313 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
23 import redis
24
24
25 from dogpile.cache.api import CachedValue
25 from dogpile.cache.api import CachedValue
26 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
31
31
32 from pyramid.settings import asbool
33
32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
33
35
34
36
35 _default_max_size = 1024
37 _default_max_size = 1024
36
38
37 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
38
40
39
41
40 class LRUMemoryBackend(memory_backend.MemoryBackend):
42 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 key_prefix = 'lru_mem_backend'
43 key_prefix = 'lru_mem_backend'
42 pickle_values = False
44 pickle_values = False
43
45
44 def __init__(self, arguments):
46 def __init__(self, arguments):
45 max_size = arguments.pop('max_size', _default_max_size)
47 max_size = arguments.pop('max_size', _default_max_size)
46
48
47 LRUDictClass = LRUDict
49 LRUDictClass = LRUDict
48 if arguments.pop('log_key_count', None):
50 if arguments.pop('log_key_count', None):
49 LRUDictClass = LRUDictDebug
51 LRUDictClass = LRUDictDebug
50
52
51 arguments['cache_dict'] = LRUDictClass(max_size)
53 arguments['cache_dict'] = LRUDictClass(max_size)
52 super(LRUMemoryBackend, self).__init__(arguments)
54 super(LRUMemoryBackend, self).__init__(arguments)
53
55
54 def delete(self, key):
56 def delete(self, key):
55 try:
57 try:
56 del self._cache[key]
58 del self._cache[key]
57 except KeyError:
59 except KeyError:
58 # we don't care if key isn't there at deletion
60 # we don't care if key isn't there at deletion
59 pass
61 pass
60
62
61 def delete_multi(self, keys):
63 def delete_multi(self, keys):
62 for key in keys:
64 for key in keys:
63 self.delete(key)
65 self.delete(key)
64
66
65
67
66 class PickleSerializer(object):
68 class PickleSerializer(object):
67
69
68 def _dumps(self, value, safe=False):
70 def _dumps(self, value, safe=False):
69 try:
71 try:
70 return compat.pickle.dumps(value)
72 return compat.pickle.dumps(value)
71 except Exception:
73 except Exception:
72 if safe:
74 if safe:
73 return NO_VALUE
75 return NO_VALUE
74 else:
76 else:
75 raise
77 raise
76
78
77 def _loads(self, value, safe=True):
79 def _loads(self, value, safe=True):
78 try:
80 try:
79 return compat.pickle.loads(value)
81 return compat.pickle.loads(value)
80 except Exception:
82 except Exception:
81 if safe:
83 if safe:
82 return NO_VALUE
84 return NO_VALUE
83 else:
85 else:
84 raise
86 raise
85
87
86
88
87 class MsgPackSerializer(object):
89 class MsgPackSerializer(object):
88
90
89 def _dumps(self, value, safe=False):
91 def _dumps(self, value, safe=False):
90 try:
92 try:
91 return msgpack.packb(value)
93 return msgpack.packb(value)
92 except Exception:
94 except Exception:
93 if safe:
95 if safe:
94 return NO_VALUE
96 return NO_VALUE
95 else:
97 else:
96 raise
98 raise
97
99
98 def _loads(self, value, safe=True):
100 def _loads(self, value, safe=True):
99 """
101 """
100 pickle maintained the `CachedValue` wrapper of the tuple
102 pickle maintained the `CachedValue` wrapper of the tuple
101 msgpack does not, so it must be added back in.
103 msgpack does not, so it must be added back in.
102 """
104 """
103 try:
105 try:
104 value = msgpack.unpackb(value, use_list=False)
106 value = msgpack.unpackb(value, use_list=False)
105 return CachedValue(*value)
107 return CachedValue(*value)
106 except Exception:
108 except Exception:
107 if safe:
109 if safe:
108 return NO_VALUE
110 return NO_VALUE
109 else:
111 else:
110 raise
112 raise
111
113
112
114
113 import fcntl
115 import fcntl
114 flock_org = fcntl.flock
116 flock_org = fcntl.flock
115
117
116
118
117 class CustomLockFactory(FileLock):
119 class CustomLockFactory(FileLock):
118
120
119 pass
121 pass
120
122
121
123
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
124 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 key_prefix = 'file_backend'
125 key_prefix = 'file_backend'
124
126
125 def __init__(self, arguments):
127 def __init__(self, arguments):
126 arguments['lock_factory'] = CustomLockFactory
128 arguments['lock_factory'] = CustomLockFactory
127 db_file = arguments.get('filename')
129 db_file = arguments.get('filename')
128
130
129 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
131 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
130 try:
132 try:
131 super(FileNamespaceBackend, self).__init__(arguments)
133 super(FileNamespaceBackend, self).__init__(arguments)
132 except Exception:
134 except Exception:
133 log.error('Failed to initialize db at: %s', db_file)
135 log.error('Failed to initialize db at: %s', db_file)
134 raise
136 raise
135
137
136 def __repr__(self):
138 def __repr__(self):
137 return '{} `{}`'.format(self.__class__, self.filename)
139 return '{} `{}`'.format(self.__class__, self.filename)
138
140
139 def list_keys(self, prefix=''):
141 def list_keys(self, prefix=''):
140 prefix = '{}:{}'.format(self.key_prefix, prefix)
142 prefix = '{}:{}'.format(self.key_prefix, prefix)
141
143
142 def cond(v):
144 def cond(v):
143 if not prefix:
145 if not prefix:
144 return True
146 return True
145
147
146 if v.startswith(prefix):
148 if v.startswith(prefix):
147 return True
149 return True
148 return False
150 return False
149
151
150 with self._dbm_file(True) as dbm:
152 with self._dbm_file(True) as dbm:
151 try:
153 try:
152 return filter(cond, dbm.keys())
154 return filter(cond, dbm.keys())
153 except Exception:
155 except Exception:
154 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
156 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
155 raise
157 raise
156
158
157 def get_store(self):
159 def get_store(self):
158 return self.filename
160 return self.filename
159
161
160 def _dbm_get(self, key):
162 def _dbm_get(self, key):
161 with self._dbm_file(False) as dbm:
163 with self._dbm_file(False) as dbm:
162 if hasattr(dbm, 'get'):
164 if hasattr(dbm, 'get'):
163 value = dbm.get(key, NO_VALUE)
165 value = dbm.get(key, NO_VALUE)
164 else:
166 else:
165 # gdbm objects lack a .get method
167 # gdbm objects lack a .get method
166 try:
168 try:
167 value = dbm[key]
169 value = dbm[key]
168 except KeyError:
170 except KeyError:
169 value = NO_VALUE
171 value = NO_VALUE
170 if value is not NO_VALUE:
172 if value is not NO_VALUE:
171 value = self._loads(value)
173 value = self._loads(value)
172 return value
174 return value
173
175
174 def get(self, key):
176 def get(self, key):
175 try:
177 try:
176 return self._dbm_get(key)
178 return self._dbm_get(key)
177 except Exception:
179 except Exception:
178 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
180 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
179 raise
181 raise
180
182
181 def set(self, key, value):
183 def set(self, key, value):
182 with self._dbm_file(True) as dbm:
184 with self._dbm_file(True) as dbm:
183 dbm[key] = self._dumps(value)
185 dbm[key] = self._dumps(value)
184
186
185 def set_multi(self, mapping):
187 def set_multi(self, mapping):
186 with self._dbm_file(True) as dbm:
188 with self._dbm_file(True) as dbm:
187 for key, value in mapping.items():
189 for key, value in mapping.items():
188 dbm[key] = self._dumps(value)
190 dbm[key] = self._dumps(value)
189
191
190
192
191 class BaseRedisBackend(redis_backend.RedisBackend):
193 class BaseRedisBackend(redis_backend.RedisBackend):
192 key_prefix = ''
194 key_prefix = ''
193
195
194 def __init__(self, arguments):
196 def __init__(self, arguments):
195 super(BaseRedisBackend, self).__init__(arguments)
197 super(BaseRedisBackend, self).__init__(arguments)
196 self._lock_timeout = self.lock_timeout
198 self._lock_timeout = self.lock_timeout
197 self._lock_auto_renewal = arguments.pop("lock_auto_renewal", False)
199 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
198
200
199 if self._lock_auto_renewal and not self._lock_timeout:
201 if self._lock_auto_renewal and not self._lock_timeout:
200 # set default timeout for auto_renewal
202 # set default timeout for auto_renewal
201 self._lock_timeout = 60
203 self._lock_timeout = 30
202
204
203 def _create_client(self):
205 def _create_client(self):
204 args = {}
206 args = {}
205
207
206 if self.url is not None:
208 if self.url is not None:
207 args.update(url=self.url)
209 args.update(url=self.url)
208
210
209 else:
211 else:
210 args.update(
212 args.update(
211 host=self.host, password=self.password,
213 host=self.host, password=self.password,
212 port=self.port, db=self.db
214 port=self.port, db=self.db
213 )
215 )
214
216
215 connection_pool = redis.ConnectionPool(**args)
217 connection_pool = redis.ConnectionPool(**args)
216
218
217 return redis.StrictRedis(connection_pool=connection_pool)
219 return redis.StrictRedis(connection_pool=connection_pool)
218
220
219 def list_keys(self, prefix=''):
221 def list_keys(self, prefix=''):
220 prefix = '{}:{}*'.format(self.key_prefix, prefix)
222 prefix = '{}:{}*'.format(self.key_prefix, prefix)
221 return self.client.keys(prefix)
223 return self.client.keys(prefix)
222
224
223 def get_store(self):
225 def get_store(self):
224 return self.client.connection_pool
226 return self.client.connection_pool
225
227
226 def get(self, key):
228 def get(self, key):
227 value = self.client.get(key)
229 value = self.client.get(key)
228 if value is None:
230 if value is None:
229 return NO_VALUE
231 return NO_VALUE
230 return self._loads(value)
232 return self._loads(value)
231
233
232 def get_multi(self, keys):
234 def get_multi(self, keys):
233 if not keys:
235 if not keys:
234 return []
236 return []
235 values = self.client.mget(keys)
237 values = self.client.mget(keys)
236 loads = self._loads
238 loads = self._loads
237 return [
239 return [
238 loads(v) if v is not None else NO_VALUE
240 loads(v) if v is not None else NO_VALUE
239 for v in values]
241 for v in values]
240
242
241 def set(self, key, value):
243 def set(self, key, value):
242 if self.redis_expiration_time:
244 if self.redis_expiration_time:
243 self.client.setex(key, self.redis_expiration_time,
245 self.client.setex(key, self.redis_expiration_time,
244 self._dumps(value))
246 self._dumps(value))
245 else:
247 else:
246 self.client.set(key, self._dumps(value))
248 self.client.set(key, self._dumps(value))
247
249
248 def set_multi(self, mapping):
250 def set_multi(self, mapping):
249 dumps = self._dumps
251 dumps = self._dumps
250 mapping = dict(
252 mapping = dict(
251 (k, dumps(v))
253 (k, dumps(v))
252 for k, v in mapping.items()
254 for k, v in mapping.items()
253 )
255 )
254
256
255 if not self.redis_expiration_time:
257 if not self.redis_expiration_time:
256 self.client.mset(mapping)
258 self.client.mset(mapping)
257 else:
259 else:
258 pipe = self.client.pipeline()
260 pipe = self.client.pipeline()
259 for key, value in mapping.items():
261 for key, value in mapping.items():
260 pipe.setex(key, self.redis_expiration_time, value)
262 pipe.setex(key, self.redis_expiration_time, value)
261 pipe.execute()
263 pipe.execute()
262
264
263 def get_mutex(self, key):
265 def get_mutex(self, key):
264 if self.distributed_lock:
266 if self.distributed_lock:
265 lock_key = redis_backend.u('_lock_{0}').format(key)
267 lock_key = redis_backend.u('_lock_{0}').format(key)
266 log.debug('Trying to acquire Redis lock for key %s', lock_key)
268 log.debug('Trying to acquire Redis lock for key %s', lock_key)
267 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
269 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
268 auto_renewal=self._lock_auto_renewal)
270 auto_renewal=self._lock_auto_renewal)
269 else:
271 else:
270 return None
272 return None
271
273
272
274
273 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
275 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
274 key_prefix = 'redis_pickle_backend'
276 key_prefix = 'redis_pickle_backend'
275 pass
277 pass
276
278
277
279
278 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
280 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
279 key_prefix = 'redis_msgpack_backend'
281 key_prefix = 'redis_msgpack_backend'
280 pass
282 pass
281
283
282
284
283 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
285 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
284 import redis_lock
286 import redis_lock
285
287
286 class _RedisLockWrapper(object):
288 class _RedisLockWrapper(object):
287 """LockWrapper for redis_lock"""
289 """LockWrapper for redis_lock"""
288
290
289 def __init__(self):
291 def __init__(self):
290 pass
292 pass
291
293
292 @property
294 @property
293 def lock(self):
295 def lock(self):
294 return redis_lock.Lock(
296 return redis_lock.Lock(
295 redis_client=client,
297 redis_client=client,
296 name=lock_key,
298 name=lock_key,
297 expire=lock_timeout,
299 expire=lock_timeout,
298 auto_renewal=auto_renewal,
300 auto_renewal=auto_renewal,
299 strict=True,
301 strict=True,
300 )
302 )
301
303
302 def acquire(self, wait=True):
304 def acquire(self, wait=True):
303 return self.lock.acquire(wait)
305 return self.lock.acquire(wait)
304
306
305 def release(self):
307 def release(self):
306 try:
308 try:
307 self.lock.release()
309 self.lock.release()
308 except redis_lock.NotAcquired:
310 except redis_lock.NotAcquired:
309 pass
311 pass
310
312
311 return _RedisLockWrapper()
313 return _RedisLockWrapper()
General Comments 0
You need to be logged in to leave comments. Login now