##// END OF EJS Templates
caches: fixed issue with exception on handling non-ascii cache keys.
super-admin -
r963:8b44d37f default
parent child Browse files
Show More
@@ -1,328 +1,329 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
23 import redis
24
24
25 from dogpile.cache.api import CachedValue
25 from dogpile.cache.api import CachedValue
26 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
31
31
32 from pyramid.settings import asbool
32 from pyramid.settings import asbool
33
33
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from vcsserver.utils import safe_str
35
36
36
37
37 _default_max_size = 1024
38 _default_max_size = 1024
38
39
39 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
40
41
41
42
42 class LRUMemoryBackend(memory_backend.MemoryBackend):
43 class LRUMemoryBackend(memory_backend.MemoryBackend):
43 key_prefix = 'lru_mem_backend'
44 key_prefix = 'lru_mem_backend'
44 pickle_values = False
45 pickle_values = False
45
46
46 def __init__(self, arguments):
47 def __init__(self, arguments):
47 max_size = arguments.pop('max_size', _default_max_size)
48 max_size = arguments.pop('max_size', _default_max_size)
48
49
49 LRUDictClass = LRUDict
50 LRUDictClass = LRUDict
50 if arguments.pop('log_key_count', None):
51 if arguments.pop('log_key_count', None):
51 LRUDictClass = LRUDictDebug
52 LRUDictClass = LRUDictDebug
52
53
53 arguments['cache_dict'] = LRUDictClass(max_size)
54 arguments['cache_dict'] = LRUDictClass(max_size)
54 super(LRUMemoryBackend, self).__init__(arguments)
55 super(LRUMemoryBackend, self).__init__(arguments)
55
56
56 def delete(self, key):
57 def delete(self, key):
57 try:
58 try:
58 del self._cache[key]
59 del self._cache[key]
59 except KeyError:
60 except KeyError:
60 # we don't care if key isn't there at deletion
61 # we don't care if key isn't there at deletion
61 pass
62 pass
62
63
63 def delete_multi(self, keys):
64 def delete_multi(self, keys):
64 for key in keys:
65 for key in keys:
65 self.delete(key)
66 self.delete(key)
66
67
67
68
68 class PickleSerializer(object):
69 class PickleSerializer(object):
69
70
70 def _dumps(self, value, safe=False):
71 def _dumps(self, value, safe=False):
71 try:
72 try:
72 return compat.pickle.dumps(value)
73 return compat.pickle.dumps(value)
73 except Exception:
74 except Exception:
74 if safe:
75 if safe:
75 return NO_VALUE
76 return NO_VALUE
76 else:
77 else:
77 raise
78 raise
78
79
79 def _loads(self, value, safe=True):
80 def _loads(self, value, safe=True):
80 try:
81 try:
81 return compat.pickle.loads(value)
82 return compat.pickle.loads(value)
82 except Exception:
83 except Exception:
83 if safe:
84 if safe:
84 return NO_VALUE
85 return NO_VALUE
85 else:
86 else:
86 raise
87 raise
87
88
88
89
89 class MsgPackSerializer(object):
90 class MsgPackSerializer(object):
90
91
91 def _dumps(self, value, safe=False):
92 def _dumps(self, value, safe=False):
92 try:
93 try:
93 return msgpack.packb(value)
94 return msgpack.packb(value)
94 except Exception:
95 except Exception:
95 if safe:
96 if safe:
96 return NO_VALUE
97 return NO_VALUE
97 else:
98 else:
98 raise
99 raise
99
100
100 def _loads(self, value, safe=True):
101 def _loads(self, value, safe=True):
101 """
102 """
102 pickle maintained the `CachedValue` wrapper of the tuple
103 pickle maintained the `CachedValue` wrapper of the tuple
103 msgpack does not, so it must be added back in.
104 msgpack does not, so it must be added back in.
104 """
105 """
105 try:
106 try:
106 value = msgpack.unpackb(value, use_list=False)
107 value = msgpack.unpackb(value, use_list=False)
107 return CachedValue(*value)
108 return CachedValue(*value)
108 except Exception:
109 except Exception:
109 if safe:
110 if safe:
110 return NO_VALUE
111 return NO_VALUE
111 else:
112 else:
112 raise
113 raise
113
114
114
115
115 import fcntl
116 import fcntl
116 flock_org = fcntl.flock
117 flock_org = fcntl.flock
117
118
118
119
119 class CustomLockFactory(FileLock):
120 class CustomLockFactory(FileLock):
120
121
121 pass
122 pass
122
123
123
124
124 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
125 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
125 key_prefix = 'file_backend'
126 key_prefix = 'file_backend'
126
127
127 def __init__(self, arguments):
128 def __init__(self, arguments):
128 arguments['lock_factory'] = CustomLockFactory
129 arguments['lock_factory'] = CustomLockFactory
129 db_file = arguments.get('filename')
130 db_file = arguments.get('filename')
130
131
131 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
132 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
132 try:
133 try:
133 super(FileNamespaceBackend, self).__init__(arguments)
134 super(FileNamespaceBackend, self).__init__(arguments)
134 except Exception:
135 except Exception:
135 log.error('Failed to initialize db at: %s', db_file)
136 log.error('Failed to initialize db at: %s', db_file)
136 raise
137 raise
137
138
138 def __repr__(self):
139 def __repr__(self):
139 return '{} `{}`'.format(self.__class__, self.filename)
140 return '{} `{}`'.format(self.__class__, self.filename)
140
141
141 def list_keys(self, prefix=''):
142 def list_keys(self, prefix=''):
142 prefix = '{}:{}'.format(self.key_prefix, prefix)
143 prefix = '{}:{}'.format(self.key_prefix, prefix)
143
144
144 def cond(v):
145 def cond(v):
145 if not prefix:
146 if not prefix:
146 return True
147 return True
147
148
148 if v.startswith(prefix):
149 if v.startswith(prefix):
149 return True
150 return True
150 return False
151 return False
151
152
152 with self._dbm_file(True) as dbm:
153 with self._dbm_file(True) as dbm:
153 try:
154 try:
154 return filter(cond, dbm.keys())
155 return filter(cond, dbm.keys())
155 except Exception:
156 except Exception:
156 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
157 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
157 raise
158 raise
158
159
159 def get_store(self):
160 def get_store(self):
160 return self.filename
161 return self.filename
161
162
162 def _dbm_get(self, key):
163 def _dbm_get(self, key):
163 with self._dbm_file(False) as dbm:
164 with self._dbm_file(False) as dbm:
164 if hasattr(dbm, 'get'):
165 if hasattr(dbm, 'get'):
165 value = dbm.get(key, NO_VALUE)
166 value = dbm.get(key, NO_VALUE)
166 else:
167 else:
167 # gdbm objects lack a .get method
168 # gdbm objects lack a .get method
168 try:
169 try:
169 value = dbm[key]
170 value = dbm[key]
170 except KeyError:
171 except KeyError:
171 value = NO_VALUE
172 value = NO_VALUE
172 if value is not NO_VALUE:
173 if value is not NO_VALUE:
173 value = self._loads(value)
174 value = self._loads(value)
174 return value
175 return value
175
176
176 def get(self, key):
177 def get(self, key):
177 try:
178 try:
178 return self._dbm_get(key)
179 return self._dbm_get(key)
179 except Exception:
180 except Exception:
180 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
181 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
181 raise
182 raise
182
183
183 def set(self, key, value):
184 def set(self, key, value):
184 with self._dbm_file(True) as dbm:
185 with self._dbm_file(True) as dbm:
185 dbm[key] = self._dumps(value)
186 dbm[key] = self._dumps(value)
186
187
187 def set_multi(self, mapping):
188 def set_multi(self, mapping):
188 with self._dbm_file(True) as dbm:
189 with self._dbm_file(True) as dbm:
189 for key, value in mapping.items():
190 for key, value in mapping.items():
190 dbm[key] = self._dumps(value)
191 dbm[key] = self._dumps(value)
191
192
192
193
193 class BaseRedisBackend(redis_backend.RedisBackend):
194 class BaseRedisBackend(redis_backend.RedisBackend):
194 key_prefix = ''
195 key_prefix = ''
195
196
196 def __init__(self, arguments):
197 def __init__(self, arguments):
197 super(BaseRedisBackend, self).__init__(arguments)
198 super(BaseRedisBackend, self).__init__(arguments)
198 self._lock_timeout = self.lock_timeout
199 self._lock_timeout = self.lock_timeout
199 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
200 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
200
201
201 if self._lock_auto_renewal and not self._lock_timeout:
202 if self._lock_auto_renewal and not self._lock_timeout:
202 # set default timeout for auto_renewal
203 # set default timeout for auto_renewal
203 self._lock_timeout = 30
204 self._lock_timeout = 30
204
205
205 def _create_client(self):
206 def _create_client(self):
206 args = {}
207 args = {}
207
208
208 if self.url is not None:
209 if self.url is not None:
209 args.update(url=self.url)
210 args.update(url=self.url)
210
211
211 else:
212 else:
212 args.update(
213 args.update(
213 host=self.host, password=self.password,
214 host=self.host, password=self.password,
214 port=self.port, db=self.db
215 port=self.port, db=self.db
215 )
216 )
216
217
217 connection_pool = redis.ConnectionPool(**args)
218 connection_pool = redis.ConnectionPool(**args)
218
219
219 return redis.StrictRedis(connection_pool=connection_pool)
220 return redis.StrictRedis(connection_pool=connection_pool)
220
221
221 def list_keys(self, prefix=''):
222 def list_keys(self, prefix=''):
222 prefix = '{}:{}*'.format(self.key_prefix, prefix)
223 prefix = '{}:{}*'.format(self.key_prefix, prefix)
223 return self.client.keys(prefix)
224 return self.client.keys(prefix)
224
225
225 def get_store(self):
226 def get_store(self):
226 return self.client.connection_pool
227 return self.client.connection_pool
227
228
228 def get(self, key):
229 def get(self, key):
229 value = self.client.get(key)
230 value = self.client.get(key)
230 if value is None:
231 if value is None:
231 return NO_VALUE
232 return NO_VALUE
232 return self._loads(value)
233 return self._loads(value)
233
234
234 def get_multi(self, keys):
235 def get_multi(self, keys):
235 if not keys:
236 if not keys:
236 return []
237 return []
237 values = self.client.mget(keys)
238 values = self.client.mget(keys)
238 loads = self._loads
239 loads = self._loads
239 return [
240 return [
240 loads(v) if v is not None else NO_VALUE
241 loads(v) if v is not None else NO_VALUE
241 for v in values]
242 for v in values]
242
243
243 def set(self, key, value):
244 def set(self, key, value):
244 if self.redis_expiration_time:
245 if self.redis_expiration_time:
245 self.client.setex(key, self.redis_expiration_time,
246 self.client.setex(key, self.redis_expiration_time,
246 self._dumps(value))
247 self._dumps(value))
247 else:
248 else:
248 self.client.set(key, self._dumps(value))
249 self.client.set(key, self._dumps(value))
249
250
250 def set_multi(self, mapping):
251 def set_multi(self, mapping):
251 dumps = self._dumps
252 dumps = self._dumps
252 mapping = dict(
253 mapping = dict(
253 (k, dumps(v))
254 (k, dumps(v))
254 for k, v in mapping.items()
255 for k, v in mapping.items()
255 )
256 )
256
257
257 if not self.redis_expiration_time:
258 if not self.redis_expiration_time:
258 self.client.mset(mapping)
259 self.client.mset(mapping)
259 else:
260 else:
260 pipe = self.client.pipeline()
261 pipe = self.client.pipeline()
261 for key, value in mapping.items():
262 for key, value in mapping.items():
262 pipe.setex(key, self.redis_expiration_time, value)
263 pipe.setex(key, self.redis_expiration_time, value)
263 pipe.execute()
264 pipe.execute()
264
265
265 def get_mutex(self, key):
266 def get_mutex(self, key):
266 if self.distributed_lock:
267 if self.distributed_lock:
267 lock_key = redis_backend.u('_lock_{0}').format(key)
268 lock_key = redis_backend.u('_lock_{0}').format(safe_str(key))
268 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
269 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
269 auto_renewal=self._lock_auto_renewal)
270 auto_renewal=self._lock_auto_renewal)
270 else:
271 else:
271 return None
272 return None
272
273
273
274
274 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
275 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
275 key_prefix = 'redis_pickle_backend'
276 key_prefix = 'redis_pickle_backend'
276 pass
277 pass
277
278
278
279
279 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
280 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
280 key_prefix = 'redis_msgpack_backend'
281 key_prefix = 'redis_msgpack_backend'
281 pass
282 pass
282
283
283
284
284 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
285 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
285 import redis_lock
286 import redis_lock
286
287
287 class _RedisLockWrapper(object):
288 class _RedisLockWrapper(object):
288 """LockWrapper for redis_lock"""
289 """LockWrapper for redis_lock"""
289
290
290 @classmethod
291 @classmethod
291 def get_lock(cls):
292 def get_lock(cls):
292 return redis_lock.Lock(
293 return redis_lock.Lock(
293 redis_client=client,
294 redis_client=client,
294 name=lock_key,
295 name=lock_key,
295 expire=lock_timeout,
296 expire=lock_timeout,
296 auto_renewal=auto_renewal,
297 auto_renewal=auto_renewal,
297 strict=True,
298 strict=True,
298 )
299 )
299
300
300 def __repr__(self):
301 def __repr__(self):
301 return "{}:{}".format(self.__class__.__name__, lock_key)
302 return "{}:{}".format(self.__class__.__name__, lock_key)
302
303
303 def __str__(self):
304 def __str__(self):
304 return "{}:{}".format(self.__class__.__name__, lock_key)
305 return "{}:{}".format(self.__class__.__name__, lock_key)
305
306
306 def __init__(self):
307 def __init__(self):
307 self.lock = self.get_lock()
308 self.lock = self.get_lock()
308 self.lock_key = lock_key
309 self.lock_key = lock_key
309
310
310 def acquire(self, wait=True):
311 def acquire(self, wait=True):
311 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
312 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
312 try:
313 try:
313 acquired = self.lock.acquire(wait)
314 acquired = self.lock.acquire(wait)
314 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
315 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
315 return acquired
316 return acquired
316 except redis_lock.AlreadyAcquired:
317 except redis_lock.AlreadyAcquired:
317 return False
318 return False
318 except redis_lock.AlreadyStarted:
319 except redis_lock.AlreadyStarted:
319 # refresh thread exists, but it also means we acquired the lock
320 # refresh thread exists, but it also means we acquired the lock
320 return True
321 return True
321
322
322 def release(self):
323 def release(self):
323 try:
324 try:
324 self.lock.release()
325 self.lock.release()
325 except redis_lock.NotAcquired:
326 except redis_lock.NotAcquired:
326 pass
327 pass
327
328
328 return _RedisLockWrapper()
329 return _RedisLockWrapper()
General Comments 0
You need to be logged in to leave comments. Login now