##// END OF EJS Templates
caches: fixed unicode error on non-ascii cache key
super-admin -
r1015:5e5c2773 default
parent child Browse files
Show More
@@ -1,329 +1,329 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
23 import redis
24
24
25 from dogpile.cache.api import CachedValue
25 from dogpile.cache.api import CachedValue
26 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
31
31
32 from pyramid.settings import asbool
32 from pyramid.settings import asbool
33
33
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from vcsserver.utils import safe_str
35 from vcsserver.utils import safe_str, safe_unicode
36
36
37
37
38 _default_max_size = 1024
38 _default_max_size = 1024
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class LRUMemoryBackend(memory_backend.MemoryBackend):
43 class LRUMemoryBackend(memory_backend.MemoryBackend):
44 key_prefix = 'lru_mem_backend'
44 key_prefix = 'lru_mem_backend'
45 pickle_values = False
45 pickle_values = False
46
46
47 def __init__(self, arguments):
47 def __init__(self, arguments):
48 max_size = arguments.pop('max_size', _default_max_size)
48 max_size = arguments.pop('max_size', _default_max_size)
49
49
50 LRUDictClass = LRUDict
50 LRUDictClass = LRUDict
51 if arguments.pop('log_key_count', None):
51 if arguments.pop('log_key_count', None):
52 LRUDictClass = LRUDictDebug
52 LRUDictClass = LRUDictDebug
53
53
54 arguments['cache_dict'] = LRUDictClass(max_size)
54 arguments['cache_dict'] = LRUDictClass(max_size)
55 super(LRUMemoryBackend, self).__init__(arguments)
55 super(LRUMemoryBackend, self).__init__(arguments)
56
56
57 def delete(self, key):
57 def delete(self, key):
58 try:
58 try:
59 del self._cache[key]
59 del self._cache[key]
60 except KeyError:
60 except KeyError:
61 # we don't care if key isn't there at deletion
61 # we don't care if key isn't there at deletion
62 pass
62 pass
63
63
64 def delete_multi(self, keys):
64 def delete_multi(self, keys):
65 for key in keys:
65 for key in keys:
66 self.delete(key)
66 self.delete(key)
67
67
68
68
69 class PickleSerializer(object):
69 class PickleSerializer(object):
70
70
71 def _dumps(self, value, safe=False):
71 def _dumps(self, value, safe=False):
72 try:
72 try:
73 return compat.pickle.dumps(value)
73 return compat.pickle.dumps(value)
74 except Exception:
74 except Exception:
75 if safe:
75 if safe:
76 return NO_VALUE
76 return NO_VALUE
77 else:
77 else:
78 raise
78 raise
79
79
80 def _loads(self, value, safe=True):
80 def _loads(self, value, safe=True):
81 try:
81 try:
82 return compat.pickle.loads(value)
82 return compat.pickle.loads(value)
83 except Exception:
83 except Exception:
84 if safe:
84 if safe:
85 return NO_VALUE
85 return NO_VALUE
86 else:
86 else:
87 raise
87 raise
88
88
89
89
90 class MsgPackSerializer(object):
90 class MsgPackSerializer(object):
91
91
92 def _dumps(self, value, safe=False):
92 def _dumps(self, value, safe=False):
93 try:
93 try:
94 return msgpack.packb(value)
94 return msgpack.packb(value)
95 except Exception:
95 except Exception:
96 if safe:
96 if safe:
97 return NO_VALUE
97 return NO_VALUE
98 else:
98 else:
99 raise
99 raise
100
100
101 def _loads(self, value, safe=True):
101 def _loads(self, value, safe=True):
102 """
102 """
103 pickle maintained the `CachedValue` wrapper of the tuple
103 pickle maintained the `CachedValue` wrapper of the tuple
104 msgpack does not, so it must be added back in.
104 msgpack does not, so it must be added back in.
105 """
105 """
106 try:
106 try:
107 value = msgpack.unpackb(value, use_list=False)
107 value = msgpack.unpackb(value, use_list=False)
108 return CachedValue(*value)
108 return CachedValue(*value)
109 except Exception:
109 except Exception:
110 if safe:
110 if safe:
111 return NO_VALUE
111 return NO_VALUE
112 else:
112 else:
113 raise
113 raise
114
114
115
115
116 import fcntl
116 import fcntl
117 flock_org = fcntl.flock
117 flock_org = fcntl.flock
118
118
119
119
120 class CustomLockFactory(FileLock):
120 class CustomLockFactory(FileLock):
121
121
122 pass
122 pass
123
123
124
124
125 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
125 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
126 key_prefix = 'file_backend'
126 key_prefix = 'file_backend'
127
127
128 def __init__(self, arguments):
128 def __init__(self, arguments):
129 arguments['lock_factory'] = CustomLockFactory
129 arguments['lock_factory'] = CustomLockFactory
130 db_file = arguments.get('filename')
130 db_file = arguments.get('filename')
131
131
132 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
132 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
133 try:
133 try:
134 super(FileNamespaceBackend, self).__init__(arguments)
134 super(FileNamespaceBackend, self).__init__(arguments)
135 except Exception:
135 except Exception:
136 log.error('Failed to initialize db at: %s', db_file)
136 log.exception('Failed to initialize db at: %s', db_file)
137 raise
137 raise
138
138
139 def __repr__(self):
139 def __repr__(self):
140 return '{} `{}`'.format(self.__class__, self.filename)
140 return '{} `{}`'.format(self.__class__, self.filename)
141
141
142 def list_keys(self, prefix=''):
142 def list_keys(self, prefix=''):
143 prefix = '{}:{}'.format(self.key_prefix, prefix)
143 prefix = '{}:{}'.format(self.key_prefix, prefix)
144
144
145 def cond(v):
145 def cond(v):
146 if not prefix:
146 if not prefix:
147 return True
147 return True
148
148
149 if v.startswith(prefix):
149 if v.startswith(prefix):
150 return True
150 return True
151 return False
151 return False
152
152
153 with self._dbm_file(True) as dbm:
153 with self._dbm_file(True) as dbm:
154 try:
154 try:
155 return filter(cond, dbm.keys())
155 return filter(cond, dbm.keys())
156 except Exception:
156 except Exception:
157 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
157 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
158 raise
158 raise
159
159
160 def get_store(self):
160 def get_store(self):
161 return self.filename
161 return self.filename
162
162
163 def _dbm_get(self, key):
163 def _dbm_get(self, key):
164 with self._dbm_file(False) as dbm:
164 with self._dbm_file(False) as dbm:
165 if hasattr(dbm, 'get'):
165 if hasattr(dbm, 'get'):
166 value = dbm.get(key, NO_VALUE)
166 value = dbm.get(key, NO_VALUE)
167 else:
167 else:
168 # gdbm objects lack a .get method
168 # gdbm objects lack a .get method
169 try:
169 try:
170 value = dbm[key]
170 value = dbm[key]
171 except KeyError:
171 except KeyError:
172 value = NO_VALUE
172 value = NO_VALUE
173 if value is not NO_VALUE:
173 if value is not NO_VALUE:
174 value = self._loads(value)
174 value = self._loads(value)
175 return value
175 return value
176
176
177 def get(self, key):
177 def get(self, key):
178 try:
178 try:
179 return self._dbm_get(key)
179 return self._dbm_get(key)
180 except Exception:
180 except Exception:
181 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
181 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
182 raise
182 raise
183
183
184 def set(self, key, value):
184 def set(self, key, value):
185 with self._dbm_file(True) as dbm:
185 with self._dbm_file(True) as dbm:
186 dbm[key] = self._dumps(value)
186 dbm[key] = self._dumps(value)
187
187
188 def set_multi(self, mapping):
188 def set_multi(self, mapping):
189 with self._dbm_file(True) as dbm:
189 with self._dbm_file(True) as dbm:
190 for key, value in mapping.items():
190 for key, value in mapping.items():
191 dbm[key] = self._dumps(value)
191 dbm[key] = self._dumps(value)
192
192
193
193
194 class BaseRedisBackend(redis_backend.RedisBackend):
194 class BaseRedisBackend(redis_backend.RedisBackend):
195 key_prefix = ''
195 key_prefix = ''
196
196
197 def __init__(self, arguments):
197 def __init__(self, arguments):
198 super(BaseRedisBackend, self).__init__(arguments)
198 super(BaseRedisBackend, self).__init__(arguments)
199 self._lock_timeout = self.lock_timeout
199 self._lock_timeout = self.lock_timeout
200 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
200 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
201
201
202 if self._lock_auto_renewal and not self._lock_timeout:
202 if self._lock_auto_renewal and not self._lock_timeout:
203 # set default timeout for auto_renewal
203 # set default timeout for auto_renewal
204 self._lock_timeout = 30
204 self._lock_timeout = 30
205
205
206 def _create_client(self):
206 def _create_client(self):
207 args = {}
207 args = {}
208
208
209 if self.url is not None:
209 if self.url is not None:
210 args.update(url=self.url)
210 args.update(url=self.url)
211
211
212 else:
212 else:
213 args.update(
213 args.update(
214 host=self.host, password=self.password,
214 host=self.host, password=self.password,
215 port=self.port, db=self.db
215 port=self.port, db=self.db
216 )
216 )
217
217
218 connection_pool = redis.ConnectionPool(**args)
218 connection_pool = redis.ConnectionPool(**args)
219
219
220 return redis.StrictRedis(connection_pool=connection_pool)
220 return redis.StrictRedis(connection_pool=connection_pool)
221
221
222 def list_keys(self, prefix=''):
222 def list_keys(self, prefix=''):
223 prefix = '{}:{}*'.format(self.key_prefix, prefix)
223 prefix = '{}:{}*'.format(self.key_prefix, prefix)
224 return self.client.keys(prefix)
224 return self.client.keys(prefix)
225
225
226 def get_store(self):
226 def get_store(self):
227 return self.client.connection_pool
227 return self.client.connection_pool
228
228
229 def get(self, key):
229 def get(self, key):
230 value = self.client.get(key)
230 value = self.client.get(key)
231 if value is None:
231 if value is None:
232 return NO_VALUE
232 return NO_VALUE
233 return self._loads(value)
233 return self._loads(value)
234
234
235 def get_multi(self, keys):
235 def get_multi(self, keys):
236 if not keys:
236 if not keys:
237 return []
237 return []
238 values = self.client.mget(keys)
238 values = self.client.mget(keys)
239 loads = self._loads
239 loads = self._loads
240 return [
240 return [
241 loads(v) if v is not None else NO_VALUE
241 loads(v) if v is not None else NO_VALUE
242 for v in values]
242 for v in values]
243
243
244 def set(self, key, value):
244 def set(self, key, value):
245 if self.redis_expiration_time:
245 if self.redis_expiration_time:
246 self.client.setex(key, self.redis_expiration_time,
246 self.client.setex(key, self.redis_expiration_time,
247 self._dumps(value))
247 self._dumps(value))
248 else:
248 else:
249 self.client.set(key, self._dumps(value))
249 self.client.set(key, self._dumps(value))
250
250
251 def set_multi(self, mapping):
251 def set_multi(self, mapping):
252 dumps = self._dumps
252 dumps = self._dumps
253 mapping = dict(
253 mapping = dict(
254 (k, dumps(v))
254 (k, dumps(v))
255 for k, v in mapping.items()
255 for k, v in mapping.items()
256 )
256 )
257
257
258 if not self.redis_expiration_time:
258 if not self.redis_expiration_time:
259 self.client.mset(mapping)
259 self.client.mset(mapping)
260 else:
260 else:
261 pipe = self.client.pipeline()
261 pipe = self.client.pipeline()
262 for key, value in mapping.items():
262 for key, value in mapping.items():
263 pipe.setex(key, self.redis_expiration_time, value)
263 pipe.setex(key, self.redis_expiration_time, value)
264 pipe.execute()
264 pipe.execute()
265
265
266 def get_mutex(self, key):
266 def get_mutex(self, key):
267 if self.distributed_lock:
267 if self.distributed_lock:
268 lock_key = redis_backend.u('_lock_{0}').format(safe_str(key))
268 lock_key = redis_backend.u(u'_lock_{0}'.format(safe_unicode(key)))
269 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
269 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
270 auto_renewal=self._lock_auto_renewal)
270 auto_renewal=self._lock_auto_renewal)
271 else:
271 else:
272 return None
272 return None
273
273
274
274
275 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
275 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
276 key_prefix = 'redis_pickle_backend'
276 key_prefix = 'redis_pickle_backend'
277 pass
277 pass
278
278
279
279
280 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
280 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
281 key_prefix = 'redis_msgpack_backend'
281 key_prefix = 'redis_msgpack_backend'
282 pass
282 pass
283
283
284
284
285 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
285 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
286 import redis_lock
286 import redis_lock
287
287
288 class _RedisLockWrapper(object):
288 class _RedisLockWrapper(object):
289 """LockWrapper for redis_lock"""
289 """LockWrapper for redis_lock"""
290
290
291 @classmethod
291 @classmethod
292 def get_lock(cls):
292 def get_lock(cls):
293 return redis_lock.Lock(
293 return redis_lock.Lock(
294 redis_client=client,
294 redis_client=client,
295 name=lock_key,
295 name=lock_key,
296 expire=lock_timeout,
296 expire=lock_timeout,
297 auto_renewal=auto_renewal,
297 auto_renewal=auto_renewal,
298 strict=True,
298 strict=True,
299 )
299 )
300
300
301 def __repr__(self):
301 def __repr__(self):
302 return "{}:{}".format(self.__class__.__name__, lock_key)
302 return "{}:{}".format(self.__class__.__name__, lock_key)
303
303
304 def __str__(self):
304 def __str__(self):
305 return "{}:{}".format(self.__class__.__name__, lock_key)
305 return "{}:{}".format(self.__class__.__name__, lock_key)
306
306
307 def __init__(self):
307 def __init__(self):
308 self.lock = self.get_lock()
308 self.lock = self.get_lock()
309 self.lock_key = lock_key
309 self.lock_key = lock_key
310
310
311 def acquire(self, wait=True):
311 def acquire(self, wait=True):
312 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
312 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
313 try:
313 try:
314 acquired = self.lock.acquire(wait)
314 acquired = self.lock.acquire(wait)
315 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
315 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
316 return acquired
316 return acquired
317 except redis_lock.AlreadyAcquired:
317 except redis_lock.AlreadyAcquired:
318 return False
318 return False
319 except redis_lock.AlreadyStarted:
319 except redis_lock.AlreadyStarted:
320 # refresh thread exists, but it also means we acquired the lock
320 # refresh thread exists, but it also means we acquired the lock
321 return True
321 return True
322
322
323 def release(self):
323 def release(self):
324 try:
324 try:
325 self.lock.release()
325 self.lock.release()
326 except redis_lock.NotAcquired:
326 except redis_lock.NotAcquired:
327 pass
327 pass
328
328
329 return _RedisLockWrapper()
329 return _RedisLockWrapper()
General Comments 0
You need to be logged in to leave comments. Login now