##// END OF EJS Templates
caches: added redis pool for redis cache backend
marcink -
r781:9979cb94 default
parent child Browse files
Show More
@@ -1,231 +1,253 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
24
23 from dogpile.cache.api import CachedValue
25 from dogpile.cache.api import CachedValue
24 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
25 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
26 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
27 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
28 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
29
31
30 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
31
33
32
34
33 _default_max_size = 1024
35 _default_max_size = 1024
34
36
35 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
36
38
37
39
38 class LRUMemoryBackend(memory_backend.MemoryBackend):
40 class LRUMemoryBackend(memory_backend.MemoryBackend):
39 key_prefix = 'lru_mem_backend'
41 key_prefix = 'lru_mem_backend'
40 pickle_values = False
42 pickle_values = False
41
43
42 def __init__(self, arguments):
44 def __init__(self, arguments):
43 max_size = arguments.pop('max_size', _default_max_size)
45 max_size = arguments.pop('max_size', _default_max_size)
44
46
45 LRUDictClass = LRUDict
47 LRUDictClass = LRUDict
46 if arguments.pop('log_key_count', None):
48 if arguments.pop('log_key_count', None):
47 LRUDictClass = LRUDictDebug
49 LRUDictClass = LRUDictDebug
48
50
49 arguments['cache_dict'] = LRUDictClass(max_size)
51 arguments['cache_dict'] = LRUDictClass(max_size)
50 super(LRUMemoryBackend, self).__init__(arguments)
52 super(LRUMemoryBackend, self).__init__(arguments)
51
53
52 def delete(self, key):
54 def delete(self, key):
53 try:
55 try:
54 del self._cache[key]
56 del self._cache[key]
55 except KeyError:
57 except KeyError:
56 # we don't care if key isn't there at deletion
58 # we don't care if key isn't there at deletion
57 pass
59 pass
58
60
59 def delete_multi(self, keys):
61 def delete_multi(self, keys):
60 for key in keys:
62 for key in keys:
61 self.delete(key)
63 self.delete(key)
62
64
63
65
64 class PickleSerializer(object):
66 class PickleSerializer(object):
65
67
66 def _dumps(self, value, safe=False):
68 def _dumps(self, value, safe=False):
67 try:
69 try:
68 return compat.pickle.dumps(value)
70 return compat.pickle.dumps(value)
69 except Exception:
71 except Exception:
70 if safe:
72 if safe:
71 return NO_VALUE
73 return NO_VALUE
72 else:
74 else:
73 raise
75 raise
74
76
75 def _loads(self, value, safe=True):
77 def _loads(self, value, safe=True):
76 try:
78 try:
77 return compat.pickle.loads(value)
79 return compat.pickle.loads(value)
78 except Exception:
80 except Exception:
79 if safe:
81 if safe:
80 return NO_VALUE
82 return NO_VALUE
81 else:
83 else:
82 raise
84 raise
83
85
84
86
85 class MsgPackSerializer(object):
87 class MsgPackSerializer(object):
86
88
87 def _dumps(self, value, safe=False):
89 def _dumps(self, value, safe=False):
88 try:
90 try:
89 return msgpack.packb(value)
91 return msgpack.packb(value)
90 except Exception:
92 except Exception:
91 if safe:
93 if safe:
92 return NO_VALUE
94 return NO_VALUE
93 else:
95 else:
94 raise
96 raise
95
97
96 def _loads(self, value, safe=True):
98 def _loads(self, value, safe=True):
97 """
99 """
98 pickle maintained the `CachedValue` wrapper of the tuple
100 pickle maintained the `CachedValue` wrapper of the tuple
99 msgpack does not, so it must be added back in.
101 msgpack does not, so it must be added back in.
100 """
102 """
101 try:
103 try:
102 value = msgpack.unpackb(value, use_list=False)
104 value = msgpack.unpackb(value, use_list=False)
103 return CachedValue(*value)
105 return CachedValue(*value)
104 except Exception:
106 except Exception:
105 if safe:
107 if safe:
106 return NO_VALUE
108 return NO_VALUE
107 else:
109 else:
108 raise
110 raise
109
111
110
112
111 import fcntl
113 import fcntl
112 flock_org = fcntl.flock
114 flock_org = fcntl.flock
113
115
114
116
115 class CustomLockFactory(FileLock):
117 class CustomLockFactory(FileLock):
116
118
117 pass
119 pass
118
120
119
121
120 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
121 key_prefix = 'file_backend'
123 key_prefix = 'file_backend'
122
124
123 def __init__(self, arguments):
125 def __init__(self, arguments):
124 arguments['lock_factory'] = CustomLockFactory
126 arguments['lock_factory'] = CustomLockFactory
125 super(FileNamespaceBackend, self).__init__(arguments)
127 super(FileNamespaceBackend, self).__init__(arguments)
126
128
129 def __repr__(self):
130 return '{} `{}`'.format(self.__class__, self.filename)
131
127 def list_keys(self, prefix=''):
132 def list_keys(self, prefix=''):
128 prefix = '{}:{}'.format(self.key_prefix, prefix)
133 prefix = '{}:{}'.format(self.key_prefix, prefix)
129
134
130 def cond(v):
135 def cond(v):
131 if not prefix:
136 if not prefix:
132 return True
137 return True
133
138
134 if v.startswith(prefix):
139 if v.startswith(prefix):
135 return True
140 return True
136 return False
141 return False
137
142
138 with self._dbm_file(True) as dbm:
143 with self._dbm_file(True) as dbm:
139
144
140 return filter(cond, dbm.keys())
145 return filter(cond, dbm.keys())
141
146
142 def get_store(self):
147 def get_store(self):
143 return self.filename
148 return self.filename
144
149
145 def get(self, key):
150 def get(self, key):
146 with self._dbm_file(False) as dbm:
151 with self._dbm_file(False) as dbm:
147 if hasattr(dbm, 'get'):
152 if hasattr(dbm, 'get'):
148 value = dbm.get(key, NO_VALUE)
153 value = dbm.get(key, NO_VALUE)
149 else:
154 else:
150 # gdbm objects lack a .get method
155 # gdbm objects lack a .get method
151 try:
156 try:
152 value = dbm[key]
157 value = dbm[key]
153 except KeyError:
158 except KeyError:
154 value = NO_VALUE
159 value = NO_VALUE
155 if value is not NO_VALUE:
160 if value is not NO_VALUE:
156 value = self._loads(value)
161 value = self._loads(value)
157 return value
162 return value
158
163
159 def set(self, key, value):
164 def set(self, key, value):
160 with self._dbm_file(True) as dbm:
165 with self._dbm_file(True) as dbm:
161 dbm[key] = self._dumps(value)
166 dbm[key] = self._dumps(value)
162
167
163 def set_multi(self, mapping):
168 def set_multi(self, mapping):
164 with self._dbm_file(True) as dbm:
169 with self._dbm_file(True) as dbm:
165 for key, value in mapping.items():
170 for key, value in mapping.items():
166 dbm[key] = self._dumps(value)
171 dbm[key] = self._dumps(value)
167
172
168
173
169 class BaseRedisBackend(redis_backend.RedisBackend):
174 class BaseRedisBackend(redis_backend.RedisBackend):
175
176 def _create_client(self):
177 args = {}
178
179 if self.url is not None:
180 args.update(url=self.url)
181
182 else:
183 args.update(
184 host=self.host, password=self.password,
185 port=self.port, db=self.db
186 )
187
188 connection_pool = redis.ConnectionPool(**args)
189
190 return redis.StrictRedis(connection_pool=connection_pool)
191
170 def list_keys(self, prefix=''):
192 def list_keys(self, prefix=''):
171 prefix = '{}:{}*'.format(self.key_prefix, prefix)
193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
172 return self.client.keys(prefix)
194 return self.client.keys(prefix)
173
195
174 def get_store(self):
196 def get_store(self):
175 return self.client.connection_pool
197 return self.client.connection_pool
176
198
177 def get(self, key):
199 def get(self, key):
178 value = self.client.get(key)
200 value = self.client.get(key)
179 if value is None:
201 if value is None:
180 return NO_VALUE
202 return NO_VALUE
181 return self._loads(value)
203 return self._loads(value)
182
204
183 def get_multi(self, keys):
205 def get_multi(self, keys):
184 if not keys:
206 if not keys:
185 return []
207 return []
186 values = self.client.mget(keys)
208 values = self.client.mget(keys)
187 loads = self._loads
209 loads = self._loads
188 return [
210 return [
189 loads(v) if v is not None else NO_VALUE
211 loads(v) if v is not None else NO_VALUE
190 for v in values]
212 for v in values]
191
213
192 def set(self, key, value):
214 def set(self, key, value):
193 if self.redis_expiration_time:
215 if self.redis_expiration_time:
194 self.client.setex(key, self.redis_expiration_time,
216 self.client.setex(key, self.redis_expiration_time,
195 self._dumps(value))
217 self._dumps(value))
196 else:
218 else:
197 self.client.set(key, self._dumps(value))
219 self.client.set(key, self._dumps(value))
198
220
199 def set_multi(self, mapping):
221 def set_multi(self, mapping):
200 dumps = self._dumps
222 dumps = self._dumps
201 mapping = dict(
223 mapping = dict(
202 (k, dumps(v))
224 (k, dumps(v))
203 for k, v in mapping.items()
225 for k, v in mapping.items()
204 )
226 )
205
227
206 if not self.redis_expiration_time:
228 if not self.redis_expiration_time:
207 self.client.mset(mapping)
229 self.client.mset(mapping)
208 else:
230 else:
209 pipe = self.client.pipeline()
231 pipe = self.client.pipeline()
210 for key, value in mapping.items():
232 for key, value in mapping.items():
211 pipe.setex(key, self.redis_expiration_time, value)
233 pipe.setex(key, self.redis_expiration_time, value)
212 pipe.execute()
234 pipe.execute()
213
235
214 def get_mutex(self, key):
236 def get_mutex(self, key):
215 u = redis_backend.u
237 u = redis_backend.u
216 if self.distributed_lock:
238 if self.distributed_lock:
217 lock_key = u('_lock_{0}').format(key)
239 lock_key = u('_lock_{0}').format(key)
218 log.debug('Trying to acquire Redis lock for key %s', lock_key)
240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
219 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
220 else:
242 else:
221 return None
243 return None
222
244
223
245
224 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
225 key_prefix = 'redis_pickle_backend'
247 key_prefix = 'redis_pickle_backend'
226 pass
248 pass
227
249
228
250
229 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
230 key_prefix = 'redis_msgpack_backend'
252 key_prefix = 'redis_msgpack_backend'
231 pass
253 pass
General Comments 0
You need to be logged in to leave comments. Login now