##// END OF EJS Templates
caches: added redis pool for redis cache backend
marcink -
r781:9979cb94 default
parent child Browse files
Show More
@@ -1,231 +1,253 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import time
19 19 import errno
20 20 import logging
21 21
22 22 import msgpack
23 import redis
24
23 25 from dogpile.cache.api import CachedValue
24 26 from dogpile.cache.backends import memory as memory_backend
25 27 from dogpile.cache.backends import file as file_backend
26 28 from dogpile.cache.backends import redis as redis_backend
27 29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
28 30 from dogpile.cache.util import memoized_property
29 31
30 32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
31 33
32 34
33 35 _default_max_size = 1024
34 36
35 37 log = logging.getLogger(__name__)
36 38
37 39
38 40 class LRUMemoryBackend(memory_backend.MemoryBackend):
39 41 key_prefix = 'lru_mem_backend'
40 42 pickle_values = False
41 43
42 44 def __init__(self, arguments):
43 45 max_size = arguments.pop('max_size', _default_max_size)
44 46
45 47 LRUDictClass = LRUDict
46 48 if arguments.pop('log_key_count', None):
47 49 LRUDictClass = LRUDictDebug
48 50
49 51 arguments['cache_dict'] = LRUDictClass(max_size)
50 52 super(LRUMemoryBackend, self).__init__(arguments)
51 53
52 54 def delete(self, key):
53 55 try:
54 56 del self._cache[key]
55 57 except KeyError:
56 58 # we don't care if key isn't there at deletion
57 59 pass
58 60
59 61 def delete_multi(self, keys):
60 62 for key in keys:
61 63 self.delete(key)
62 64
63 65
64 66 class PickleSerializer(object):
65 67
66 68 def _dumps(self, value, safe=False):
67 69 try:
68 70 return compat.pickle.dumps(value)
69 71 except Exception:
70 72 if safe:
71 73 return NO_VALUE
72 74 else:
73 75 raise
74 76
75 77 def _loads(self, value, safe=True):
76 78 try:
77 79 return compat.pickle.loads(value)
78 80 except Exception:
79 81 if safe:
80 82 return NO_VALUE
81 83 else:
82 84 raise
83 85
84 86
85 87 class MsgPackSerializer(object):
86 88
87 89 def _dumps(self, value, safe=False):
88 90 try:
89 91 return msgpack.packb(value)
90 92 except Exception:
91 93 if safe:
92 94 return NO_VALUE
93 95 else:
94 96 raise
95 97
96 98 def _loads(self, value, safe=True):
97 99 """
98 100 pickle maintained the `CachedValue` wrapper of the tuple
99 101 msgpack does not, so it must be added back in.
100 102 """
101 103 try:
102 104 value = msgpack.unpackb(value, use_list=False)
103 105 return CachedValue(*value)
104 106 except Exception:
105 107 if safe:
106 108 return NO_VALUE
107 109 else:
108 110 raise
109 111
110 112
111 113 import fcntl
112 114 flock_org = fcntl.flock
113 115
114 116
115 117 class CustomLockFactory(FileLock):
116 118
117 119 pass
118 120
119 121
120 122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
121 123 key_prefix = 'file_backend'
122 124
123 125 def __init__(self, arguments):
124 126 arguments['lock_factory'] = CustomLockFactory
125 127 super(FileNamespaceBackend, self).__init__(arguments)
126 128
129 def __repr__(self):
130 return '{} `{}`'.format(self.__class__, self.filename)
131
127 132 def list_keys(self, prefix=''):
128 133 prefix = '{}:{}'.format(self.key_prefix, prefix)
129 134
130 135 def cond(v):
131 136 if not prefix:
132 137 return True
133 138
134 139 if v.startswith(prefix):
135 140 return True
136 141 return False
137 142
138 143 with self._dbm_file(True) as dbm:
139 144
140 145 return filter(cond, dbm.keys())
141 146
142 147 def get_store(self):
143 148 return self.filename
144 149
145 150 def get(self, key):
146 151 with self._dbm_file(False) as dbm:
147 152 if hasattr(dbm, 'get'):
148 153 value = dbm.get(key, NO_VALUE)
149 154 else:
150 155 # gdbm objects lack a .get method
151 156 try:
152 157 value = dbm[key]
153 158 except KeyError:
154 159 value = NO_VALUE
155 160 if value is not NO_VALUE:
156 161 value = self._loads(value)
157 162 return value
158 163
159 164 def set(self, key, value):
160 165 with self._dbm_file(True) as dbm:
161 166 dbm[key] = self._dumps(value)
162 167
163 168 def set_multi(self, mapping):
164 169 with self._dbm_file(True) as dbm:
165 170 for key, value in mapping.items():
166 171 dbm[key] = self._dumps(value)
167 172
168 173
169 174 class BaseRedisBackend(redis_backend.RedisBackend):
175
176 def _create_client(self):
177 args = {}
178
179 if self.url is not None:
180 args.update(url=self.url)
181
182 else:
183 args.update(
184 host=self.host, password=self.password,
185 port=self.port, db=self.db
186 )
187
188 connection_pool = redis.ConnectionPool(**args)
189
190 return redis.StrictRedis(connection_pool=connection_pool)
191
170 192 def list_keys(self, prefix=''):
171 193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
172 194 return self.client.keys(prefix)
173 195
174 196 def get_store(self):
175 197 return self.client.connection_pool
176 198
177 199 def get(self, key):
178 200 value = self.client.get(key)
179 201 if value is None:
180 202 return NO_VALUE
181 203 return self._loads(value)
182 204
183 205 def get_multi(self, keys):
184 206 if not keys:
185 207 return []
186 208 values = self.client.mget(keys)
187 209 loads = self._loads
188 210 return [
189 211 loads(v) if v is not None else NO_VALUE
190 212 for v in values]
191 213
192 214 def set(self, key, value):
193 215 if self.redis_expiration_time:
194 216 self.client.setex(key, self.redis_expiration_time,
195 217 self._dumps(value))
196 218 else:
197 219 self.client.set(key, self._dumps(value))
198 220
199 221 def set_multi(self, mapping):
200 222 dumps = self._dumps
201 223 mapping = dict(
202 224 (k, dumps(v))
203 225 for k, v in mapping.items()
204 226 )
205 227
206 228 if not self.redis_expiration_time:
207 229 self.client.mset(mapping)
208 230 else:
209 231 pipe = self.client.pipeline()
210 232 for key, value in mapping.items():
211 233 pipe.setex(key, self.redis_expiration_time, value)
212 234 pipe.execute()
213 235
214 236 def get_mutex(self, key):
215 237 u = redis_backend.u
216 238 if self.distributed_lock:
217 239 lock_key = u('_lock_{0}').format(key)
218 240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
219 241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
220 242 else:
221 243 return None
222 244
223 245
224 246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
225 247 key_prefix = 'redis_pickle_backend'
226 248 pass
227 249
228 250
229 251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
230 252 key_prefix = 'redis_msgpack_backend'
231 253 pass
General Comments 0
You need to be logged in to leave comments. Login now