##// END OF EJS Templates
caches: use safest possible pickle for cache backends....
marcink -
r2909:dfc57a11 default
parent child Browse files
Show More
@@ -1,181 +1,199 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import time
21 21 import errno
22 22 import logging
23 23
24 24 import gevent
25 25
26 26 from dogpile.cache.backends import memory as memory_backend
27 27 from dogpile.cache.backends import file as file_backend
28 28 from dogpile.cache.backends import redis as redis_backend
29 29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 30 from dogpile.cache.util import memoized_property
31 31 from lru import LRU as LRUDict
32 32
33 33
34 34 _default_max_size = 1024
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38
39 39 class LRUMemoryBackend(memory_backend.MemoryBackend):
40 40 pickle_values = False
41 41
42 42 def __init__(self, arguments):
43 43 max_size = arguments.pop('max_size', _default_max_size)
44 44 callback = None
45 45 if arguments.pop('log_max_size_reached', None):
46 46 def evicted(key, value):
47 47 log.debug(
48 48 'LRU: evicting key `%s` due to max size %s reach', key, max_size)
49 49 callback = evicted
50 50
51 51 arguments['cache_dict'] = LRUDict(max_size, callback=callback)
52 52 super(LRUMemoryBackend, self).__init__(arguments)
53 53
54 54 def delete(self, key):
55 55 if self._cache.has_key(key):
56 56 del self._cache[key]
57 57
58 58 def delete_multi(self, keys):
59 59 for key in keys:
60 60 if self._cache.has_key(key):
61 61 del self._cache[key]
62 62
63 63
64 64 class Serializer(object):
65 def _dumps(self, value):
66 return compat.pickle.dumps(value)
65 def _dumps(self, value, safe=False):
66 try:
67 return compat.pickle.dumps(value)
68 except Exception:
69 if safe:
70 return NO_VALUE
71 else:
72 raise
67 73
68 def _loads(self, value):
69 return compat.pickle.loads(value)
74 def _loads(self, value, safe=True):
75 try:
76 return compat.pickle.loads(value)
77 except Exception:
78 if safe:
79 return NO_VALUE
80 else:
81 raise
70 82
71 83
72 84 class CustomLockFactory(FileLock):
73 85
74 86 @memoized_property
75 87 def _module(self):
76 88 import fcntl
77 89 flock_org = fcntl.flock
78 90
79 91 def gevent_flock(fd, operation):
80 92 """
81 93 Gevent compatible flock
82 94 """
83 95 # set non-blocking, this will cause an exception if we cannot acquire a lock
84 96 operation |= fcntl.LOCK_NB
85 97 start_lock_time = time.time()
86 98 timeout = 60 * 5 # 5min
87 99 while True:
88 100 try:
89 101 flock_org(fd, operation)
90 102 # lock has been acquired
91 103 break
92 104 except (OSError, IOError) as e:
93 105 # raise on other errors than Resource temporarily unavailable
94 106 if e.errno != errno.EAGAIN:
95 107 raise
96 108 elif (time.time() - start_lock_time) > timeout:
97 109 # waited to much time on a lock, better fail than loop for ever
98 110 raise
99 111
100 112 log.debug('Failed to acquire lock, retry in 0.1')
101 113 gevent.sleep(0.1)
102 114
103 115 fcntl.flock = gevent_flock
104 116 return fcntl
105 117
106 118
107 119 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
108 120
109 121 def __init__(self, arguments):
110 122 arguments['lock_factory'] = CustomLockFactory
111 123 super(FileNamespaceBackend, self).__init__(arguments)
112 124
113 125 def list_keys(self, prefix=''):
114 126 def cond(v):
115 127 if not prefix:
116 128 return True
117 129
118 130 if v.startswith(prefix):
119 131 return True
120 132 return False
121 133
122 134 with self._dbm_file(True) as dbm:
123 135
124 136 return filter(cond, dbm.keys())
125 137
126 138 def get_store(self):
127 139 return self.filename
128 140
129 141 def get(self, key):
130 142 with self._dbm_file(False) as dbm:
131 143 if hasattr(dbm, 'get'):
132 144 value = dbm.get(key, NO_VALUE)
133 145 else:
134 146 # gdbm objects lack a .get method
135 147 try:
136 148 value = dbm[key]
137 149 except KeyError:
138 150 value = NO_VALUE
139 151 if value is not NO_VALUE:
140 152 value = self._loads(value)
141 153 return value
142 154
143 155 def set(self, key, value):
144 156 with self._dbm_file(True) as dbm:
145 157 dbm[key] = self._dumps(value)
146 158
147 159 def set_multi(self, mapping):
148 160 with self._dbm_file(True) as dbm:
149 161 for key, value in mapping.items():
150 162 dbm[key] = self._dumps(value)
151 163
152 164
153 165 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
154 166 def list_keys(self, prefix=''):
155 167 if prefix:
156 168 prefix = prefix + '*'
157 169 return self.client.keys(prefix)
158 170
159 171 def get_store(self):
160 172 return self.client.connection_pool
161 173
174 def get(self, key):
175 value = self.client.get(key)
176 if value is None:
177 return NO_VALUE
178 return self._loads(value)
179
162 180 def set(self, key, value):
163 181 if self.redis_expiration_time:
164 182 self.client.setex(key, self.redis_expiration_time,
165 183 self._dumps(value))
166 184 else:
167 185 self.client.set(key, self._dumps(value))
168 186
169 187 def set_multi(self, mapping):
170 188 mapping = dict(
171 189 (k, self._dumps(v))
172 190 for k, v in mapping.items()
173 191 )
174 192
175 193 if not self.redis_expiration_time:
176 194 self.client.mset(mapping)
177 195 else:
178 196 pipe = self.client.pipeline()
179 197 for key, value in mapping.items():
180 198 pipe.setex(key, self.redis_expiration_time, value)
181 199 pipe.execute()
General Comments 0
You need to be logged in to leave comments. Login now