##// END OF EJS Templates
caches: use safest possible pickle for cache backends....
marcink -
r2909:dfc57a11 default
parent child Browse files
Show More
@@ -1,181 +1,199 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2018 RhodeCode GmbH
3 # Copyright (C) 2015-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import time
20 import time
21 import errno
21 import errno
22 import logging
22 import logging
23
23
24 import gevent
24 import gevent
25
25
26 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
31 from lru import LRU as LRUDict
31 from lru import LRU as LRUDict
32
32
33
33
34 _default_max_size = 1024
34 _default_max_size = 1024
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class LRUMemoryBackend(memory_backend.MemoryBackend):
39 class LRUMemoryBackend(memory_backend.MemoryBackend):
40 pickle_values = False
40 pickle_values = False
41
41
42 def __init__(self, arguments):
42 def __init__(self, arguments):
43 max_size = arguments.pop('max_size', _default_max_size)
43 max_size = arguments.pop('max_size', _default_max_size)
44 callback = None
44 callback = None
45 if arguments.pop('log_max_size_reached', None):
45 if arguments.pop('log_max_size_reached', None):
46 def evicted(key, value):
46 def evicted(key, value):
47 log.debug(
47 log.debug(
48 'LRU: evicting key `%s` due to max size %s reach', key, max_size)
48 'LRU: evicting key `%s` due to max size %s reach', key, max_size)
49 callback = evicted
49 callback = evicted
50
50
51 arguments['cache_dict'] = LRUDict(max_size, callback=callback)
51 arguments['cache_dict'] = LRUDict(max_size, callback=callback)
52 super(LRUMemoryBackend, self).__init__(arguments)
52 super(LRUMemoryBackend, self).__init__(arguments)
53
53
54 def delete(self, key):
54 def delete(self, key):
55 if self._cache.has_key(key):
55 if self._cache.has_key(key):
56 del self._cache[key]
56 del self._cache[key]
57
57
58 def delete_multi(self, keys):
58 def delete_multi(self, keys):
59 for key in keys:
59 for key in keys:
60 if self._cache.has_key(key):
60 if self._cache.has_key(key):
61 del self._cache[key]
61 del self._cache[key]
62
62
63
63
64 class Serializer(object):
64 class Serializer(object):
65 def _dumps(self, value):
65 def _dumps(self, value, safe=False):
66 try:
66 return compat.pickle.dumps(value)
67 return compat.pickle.dumps(value)
68 except Exception:
69 if safe:
70 return NO_VALUE
71 else:
72 raise
67
73
68 def _loads(self, value):
74 def _loads(self, value, safe=True):
75 try:
69 return compat.pickle.loads(value)
76 return compat.pickle.loads(value)
77 except Exception:
78 if safe:
79 return NO_VALUE
80 else:
81 raise
70
82
71
83
72 class CustomLockFactory(FileLock):
84 class CustomLockFactory(FileLock):
73
85
74 @memoized_property
86 @memoized_property
75 def _module(self):
87 def _module(self):
76 import fcntl
88 import fcntl
77 flock_org = fcntl.flock
89 flock_org = fcntl.flock
78
90
79 def gevent_flock(fd, operation):
91 def gevent_flock(fd, operation):
80 """
92 """
81 Gevent compatible flock
93 Gevent compatible flock
82 """
94 """
83 # set non-blocking, this will cause an exception if we cannot acquire a lock
95 # set non-blocking, this will cause an exception if we cannot acquire a lock
84 operation |= fcntl.LOCK_NB
96 operation |= fcntl.LOCK_NB
85 start_lock_time = time.time()
97 start_lock_time = time.time()
86 timeout = 60 * 5 # 5min
98 timeout = 60 * 5 # 5min
87 while True:
99 while True:
88 try:
100 try:
89 flock_org(fd, operation)
101 flock_org(fd, operation)
90 # lock has been acquired
102 # lock has been acquired
91 break
103 break
92 except (OSError, IOError) as e:
104 except (OSError, IOError) as e:
93 # raise on other errors than Resource temporarily unavailable
105 # raise on other errors than Resource temporarily unavailable
94 if e.errno != errno.EAGAIN:
106 if e.errno != errno.EAGAIN:
95 raise
107 raise
96 elif (time.time() - start_lock_time) > timeout:
108 elif (time.time() - start_lock_time) > timeout:
97 # waited to much time on a lock, better fail than loop for ever
109 # waited to much time on a lock, better fail than loop for ever
98 raise
110 raise
99
111
100 log.debug('Failed to acquire lock, retry in 0.1')
112 log.debug('Failed to acquire lock, retry in 0.1')
101 gevent.sleep(0.1)
113 gevent.sleep(0.1)
102
114
103 fcntl.flock = gevent_flock
115 fcntl.flock = gevent_flock
104 return fcntl
116 return fcntl
105
117
106
118
107 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
119 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
108
120
109 def __init__(self, arguments):
121 def __init__(self, arguments):
110 arguments['lock_factory'] = CustomLockFactory
122 arguments['lock_factory'] = CustomLockFactory
111 super(FileNamespaceBackend, self).__init__(arguments)
123 super(FileNamespaceBackend, self).__init__(arguments)
112
124
113 def list_keys(self, prefix=''):
125 def list_keys(self, prefix=''):
114 def cond(v):
126 def cond(v):
115 if not prefix:
127 if not prefix:
116 return True
128 return True
117
129
118 if v.startswith(prefix):
130 if v.startswith(prefix):
119 return True
131 return True
120 return False
132 return False
121
133
122 with self._dbm_file(True) as dbm:
134 with self._dbm_file(True) as dbm:
123
135
124 return filter(cond, dbm.keys())
136 return filter(cond, dbm.keys())
125
137
126 def get_store(self):
138 def get_store(self):
127 return self.filename
139 return self.filename
128
140
129 def get(self, key):
141 def get(self, key):
130 with self._dbm_file(False) as dbm:
142 with self._dbm_file(False) as dbm:
131 if hasattr(dbm, 'get'):
143 if hasattr(dbm, 'get'):
132 value = dbm.get(key, NO_VALUE)
144 value = dbm.get(key, NO_VALUE)
133 else:
145 else:
134 # gdbm objects lack a .get method
146 # gdbm objects lack a .get method
135 try:
147 try:
136 value = dbm[key]
148 value = dbm[key]
137 except KeyError:
149 except KeyError:
138 value = NO_VALUE
150 value = NO_VALUE
139 if value is not NO_VALUE:
151 if value is not NO_VALUE:
140 value = self._loads(value)
152 value = self._loads(value)
141 return value
153 return value
142
154
143 def set(self, key, value):
155 def set(self, key, value):
144 with self._dbm_file(True) as dbm:
156 with self._dbm_file(True) as dbm:
145 dbm[key] = self._dumps(value)
157 dbm[key] = self._dumps(value)
146
158
147 def set_multi(self, mapping):
159 def set_multi(self, mapping):
148 with self._dbm_file(True) as dbm:
160 with self._dbm_file(True) as dbm:
149 for key, value in mapping.items():
161 for key, value in mapping.items():
150 dbm[key] = self._dumps(value)
162 dbm[key] = self._dumps(value)
151
163
152
164
153 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
165 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
154 def list_keys(self, prefix=''):
166 def list_keys(self, prefix=''):
155 if prefix:
167 if prefix:
156 prefix = prefix + '*'
168 prefix = prefix + '*'
157 return self.client.keys(prefix)
169 return self.client.keys(prefix)
158
170
159 def get_store(self):
171 def get_store(self):
160 return self.client.connection_pool
172 return self.client.connection_pool
161
173
174 def get(self, key):
175 value = self.client.get(key)
176 if value is None:
177 return NO_VALUE
178 return self._loads(value)
179
162 def set(self, key, value):
180 def set(self, key, value):
163 if self.redis_expiration_time:
181 if self.redis_expiration_time:
164 self.client.setex(key, self.redis_expiration_time,
182 self.client.setex(key, self.redis_expiration_time,
165 self._dumps(value))
183 self._dumps(value))
166 else:
184 else:
167 self.client.set(key, self._dumps(value))
185 self.client.set(key, self._dumps(value))
168
186
169 def set_multi(self, mapping):
187 def set_multi(self, mapping):
170 mapping = dict(
188 mapping = dict(
171 (k, self._dumps(v))
189 (k, self._dumps(v))
172 for k, v in mapping.items()
190 for k, v in mapping.items()
173 )
191 )
174
192
175 if not self.redis_expiration_time:
193 if not self.redis_expiration_time:
176 self.client.mset(mapping)
194 self.client.mset(mapping)
177 else:
195 else:
178 pipe = self.client.pipeline()
196 pipe = self.client.pipeline()
179 for key, value in mapping.items():
197 for key, value in mapping.items():
180 pipe.setex(key, self.redis_expiration_time, value)
198 pipe.setex(key, self.redis_expiration_time, value)
181 pipe.execute()
199 pipe.execute()
General Comments 0
You need to be logged in to leave comments. Login now