##// END OF EJS Templates
caches: synced cache logic with vcsserver.
marcink -
r3851:a5ece276 default
parent child Browse files
Show More
@@ -1,75 +1,78 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2019 RhodeCode GmbH
3 # Copyright (C) 2015-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 from dogpile.cache import register_backend
22 from dogpile.cache import register_backend
23
23
24 register_backend(
24 register_backend(
25 "dogpile.cache.rc.memory_lru", "rhodecode.lib.rc_cache.backends",
25 "dogpile.cache.rc.memory_lru", "rhodecode.lib.rc_cache.backends",
26 "LRUMemoryBackend")
26 "LRUMemoryBackend")
27
27
28 register_backend(
28 register_backend(
29 "dogpile.cache.rc.file_namespace", "rhodecode.lib.rc_cache.backends",
29 "dogpile.cache.rc.file_namespace", "rhodecode.lib.rc_cache.backends",
30 "FileNamespaceBackend")
30 "FileNamespaceBackend")
31
31
32 register_backend(
32 register_backend(
33 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
33 "dogpile.cache.rc.redis", "rhodecode.lib.rc_cache.backends",
34 "RedisPickleBackend")
34 "RedisPickleBackend")
35
35
36 register_backend(
37 "dogpile.cache.rc.redis_msgpack", "rhodecode.lib.rc_cache.backends",
38 "RedisMsgPackBackend")
39
36
40
37 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
38
42
39 from . import region_meta
43 from . import region_meta
40 from .utils import (
44 from .utils import (
41 get_default_cache_settings, key_generator, get_or_create_region,
45 get_default_cache_settings, backend_key_generator, get_or_create_region,
42 clear_cache_namespace, make_region, InvalidationContext,
46 clear_cache_namespace, make_region, InvalidationContext,
43 FreshRegionCache, ActiveRegionCache)
47 FreshRegionCache, ActiveRegionCache)
44
48
45
49
46 def configure_dogpile_cache(settings):
50 def configure_dogpile_cache(settings):
47 cache_dir = settings.get('cache_dir')
51 cache_dir = settings.get('cache_dir')
48 if cache_dir:
52 if cache_dir:
49 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
53 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
50
54
51 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
55 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
52
56
53 # inspect available namespaces
57 # inspect available namespaces
54 avail_regions = set()
58 avail_regions = set()
55 for key in rc_cache_data.keys():
59 for key in rc_cache_data.keys():
56 namespace_name = key.split('.', 1)[0]
60 namespace_name = key.split('.', 1)[0]
57 avail_regions.add(namespace_name)
61 avail_regions.add(namespace_name)
58 log.debug('dogpile: found following cache regions: %s', avail_regions)
62 log.debug('dogpile: found following cache regions: %s', avail_regions)
59
63
60 # register them into namespace
64 # register them into namespace
61 for region_name in avail_regions:
65 for region_name in avail_regions:
62 new_region = make_region(
66 new_region = make_region(
63 name=region_name,
67 name=region_name,
64 function_key_generator=key_generator
68 function_key_generator=None
65 )
69 )
66
70
67 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
71 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
68
72 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
69 log.debug('dogpile: registering a new region %s[%s]',
73 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
70 region_name, new_region.__dict__)
71 region_meta.dogpile_cache_regions[region_name] = new_region
74 region_meta.dogpile_cache_regions[region_name] = new_region
72
75
73
76
74 def includeme(config):
77 def includeme(config):
75 configure_dogpile_cache(config.registry.settings)
78 configure_dogpile_cache(config.registry.settings)
@@ -1,215 +1,267 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2019 RhodeCode GmbH
3 # Copyright (C) 2015-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22 import errno
22 import errno
23 import logging
23 import logging
24
24
25 import msgpack
25 import gevent
26 import gevent
26
27
28 from dogpile.cache.api import CachedValue
27 from dogpile.cache.backends import memory as memory_backend
29 from dogpile.cache.backends import memory as memory_backend
28 from dogpile.cache.backends import file as file_backend
30 from dogpile.cache.backends import file as file_backend
29 from dogpile.cache.backends import redis as redis_backend
31 from dogpile.cache.backends import redis as redis_backend
30 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
32 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
31 from dogpile.cache.util import memoized_property
33 from dogpile.cache.util import memoized_property
32
34
33 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from rhodecode.lib.memory_lru_dict import LRUDict, LRUDictDebug
34
36
35
37
36 _default_max_size = 1024
38 _default_max_size = 1024
37
39
38 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
39
41
40
42
41 class LRUMemoryBackend(memory_backend.MemoryBackend):
43 class LRUMemoryBackend(memory_backend.MemoryBackend):
44 key_prefix = 'lru_mem_backend'
42 pickle_values = False
45 pickle_values = False
43
46
44 def __init__(self, arguments):
47 def __init__(self, arguments):
45 max_size = arguments.pop('max_size', _default_max_size)
48 max_size = arguments.pop('max_size', _default_max_size)
46
49
47 LRUDictClass = LRUDict
50 LRUDictClass = LRUDict
48 if arguments.pop('log_key_count', None):
51 if arguments.pop('log_key_count', None):
49 LRUDictClass = LRUDictDebug
52 LRUDictClass = LRUDictDebug
50
53
51 arguments['cache_dict'] = LRUDictClass(max_size)
54 arguments['cache_dict'] = LRUDictClass(max_size)
52 super(LRUMemoryBackend, self).__init__(arguments)
55 super(LRUMemoryBackend, self).__init__(arguments)
53
56
54 def delete(self, key):
57 def delete(self, key):
55 try:
58 try:
56 del self._cache[key]
59 del self._cache[key]
57 except KeyError:
60 except KeyError:
58 # we don't care if key isn't there at deletion
61 # we don't care if key isn't there at deletion
59 pass
62 pass
60
63
61 def delete_multi(self, keys):
64 def delete_multi(self, keys):
62 for key in keys:
65 for key in keys:
63 self.delete(key)
66 self.delete(key)
64
67
65
68
66 class Serializer(object):
69 class PickleSerializer(object):
70
67 def _dumps(self, value, safe=False):
71 def _dumps(self, value, safe=False):
68 try:
72 try:
69 return compat.pickle.dumps(value)
73 return compat.pickle.dumps(value)
70 except Exception:
74 except Exception:
71 if safe:
75 if safe:
72 return NO_VALUE
76 return NO_VALUE
73 else:
77 else:
74 raise
78 raise
75
79
76 def _loads(self, value, safe=True):
80 def _loads(self, value, safe=True):
77 try:
81 try:
78 return compat.pickle.loads(value)
82 return compat.pickle.loads(value)
79 except Exception:
83 except Exception:
80 if safe:
84 if safe:
81 return NO_VALUE
85 return NO_VALUE
82 else:
86 else:
83 raise
87 raise
84
88
85
89
90 class MsgPackSerializer(object):
91
92 def _dumps(self, value, safe=False):
93 try:
94 return msgpack.packb(value)
95 except Exception:
96 if safe:
97 return NO_VALUE
98 else:
99 raise
100
101 def _loads(self, value, safe=True):
102 """
103 pickle maintained the `CachedValue` wrapper of the tuple
104 msgpack does not, so it must be added back in.
105 """
106 try:
107 value = msgpack.unpackb(value, use_list=False)
108 return CachedValue(*value)
109 except Exception:
110 if safe:
111 return NO_VALUE
112 else:
113 raise
114
115
86 import fcntl
116 import fcntl
87 flock_org = fcntl.flock
117 flock_org = fcntl.flock
88
118
89
119
90 class CustomLockFactory(FileLock):
120 class CustomLockFactory(FileLock):
91
121
92 @memoized_property
122 @memoized_property
93 def _module(self):
123 def _module(self):
94
124
95 def gevent_flock(fd, operation):
125 def gevent_flock(fd, operation):
96 """
126 """
97 Gevent compatible flock
127 Gevent compatible flock
98 """
128 """
99 # set non-blocking, this will cause an exception if we cannot acquire a lock
129 # set non-blocking, this will cause an exception if we cannot acquire a lock
100 operation |= fcntl.LOCK_NB
130 operation |= fcntl.LOCK_NB
101 start_lock_time = time.time()
131 start_lock_time = time.time()
102 timeout = 60 * 15 # 15min
132 timeout = 60 * 15 # 15min
103 while True:
133 while True:
104 try:
134 try:
105 flock_org(fd, operation)
135 flock_org(fd, operation)
106 # lock has been acquired
136 # lock has been acquired
107 break
137 break
108 except (OSError, IOError) as e:
138 except (OSError, IOError) as e:
109 # raise on other errors than Resource temporarily unavailable
139 # raise on other errors than Resource temporarily unavailable
110 if e.errno != errno.EAGAIN:
140 if e.errno != errno.EAGAIN:
111 raise
141 raise
112 elif (time.time() - start_lock_time) > timeout:
142 elif (time.time() - start_lock_time) > timeout:
113 # waited to much time on a lock, better fail than loop for ever
143 # waited to much time on a lock, better fail than loop for ever
114 log.error('Failed to acquire lock on `%s` after waiting %ss',
144 log.error('Failed to acquire lock on `%s` after waiting %ss',
115 self.filename, timeout)
145 self.filename, timeout)
116 raise
146 raise
117 wait_timeout = 0.03
147 wait_timeout = 0.03
118 log.debug('Failed to acquire lock on `%s`, retry in %ss',
148 log.debug('Failed to acquire lock on `%s`, retry in %ss',
119 self.filename, wait_timeout)
149 self.filename, wait_timeout)
120 gevent.sleep(wait_timeout)
150 gevent.sleep(wait_timeout)
121
151
122 fcntl.flock = gevent_flock
152 fcntl.flock = gevent_flock
123 return fcntl
153 return fcntl
124
154
125
155
126 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
156 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
157 key_prefix = 'file_backend'
127
158
128 def __init__(self, arguments):
159 def __init__(self, arguments):
129 arguments['lock_factory'] = CustomLockFactory
160 arguments['lock_factory'] = CustomLockFactory
130 super(FileNamespaceBackend, self).__init__(arguments)
161 super(FileNamespaceBackend, self).__init__(arguments)
131
162
132 def list_keys(self, prefix=''):
163 def list_keys(self, prefix=''):
164 prefix = '{}:{}'.format(self.key_prefix, prefix)
165
133 def cond(v):
166 def cond(v):
134 if not prefix:
167 if not prefix:
135 return True
168 return True
136
169
137 if v.startswith(prefix):
170 if v.startswith(prefix):
138 return True
171 return True
139 return False
172 return False
140
173
141 with self._dbm_file(True) as dbm:
174 with self._dbm_file(True) as dbm:
142
175
143 return filter(cond, dbm.keys())
176 return filter(cond, dbm.keys())
144
177
145 def get_store(self):
178 def get_store(self):
146 return self.filename
179 return self.filename
147
180
148 def get(self, key):
181 def get(self, key):
149 with self._dbm_file(False) as dbm:
182 with self._dbm_file(False) as dbm:
150 if hasattr(dbm, 'get'):
183 if hasattr(dbm, 'get'):
151 value = dbm.get(key, NO_VALUE)
184 value = dbm.get(key, NO_VALUE)
152 else:
185 else:
153 # gdbm objects lack a .get method
186 # gdbm objects lack a .get method
154 try:
187 try:
155 value = dbm[key]
188 value = dbm[key]
156 except KeyError:
189 except KeyError:
157 value = NO_VALUE
190 value = NO_VALUE
158 if value is not NO_VALUE:
191 if value is not NO_VALUE:
159 value = self._loads(value)
192 value = self._loads(value)
160 return value
193 return value
161
194
162 def set(self, key, value):
195 def set(self, key, value):
163 with self._dbm_file(True) as dbm:
196 with self._dbm_file(True) as dbm:
164 dbm[key] = self._dumps(value)
197 dbm[key] = self._dumps(value)
165
198
166 def set_multi(self, mapping):
199 def set_multi(self, mapping):
167 with self._dbm_file(True) as dbm:
200 with self._dbm_file(True) as dbm:
168 for key, value in mapping.items():
201 for key, value in mapping.items():
169 dbm[key] = self._dumps(value)
202 dbm[key] = self._dumps(value)
170
203
171
204
172 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
205 class BaseRedisBackend(redis_backend.RedisBackend):
173 def list_keys(self, prefix=''):
206 def list_keys(self, prefix=''):
174 if prefix:
207 prefix = '{}:{}*'.format(self.key_prefix, prefix)
175 prefix = prefix + '*'
176 return self.client.keys(prefix)
208 return self.client.keys(prefix)
177
209
178 def get_store(self):
210 def get_store(self):
179 return self.client.connection_pool
211 return self.client.connection_pool
180
212
181 def get(self, key):
213 def get(self, key):
182 value = self.client.get(key)
214 value = self.client.get(key)
183 if value is None:
215 if value is None:
184 return NO_VALUE
216 return NO_VALUE
185 return self._loads(value)
217 return self._loads(value)
186
218
219 def get_multi(self, keys):
220 if not keys:
221 return []
222 values = self.client.mget(keys)
223 loads = self._loads
224 return [
225 loads(v) if v is not None else NO_VALUE
226 for v in values]
227
187 def set(self, key, value):
228 def set(self, key, value):
188 if self.redis_expiration_time:
229 if self.redis_expiration_time:
189 self.client.setex(key, self.redis_expiration_time,
230 self.client.setex(key, self.redis_expiration_time,
190 self._dumps(value))
231 self._dumps(value))
191 else:
232 else:
192 self.client.set(key, self._dumps(value))
233 self.client.set(key, self._dumps(value))
193
234
194 def set_multi(self, mapping):
235 def set_multi(self, mapping):
236 dumps = self._dumps
195 mapping = dict(
237 mapping = dict(
196 (k, self._dumps(v))
238 (k, dumps(v))
197 for k, v in mapping.items()
239 for k, v in mapping.items()
198 )
240 )
199
241
200 if not self.redis_expiration_time:
242 if not self.redis_expiration_time:
201 self.client.mset(mapping)
243 self.client.mset(mapping)
202 else:
244 else:
203 pipe = self.client.pipeline()
245 pipe = self.client.pipeline()
204 for key, value in mapping.items():
246 for key, value in mapping.items():
205 pipe.setex(key, self.redis_expiration_time, value)
247 pipe.setex(key, self.redis_expiration_time, value)
206 pipe.execute()
248 pipe.execute()
207
249
208 def get_mutex(self, key):
250 def get_mutex(self, key):
209 u = redis_backend.u
251 u = redis_backend.u
210 if self.distributed_lock:
252 if self.distributed_lock:
211 lock_key = u('_lock_{0}').format(key)
253 lock_key = u('_lock_{0}').format(key)
212 log.debug('Trying to acquire Redis lock for key %s', lock_key)
254 log.debug('Trying to acquire Redis lock for key %s', lock_key)
213 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
255 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
214 else:
256 else:
215 return None
257 return None
258
259
260 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
261 key_prefix = 'redis_pickle_backend'
262 pass
263
264
265 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
266 key_prefix = 'redis_msgpack_backend'
267 pass
@@ -1,327 +1,338 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2019 RhodeCode GmbH
3 # Copyright (C) 2015-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import os
20 import os
21 import time
21 import time
22 import logging
22 import logging
23 import functools
23 import functools
24 import threading
24 import threading
25
25
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27 from dogpile.cache.util import compat
27 from dogpile.cache.util import compat
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.utils import safe_str, sha1
30 from rhodecode.lib.utils import safe_str, sha1
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
33
33
34 from . import region_meta
34 from . import region_meta
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class RhodeCodeCacheRegion(CacheRegion):
39 class RhodeCodeCacheRegion(CacheRegion):
40
40
41 def conditional_cache_on_arguments(
41 def conditional_cache_on_arguments(
42 self, namespace=None,
42 self, namespace=None,
43 expiration_time=None,
43 expiration_time=None,
44 should_cache_fn=None,
44 should_cache_fn=None,
45 to_str=compat.string_type,
45 to_str=compat.string_type,
46 function_key_generator=None,
46 function_key_generator=None,
47 condition=True):
47 condition=True):
48 """
48 """
49 Custom conditional decorator, that will not touch any dogpile internals if
49 Custom conditional decorator, that will not touch any dogpile internals if
50 condition isn't meet. This works a bit different than should_cache_fn
50 condition isn't meet. This works a bit different than should_cache_fn
51 And it's faster in cases we don't ever want to compute cached values
51 And it's faster in cases we don't ever want to compute cached values
52 """
52 """
53 expiration_time_is_callable = compat.callable(expiration_time)
53 expiration_time_is_callable = compat.callable(expiration_time)
54
54
55 if function_key_generator is None:
55 if function_key_generator is None:
56 function_key_generator = self.function_key_generator
56 function_key_generator = self.function_key_generator
57
57
58 def decorator(fn):
58 def decorator(fn):
59 if to_str is compat.string_type:
59 if to_str is compat.string_type:
60 # backwards compatible
60 # backwards compatible
61 key_generator = function_key_generator(namespace, fn)
61 key_generator = function_key_generator(namespace, fn)
62 else:
62 else:
63 key_generator = function_key_generator(namespace, fn, to_str=to_str)
63 key_generator = function_key_generator(namespace, fn, to_str=to_str)
64
64
65 @functools.wraps(fn)
65 @functools.wraps(fn)
66 def decorate(*arg, **kw):
66 def decorate(*arg, **kw):
67 key = key_generator(*arg, **kw)
67 key = key_generator(*arg, **kw)
68
68
69 @functools.wraps(fn)
69 @functools.wraps(fn)
70 def creator():
70 def creator():
71 return fn(*arg, **kw)
71 return fn(*arg, **kw)
72
72
73 if not condition:
73 if not condition:
74 return creator()
74 return creator()
75
75
76 timeout = expiration_time() if expiration_time_is_callable \
76 timeout = expiration_time() if expiration_time_is_callable \
77 else expiration_time
77 else expiration_time
78
78
79 return self.get_or_create(key, creator, timeout, should_cache_fn)
79 return self.get_or_create(key, creator, timeout, should_cache_fn)
80
80
81 def invalidate(*arg, **kw):
81 def invalidate(*arg, **kw):
82 key = key_generator(*arg, **kw)
82 key = key_generator(*arg, **kw)
83 self.delete(key)
83 self.delete(key)
84
84
85 def set_(value, *arg, **kw):
85 def set_(value, *arg, **kw):
86 key = key_generator(*arg, **kw)
86 key = key_generator(*arg, **kw)
87 self.set(key, value)
87 self.set(key, value)
88
88
89 def get(*arg, **kw):
89 def get(*arg, **kw):
90 key = key_generator(*arg, **kw)
90 key = key_generator(*arg, **kw)
91 return self.get(key)
91 return self.get(key)
92
92
93 def refresh(*arg, **kw):
93 def refresh(*arg, **kw):
94 key = key_generator(*arg, **kw)
94 key = key_generator(*arg, **kw)
95 value = fn(*arg, **kw)
95 value = fn(*arg, **kw)
96 self.set(key, value)
96 self.set(key, value)
97 return value
97 return value
98
98
99 decorate.set = set_
99 decorate.set = set_
100 decorate.invalidate = invalidate
100 decorate.invalidate = invalidate
101 decorate.refresh = refresh
101 decorate.refresh = refresh
102 decorate.get = get
102 decorate.get = get
103 decorate.original = fn
103 decorate.original = fn
104 decorate.key_generator = key_generator
104 decorate.key_generator = key_generator
105 decorate.__wrapped__ = fn
105 decorate.__wrapped__ = fn
106
106
107 return decorate
107 return decorate
108
108
109 return decorator
109 return decorator
110
110
111
111
112 def make_region(*arg, **kw):
112 def make_region(*arg, **kw):
113 return RhodeCodeCacheRegion(*arg, **kw)
113 return RhodeCodeCacheRegion(*arg, **kw)
114
114
115
115
116 def get_default_cache_settings(settings, prefixes=None):
116 def get_default_cache_settings(settings, prefixes=None):
117 prefixes = prefixes or []
117 prefixes = prefixes or []
118 cache_settings = {}
118 cache_settings = {}
119 for key in settings.keys():
119 for key in settings.keys():
120 for prefix in prefixes:
120 for prefix in prefixes:
121 if key.startswith(prefix):
121 if key.startswith(prefix):
122 name = key.split(prefix)[1].strip()
122 name = key.split(prefix)[1].strip()
123 val = settings[key]
123 val = settings[key]
124 if isinstance(val, compat.string_types):
124 if isinstance(val, compat.string_types):
125 val = val.strip()
125 val = val.strip()
126 cache_settings[name] = val
126 cache_settings[name] = val
127 return cache_settings
127 return cache_settings
128
128
129
129
130 def compute_key_from_params(*args):
130 def compute_key_from_params(*args):
131 """
131 """
132 Helper to compute key from given params to be used in cache manager
132 Helper to compute key from given params to be used in cache manager
133 """
133 """
134 return sha1("_".join(map(safe_str, args)))
134 return sha1("_".join(map(safe_str, args)))
135
135
136
136
137 def key_generator(namespace, fn):
137 def backend_key_generator(backend):
138 """
139 Special wrapper that also sends over the backend to the key generator
140 """
141 def wrapper(namespace, fn):
142 return key_generator(backend, namespace, fn)
143 return wrapper
144
145
146 def key_generator(backend, namespace, fn):
138 fname = fn.__name__
147 fname = fn.__name__
139
148
140 def generate_key(*args):
149 def generate_key(*args):
141 namespace_pref = namespace or 'default'
150 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
151 namespace_pref = namespace or 'default_namespace'
142 arg_key = compute_key_from_params(*args)
152 arg_key = compute_key_from_params(*args)
143 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
153 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
144
154
145 return final_key
155 return final_key
146
156
147 return generate_key
157 return generate_key
148
158
149
159
150 def get_or_create_region(region_name, region_namespace=None):
160 def get_or_create_region(region_name, region_namespace=None):
151 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
161 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
152 region_obj = region_meta.dogpile_cache_regions.get(region_name)
162 region_obj = region_meta.dogpile_cache_regions.get(region_name)
153 if not region_obj:
163 if not region_obj:
154 raise EnvironmentError(
164 raise EnvironmentError(
155 'Region `{}` not in configured: {}.'.format(
165 'Region `{}` not in configured: {}.'.format(
156 region_name, region_meta.dogpile_cache_regions.keys()))
166 region_name, region_meta.dogpile_cache_regions.keys()))
157
167
158 region_uid_name = '{}:{}'.format(region_name, region_namespace)
168 region_uid_name = '{}:{}'.format(region_name, region_namespace)
159 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
169 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
160 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
170 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
161 if region_exist:
171 if region_exist:
162 log.debug('Using already configured region: %s', region_namespace)
172 log.debug('Using already configured region: %s', region_namespace)
163 return region_exist
173 return region_exist
164 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
174 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
165 expiration_time = region_obj.expiration_time
175 expiration_time = region_obj.expiration_time
166
176
167 if not os.path.isdir(cache_dir):
177 if not os.path.isdir(cache_dir):
168 os.makedirs(cache_dir)
178 os.makedirs(cache_dir)
169 new_region = make_region(
179 new_region = make_region(
170 name=region_uid_name, function_key_generator=key_generator
180 name=region_uid_name,
181 function_key_generator=backend_key_generator(region_obj.actual_backend)
171 )
182 )
172 namespace_filename = os.path.join(
183 namespace_filename = os.path.join(
173 cache_dir, "{}.cache.dbm".format(region_namespace))
184 cache_dir, "{}.cache.dbm".format(region_namespace))
174 # special type that allows 1db per namespace
185 # special type that allows 1db per namespace
175 new_region.configure(
186 new_region.configure(
176 backend='dogpile.cache.rc.file_namespace',
187 backend='dogpile.cache.rc.file_namespace',
177 expiration_time=expiration_time,
188 expiration_time=expiration_time,
178 arguments={"filename": namespace_filename}
189 arguments={"filename": namespace_filename}
179 )
190 )
180
191
181 # create and save in region caches
192 # create and save in region caches
182 log.debug('configuring new region: %s',region_uid_name)
193 log.debug('configuring new region: %s', region_uid_name)
183 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
194 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
184
195
185 return region_obj
196 return region_obj
186
197
187
198
188 def clear_cache_namespace(cache_region, cache_namespace_uid):
199 def clear_cache_namespace(cache_region, cache_namespace_uid):
189 region = get_or_create_region(cache_region, cache_namespace_uid)
200 region = get_or_create_region(cache_region, cache_namespace_uid)
190 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
201 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
191 num_delete_keys = len(cache_keys)
202 num_delete_keys = len(cache_keys)
192 if num_delete_keys:
203 if num_delete_keys:
193 region.delete_multi(cache_keys)
204 region.delete_multi(cache_keys)
194 return num_delete_keys
205 return num_delete_keys
195
206
196
207
197 class ActiveRegionCache(object):
208 class ActiveRegionCache(object):
198 def __init__(self, context, cache_data):
209 def __init__(self, context, cache_data):
199 self.context = context
210 self.context = context
200 self.cache_data = cache_data
211 self.cache_data = cache_data
201
212
202 def should_invalidate(self):
213 def should_invalidate(self):
203 return False
214 return False
204
215
205
216
206 class FreshRegionCache(object):
217 class FreshRegionCache(object):
207 def __init__(self, context, cache_data):
218 def __init__(self, context, cache_data):
208 self.context = context
219 self.context = context
209 self.cache_data = cache_data
220 self.cache_data = cache_data
210
221
211 def should_invalidate(self):
222 def should_invalidate(self):
212 return True
223 return True
213
224
214
225
215 class InvalidationContext(object):
226 class InvalidationContext(object):
216 """
227 """
217 usage::
228 usage::
218
229
219 from rhodecode.lib import rc_cache
230 from rhodecode.lib import rc_cache
220
231
221 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
232 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
222 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
233 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
223
234
224 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
235 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
225 def heavy_compute(cache_name, param1, param2):
236 def heavy_compute(cache_name, param1, param2):
226 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
237 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
227
238
228 # invalidation namespace is shared namespace key for all process caches
239 # invalidation namespace is shared namespace key for all process caches
229 # we use it to send a global signal
240 # we use it to send a global signal
230 invalidation_namespace = 'repo_cache:1'
241 invalidation_namespace = 'repo_cache:1'
231
242
232 inv_context_manager = rc_cache.InvalidationContext(
243 inv_context_manager = rc_cache.InvalidationContext(
233 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
244 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
234 with inv_context_manager as invalidation_context:
245 with inv_context_manager as invalidation_context:
235 args = ('one', 'two')
246 args = ('one', 'two')
236 # re-compute and store cache if we get invalidate signal
247 # re-compute and store cache if we get invalidate signal
237 if invalidation_context.should_invalidate():
248 if invalidation_context.should_invalidate():
238 result = heavy_compute.refresh(*args)
249 result = heavy_compute.refresh(*args)
239 else:
250 else:
240 result = heavy_compute(*args)
251 result = heavy_compute(*args)
241
252
242 compute_time = inv_context_manager.compute_time
253 compute_time = inv_context_manager.compute_time
243 log.debug('result computed in %.3fs', compute_time)
254 log.debug('result computed in %.3fs', compute_time)
244
255
245 # To send global invalidation signal, simply run
256 # To send global invalidation signal, simply run
246 CacheKey.set_invalidate(invalidation_namespace)
257 CacheKey.set_invalidate(invalidation_namespace)
247
258
248 """
259 """
249
260
250 def __repr__(self):
261 def __repr__(self):
251 return '<InvalidationContext:{}[{}]>'.format(
262 return '<InvalidationContext:{}[{}]>'.format(
252 safe_str(self.cache_key), safe_str(self.uid))
263 safe_str(self.cache_key), safe_str(self.uid))
253
264
254 def __init__(self, uid, invalidation_namespace='',
265 def __init__(self, uid, invalidation_namespace='',
255 raise_exception=False, thread_scoped=None):
266 raise_exception=False, thread_scoped=None):
256 self.uid = uid
267 self.uid = uid
257 self.invalidation_namespace = invalidation_namespace
268 self.invalidation_namespace = invalidation_namespace
258 self.raise_exception = raise_exception
269 self.raise_exception = raise_exception
259 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
270 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
260 self.thread_id = 'global'
271 self.thread_id = 'global'
261
272
262 if thread_scoped is None:
273 if thread_scoped is None:
263 # if we set "default" we can override this via .ini settings
274 # if we set "default" we can override this via .ini settings
264 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
275 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
265
276
266 # Append the thread id to the cache key if this invalidation context
277 # Append the thread id to the cache key if this invalidation context
267 # should be scoped to the current thread.
278 # should be scoped to the current thread.
268 if thread_scoped is True:
279 if thread_scoped is True:
269 self.thread_id = threading.current_thread().ident
280 self.thread_id = threading.current_thread().ident
270
281
271 self.cache_key = compute_key_from_params(uid)
282 self.cache_key = compute_key_from_params(uid)
272 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
283 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
273 self.proc_id, self.thread_id, self.cache_key)
284 self.proc_id, self.thread_id, self.cache_key)
274 self.compute_time = 0
285 self.compute_time = 0
275
286
276 def get_or_create_cache_obj(self, uid, invalidation_namespace=''):
287 def get_or_create_cache_obj(self, uid, invalidation_namespace=''):
277 cache_obj = CacheKey.get_active_cache(self.cache_key)
288 cache_obj = CacheKey.get_active_cache(self.cache_key)
278 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
289 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
279 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
290 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
280 if not cache_obj:
291 if not cache_obj:
281 cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace)
292 cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace)
282 return cache_obj
293 return cache_obj
283
294
284 def __enter__(self):
295 def __enter__(self):
285 """
296 """
286 Test if current object is valid, and return CacheRegion function
297 Test if current object is valid, and return CacheRegion function
287 that does invalidation and calculation
298 that does invalidation and calculation
288 """
299 """
289 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
300 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
290 # register or get a new key based on uid
301 # register or get a new key based on uid
291 self.cache_obj = self.get_or_create_cache_obj(uid=self.uid)
302 self.cache_obj = self.get_or_create_cache_obj(uid=self.uid)
292 cache_data = self.cache_obj.get_dict()
303 cache_data = self.cache_obj.get_dict()
293 self._start_time = time.time()
304 self._start_time = time.time()
294 if self.cache_obj.cache_active:
305 if self.cache_obj.cache_active:
295 # means our cache obj is existing and marked as it's
306 # means our cache obj is existing and marked as it's
296 # cache is not outdated, we return ActiveRegionCache
307 # cache is not outdated, we return ActiveRegionCache
297 self.skip_cache_active_change = True
308 self.skip_cache_active_change = True
298
309
299 return ActiveRegionCache(context=self, cache_data=cache_data)
310 return ActiveRegionCache(context=self, cache_data=cache_data)
300
311
301 # the key is either not existing or set to False, we return
312 # the key is either not existing or set to False, we return
302 # the real invalidator which re-computes value. We additionally set
313 # the real invalidator which re-computes value. We additionally set
303 # the flag to actually update the Database objects
314 # the flag to actually update the Database objects
304 self.skip_cache_active_change = False
315 self.skip_cache_active_change = False
305 return FreshRegionCache(context=self, cache_data=cache_data)
316 return FreshRegionCache(context=self, cache_data=cache_data)
306
317
307 def __exit__(self, exc_type, exc_val, exc_tb):
318 def __exit__(self, exc_type, exc_val, exc_tb):
308 # save compute time
319 # save compute time
309 self.compute_time = time.time() - self._start_time
320 self.compute_time = time.time() - self._start_time
310
321
311 if self.skip_cache_active_change:
322 if self.skip_cache_active_change:
312 return
323 return
313
324
314 try:
325 try:
315 self.cache_obj.cache_active = True
326 self.cache_obj.cache_active = True
316 Session().add(self.cache_obj)
327 Session().add(self.cache_obj)
317 Session().commit()
328 Session().commit()
318 except IntegrityError:
329 except IntegrityError:
319 # if we catch integrity error, it means we inserted this object
330 # if we catch integrity error, it means we inserted this object
320 # assumption is that's really an edge race-condition case and
331 # assumption is that's really an edge race-condition case and
321 # it's safe is to skip it
332 # it's safe is to skip it
322 Session().rollback()
333 Session().rollback()
323 except Exception:
334 except Exception:
324 log.exception('Failed to commit on cache key update')
335 log.exception('Failed to commit on cache key update')
325 Session().rollback()
336 Session().rollback()
326 if self.raise_exception:
337 if self.raise_exception:
327 raise
338 raise
@@ -1,108 +1,108 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import time
21 import time
22
22
23 import pytest
23 import pytest
24
24
25 from rhodecode.lib import rc_cache
25 from rhodecode.lib import rc_cache
26
26
27
27
28 @pytest.mark.usefixtures('app')
28 @pytest.mark.usefixtures('app')
29 class TestCaches(object):
29 class TestCaches(object):
30
30
31 def test_cache_decorator_init_not_configured(self):
31 def test_cache_decorator_init_not_configured(self):
32 with pytest.raises(EnvironmentError):
32 with pytest.raises(EnvironmentError):
33 rc_cache.get_or_create_region('dontexist')
33 rc_cache.get_or_create_region('dontexist')
34
34
35 @pytest.mark.parametrize('region_name', [
35 @pytest.mark.parametrize('region_name', [
36 'cache_perms', u'cache_perms',
36 'cache_perms', u'cache_perms',
37 ])
37 ])
38 def test_cache_decorator_init(self, region_name):
38 def test_cache_decorator_init(self, region_name):
39 namespace = region_name
39 namespace = region_name
40 cache_region = rc_cache.get_or_create_region(
40 cache_region = rc_cache.get_or_create_region(
41 region_name, region_namespace=namespace)
41 region_name, region_namespace=namespace)
42 assert cache_region
42 assert cache_region
43
43
44 @pytest.mark.parametrize('example_input', [
44 @pytest.mark.parametrize('example_input', [
45 ('',),
45 ('',),
46 (u'/ac',),
46 (u'/ac',),
47 (u'/ac', 1, 2, object()),
47 (u'/ac', 1, 2, object()),
48 (u'/Δ™Δ‡c', 1, 2, object()),
48 (u'/Δ™Δ‡c', 1, 2, object()),
49 ('/Δ…ac',),
49 ('/Δ…ac',),
50 (u'/ac', ),
50 (u'/ac', ),
51 ])
51 ])
52 def test_cache_manager_create_key(self, example_input):
52 def test_cache_manager_create_key(self, example_input):
53 key = rc_cache.utils.compute_key_from_params(*example_input)
53 key = rc_cache.utils.compute_key_from_params(*example_input)
54 assert key
54 assert key
55
55
56 @pytest.mark.parametrize('example_namespace', [
56 @pytest.mark.parametrize('example_namespace', [
57 'namespace', None
57 'namespace', None
58 ])
58 ])
59 @pytest.mark.parametrize('example_input', [
59 @pytest.mark.parametrize('example_input', [
60 ('',),
60 ('',),
61 (u'/ac',),
61 (u'/ac',),
62 (u'/ac', 1, 2, object()),
62 (u'/ac', 1, 2, object()),
63 (u'/Δ™Δ‡c', 1, 2, object()),
63 (u'/Δ™Δ‡c', 1, 2, object()),
64 ('/Δ…ac',),
64 ('/Δ…ac',),
65 (u'/ac', ),
65 (u'/ac', ),
66 ])
66 ])
67 def test_cache_keygen(self, example_input, example_namespace):
67 def test_cache_keygen(self, example_input, example_namespace):
68 def func_wrapped():
68 def func_wrapped():
69 return 1
69 return 1
70 func = rc_cache.utils.key_generator(example_namespace, func_wrapped)
70 func = rc_cache.utils.key_generator(None, example_namespace, func_wrapped)
71 key = func(*example_input)
71 key = func(*example_input)
72 assert key
72 assert key
73
73
74 def test_store_value_in_cache(self):
74 def test_store_value_in_cache(self):
75 cache_region = rc_cache.get_or_create_region('cache_perms')
75 cache_region = rc_cache.get_or_create_region('cache_perms')
76 # make sure we empty the cache now
76 # make sure we empty the cache now
77 cache_region.delete_multi(cache_region.backend.list_keys())
77 cache_region.delete_multi(cache_region.backend.list_keys())
78
78
79 assert cache_region.backend.list_keys() == []
79 assert cache_region.backend.list_keys() == []
80
80
81 @cache_region.conditional_cache_on_arguments(expiration_time=5)
81 @cache_region.conditional_cache_on_arguments(expiration_time=5)
82 def compute(key):
82 def compute(key):
83 return time.time()
83 return time.time()
84
84
85 for x in range(10):
85 for x in range(10):
86 compute(x)
86 compute(x)
87
87
88 assert len(set(cache_region.backend.list_keys())) == 10
88 assert len(set(cache_region.backend.list_keys())) == 10
89
89
90 def test_store_and_get_value_from_region(self):
90 def test_store_and_get_value_from_region(self):
91 cache_region = rc_cache.get_or_create_region('cache_perms')
91 cache_region = rc_cache.get_or_create_region('cache_perms')
92 # make sure we empty the cache now
92 # make sure we empty the cache now
93 for key in cache_region.backend.list_keys():
93 for key in cache_region.backend.list_keys():
94 cache_region.delete(key)
94 cache_region.delete(key)
95 assert cache_region.backend.list_keys() == []
95 assert cache_region.backend.list_keys() == []
96
96
97 @cache_region.conditional_cache_on_arguments(expiration_time=5)
97 @cache_region.conditional_cache_on_arguments(expiration_time=5)
98 def compute(key):
98 def compute(key):
99 return time.time()
99 return time.time()
100
100
101 result = set()
101 result = set()
102 for x in range(10):
102 for x in range(10):
103 ret = compute('x')
103 ret = compute('x')
104 result.add(ret)
104 result.add(ret)
105
105
106 # once computed we have only one value (the same from cache)
106 # once computed we have only one value (the same from cache)
107 # after executing it 10x
107 # after executing it 10x
108 assert len(result) == 1
108 assert len(result) == 1
General Comments 0
You need to be logged in to leave comments. Login now