##// END OF EJS Templates
fix(caches): removed cacheKey cleanup logic as its proven to fail, and is not reliable.
super-admin -
r5287:486bcf43 default
parent child Browse files
Show More
@@ -1,404 +1,403 b''
1 # Copyright (C) 2015-2023 RhodeCode GmbH
1 # Copyright (C) 2015-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import functools
19 import functools
20 import logging
20 import logging
21 import os
21 import os
22 import threading
22 import threading
23 import time
23 import time
24
24
25 import decorator
25 import decorator
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27
27
28 import rhodecode
28 import rhodecode
29 from rhodecode.lib.hash_utils import sha1
29 from rhodecode.lib.hash_utils import sha1
30 from rhodecode.lib.str_utils import safe_bytes
30 from rhodecode.lib.str_utils import safe_bytes
31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
31 from rhodecode.lib.type_utils import str2bool # noqa :required by imports from .utils
32
32
33 from . import region_meta, cache_key_meta
33 from . import region_meta
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def isCython(func):
38 def isCython(func):
39 """
39 """
40 Private helper that checks if a function is a cython function.
40 Private helper that checks if a function is a cython function.
41 """
41 """
42 return func.__class__.__name__ == 'cython_function_or_method'
42 return func.__class__.__name__ == 'cython_function_or_method'
43
43
44
44
45 class RhodeCodeCacheRegion(CacheRegion):
45 class RhodeCodeCacheRegion(CacheRegion):
46
46
47 def __repr__(self):
47 def __repr__(self):
48 return f'{self.__class__}(name={self.name})'
48 return f'{self.__class__}(name={self.name})'
49
49
50 def conditional_cache_on_arguments(
50 def conditional_cache_on_arguments(
51 self, namespace=None,
51 self, namespace=None,
52 expiration_time=None,
52 expiration_time=None,
53 should_cache_fn=None,
53 should_cache_fn=None,
54 to_str=str,
54 to_str=str,
55 function_key_generator=None,
55 function_key_generator=None,
56 condition=True):
56 condition=True):
57 """
57 """
58 Custom conditional decorator, that will not touch any dogpile internals if
58 Custom conditional decorator, that will not touch any dogpile internals if
59 condition isn't meet. This works a bit different from should_cache_fn
59 condition isn't meet. This works a bit different from should_cache_fn
60 And it's faster in cases we don't ever want to compute cached values
60 And it's faster in cases we don't ever want to compute cached values
61 """
61 """
62 expiration_time_is_callable = callable(expiration_time)
62 expiration_time_is_callable = callable(expiration_time)
63 if not namespace:
63 if not namespace:
64 namespace = getattr(self, '_default_namespace', None)
64 namespace = getattr(self, '_default_namespace', None)
65
65
66 if function_key_generator is None:
66 if function_key_generator is None:
67 function_key_generator = self.function_key_generator
67 function_key_generator = self.function_key_generator
68
68
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70
70
71 if not condition:
71 if not condition:
72 log.debug('Calling un-cached method:%s', user_func.__name__)
72 log.debug('Calling un-cached method:%s', user_func.__name__)
73 start = time.time()
73 start = time.time()
74 result = user_func(*arg, **kw)
74 result = user_func(*arg, **kw)
75 total = time.time() - start
75 total = time.time() - start
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
77 return result
77 return result
78
78
79 key = func_key_generator(*arg, **kw)
79 key = func_key_generator(*arg, **kw)
80
80
81 timeout = expiration_time() if expiration_time_is_callable \
81 timeout = expiration_time() if expiration_time_is_callable \
82 else expiration_time
82 else expiration_time
83
83
84 log.debug('Calling cached method:`%s`', user_func.__name__)
84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86
86
87 def cache_decorator(user_func):
87 def cache_decorator(user_func):
88 if to_str is str:
88 if to_str is str:
89 # backwards compatible
89 # backwards compatible
90 key_generator = function_key_generator(namespace, user_func)
90 key_generator = function_key_generator(namespace, user_func)
91 else:
91 else:
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93
93
94 def refresh(*arg, **kw):
94 def refresh(*arg, **kw):
95 """
95 """
96 Like invalidate, but regenerates the value instead
96 Like invalidate, but regenerates the value instead
97 """
97 """
98 key = key_generator(*arg, **kw)
98 key = key_generator(*arg, **kw)
99 value = user_func(*arg, **kw)
99 value = user_func(*arg, **kw)
100 self.set(key, value)
100 self.set(key, value)
101 return value
101 return value
102
102
103 def invalidate(*arg, **kw):
103 def invalidate(*arg, **kw):
104 key = key_generator(*arg, **kw)
104 key = key_generator(*arg, **kw)
105 self.delete(key)
105 self.delete(key)
106
106
107 def set_(value, *arg, **kw):
107 def set_(value, *arg, **kw):
108 key = key_generator(*arg, **kw)
108 key = key_generator(*arg, **kw)
109 self.set(key, value)
109 self.set(key, value)
110
110
111 def get(*arg, **kw):
111 def get(*arg, **kw):
112 key = key_generator(*arg, **kw)
112 key = key_generator(*arg, **kw)
113 return self.get(key)
113 return self.get(key)
114
114
115 user_func.set = set_
115 user_func.set = set_
116 user_func.invalidate = invalidate
116 user_func.invalidate = invalidate
117 user_func.get = get
117 user_func.get = get
118 user_func.refresh = refresh
118 user_func.refresh = refresh
119 user_func.key_generator = key_generator
119 user_func.key_generator = key_generator
120 user_func.original = user_func
120 user_func.original = user_func
121
121
122 # Use `decorate` to preserve the signature of :param:`user_func`.
122 # Use `decorate` to preserve the signature of :param:`user_func`.
123 return decorator.decorate(user_func, functools.partial(
123 return decorator.decorate(user_func, functools.partial(
124 get_or_create_for_user_func, key_generator))
124 get_or_create_for_user_func, key_generator))
125
125
126 return cache_decorator
126 return cache_decorator
127
127
128
128
129 def make_region(*arg, **kw):
129 def make_region(*arg, **kw):
130 return RhodeCodeCacheRegion(*arg, **kw)
130 return RhodeCodeCacheRegion(*arg, **kw)
131
131
132
132
133 def get_default_cache_settings(settings, prefixes=None):
133 def get_default_cache_settings(settings, prefixes=None):
134 prefixes = prefixes or []
134 prefixes = prefixes or []
135 cache_settings = {}
135 cache_settings = {}
136 for key in settings.keys():
136 for key in settings.keys():
137 for prefix in prefixes:
137 for prefix in prefixes:
138 if key.startswith(prefix):
138 if key.startswith(prefix):
139 name = key.split(prefix)[1].strip()
139 name = key.split(prefix)[1].strip()
140 val = settings[key]
140 val = settings[key]
141 if isinstance(val, str):
141 if isinstance(val, str):
142 val = val.strip()
142 val = val.strip()
143 cache_settings[name] = val
143 cache_settings[name] = val
144 return cache_settings
144 return cache_settings
145
145
146
146
147 def compute_key_from_params(*args):
147 def compute_key_from_params(*args):
148 """
148 """
149 Helper to compute key from given params to be used in cache manager
149 Helper to compute key from given params to be used in cache manager
150 """
150 """
151 return sha1(safe_bytes("_".join(map(str, args))))
151 return sha1(safe_bytes("_".join(map(str, args))))
152
152
153
153
154 def custom_key_generator(backend, namespace, fn):
154 def custom_key_generator(backend, namespace, fn):
155 func_name = fn.__name__
155 func_name = fn.__name__
156
156
157 def generate_key(*args):
157 def generate_key(*args):
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 namespace_pref = namespace or 'default_namespace'
159 namespace_pref = namespace or 'default_namespace'
160 arg_key = compute_key_from_params(*args)
160 arg_key = compute_key_from_params(*args)
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162
162
163 return final_key
163 return final_key
164
164
165 return generate_key
165 return generate_key
166
166
167
167
168 def backend_key_generator(backend):
168 def backend_key_generator(backend):
169 """
169 """
170 Special wrapper that also sends over the backend to the key generator
170 Special wrapper that also sends over the backend to the key generator
171 """
171 """
172 def wrapper(namespace, fn):
172 def wrapper(namespace, fn):
173 return custom_key_generator(backend, namespace, fn)
173 return custom_key_generator(backend, namespace, fn)
174 return wrapper
174 return wrapper
175
175
176
176
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
178 from .backends import FileNamespaceBackend
178 from .backends import FileNamespaceBackend
179 from . import async_creation_runner
179 from . import async_creation_runner
180
180
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 if not region_obj:
182 if not region_obj:
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185
185
186 region_uid_name = f'{region_name}:{region_namespace}'
186 region_uid_name = f'{region_name}:{region_namespace}'
187
187
188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
190 if not region_namespace:
190 if not region_namespace:
191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
192
192
193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
194 if region_exist:
194 if region_exist:
195 log.debug('Using already configured region: %s', region_namespace)
195 log.debug('Using already configured region: %s', region_namespace)
196 return region_exist
196 return region_exist
197
197
198 expiration_time = region_obj.expiration_time
198 expiration_time = region_obj.expiration_time
199
199
200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
201 namespace_cache_dir = cache_dir
201 namespace_cache_dir = cache_dir
202
202
203 # we default the namespace_cache_dir to our default cache dir.
203 # we default the namespace_cache_dir to our default cache dir.
204 # however, if this backend is configured with filename= param, we prioritize that
204 # however, if this backend is configured with filename= param, we prioritize that
205 # so all caches within that particular region, even those namespaced end up in the same path
205 # so all caches within that particular region, even those namespaced end up in the same path
206 if region_obj.actual_backend.filename:
206 if region_obj.actual_backend.filename:
207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
208
208
209 if not os.path.isdir(namespace_cache_dir):
209 if not os.path.isdir(namespace_cache_dir):
210 os.makedirs(namespace_cache_dir)
210 os.makedirs(namespace_cache_dir)
211 new_region = make_region(
211 new_region = make_region(
212 name=region_uid_name,
212 name=region_uid_name,
213 function_key_generator=backend_key_generator(region_obj.actual_backend)
213 function_key_generator=backend_key_generator(region_obj.actual_backend)
214 )
214 )
215
215
216 namespace_filename = os.path.join(
216 namespace_filename = os.path.join(
217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
218 # special type that allows 1db per namespace
218 # special type that allows 1db per namespace
219 new_region.configure(
219 new_region.configure(
220 backend='dogpile.cache.rc.file_namespace',
220 backend='dogpile.cache.rc.file_namespace',
221 expiration_time=expiration_time,
221 expiration_time=expiration_time,
222 arguments={"filename": namespace_filename}
222 arguments={"filename": namespace_filename}
223 )
223 )
224
224
225 # create and save in region caches
225 # create and save in region caches
226 log.debug('configuring new region: %s', region_uid_name)
226 log.debug('configuring new region: %s', region_uid_name)
227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
228
228
229 region_obj._default_namespace = region_namespace
229 region_obj._default_namespace = region_namespace
230 if use_async_runner:
230 if use_async_runner:
231 region_obj.async_creation_runner = async_creation_runner
231 region_obj.async_creation_runner = async_creation_runner
232 return region_obj
232 return region_obj
233
233
234
234
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
237
237
238 if not isinstance(cache_region, RhodeCodeCacheRegion):
238 if not isinstance(cache_region, RhodeCodeCacheRegion):
239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
240 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
240 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
241 cache_region, cache_namespace_uid, method)
241 cache_region, cache_namespace_uid, method)
242
242
243 num_affected_keys = 0
243 num_affected_keys = 0
244
244
245 if method == CLEAR_INVALIDATE:
245 if method == CLEAR_INVALIDATE:
246 # NOTE: The CacheRegion.invalidate() method’s default mode of
246 # NOTE: The CacheRegion.invalidate() method’s default mode of
247 # operation is to set a timestamp local to this CacheRegion in this Python process only.
247 # operation is to set a timestamp local to this CacheRegion in this Python process only.
248 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
248 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
249 cache_region.invalidate(hard=True)
249 cache_region.invalidate(hard=True)
250
250
251 if method == CLEAR_DELETE:
251 if method == CLEAR_DELETE:
252 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
252 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
253
253
254 return num_affected_keys
254 return num_affected_keys
255
255
256
256
257 class ActiveRegionCache(object):
257 class ActiveRegionCache(object):
258 def __init__(self, context, cache_data):
258 def __init__(self, context, cache_data):
259 self.context = context
259 self.context = context
260 self.cache_data = cache_data
260 self.cache_data = cache_data
261
261
262 def should_invalidate(self):
262 def should_invalidate(self):
263 return False
263 return False
264
264
265
265
266 class FreshRegionCache(object):
266 class FreshRegionCache(object):
267 def __init__(self, context, cache_data):
267 def __init__(self, context, cache_data):
268 self.context = context
268 self.context = context
269 self.cache_data = cache_data
269 self.cache_data = cache_data
270
270
271 def should_invalidate(self):
271 def should_invalidate(self):
272 return True
272 return True
273
273
274
274
275 class InvalidationContext(object):
275 class InvalidationContext(object):
276 """
276 """
277 usage::
277 usage::
278
278
279 from rhodecode.lib import rc_cache
279 from rhodecode.lib import rc_cache
280
280
281 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
281 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
282 region = rc_cache.get_or_create_region('some_region', cache_namespace_uid)
282 region = rc_cache.get_or_create_region('some_region', cache_namespace_uid)
283
283
284 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
284 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
285 def heavy_compute(cache_name, param1, param2):
285 def heavy_compute(cache_name, param1, param2):
286 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
286 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
287
287
288 # invalidation namespace is shared namespace key for all process caches
288 # invalidation namespace is shared namespace key for all process caches
289 # we use it to send a global signal
289 # we use it to send a global signal
290 invalidation_namespace = 'repo_cache:1'
290 invalidation_namespace = 'repo_cache:1'
291
291
292 inv_context_manager = rc_cache.InvalidationContext(
292 inv_context_manager = rc_cache.InvalidationContext(
293 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
293 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
294 with inv_context_manager as invalidation_context:
294 with inv_context_manager as invalidation_context:
295 args = ('one', 'two')
295 args = ('one', 'two')
296 # re-compute and store cache if we get invalidate signal
296 # re-compute and store cache if we get invalidate signal
297 if invalidation_context.should_invalidate():
297 if invalidation_context.should_invalidate():
298 result = heavy_compute.refresh(*args)
298 result = heavy_compute.refresh(*args)
299 else:
299 else:
300 result = heavy_compute(*args)
300 result = heavy_compute(*args)
301
301
302 compute_time = inv_context_manager.compute_time
302 compute_time = inv_context_manager.compute_time
303 log.debug('result computed in %.4fs', compute_time)
303 log.debug('result computed in %.4fs', compute_time)
304
304
305 # To send global invalidation signal, simply run
305 # To send global invalidation signal, simply run
306 CacheKey.set_invalidate(invalidation_namespace)
306 CacheKey.set_invalidate(invalidation_namespace)
307
307
308 """
308 """
309
309
310 def __repr__(self):
310 def __repr__(self):
311 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
311 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
312
312
313 def __init__(self, uid, invalidation_namespace='',
313 def __init__(self, uid, invalidation_namespace='',
314 raise_exception=False, thread_scoped=None):
314 raise_exception=False, thread_scoped=None):
315 self.uid = uid
315 self.uid = uid
316 self.invalidation_namespace = invalidation_namespace
316 self.invalidation_namespace = invalidation_namespace
317 self.raise_exception = raise_exception
317 self.raise_exception = raise_exception
318 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
318 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
319 self.thread_id = 'global'
319 self.thread_id = 'global'
320
320
321 if thread_scoped is None:
321 if thread_scoped is None:
322 # if we set "default" we can override this via .ini settings
322 # if we set "default" we can override this via .ini settings
323 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
323 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
324
324
325 # Append the thread id to the cache key if this invalidation context
325 # Append the thread id to the cache key if this invalidation context
326 # should be scoped to the current thread.
326 # should be scoped to the current thread.
327 if thread_scoped is True:
327 if thread_scoped is True:
328 self.thread_id = threading.current_thread().ident
328 self.thread_id = threading.current_thread().ident
329
329
330 self.cache_key = compute_key_from_params(uid)
330 self.cache_key = compute_key_from_params(uid)
331 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
331 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
332 self.proc_id, self.thread_id, self.cache_key)
332 self.proc_id, self.thread_id, self.cache_key)
333 self.proc_key = f'proc:{self.proc_id}'
333 self.proc_key = f'proc:{self.proc_id}'
334 self.compute_time = 0
334 self.compute_time = 0
335
335
336 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
336 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
337 from rhodecode.model.db import CacheKey
337 from rhodecode.model.db import CacheKey
338
338
339 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
339 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
340 # fetch all cache keys for this namespace and convert them to a map to find if we
340 # fetch all cache keys for this namespace and convert them to a map to find if we
341 # have specific cache_key object registered. We do this because we want to have
341 # have specific cache_key object registered. We do this because we want to have
342 # all consistent cache_state_uid for newly registered objects
342 # all consistent cache_state_uid for newly registered objects
343 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
343 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
344 cache_obj = cache_obj_map.get(self.cache_key)
344 cache_obj = cache_obj_map.get(self.cache_key)
345 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
345 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
346
346
347 if not cache_obj:
347 if not cache_obj:
348 new_cache_args = invalidation_namespace
348 new_cache_args = invalidation_namespace
349 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
349 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
350 cache_state_uid = None
350 cache_state_uid = None
351 if first_cache_obj:
351 if first_cache_obj:
352 cache_state_uid = first_cache_obj.cache_state_uid
352 cache_state_uid = first_cache_obj.cache_state_uid
353 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
353 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
354 cache_state_uid=cache_state_uid)
354 cache_state_uid=cache_state_uid)
355 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
356
355
357 return cache_obj
356 return cache_obj
358
357
359 def __enter__(self):
358 def __enter__(self):
360 """
359 """
361 Test if current object is valid, and return CacheRegion function
360 Test if current object is valid, and return CacheRegion function
362 that does invalidation and calculation
361 that does invalidation and calculation
363 """
362 """
364 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
363 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
365 # register or get a new key based on uid
364 # register or get a new key based on uid
366 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
365 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
367 cache_data = self.cache_obj.get_dict()
366 cache_data = self.cache_obj.get_dict()
368 self._start_time = time.time()
367 self._start_time = time.time()
369 if self.cache_obj.cache_active:
368 if self.cache_obj.cache_active:
370 # means our cache obj is existing and marked as it's
369 # means our cache obj is existing and marked as it's
371 # cache is not outdated, we return ActiveRegionCache
370 # cache is not outdated, we return ActiveRegionCache
372 self.skip_cache_active_change = True
371 self.skip_cache_active_change = True
373
372
374 return ActiveRegionCache(context=self, cache_data=cache_data)
373 return ActiveRegionCache(context=self, cache_data=cache_data)
375
374
376 # the key is either not existing or set to False, we return
375 # the key is either not existing or set to False, we return
377 # the real invalidator which re-computes value. We additionally set
376 # the real invalidator which re-computes value. We additionally set
378 # the flag to actually update the Database objects
377 # the flag to actually update the Database objects
379 self.skip_cache_active_change = False
378 self.skip_cache_active_change = False
380 return FreshRegionCache(context=self, cache_data=cache_data)
379 return FreshRegionCache(context=self, cache_data=cache_data)
381
380
382 def __exit__(self, exc_type, exc_val, exc_tb):
381 def __exit__(self, exc_type, exc_val, exc_tb):
383 from rhodecode.model.db import IntegrityError, Session
382 from rhodecode.model.db import IntegrityError, Session
384
383
385 # save compute time
384 # save compute time
386 self.compute_time = time.time() - self._start_time
385 self.compute_time = time.time() - self._start_time
387
386
388 if self.skip_cache_active_change:
387 if self.skip_cache_active_change:
389 return
388 return
390
389
391 try:
390 try:
392 self.cache_obj.cache_active = True
391 self.cache_obj.cache_active = True
393 Session().add(self.cache_obj)
392 Session().add(self.cache_obj)
394 Session().commit()
393 Session().commit()
395 except IntegrityError:
394 except IntegrityError:
396 # if we catch integrity error, it means we inserted this object
395 # if we catch integrity error, it means we inserted this object
397 # assumption is that's really an edge race-condition case and
396 # assumption is that's really an edge race-condition case and
398 # it's safe is to skip it
397 # it's safe is to skip it
399 Session().rollback()
398 Session().rollback()
400 except Exception:
399 except Exception:
401 log.exception('Failed to commit on cache key update')
400 log.exception('Failed to commit on cache key update')
402 Session().rollback()
401 Session().rollback()
403 if self.raise_exception:
402 if self.raise_exception:
404 raise
403 raise
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now