##// END OF EJS Templates
caches: fixed utils imports
super-admin -
r4922:905a9fec default
parent child Browse files
Show More
@@ -1,368 +1,369 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2020 RhodeCode GmbH
3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import os
20 import os
21 import time
21 import time
22 import logging
22 import logging
23 import functools
23 import functools
24 import decorator
24 import decorator
25 import threading
25 import threading
26
26
27 from dogpile.cache import CacheRegion
27 from dogpile.cache import CacheRegion
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode.lib.utils import safe_bytes, sha1
30 from rhodecode.lib.hash_utils import sha1
31 from rhodecode.lib.utils2 import safe_unicode, str2bool
31 from rhodecode.lib.type_utils import str2bool
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.model.db import Session, CacheKey, IntegrityError
33 from rhodecode.model.db import Session, CacheKey, IntegrityError
33
34
34 from rhodecode.lib.rc_cache import cache_key_meta
35 from rhodecode.lib.rc_cache import cache_key_meta
35 from rhodecode.lib.rc_cache import region_meta
36 from rhodecode.lib.rc_cache import region_meta
36
37
37 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
38
39
39
40
40 def isCython(func):
41 def isCython(func):
41 """
42 """
42 Private helper that checks if a function is a cython function.
43 Private helper that checks if a function is a cython function.
43 """
44 """
44 return func.__class__.__name__ == 'cython_function_or_method'
45 return func.__class__.__name__ == 'cython_function_or_method'
45
46
46
47
47 class RhodeCodeCacheRegion(CacheRegion):
48 class RhodeCodeCacheRegion(CacheRegion):
48
49
49 def conditional_cache_on_arguments(
50 def conditional_cache_on_arguments(
50 self, namespace=None,
51 self, namespace=None,
51 expiration_time=None,
52 expiration_time=None,
52 should_cache_fn=None,
53 should_cache_fn=None,
53 to_str=str,
54 to_str=str,
54 function_key_generator=None,
55 function_key_generator=None,
55 condition=True):
56 condition=True):
56 """
57 """
57 Custom conditional decorator, that will not touch any dogpile internals if
58 Custom conditional decorator, that will not touch any dogpile internals if
58 condition isn't meet. This works a bit different than should_cache_fn
59 condition isn't meet. This works a bit different than should_cache_fn
59 And it's faster in cases we don't ever want to compute cached values
60 And it's faster in cases we don't ever want to compute cached values
60 """
61 """
61 expiration_time_is_callable = callable(expiration_time)
62 expiration_time_is_callable = callable(expiration_time)
62
63
63 if function_key_generator is None:
64 if function_key_generator is None:
64 function_key_generator = self.function_key_generator
65 function_key_generator = self.function_key_generator
65
66
66 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
67 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
67
68
68 if not condition:
69 if not condition:
69 log.debug('Calling un-cached method:%s', user_func.__name__)
70 log.debug('Calling un-cached method:%s', user_func.__name__)
70 start = time.time()
71 start = time.time()
71 result = user_func(*arg, **kw)
72 result = user_func(*arg, **kw)
72 total = time.time() - start
73 total = time.time() - start
73 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
74 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
74 return result
75 return result
75
76
76 key = key_generator(*arg, **kw)
77 key = key_generator(*arg, **kw)
77
78
78 timeout = expiration_time() if expiration_time_is_callable \
79 timeout = expiration_time() if expiration_time_is_callable \
79 else expiration_time
80 else expiration_time
80
81
81 log.debug('Calling cached method:`%s`', user_func.__name__)
82 log.debug('Calling cached method:`%s`', user_func.__name__)
82 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
83 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
83
84
84 def cache_decorator(user_func):
85 def cache_decorator(user_func):
85 if to_str is str:
86 if to_str is str:
86 # backwards compatible
87 # backwards compatible
87 key_generator = function_key_generator(namespace, user_func)
88 key_generator = function_key_generator(namespace, user_func)
88 else:
89 else:
89 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
90 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
90
91
91 def refresh(*arg, **kw):
92 def refresh(*arg, **kw):
92 """
93 """
93 Like invalidate, but regenerates the value instead
94 Like invalidate, but regenerates the value instead
94 """
95 """
95 key = key_generator(*arg, **kw)
96 key = key_generator(*arg, **kw)
96 value = user_func(*arg, **kw)
97 value = user_func(*arg, **kw)
97 self.set(key, value)
98 self.set(key, value)
98 return value
99 return value
99
100
100 def invalidate(*arg, **kw):
101 def invalidate(*arg, **kw):
101 key = key_generator(*arg, **kw)
102 key = key_generator(*arg, **kw)
102 self.delete(key)
103 self.delete(key)
103
104
104 def set_(value, *arg, **kw):
105 def set_(value, *arg, **kw):
105 key = key_generator(*arg, **kw)
106 key = key_generator(*arg, **kw)
106 self.set(key, value)
107 self.set(key, value)
107
108
108 def get(*arg, **kw):
109 def get(*arg, **kw):
109 key = key_generator(*arg, **kw)
110 key = key_generator(*arg, **kw)
110 return self.get(key)
111 return self.get(key)
111
112
112 user_func.set = set_
113 user_func.set = set_
113 user_func.invalidate = invalidate
114 user_func.invalidate = invalidate
114 user_func.get = get
115 user_func.get = get
115 user_func.refresh = refresh
116 user_func.refresh = refresh
116 user_func.key_generator = key_generator
117 user_func.key_generator = key_generator
117 user_func.original = user_func
118 user_func.original = user_func
118
119
119 # Use `decorate` to preserve the signature of :param:`user_func`.
120 # Use `decorate` to preserve the signature of :param:`user_func`.
120 return decorator.decorate(user_func, functools.partial(
121 return decorator.decorate(user_func, functools.partial(
121 get_or_create_for_user_func, key_generator))
122 get_or_create_for_user_func, key_generator))
122
123
123 return cache_decorator
124 return cache_decorator
124
125
125
126
126 def make_region(*arg, **kw):
127 def make_region(*arg, **kw):
127 return RhodeCodeCacheRegion(*arg, **kw)
128 return RhodeCodeCacheRegion(*arg, **kw)
128
129
129
130
130 def get_default_cache_settings(settings, prefixes=None):
131 def get_default_cache_settings(settings, prefixes=None):
131 prefixes = prefixes or []
132 prefixes = prefixes or []
132 cache_settings = {}
133 cache_settings = {}
133 for key in settings.keys():
134 for key in settings.keys():
134 for prefix in prefixes:
135 for prefix in prefixes:
135 if key.startswith(prefix):
136 if key.startswith(prefix):
136 name = key.split(prefix)[1].strip()
137 name = key.split(prefix)[1].strip()
137 val = settings[key]
138 val = settings[key]
138 if isinstance(val, str):
139 if isinstance(val, str):
139 val = val.strip()
140 val = val.strip()
140 cache_settings[name] = val
141 cache_settings[name] = val
141 return cache_settings
142 return cache_settings
142
143
143
144
144 def compute_key_from_params(*args):
145 def compute_key_from_params(*args):
145 """
146 """
146 Helper to compute key from given params to be used in cache manager
147 Helper to compute key from given params to be used in cache manager
147 """
148 """
148 return sha1(safe_bytes("_".join(map(str, args))))
149 return sha1(safe_bytes("_".join(map(str, args))))
149
150
150
151
151 def backend_key_generator(backend):
152 def backend_key_generator(backend):
152 """
153 """
153 Special wrapper that also sends over the backend to the key generator
154 Special wrapper that also sends over the backend to the key generator
154 """
155 """
155 def wrapper(namespace, fn):
156 def wrapper(namespace, fn):
156 return key_generator(backend, namespace, fn)
157 return key_generator(backend, namespace, fn)
157 return wrapper
158 return wrapper
158
159
159
160
160 def key_generator(backend, namespace, fn):
161 def key_generator(backend, namespace, fn):
161 fname = fn.__name__
162 fname = fn.__name__
162
163
163 def generate_key(*args):
164 def generate_key(*args):
164 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
165 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
165 namespace_pref = namespace or 'default_namespace'
166 namespace_pref = namespace or 'default_namespace'
166 arg_key = compute_key_from_params(*args)
167 arg_key = compute_key_from_params(*args)
167 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
168 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
168
169
169 return final_key
170 return final_key
170
171
171 return generate_key
172 return generate_key
172
173
173
174
174 def get_or_create_region(region_name, region_namespace=None):
175 def get_or_create_region(region_name, region_namespace=None):
175 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
176 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
176 region_obj = region_meta.dogpile_cache_regions.get(region_name)
177 region_obj = region_meta.dogpile_cache_regions.get(region_name)
177 if not region_obj:
178 if not region_obj:
178 raise EnvironmentError(
179 raise EnvironmentError(
179 'Region `{}` not in configured: {}.'.format(
180 'Region `{}` not in configured: {}.'.format(
180 region_name, list(region_meta.dogpile_cache_regions.keys())))
181 region_name, list(region_meta.dogpile_cache_regions.keys())))
181
182
182 region_uid_name = '{}:{}'.format(region_name, region_namespace)
183 region_uid_name = '{}:{}'.format(region_name, region_namespace)
183 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
184 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
184 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
185 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
185 if region_exist:
186 if region_exist:
186 log.debug('Using already configured region: %s', region_namespace)
187 log.debug('Using already configured region: %s', region_namespace)
187 return region_exist
188 return region_exist
188 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
189 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
189 expiration_time = region_obj.expiration_time
190 expiration_time = region_obj.expiration_time
190
191
191 if not os.path.isdir(cache_dir):
192 if not os.path.isdir(cache_dir):
192 os.makedirs(cache_dir)
193 os.makedirs(cache_dir)
193 new_region = make_region(
194 new_region = make_region(
194 name=region_uid_name,
195 name=region_uid_name,
195 function_key_generator=backend_key_generator(region_obj.actual_backend)
196 function_key_generator=backend_key_generator(region_obj.actual_backend)
196 )
197 )
197 namespace_filename = os.path.join(
198 namespace_filename = os.path.join(
198 cache_dir, "{}.cache.dbm".format(region_namespace))
199 cache_dir, "{}.cache.dbm".format(region_namespace))
199 # special type that allows 1db per namespace
200 # special type that allows 1db per namespace
200 new_region.configure(
201 new_region.configure(
201 backend='dogpile.cache.rc.file_namespace',
202 backend='dogpile.cache.rc.file_namespace',
202 expiration_time=expiration_time,
203 expiration_time=expiration_time,
203 arguments={"filename": namespace_filename}
204 arguments={"filename": namespace_filename}
204 )
205 )
205
206
206 # create and save in region caches
207 # create and save in region caches
207 log.debug('configuring new region: %s', region_uid_name)
208 log.debug('configuring new region: %s', region_uid_name)
208 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
209 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
209
210
210 return region_obj
211 return region_obj
211
212
212
213
213 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
214 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
214 region = get_or_create_region(cache_region, cache_namespace_uid)
215 region = get_or_create_region(cache_region, cache_namespace_uid)
215 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
216 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
216 num_delete_keys = len(cache_keys)
217 num_delete_keys = len(cache_keys)
217 if invalidate:
218 if invalidate:
218 region.invalidate(hard=False)
219 region.invalidate(hard=False)
219 else:
220 else:
220 if num_delete_keys:
221 if num_delete_keys:
221 region.delete_multi(cache_keys)
222 region.delete_multi(cache_keys)
222 return num_delete_keys
223 return num_delete_keys
223
224
224
225
225 class ActiveRegionCache(object):
226 class ActiveRegionCache(object):
226 def __init__(self, context, cache_data):
227 def __init__(self, context, cache_data):
227 self.context = context
228 self.context = context
228 self.cache_data = cache_data
229 self.cache_data = cache_data
229
230
230 def should_invalidate(self):
231 def should_invalidate(self):
231 return False
232 return False
232
233
233
234
234 class FreshRegionCache(object):
235 class FreshRegionCache(object):
235 def __init__(self, context, cache_data):
236 def __init__(self, context, cache_data):
236 self.context = context
237 self.context = context
237 self.cache_data = cache_data
238 self.cache_data = cache_data
238
239
239 def should_invalidate(self):
240 def should_invalidate(self):
240 return True
241 return True
241
242
242
243
243 class InvalidationContext(object):
244 class InvalidationContext(object):
244 """
245 """
245 usage::
246 usage::
246
247
247 from rhodecode.lib import rc_cache
248 from rhodecode.lib import rc_cache
248
249
249 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
250 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
250 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
251 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
251
252
252 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
253 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
253 def heavy_compute(cache_name, param1, param2):
254 def heavy_compute(cache_name, param1, param2):
254 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
255 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
255
256
256 # invalidation namespace is shared namespace key for all process caches
257 # invalidation namespace is shared namespace key for all process caches
257 # we use it to send a global signal
258 # we use it to send a global signal
258 invalidation_namespace = 'repo_cache:1'
259 invalidation_namespace = 'repo_cache:1'
259
260
260 inv_context_manager = rc_cache.InvalidationContext(
261 inv_context_manager = rc_cache.InvalidationContext(
261 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
262 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
262 with inv_context_manager as invalidation_context:
263 with inv_context_manager as invalidation_context:
263 args = ('one', 'two')
264 args = ('one', 'two')
264 # re-compute and store cache if we get invalidate signal
265 # re-compute and store cache if we get invalidate signal
265 if invalidation_context.should_invalidate():
266 if invalidation_context.should_invalidate():
266 result = heavy_compute.refresh(*args)
267 result = heavy_compute.refresh(*args)
267 else:
268 else:
268 result = heavy_compute(*args)
269 result = heavy_compute(*args)
269
270
270 compute_time = inv_context_manager.compute_time
271 compute_time = inv_context_manager.compute_time
271 log.debug('result computed in %.4fs', compute_time)
272 log.debug('result computed in %.4fs', compute_time)
272
273
273 # To send global invalidation signal, simply run
274 # To send global invalidation signal, simply run
274 CacheKey.set_invalidate(invalidation_namespace)
275 CacheKey.set_invalidate(invalidation_namespace)
275
276
276 """
277 """
277
278
278 def __repr__(self):
279 def __repr__(self):
279 return '<InvalidationContext:{}[{}]>'.format(
280 return '<InvalidationContext:{}[{}]>'.format(
280 safe_str(self.cache_key), safe_str(self.uid))
281 safe_str(self.cache_key), safe_str(self.uid))
281
282
282 def __init__(self, uid, invalidation_namespace='',
283 def __init__(self, uid, invalidation_namespace='',
283 raise_exception=False, thread_scoped=None):
284 raise_exception=False, thread_scoped=None):
284 self.uid = uid
285 self.uid = uid
285 self.invalidation_namespace = invalidation_namespace
286 self.invalidation_namespace = invalidation_namespace
286 self.raise_exception = raise_exception
287 self.raise_exception = raise_exception
287 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
288 self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT')
288 self.thread_id = 'global'
289 self.thread_id = 'global'
289
290
290 if thread_scoped is None:
291 if thread_scoped is None:
291 # if we set "default" we can override this via .ini settings
292 # if we set "default" we can override this via .ini settings
292 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
293 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
293
294
294 # Append the thread id to the cache key if this invalidation context
295 # Append the thread id to the cache key if this invalidation context
295 # should be scoped to the current thread.
296 # should be scoped to the current thread.
296 if thread_scoped is True:
297 if thread_scoped is True:
297 self.thread_id = threading.current_thread().ident
298 self.thread_id = threading.current_thread().ident
298
299
299 self.cache_key = compute_key_from_params(uid)
300 self.cache_key = compute_key_from_params(uid)
300 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
301 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
301 self.proc_id, self.thread_id, self.cache_key)
302 self.proc_id, self.thread_id, self.cache_key)
302 self.proc_key = 'proc:{}'.format(self.proc_id)
303 self.proc_key = 'proc:{}'.format(self.proc_id)
303 self.compute_time = 0
304 self.compute_time = 0
304
305
305 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
306 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
306 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
307 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
307 # fetch all cache keys for this namespace and convert them to a map to find if we
308 # fetch all cache keys for this namespace and convert them to a map to find if we
308 # have specific cache_key object registered. We do this because we want to have
309 # have specific cache_key object registered. We do this because we want to have
309 # all consistent cache_state_uid for newly registered objects
310 # all consistent cache_state_uid for newly registered objects
310 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
311 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
311 cache_obj = cache_obj_map.get(self.cache_key)
312 cache_obj = cache_obj_map.get(self.cache_key)
312 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
313 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
313 if not cache_obj:
314 if not cache_obj:
314 new_cache_args = invalidation_namespace
315 new_cache_args = invalidation_namespace
315 first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None
316 first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None
316 cache_state_uid = None
317 cache_state_uid = None
317 if first_cache_obj:
318 if first_cache_obj:
318 cache_state_uid = first_cache_obj.cache_state_uid
319 cache_state_uid = first_cache_obj.cache_state_uid
319 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
320 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
320 cache_state_uid=cache_state_uid)
321 cache_state_uid=cache_state_uid)
321 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
322 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
322
323
323 return cache_obj
324 return cache_obj
324
325
325 def __enter__(self):
326 def __enter__(self):
326 """
327 """
327 Test if current object is valid, and return CacheRegion function
328 Test if current object is valid, and return CacheRegion function
328 that does invalidation and calculation
329 that does invalidation and calculation
329 """
330 """
330 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
331 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
331 # register or get a new key based on uid
332 # register or get a new key based on uid
332 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
333 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
333 cache_data = self.cache_obj.get_dict()
334 cache_data = self.cache_obj.get_dict()
334 self._start_time = time.time()
335 self._start_time = time.time()
335 if self.cache_obj.cache_active:
336 if self.cache_obj.cache_active:
336 # means our cache obj is existing and marked as it's
337 # means our cache obj is existing and marked as it's
337 # cache is not outdated, we return ActiveRegionCache
338 # cache is not outdated, we return ActiveRegionCache
338 self.skip_cache_active_change = True
339 self.skip_cache_active_change = True
339
340
340 return ActiveRegionCache(context=self, cache_data=cache_data)
341 return ActiveRegionCache(context=self, cache_data=cache_data)
341
342
342 # the key is either not existing or set to False, we return
343 # the key is either not existing or set to False, we return
343 # the real invalidator which re-computes value. We additionally set
344 # the real invalidator which re-computes value. We additionally set
344 # the flag to actually update the Database objects
345 # the flag to actually update the Database objects
345 self.skip_cache_active_change = False
346 self.skip_cache_active_change = False
346 return FreshRegionCache(context=self, cache_data=cache_data)
347 return FreshRegionCache(context=self, cache_data=cache_data)
347
348
348 def __exit__(self, exc_type, exc_val, exc_tb):
349 def __exit__(self, exc_type, exc_val, exc_tb):
349 # save compute time
350 # save compute time
350 self.compute_time = time.time() - self._start_time
351 self.compute_time = time.time() - self._start_time
351
352
352 if self.skip_cache_active_change:
353 if self.skip_cache_active_change:
353 return
354 return
354
355
355 try:
356 try:
356 self.cache_obj.cache_active = True
357 self.cache_obj.cache_active = True
357 Session().add(self.cache_obj)
358 Session().add(self.cache_obj)
358 Session().commit()
359 Session().commit()
359 except IntegrityError:
360 except IntegrityError:
360 # if we catch integrity error, it means we inserted this object
361 # if we catch integrity error, it means we inserted this object
361 # assumption is that's really an edge race-condition case and
362 # assumption is that's really an edge race-condition case and
362 # it's safe is to skip it
363 # it's safe is to skip it
363 Session().rollback()
364 Session().rollback()
364 except Exception:
365 except Exception:
365 log.exception('Failed to commit on cache key update')
366 log.exception('Failed to commit on cache key update')
366 Session().rollback()
367 Session().rollback()
367 if self.raise_exception:
368 if self.raise_exception:
368 raise
369 raise
General Comments 0
You need to be logged in to leave comments. Login now