##// END OF EJS Templates
caches: improve logging
super-admin -
r5579:54bb9264 default
parent child Browse files
Show More
@@ -1,357 +1,355 b''
1 # Copyright (C) 2015-2024 RhodeCode GmbH
1 # Copyright (C) 2015-2024 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import functools
19 import functools
20 import logging
20 import logging
21 import os
21 import os
22 import threading
22 import threading
23 import time
23 import time
24
24
25 import decorator
25 import decorator
26 from dogpile.cache import CacheRegion
26 from dogpile.cache import CacheRegion
27
27
28 import rhodecode
28 import rhodecode
29 from ...lib.hash_utils import sha1
29 from ...lib.hash_utils import sha1
30 from ...lib.str_utils import safe_bytes
30 from ...lib.str_utils import safe_bytes
31 from ...lib.type_utils import str2bool # noqa :required by imports from .utils
31 from ...lib.type_utils import str2bool # noqa :required by imports from .utils
32
32
33 from . import region_meta
33 from . import region_meta
34
34
35 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
36
36
37
37
38 def isCython(func):
38 def isCython(func):
39 """
39 """
40 Private helper that checks if a function is a cython function.
40 Private helper that checks if a function is a cython function.
41 """
41 """
42 return func.__class__.__name__ == 'cython_function_or_method'
42 return func.__class__.__name__ == 'cython_function_or_method'
43
43
44
44
45 class RhodeCodeCacheRegion(CacheRegion):
45 class RhodeCodeCacheRegion(CacheRegion):
46
46
47 def __repr__(self):
47 def __repr__(self):
48 return f'`{self.__class__.__name__}(name={self.name}, backend={self.backend.__class__})`'
48 return f'`{self.__class__.__name__}(name={self.name}, backend={self.backend.__class__})`'
49
49
50 def conditional_cache_on_arguments(
50 def conditional_cache_on_arguments(
51 self, namespace=None,
51 self, namespace=None,
52 expiration_time=None,
52 expiration_time=None,
53 should_cache_fn=None,
53 should_cache_fn=None,
54 to_str=str,
54 to_str=str,
55 function_key_generator=None,
55 function_key_generator=None,
56 condition=True):
56 condition=True):
57 """
57 """
58 Custom conditional decorator, that will not touch any dogpile internals if
58 Custom conditional decorator, that will not touch any dogpile internals if
59 condition isn't meet. This works a bit different from should_cache_fn
59 condition isn't meet. This works a bit different from should_cache_fn
60 And it's faster in cases we don't ever want to compute cached values
60 And it's faster in cases we don't ever want to compute cached values
61 """
61 """
62 expiration_time_is_callable = callable(expiration_time)
62 expiration_time_is_callable = callable(expiration_time)
63 if not namespace:
63 if not namespace:
64 namespace = getattr(self, '_default_namespace', None)
64 namespace = getattr(self, '_default_namespace', None)
65
65
66 if function_key_generator is None:
66 if function_key_generator is None:
67 function_key_generator = self.function_key_generator
67 function_key_generator = self.function_key_generator
68
68
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
69 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
70
70
71 if not condition:
71 if not condition:
72 log.debug('Calling un-cached method:%s', user_func.__name__)
72 log.debug('Calling un-cached method:`%s`', user_func.__name__)
73 start = time.time()
73 start = time.time()
74 result = user_func(*arg, **kw)
74 result = user_func(*arg, **kw)
75 total = time.time() - start
75 total = time.time() - start
76 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
76 log.debug('Call for un-cached method:`%s` took %.4fs', user_func.__name__, total)
77 return result
77 return result
78
78
79 key = func_key_generator(*arg, **kw)
79 key = func_key_generator(*arg, **kw)
80 timeout = expiration_time() if expiration_time_is_callable else expiration_time
81 log.debug('Calling cached (timeout=%s) method:`%s`', timeout, user_func.__name__)
80
82
81 timeout = expiration_time() if expiration_time_is_callable \
82 else expiration_time
83
84 log.debug('Calling cached method:`%s`', user_func.__name__)
85 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
83 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
86
84
87 def cache_decorator(user_func):
85 def cache_decorator(user_func):
88 if to_str is str:
86 if to_str is str:
89 # backwards compatible
87 # backwards compatible
90 key_generator = function_key_generator(namespace, user_func)
88 key_generator = function_key_generator(namespace, user_func)
91 else:
89 else:
92 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
90 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
93
91
94 def refresh(*arg, **kw):
92 def refresh(*arg, **kw):
95 """
93 """
96 Like invalidate, but regenerates the value instead
94 Like invalidate, but regenerates the value instead
97 """
95 """
98 key = key_generator(*arg, **kw)
96 key = key_generator(*arg, **kw)
99 value = user_func(*arg, **kw)
97 value = user_func(*arg, **kw)
100 self.set(key, value)
98 self.set(key, value)
101 return value
99 return value
102
100
103 def invalidate(*arg, **kw):
101 def invalidate(*arg, **kw):
104 key = key_generator(*arg, **kw)
102 key = key_generator(*arg, **kw)
105 self.delete(key)
103 self.delete(key)
106
104
107 def set_(value, *arg, **kw):
105 def set_(value, *arg, **kw):
108 key = key_generator(*arg, **kw)
106 key = key_generator(*arg, **kw)
109 self.set(key, value)
107 self.set(key, value)
110
108
111 def get(*arg, **kw):
109 def get(*arg, **kw):
112 key = key_generator(*arg, **kw)
110 key = key_generator(*arg, **kw)
113 return self.get(key)
111 return self.get(key)
114
112
115 user_func.set = set_
113 user_func.set = set_
116 user_func.invalidate = invalidate
114 user_func.invalidate = invalidate
117 user_func.get = get
115 user_func.get = get
118 user_func.refresh = refresh
116 user_func.refresh = refresh
119 user_func.key_generator = key_generator
117 user_func.key_generator = key_generator
120 user_func.original = user_func
118 user_func.original = user_func
121
119
122 # Use `decorate` to preserve the signature of :param:`user_func`.
120 # Use `decorate` to preserve the signature of :param:`user_func`.
123 return decorator.decorate(user_func, functools.partial(
121 return decorator.decorate(user_func, functools.partial(
124 get_or_create_for_user_func, key_generator))
122 get_or_create_for_user_func, key_generator))
125
123
126 return cache_decorator
124 return cache_decorator
127
125
128
126
129 def make_region(*arg, **kw):
127 def make_region(*arg, **kw):
130 return RhodeCodeCacheRegion(*arg, **kw)
128 return RhodeCodeCacheRegion(*arg, **kw)
131
129
132
130
133 def get_default_cache_settings(settings, prefixes=None):
131 def get_default_cache_settings(settings, prefixes=None):
134 prefixes = prefixes or []
132 prefixes = prefixes or []
135 cache_settings = {}
133 cache_settings = {}
136 for key in settings.keys():
134 for key in settings.keys():
137 for prefix in prefixes:
135 for prefix in prefixes:
138 if key.startswith(prefix):
136 if key.startswith(prefix):
139 name = key.split(prefix)[1].strip()
137 name = key.split(prefix)[1].strip()
140 val = settings[key]
138 val = settings[key]
141 if isinstance(val, str):
139 if isinstance(val, str):
142 val = val.strip()
140 val = val.strip()
143 cache_settings[name] = val
141 cache_settings[name] = val
144 return cache_settings
142 return cache_settings
145
143
146
144
147 def compute_key_from_params(*args):
145 def compute_key_from_params(*args):
148 """
146 """
149 Helper to compute key from given params to be used in cache manager
147 Helper to compute key from given params to be used in cache manager
150 """
148 """
151 return sha1(safe_bytes("_".join(map(str, args))))
149 return sha1(safe_bytes("_".join(map(str, args))))
152
150
153
151
154 def custom_key_generator(backend, namespace, fn):
152 def custom_key_generator(backend, namespace, fn):
155 func_name = fn.__name__
153 func_name = fn.__name__
156
154
157 def generate_key(*args):
155 def generate_key(*args):
158 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
156 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
159 namespace_pref = namespace or 'default_namespace'
157 namespace_pref = namespace or 'default_namespace'
160 arg_key = compute_key_from_params(*args)
158 arg_key = compute_key_from_params(*args)
161 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
159 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
162
160
163 return final_key
161 return final_key
164
162
165 return generate_key
163 return generate_key
166
164
167
165
168 def backend_key_generator(backend):
166 def backend_key_generator(backend):
169 """
167 """
170 Special wrapper that also sends over the backend to the key generator
168 Special wrapper that also sends over the backend to the key generator
171 """
169 """
172 def wrapper(namespace, fn):
170 def wrapper(namespace, fn):
173 return custom_key_generator(backend, namespace, fn)
171 return custom_key_generator(backend, namespace, fn)
174 return wrapper
172 return wrapper
175
173
176
174
177 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False, force=False):
175 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False, force=False):
178 from .backends import FileNamespaceBackend
176 from .backends import FileNamespaceBackend
179 from . import async_creation_runner
177 from . import async_creation_runner
180
178
181 region_obj = region_meta.dogpile_cache_regions.get(region_name)
179 region_obj = region_meta.dogpile_cache_regions.get(region_name)
182 if not region_obj:
180 if not region_obj:
183 reg_keys = list(region_meta.dogpile_cache_regions.keys())
181 reg_keys = list(region_meta.dogpile_cache_regions.keys())
184 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
182 raise OSError(f'Region `{region_name}` not in configured: {reg_keys}.')
185
183
186 region_uid_name = f'{region_name}:{region_namespace}'
184 region_uid_name = f'{region_name}:{region_namespace}'
187
185
188 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
186 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
189 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
187 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
190 if not region_namespace:
188 if not region_namespace:
191 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
189 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
192
190
193 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
191 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
194 if region_exist and not force:
192 if region_exist and not force:
195 log.debug('Using already configured region: %s', region_namespace)
193 log.debug('Using already configured region: %s', region_namespace)
196 return region_exist
194 return region_exist
197
195
198 expiration_time = region_obj.expiration_time
196 expiration_time = region_obj.expiration_time
199
197
200 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
198 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
201 namespace_cache_dir = cache_dir
199 namespace_cache_dir = cache_dir
202
200
203 # we default the namespace_cache_dir to our default cache dir.
201 # we default the namespace_cache_dir to our default cache dir.
204 # however, if this backend is configured with filename= param, we prioritize that
202 # however, if this backend is configured with filename= param, we prioritize that
205 # so all caches within that particular region, even those namespaced end up in the same path
203 # so all caches within that particular region, even those namespaced end up in the same path
206 if region_obj.actual_backend.filename:
204 if region_obj.actual_backend.filename:
207 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
205 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
208
206
209 if not os.path.isdir(namespace_cache_dir):
207 if not os.path.isdir(namespace_cache_dir):
210 os.makedirs(namespace_cache_dir)
208 os.makedirs(namespace_cache_dir)
211 new_region = make_region(
209 new_region = make_region(
212 name=region_uid_name,
210 name=region_uid_name,
213 function_key_generator=backend_key_generator(region_obj.actual_backend)
211 function_key_generator=backend_key_generator(region_obj.actual_backend)
214 )
212 )
215
213
216 namespace_filename = os.path.join(
214 namespace_filename = os.path.join(
217 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
215 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
218 # special type that allows 1db per namespace
216 # special type that allows 1db per namespace
219 new_region.configure(
217 new_region.configure(
220 backend='dogpile.cache.rc.file_namespace',
218 backend='dogpile.cache.rc.file_namespace',
221 expiration_time=expiration_time,
219 expiration_time=expiration_time,
222 arguments={"filename": namespace_filename}
220 arguments={"filename": namespace_filename}
223 )
221 )
224
222
225 # create and save in region caches
223 # create and save in region caches
226 log.debug('configuring new region: %s', region_uid_name)
224 log.debug('configuring new region: %s', region_uid_name)
227 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
225 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
228
226
229 region_obj._default_namespace = region_namespace
227 region_obj._default_namespace = region_namespace
230 if use_async_runner:
228 if use_async_runner:
231 region_obj.async_creation_runner = async_creation_runner
229 region_obj.async_creation_runner = async_creation_runner
232 return region_obj
230 return region_obj
233
231
234
232
235 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
233 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str) -> int:
236 from . import CLEAR_DELETE, CLEAR_INVALIDATE
234 from . import CLEAR_DELETE, CLEAR_INVALIDATE
237
235
238 if not isinstance(cache_region, RhodeCodeCacheRegion):
236 if not isinstance(cache_region, RhodeCodeCacheRegion):
239 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
237 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
240 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
238 log.debug('clearing cache region: %s [prefix:%s] with method=%s',
241 cache_region, cache_namespace_uid, method)
239 cache_region, cache_namespace_uid, method)
242
240
243 num_affected_keys = 0
241 num_affected_keys = 0
244
242
245 if method == CLEAR_INVALIDATE:
243 if method == CLEAR_INVALIDATE:
246 # NOTE: The CacheRegion.invalidate() method’s default mode of
244 # NOTE: The CacheRegion.invalidate() method’s default mode of
247 # operation is to set a timestamp local to this CacheRegion in this Python process only.
245 # operation is to set a timestamp local to this CacheRegion in this Python process only.
248 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
246 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
249 cache_region.invalidate(hard=True)
247 cache_region.invalidate(hard=True)
250
248
251 if method == CLEAR_DELETE:
249 if method == CLEAR_DELETE:
252 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
250 num_affected_keys = cache_region.backend.delete_multi_by_prefix(prefix=cache_namespace_uid)
253 return num_affected_keys
251 return num_affected_keys
254
252
255
253
256 class ActiveRegionCache(object):
254 class ActiveRegionCache(object):
257 def __init__(self, context, cache_data: dict):
255 def __init__(self, context, cache_data: dict):
258 self.context = context
256 self.context = context
259 self.cache_data = cache_data
257 self.cache_data = cache_data
260
258
261 @property
259 @property
262 def state_uid(self) -> str:
260 def state_uid(self) -> str:
263 return self.cache_data['cache_state_uid']
261 return self.cache_data['cache_state_uid']
264
262
265
263
266 class InvalidationContext(object):
264 class InvalidationContext(object):
267 """
265 """
268 usage::
266 usage::
269
267
270 from rhodecode.lib import rc_cache
268 from rhodecode.lib import rc_cache
271
269
272 repo_namespace_key = 'some-cache-for-repo-id-100'
270 repo_namespace_key = 'some-cache-for-repo-id-100'
273 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
271 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key)
274
272
275 def cache_generator(_state_uid):
273 def cache_generator(_state_uid):
276
274
277 @region.conditional_cache_on_arguments(namespace='some-common-namespace-100')
275 @region.conditional_cache_on_arguments(namespace='some-common-namespace-100')
278 def _dummy_func(*args):
276 def _dummy_func(*args):
279 # compute heavy function
277 # compute heavy function
280 return _state_uid, 'result'
278 return _state_uid, 'result'
281
279
282 return _dummy_func
280 return _dummy_func
283
281
284 with inv_context_manager as invalidation_context:
282 with inv_context_manager as invalidation_context:
285 cache_state_uid = invalidation_context.state_uid
283 cache_state_uid = invalidation_context.state_uid
286 cache_func = cache_generator(cache_state_uid)
284 cache_func = cache_generator(cache_state_uid)
287 previous_state_uid, result = cache_func(*call_args)
285 previous_state_uid, result = cache_func(*call_args)
288
286
289 should_invalidate = previous_state_uid != cache_state_uid
287 should_invalidate = previous_state_uid != cache_state_uid
290 if should_invalidate:
288 if should_invalidate:
291 _, result = cache_func.refresh(*call_args)
289 _, result = cache_func.refresh(*call_args)
292
290
293 # To send global invalidation signal, simply run
291 # To send global invalidation signal, simply run
294 CacheKey.set_invalidate(repo_namespace_key)
292 CacheKey.set_invalidate(repo_namespace_key)
295
293
296 """
294 """
297
295
298 def __repr__(self):
296 def __repr__(self):
299 return f'<InvalidationContext:{self.cache_key}>'
297 return f'<InvalidationContext:{self.cache_key}>'
300
298
301 def __init__(self, key, raise_exception=False, thread_scoped=None):
299 def __init__(self, key, raise_exception=False, thread_scoped=None):
302 self.cache_key = key
300 self.cache_key = key
303
301
304 self.raise_exception = raise_exception
302 self.raise_exception = raise_exception
305 self.proc_id = rhodecode.ConfigGet().get_str('instance_id') or 'DEFAULT'
303 self.proc_id = rhodecode.ConfigGet().get_str('instance_id') or 'DEFAULT'
306 self.thread_id = 'global'
304 self.thread_id = 'global'
307
305
308 if thread_scoped is None:
306 if thread_scoped is None:
309 # if we set "default" we can override this via .ini settings
307 # if we set "default" we can override this via .ini settings
310 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
308 thread_scoped = rhodecode.ConfigGet().get_bool('cache_thread_scoped')
311
309
312 # Append the thread id to the cache key if this invalidation context
310 # Append the thread id to the cache key if this invalidation context
313 # should be scoped to the current thread.
311 # should be scoped to the current thread.
314 if thread_scoped is True:
312 if thread_scoped is True:
315 self.thread_id = threading.current_thread().ident
313 self.thread_id = threading.current_thread().ident
316
314
317 self.proc_key = f'proc:{self.proc_id}|thread:{self.thread_id}|key:{self.cache_key}'
315 self.proc_key = f'proc:{self.proc_id}|thread:{self.thread_id}|key:{self.cache_key}'
318 self.compute_time = 0
316 self.compute_time = 0
319
317
320 def get_or_create_cache_obj(self):
318 def get_or_create_cache_obj(self):
321 from rhodecode.model.db import CacheKey, Session, IntegrityError
319 from rhodecode.model.db import CacheKey, Session, IntegrityError
322
320
323 cache_obj = CacheKey.get_active_cache(self.cache_key)
321 cache_obj = CacheKey.get_active_cache(self.cache_key)
324 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
322 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
325
323
326 if not cache_obj:
324 if not cache_obj:
327 # generate new UID for non-existing cache object
325 # generate new UID for non-existing cache object
328 cache_state_uid = CacheKey.generate_new_state_uid()
326 cache_state_uid = CacheKey.generate_new_state_uid()
329 cache_obj = CacheKey(self.cache_key, cache_args=f'repo_state:{self._start_time}',
327 cache_obj = CacheKey(self.cache_key, cache_args=f'repo_state:{self._start_time}',
330 cache_state_uid=cache_state_uid, cache_active=True)
328 cache_state_uid=cache_state_uid, cache_active=True)
331 try:
329 try:
332 Session().add(cache_obj)
330 Session().add(cache_obj)
333 Session().commit()
331 Session().commit()
334 except IntegrityError:
332 except IntegrityError:
335 # if we catch integrity error, it means we inserted this object
333 # if we catch integrity error, it means we inserted this object
336 # assumption is that's really an edge race-condition case and
334 # assumption is that's really an edge race-condition case and
337 # it's safe is to skip it
335 # it's safe is to skip it
338 Session().rollback()
336 Session().rollback()
339 except Exception:
337 except Exception:
340 log.exception('Failed to commit on cache key update')
338 log.exception('Failed to commit on cache key update')
341 Session().rollback()
339 Session().rollback()
342 if self.raise_exception:
340 if self.raise_exception:
343 raise
341 raise
344 return cache_obj
342 return cache_obj
345
343
346 def __enter__(self):
344 def __enter__(self):
347 log.debug('Entering cache invalidation check context: %s', self)
345 log.debug('Entering cache invalidation check context: %s', self)
348 self._start_time = time.time()
346 self._start_time = time.time()
349
347
350 self.cache_obj = self.get_or_create_cache_obj()
348 self.cache_obj = self.get_or_create_cache_obj()
351 cache_data = self.cache_obj.get_dict()
349 cache_data = self.cache_obj.get_dict()
352
350
353 return ActiveRegionCache(context=self, cache_data=cache_data)
351 return ActiveRegionCache(context=self, cache_data=cache_data)
354
352
355 def __exit__(self, exc_type, exc_val, exc_tb):
353 def __exit__(self, exc_type, exc_val, exc_tb):
356 # save compute time
354 # save compute time
357 self.compute_time = time.time() - self._start_time
355 self.compute_time = time.time() - self._start_time
General Comments 0
You need to be logged in to leave comments. Login now