##// END OF EJS Templates
python3: fix itervalues
super-admin -
r4963:4bd6e5a3 default
parent child Browse files
Show More
@@ -1,369 +1,369 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import os
21 21 import time
22 22 import logging
23 23 import functools
24 24 import decorator
25 25 import threading
26 26
27 27 from dogpile.cache import CacheRegion
28 28
29 29 import rhodecode
30 30 from rhodecode.lib.hash_utils import sha1
31 31 from rhodecode.lib.type_utils import str2bool
32 32 from rhodecode.lib.str_utils import safe_bytes
33 33 from rhodecode.model.db import Session, CacheKey, IntegrityError
34 34
35 35 from rhodecode.lib.rc_cache import cache_key_meta
36 36 from rhodecode.lib.rc_cache import region_meta
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 def isCython(func):
42 42 """
43 43 Private helper that checks if a function is a cython function.
44 44 """
45 45 return func.__class__.__name__ == 'cython_function_or_method'
46 46
47 47
48 48 class RhodeCodeCacheRegion(CacheRegion):
49 49
50 50 def conditional_cache_on_arguments(
51 51 self, namespace=None,
52 52 expiration_time=None,
53 53 should_cache_fn=None,
54 54 to_str=str,
55 55 function_key_generator=None,
56 56 condition=True):
57 57 """
58 58 Custom conditional decorator, that will not touch any dogpile internals if
59 59 condition isn't meet. This works a bit different than should_cache_fn
60 60 And it's faster in cases we don't ever want to compute cached values
61 61 """
62 62 expiration_time_is_callable = callable(expiration_time)
63 63
64 64 if function_key_generator is None:
65 65 function_key_generator = self.function_key_generator
66 66
67 67 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
68 68
69 69 if not condition:
70 70 log.debug('Calling un-cached method:%s', user_func.__name__)
71 71 start = time.time()
72 72 result = user_func(*arg, **kw)
73 73 total = time.time() - start
74 74 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
75 75 return result
76 76
77 77 key = key_generator(*arg, **kw)
78 78
79 79 timeout = expiration_time() if expiration_time_is_callable \
80 80 else expiration_time
81 81
82 82 log.debug('Calling cached method:`%s`', user_func.__name__)
83 83 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
84 84
85 85 def cache_decorator(user_func):
86 86 if to_str is str:
87 87 # backwards compatible
88 88 key_generator = function_key_generator(namespace, user_func)
89 89 else:
90 90 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
91 91
92 92 def refresh(*arg, **kw):
93 93 """
94 94 Like invalidate, but regenerates the value instead
95 95 """
96 96 key = key_generator(*arg, **kw)
97 97 value = user_func(*arg, **kw)
98 98 self.set(key, value)
99 99 return value
100 100
101 101 def invalidate(*arg, **kw):
102 102 key = key_generator(*arg, **kw)
103 103 self.delete(key)
104 104
105 105 def set_(value, *arg, **kw):
106 106 key = key_generator(*arg, **kw)
107 107 self.set(key, value)
108 108
109 109 def get(*arg, **kw):
110 110 key = key_generator(*arg, **kw)
111 111 return self.get(key)
112 112
113 113 user_func.set = set_
114 114 user_func.invalidate = invalidate
115 115 user_func.get = get
116 116 user_func.refresh = refresh
117 117 user_func.key_generator = key_generator
118 118 user_func.original = user_func
119 119
120 120 # Use `decorate` to preserve the signature of :param:`user_func`.
121 121 return decorator.decorate(user_func, functools.partial(
122 122 get_or_create_for_user_func, key_generator))
123 123
124 124 return cache_decorator
125 125
126 126
127 127 def make_region(*arg, **kw):
128 128 return RhodeCodeCacheRegion(*arg, **kw)
129 129
130 130
131 131 def get_default_cache_settings(settings, prefixes=None):
132 132 prefixes = prefixes or []
133 133 cache_settings = {}
134 134 for key in settings.keys():
135 135 for prefix in prefixes:
136 136 if key.startswith(prefix):
137 137 name = key.split(prefix)[1].strip()
138 138 val = settings[key]
139 139 if isinstance(val, str):
140 140 val = val.strip()
141 141 cache_settings[name] = val
142 142 return cache_settings
143 143
144 144
145 145 def compute_key_from_params(*args):
146 146 """
147 147 Helper to compute key from given params to be used in cache manager
148 148 """
149 149 return sha1(safe_bytes("_".join(map(str, args))))
150 150
151 151
152 152 def backend_key_generator(backend):
153 153 """
154 154 Special wrapper that also sends over the backend to the key generator
155 155 """
156 156 def wrapper(namespace, fn):
157 157 return key_generator(backend, namespace, fn)
158 158 return wrapper
159 159
160 160
161 161 def key_generator(backend, namespace, fn):
162 162 fname = fn.__name__
163 163
164 164 def generate_key(*args):
165 165 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
166 166 namespace_pref = namespace or 'default_namespace'
167 167 arg_key = compute_key_from_params(*args)
168 168 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
169 169
170 170 return final_key
171 171
172 172 return generate_key
173 173
174 174
175 175 def get_or_create_region(region_name, region_namespace=None):
176 176 from rhodecode.lib.rc_cache.backends import FileNamespaceBackend
177 177 region_obj = region_meta.dogpile_cache_regions.get(region_name)
178 178 if not region_obj:
179 179 raise EnvironmentError(
180 180 'Region `{}` not in configured: {}.'.format(
181 181 region_name, list(region_meta.dogpile_cache_regions.keys())))
182 182
183 183 region_uid_name = '{}:{}'.format(region_name, region_namespace)
184 184 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
185 185 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
186 186 if region_exist:
187 187 log.debug('Using already configured region: %s', region_namespace)
188 188 return region_exist
189 189 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
190 190 expiration_time = region_obj.expiration_time
191 191
192 192 if not os.path.isdir(cache_dir):
193 193 os.makedirs(cache_dir)
194 194 new_region = make_region(
195 195 name=region_uid_name,
196 196 function_key_generator=backend_key_generator(region_obj.actual_backend)
197 197 )
198 198 namespace_filename = os.path.join(
199 199 cache_dir, "{}.cache.dbm".format(region_namespace))
200 200 # special type that allows 1db per namespace
201 201 new_region.configure(
202 202 backend='dogpile.cache.rc.file_namespace',
203 203 expiration_time=expiration_time,
204 204 arguments={"filename": namespace_filename}
205 205 )
206 206
207 207 # create and save in region caches
208 208 log.debug('configuring new region: %s', region_uid_name)
209 209 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
210 210
211 211 return region_obj
212 212
213 213
214 214 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
215 215 region = get_or_create_region(cache_region, cache_namespace_uid)
216 216 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
217 217 num_delete_keys = len(cache_keys)
218 218 if invalidate:
219 219 region.invalidate(hard=False)
220 220 else:
221 221 if num_delete_keys:
222 222 region.delete_multi(cache_keys)
223 223 return num_delete_keys
224 224
225 225
226 226 class ActiveRegionCache(object):
227 227 def __init__(self, context, cache_data):
228 228 self.context = context
229 229 self.cache_data = cache_data
230 230
231 231 def should_invalidate(self):
232 232 return False
233 233
234 234
235 235 class FreshRegionCache(object):
236 236 def __init__(self, context, cache_data):
237 237 self.context = context
238 238 self.cache_data = cache_data
239 239
240 240 def should_invalidate(self):
241 241 return True
242 242
243 243
244 244 class InvalidationContext(object):
245 245 """
246 246 usage::
247 247
248 248 from rhodecode.lib import rc_cache
249 249
250 250 cache_namespace_uid = CacheKey.SOME_NAMESPACE.format(1)
251 251 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
252 252
253 253 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=True)
254 254 def heavy_compute(cache_name, param1, param2):
255 255 print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2))
256 256
257 257 # invalidation namespace is shared namespace key for all process caches
258 258 # we use it to send a global signal
259 259 invalidation_namespace = 'repo_cache:1'
260 260
261 261 inv_context_manager = rc_cache.InvalidationContext(
262 262 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
263 263 with inv_context_manager as invalidation_context:
264 264 args = ('one', 'two')
265 265 # re-compute and store cache if we get invalidate signal
266 266 if invalidation_context.should_invalidate():
267 267 result = heavy_compute.refresh(*args)
268 268 else:
269 269 result = heavy_compute(*args)
270 270
271 271 compute_time = inv_context_manager.compute_time
272 272 log.debug('result computed in %.4fs', compute_time)
273 273
274 274 # To send global invalidation signal, simply run
275 275 CacheKey.set_invalidate(invalidation_namespace)
276 276
277 277 """
278 278
279 279 def __repr__(self):
280 return '<InvalidationContext:{}[{}]>'.format(
281 safe_str(self.cache_key), safe_str(self.uid))
280 return f'<InvalidationContext:{self.cache_key}[{self.uid}]>'
282 281
283 282 def __init__(self, uid, invalidation_namespace='',
284 283 raise_exception=False, thread_scoped=None):
285 284 self.uid = uid
286 285 self.invalidation_namespace = invalidation_namespace
287 286 self.raise_exception = raise_exception
288 287 self.proc_id = rhodecode.CONFIG.get('instance_id') or 'DEFAULT'
289 288 self.thread_id = 'global'
290 289
291 290 if thread_scoped is None:
292 291 # if we set "default" we can override this via .ini settings
293 292 thread_scoped = str2bool(rhodecode.CONFIG.get('cache_thread_scoped'))
294 293
295 294 # Append the thread id to the cache key if this invalidation context
296 295 # should be scoped to the current thread.
297 296 if thread_scoped is True:
298 297 self.thread_id = threading.current_thread().ident
299 298
300 299 self.cache_key = compute_key_from_params(uid)
301 300 self.cache_key = 'proc:{}|thread:{}|params:{}'.format(
302 301 self.proc_id, self.thread_id, self.cache_key)
303 302 self.proc_key = 'proc:{}'.format(self.proc_id)
304 303 self.compute_time = 0
305 304
306 305 def get_or_create_cache_obj(self, cache_type, invalidation_namespace=''):
307 306 invalidation_namespace = invalidation_namespace or self.invalidation_namespace
308 307 # fetch all cache keys for this namespace and convert them to a map to find if we
309 308 # have specific cache_key object registered. We do this because we want to have
310 309 # all consistent cache_state_uid for newly registered objects
311 310 cache_obj_map = CacheKey.get_namespace_map(invalidation_namespace)
312 311 cache_obj = cache_obj_map.get(self.cache_key)
313 312 log.debug('Fetched cache obj %s using %s cache key.', cache_obj, self.cache_key)
313
314 314 if not cache_obj:
315 315 new_cache_args = invalidation_namespace
316 first_cache_obj = next(cache_obj_map.itervalues()) if cache_obj_map else None
316 first_cache_obj = next(iter(cache_obj_map.values())) if cache_obj_map else None
317 317 cache_state_uid = None
318 318 if first_cache_obj:
319 319 cache_state_uid = first_cache_obj.cache_state_uid
320 320 cache_obj = CacheKey(self.cache_key, cache_args=new_cache_args,
321 321 cache_state_uid=cache_state_uid)
322 322 cache_key_meta.cache_keys_by_pid.add(self.proc_key)
323 323
324 324 return cache_obj
325 325
326 326 def __enter__(self):
327 327 """
328 328 Test if current object is valid, and return CacheRegion function
329 329 that does invalidation and calculation
330 330 """
331 331 log.debug('Entering cache invalidation check context: %s', self.invalidation_namespace)
332 332 # register or get a new key based on uid
333 333 self.cache_obj = self.get_or_create_cache_obj(cache_type=self.uid)
334 334 cache_data = self.cache_obj.get_dict()
335 335 self._start_time = time.time()
336 336 if self.cache_obj.cache_active:
337 337 # means our cache obj is existing and marked as it's
338 338 # cache is not outdated, we return ActiveRegionCache
339 339 self.skip_cache_active_change = True
340 340
341 341 return ActiveRegionCache(context=self, cache_data=cache_data)
342 342
343 343 # the key is either not existing or set to False, we return
344 344 # the real invalidator which re-computes value. We additionally set
345 345 # the flag to actually update the Database objects
346 346 self.skip_cache_active_change = False
347 347 return FreshRegionCache(context=self, cache_data=cache_data)
348 348
349 349 def __exit__(self, exc_type, exc_val, exc_tb):
350 350 # save compute time
351 351 self.compute_time = time.time() - self._start_time
352 352
353 353 if self.skip_cache_active_change:
354 354 return
355 355
356 356 try:
357 357 self.cache_obj.cache_active = True
358 358 Session().add(self.cache_obj)
359 359 Session().commit()
360 360 except IntegrityError:
361 361 # if we catch integrity error, it means we inserted this object
362 362 # assumption is that's really an edge race-condition case and
363 363 # it's safe is to skip it
364 364 Session().rollback()
365 365 except Exception:
366 366 log.exception('Failed to commit on cache key update')
367 367 Session().rollback()
368 368 if self.raise_exception:
369 369 raise
General Comments 0
You need to be logged in to leave comments. Login now