##// END OF EJS Templates
caches: synced with CE changes
super-admin -
r1127:367165a5 python3
parent child Browse files
Show More
@@ -1,72 +1,87 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import diskcache
20 import diskcache
21 from diskcache import RLock
21
22
22 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
23
24
24 cache_meta = None
25 cache_meta = None
25
26
26
27
28 class ReentrantLock(RLock):
29 def __enter__(self):
30 reentrant_lock_key = self._key
31
32 log.debug('Acquire ReentrantLock(key=%s) for archive cache generation...', reentrant_lock_key)
33 #self.acquire()
34 log.debug('Lock for key=%s acquired', reentrant_lock_key)
35
36 def __exit__(self, *exc_info):
37 #self.release()
38 pass
39
40
27 def get_archival_config(config):
41 def get_archival_config(config):
42
28 final_config = {
43 final_config = {
29 'archive_cache.eviction_policy': 'least-frequently-used'
44 'archive_cache.eviction_policy': 'least-frequently-used'
30 }
45 }
31
46
32 for k, v in config.items():
47 for k, v in config.items():
33 if k.startswith('archive_cache'):
48 if k.startswith('archive_cache'):
34 final_config[k] = v
49 final_config[k] = v
35
50
36 return final_config
51 return final_config
37
52
38
53
39 def get_archival_cache_store(config):
54 def get_archival_cache_store(config):
40
55
41 global cache_meta
56 global cache_meta
42 if cache_meta is not None:
57 if cache_meta is not None:
43 return cache_meta
58 return cache_meta
44
59
45 config = get_archival_config(config)
60 config = get_archival_config(config)
46
61
47 archive_cache_dir = config['archive_cache.store_dir']
62 archive_cache_dir = config['archive_cache.store_dir']
48 archive_cache_size_gb = config['archive_cache.cache_size_gb']
63 archive_cache_size_gb = config['archive_cache.cache_size_gb']
49 archive_cache_shards = config['archive_cache.cache_shards']
64 archive_cache_shards = config['archive_cache.cache_shards']
50 archive_cache_eviction_policy = config['archive_cache.eviction_policy']
65 archive_cache_eviction_policy = config['archive_cache.eviction_policy']
51
66
52 log.debug('Initializing archival cache instance under %s', archive_cache_dir)
67 log.debug('Initializing archival cache instance under %s', archive_cache_dir)
53
68
54 # check if it's ok to write, and re-create the archive cache
69 # check if it's ok to write, and re-create the archive cache
55 if not os.path.isdir(archive_cache_dir):
70 if not os.path.isdir(archive_cache_dir):
56 os.makedirs(archive_cache_dir, exist_ok=True)
71 os.makedirs(archive_cache_dir, exist_ok=True)
57
72
58 d_cache = diskcache.FanoutCache(
73 d_cache = diskcache.FanoutCache(
59 archive_cache_dir, shards=archive_cache_shards,
74 archive_cache_dir, shards=archive_cache_shards,
60 cull_limit=0, # manual eviction required
75 cull_limit=0, # manual eviction required
61 size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
76 size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
62 eviction_policy=archive_cache_eviction_policy,
77 eviction_policy=archive_cache_eviction_policy,
63 timeout=30
78 timeout=30
64 )
79 )
65 cache_meta = d_cache
80 cache_meta = d_cache
66 return cache_meta
81 return cache_meta
67
82
68
83
69 def includeme(config):
84 def includeme(config):
70 # init our cache at start, for vcsserver we don't init at runtime
85 # init our cache at start, for vcsserver we don't init at runtime
71 # because our cache config is sent via wire on make archive call, this call just lazy-enables the client
86 # because our cache config is sent via wire on make archive call, this call just lazy-enables the client
72 return
87 return
@@ -1,242 +1,247 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import functools
18 import functools
19 import logging
19 import logging
20 import os
20 import os
21 import threading
21 import threading
22 import time
22 import time
23
23
24 import decorator
24 import decorator
25 from dogpile.cache import CacheRegion
25 from dogpile.cache import CacheRegion
26
26
27 from vcsserver.lib.rc_cache import region_meta
27
28 from vcsserver.utils import sha1
28 from vcsserver.str_utils import safe_bytes
29 from vcsserver.str_utils import safe_bytes
29 from vcsserver.type_utils import str2bool
30 from vcsserver.type_utils import str2bool
30 from vcsserver.utils import sha1
31
32 from . import region_meta
31
33
32 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
33
35
34
36
35 class RhodeCodeCacheRegion(CacheRegion):
37 class RhodeCodeCacheRegion(CacheRegion):
36
38
37 def __repr__(self):
39 def __repr__(self):
38 return f'{self.__class__}(name={self.name})'
40 return f'{self.__class__}(name={self.name})'
39
41
40 def conditional_cache_on_arguments(
42 def conditional_cache_on_arguments(
41 self, namespace=None,
43 self, namespace=None,
42 expiration_time=None,
44 expiration_time=None,
43 should_cache_fn=None,
45 should_cache_fn=None,
44 to_str=str,
46 to_str=str,
45 function_key_generator=None,
47 function_key_generator=None,
46 condition=True):
48 condition=True):
47 """
49 """
48 Custom conditional decorator, that will not touch any dogpile internals if
50 Custom conditional decorator, that will not touch any dogpile internals if
49 condition isn't meet. This works a bit different from should_cache_fn
51 condition isn't meet. This works a bit different from should_cache_fn
50 And it's faster in cases we don't ever want to compute cached values
52 And it's faster in cases we don't ever want to compute cached values
51 """
53 """
52 expiration_time_is_callable = callable(expiration_time)
54 expiration_time_is_callable = callable(expiration_time)
53 if not namespace:
55 if not namespace:
54 namespace = getattr(self, '_default_namespace', None)
56 namespace = getattr(self, '_default_namespace', None)
55
57
56 if function_key_generator is None:
58 if function_key_generator is None:
57 function_key_generator = self.function_key_generator
59 function_key_generator = self.function_key_generator
58
60
59 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
61 def get_or_create_for_user_func(func_key_generator, user_func, *arg, **kw):
60
62
61 if not condition:
63 if not condition:
62 log.debug('Calling un-cached method:%s', user_func.__name__)
64 log.debug('Calling un-cached method:%s', user_func.__name__)
63 start = time.time()
65 start = time.time()
64 result = user_func(*arg, **kw)
66 result = user_func(*arg, **kw)
65 total = time.time() - start
67 total = time.time() - start
66 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
68 log.debug('un-cached method:%s took %.4fs', user_func.__name__, total)
67 return result
69 return result
68
70
69 key = func_key_generator(*arg, **kw)
71 key = func_key_generator(*arg, **kw)
70
72
71 timeout = expiration_time() if expiration_time_is_callable \
73 timeout = expiration_time() if expiration_time_is_callable \
72 else expiration_time
74 else expiration_time
73
75
74 log.debug('Calling cached method:`%s`', user_func.__name__)
76 log.debug('Calling cached method:`%s`', user_func.__name__)
75 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
77 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
76
78
77 def cache_decorator(user_func):
79 def cache_decorator(user_func):
78 if to_str is str:
80 if to_str is str:
79 # backwards compatible
81 # backwards compatible
80 key_generator = function_key_generator(namespace, user_func)
82 key_generator = function_key_generator(namespace, user_func)
81 else:
83 else:
82 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
84 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
83
85
84 def refresh(*arg, **kw):
86 def refresh(*arg, **kw):
85 """
87 """
86 Like invalidate, but regenerates the value instead
88 Like invalidate, but regenerates the value instead
87 """
89 """
88 key = key_generator(*arg, **kw)
90 key = key_generator(*arg, **kw)
89 value = user_func(*arg, **kw)
91 value = user_func(*arg, **kw)
90 self.set(key, value)
92 self.set(key, value)
91 return value
93 return value
92
94
93 def invalidate(*arg, **kw):
95 def invalidate(*arg, **kw):
94 key = key_generator(*arg, **kw)
96 key = key_generator(*arg, **kw)
95 self.delete(key)
97 self.delete(key)
96
98
97 def set_(value, *arg, **kw):
99 def set_(value, *arg, **kw):
98 key = key_generator(*arg, **kw)
100 key = key_generator(*arg, **kw)
99 self.set(key, value)
101 self.set(key, value)
100
102
101 def get(*arg, **kw):
103 def get(*arg, **kw):
102 key = key_generator(*arg, **kw)
104 key = key_generator(*arg, **kw)
103 return self.get(key)
105 return self.get(key)
104
106
105 user_func.set = set_
107 user_func.set = set_
106 user_func.invalidate = invalidate
108 user_func.invalidate = invalidate
107 user_func.get = get
109 user_func.get = get
108 user_func.refresh = refresh
110 user_func.refresh = refresh
109 user_func.key_generator = key_generator
111 user_func.key_generator = key_generator
110 user_func.original = user_func
112 user_func.original = user_func
111
113
112 # Use `decorate` to preserve the signature of :param:`user_func`.
114 # Use `decorate` to preserve the signature of :param:`user_func`.
113 return decorator.decorate(user_func, functools.partial(
115 return decorator.decorate(user_func, functools.partial(
114 get_or_create_for_user_func, key_generator))
116 get_or_create_for_user_func, key_generator))
115
117
116 return cache_decorator
118 return cache_decorator
117
119
118
120
119 def make_region(*arg, **kw):
121 def make_region(*arg, **kw):
120 return RhodeCodeCacheRegion(*arg, **kw)
122 return RhodeCodeCacheRegion(*arg, **kw)
121
123
122
124
123 def get_default_cache_settings(settings, prefixes=None):
125 def get_default_cache_settings(settings, prefixes=None):
124 prefixes = prefixes or []
126 prefixes = prefixes or []
125 cache_settings = {}
127 cache_settings = {}
126 for key in settings.keys():
128 for key in settings.keys():
127 for prefix in prefixes:
129 for prefix in prefixes:
128 if key.startswith(prefix):
130 if key.startswith(prefix):
129 name = key.split(prefix)[1].strip()
131 name = key.split(prefix)[1].strip()
130 val = settings[key]
132 val = settings[key]
131 if isinstance(val, str):
133 if isinstance(val, str):
132 val = val.strip()
134 val = val.strip()
133 cache_settings[name] = val
135 cache_settings[name] = val
134 return cache_settings
136 return cache_settings
135
137
136
138
137 def compute_key_from_params(*args):
139 def compute_key_from_params(*args):
138 """
140 """
139 Helper to compute key from given params to be used in cache manager
141 Helper to compute key from given params to be used in cache manager
140 """
142 """
141 return sha1(safe_bytes("_".join(map(str, args))))
143 return sha1(safe_bytes("_".join(map(str, args))))
142
144
143
145
144 def custom_key_generator(backend, namespace, fn):
146 def custom_key_generator(backend, namespace, fn):
145 func_name = fn.__name__
147 func_name = fn.__name__
146
148
147 def generate_key(*args):
149 def generate_key(*args):
148 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
150 backend_pref = getattr(backend, 'key_prefix', None) or 'backend_prefix'
149 namespace_pref = namespace or 'default_namespace'
151 namespace_pref = namespace or 'default_namespace'
150 arg_key = compute_key_from_params(*args)
152 arg_key = compute_key_from_params(*args)
151 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
153 final_key = f"{backend_pref}:{namespace_pref}:{func_name}_{arg_key}"
152
154
153 return final_key
155 return final_key
154
156
155 return generate_key
157 return generate_key
156
158
157
159
158 def backend_key_generator(backend):
160 def backend_key_generator(backend):
159 """
161 """
160 Special wrapper that also sends over the backend to the key generator
162 Special wrapper that also sends over the backend to the key generator
161 """
163 """
162 def wrapper(namespace, fn):
164 def wrapper(namespace, fn):
163 return custom_key_generator(backend, namespace, fn)
165 return custom_key_generator(backend, namespace, fn)
164 return wrapper
166 return wrapper
165
167
166
168
167 def get_or_create_region(region_name, region_namespace: str = None):
169 def get_or_create_region(region_name, region_namespace: str = None, use_async_runner=False):
168 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
170 from .backends import FileNamespaceBackend
171 from . import async_creation_runner
169
172
170 region_obj = region_meta.dogpile_cache_regions.get(region_name)
173 region_obj = region_meta.dogpile_cache_regions.get(region_name)
171 if not region_obj:
174 if not region_obj:
172 reg_keys = list(region_meta.dogpile_cache_regions.keys())
175 reg_keys = list(region_meta.dogpile_cache_regions.keys())
173 raise EnvironmentError(f'Region `{region_name}` not in configured: {reg_keys}.')
176 raise EnvironmentError(f'Region `{region_name}` not in configured: {reg_keys}.')
174
177
175 region_uid_name = f'{region_name}:{region_namespace}'
178 region_uid_name = f'{region_name}:{region_namespace}'
176
179
177 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
180 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
178 if not region_namespace:
181 if not region_namespace:
179 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
182 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
180
183
181 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
184 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
182 if region_exist:
185 if region_exist:
183 log.debug('Using already configured region: %s', region_namespace)
186 log.debug('Using already configured region: %s', region_namespace)
184 return region_exist
187 return region_exist
185
188
186 expiration_time = region_obj.expiration_time
189 expiration_time = region_obj.expiration_time
187
190
188 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
191 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
189 namespace_cache_dir = cache_dir
192 namespace_cache_dir = cache_dir
190
193
191 # we default the namespace_cache_dir to our default cache dir.
194 # we default the namespace_cache_dir to our default cache dir.
192 # however if this backend is configured with filename= param, we prioritize that
195 # however if this backend is configured with filename= param, we prioritize that
193 # so all caches within that particular region, even those namespaced end up in the same path
196 # so all caches within that particular region, even those namespaced end up in the same path
194 if region_obj.actual_backend.filename:
197 if region_obj.actual_backend.filename:
195 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
198 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
196
199
197 if not os.path.isdir(namespace_cache_dir):
200 if not os.path.isdir(namespace_cache_dir):
198 os.makedirs(namespace_cache_dir)
201 os.makedirs(namespace_cache_dir)
199 new_region = make_region(
202 new_region = make_region(
200 name=region_uid_name,
203 name=region_uid_name,
201 function_key_generator=backend_key_generator(region_obj.actual_backend)
204 function_key_generator=backend_key_generator(region_obj.actual_backend)
202 )
205 )
203
206
204 namespace_filename = os.path.join(
207 namespace_filename = os.path.join(
205 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
208 namespace_cache_dir, f"{region_name}_{region_namespace}.cache_db")
206 # special type that allows 1db per namespace
209 # special type that allows 1db per namespace
207 new_region.configure(
210 new_region.configure(
208 backend='dogpile.cache.rc.file_namespace',
211 backend='dogpile.cache.rc.file_namespace',
209 expiration_time=expiration_time,
212 expiration_time=expiration_time,
210 arguments={"filename": namespace_filename}
213 arguments={"filename": namespace_filename}
211 )
214 )
212
215
213 # create and save in region caches
216 # create and save in region caches
214 log.debug('configuring new region: %s', region_uid_name)
217 log.debug('configuring new region: %s', region_uid_name)
215 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
218 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
216
219
217 region_obj._default_namespace = region_namespace
220 region_obj._default_namespace = region_namespace
221 if use_async_runner:
222 region_obj.async_creation_runner = async_creation_runner
218 return region_obj
223 return region_obj
219
224
220
225
221 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
226 def clear_cache_namespace(cache_region: str | RhodeCodeCacheRegion, cache_namespace_uid: str, method: str):
222 from . import CLEAR_DELETE, CLEAR_INVALIDATE
227 from . import CLEAR_DELETE, CLEAR_INVALIDATE
223
228
224 if not isinstance(cache_region, RhodeCodeCacheRegion):
229 if not isinstance(cache_region, RhodeCodeCacheRegion):
225 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
230 cache_region = get_or_create_region(cache_region, cache_namespace_uid)
226 log.debug('clearing cache region: %s with method=%s', cache_region, method)
231 log.debug('clearing cache region: %s with method=%s', cache_region, method)
227
232
228 num_affected_keys = None
233 num_affected_keys = None
229
234
230 if method == CLEAR_INVALIDATE:
235 if method == CLEAR_INVALIDATE:
231 # NOTE: The CacheRegion.invalidate() method’s default mode of
236 # NOTE: The CacheRegion.invalidate() method’s default mode of
232 # operation is to set a timestamp local to this CacheRegion in this Python process only.
237 # operation is to set a timestamp local to this CacheRegion in this Python process only.
233 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
238 # It does not impact other Python processes or regions as the timestamp is only stored locally in memory.
234 cache_region.invalidate(hard=True)
239 cache_region.invalidate(hard=True)
235
240
236 if method == CLEAR_DELETE:
241 if method == CLEAR_DELETE:
237 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
242 cache_keys = cache_region.backend.list_keys(prefix=cache_namespace_uid)
238 num_affected_keys = len(cache_keys)
243 num_affected_keys = len(cache_keys)
239 if num_affected_keys:
244 if num_affected_keys:
240 cache_region.delete_multi(cache_keys)
245 cache_region.delete_multi(cache_keys)
241
246
242 return num_affected_keys
247 return num_affected_keys
General Comments 0
You need to be logged in to leave comments. Login now