##// END OF EJS Templates
caches: use of global cache prefixes so we can keep compatability when switching from OLD rc to new python3 based
super-admin -
r1135:7b7ca856 default
parent child Browse files
Show More
@@ -22,7 +22,6 b' import logging'
22 import urllib.parse
22 import urllib.parse
23
23
24 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
24 from vcsserver.lib.rc_cache.archive_cache import get_archival_cache_store
25 from vcsserver.lib.rc_cache import region_meta
26
25
27 from vcsserver import exceptions
26 from vcsserver import exceptions
28 from vcsserver.exceptions import NoContentException
27 from vcsserver.exceptions import NoContentException
@@ -42,7 +41,7 b' class RepoFactory(object):'
42 repo_type = None
41 repo_type = None
43
42
44 def __init__(self):
43 def __init__(self):
45 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
44 pass
46
45
47 def _create_config(self, path, config):
46 def _create_config(self, path, config):
48 config = {}
47 config = {}
@@ -86,7 +85,6 b' def raise_from_original(new_type, org_ex'
86 del exc_traceback
85 del exc_traceback
87
86
88
87
89
90 class ArchiveNode(object):
88 class ArchiveNode(object):
91 def __init__(self, path, mode, is_link, raw_bytes):
89 def __init__(self, path, mode, is_link, raw_bytes):
92 self.path = path
90 self.path = path
@@ -98,21 +96,22 b' class ArchiveNode(object):'
98 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
96 def store_archive_in_cache(node_walker, archive_key, kind, mtime, archive_at_path, archive_dir_name,
99 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
97 commit_id, write_metadata=True, extra_metadata=None, cache_config=None):
100 """
98 """
101 Function that would store an generate archive and send it to a dedicated backend store
99 Function that would store generate archive and send it to a dedicated backend store
102 In here we use diskcache
100 In here we use diskcache
103
101
104 :param node_walker: a generator returning nodes to add to archive
102 :param node_walker: a generator returning nodes to add to archive
105 :param archive_key: key used to store the path
103 :param archive_key: key used to store the path
106 :param kind: archive kind
104 :param kind: archive kind
107 :param mtime: time of creation
105 :param mtime: time of creation
108 :param archive_at_path: default '/' the path at archive was started. if this is not '/' it means it's a partial archive
106 :param archive_at_path: default '/' the path at archive was started.
107 If this is not '/' it means it's a partial archive
109 :param archive_dir_name: inside dir name when creating an archive
108 :param archive_dir_name: inside dir name when creating an archive
110 :param commit_id: commit sha of revision archive was created at
109 :param commit_id: commit sha of revision archive was created at
111 :param write_metadata:
110 :param write_metadata:
112 :param extra_metadata:
111 :param extra_metadata:
113 :param cache_config:
112 :param cache_config:
114
113
115 walker should be a file walker, for example:
114 walker should be a file walker, for example,
116 def node_walker():
115 def node_walker():
117 for file_info in files:
116 for file_info in files:
118 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
117 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
@@ -145,7 +144,7 b' def store_archive_in_cache(node_walker, '
145 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
144 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
146 except NoContentException:
145 except NoContentException:
147 # NOTE(marcink): this is a special case for SVN so we can create "empty"
146 # NOTE(marcink): this is a special case for SVN so we can create "empty"
148 # directories which arent supported by archiver
147 # directories which are not supported by archiver
149 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
148 archiver.addfile(os.path.join(f_path, b'.dir'), f.mode, f.is_link, b'')
150
149
151 if write_metadata:
150 if write_metadata:
@@ -52,6 +52,8 b' register_backend('
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 CACHE_OBJ_CACHE_VER = 'v2'
56
55 CLEAR_DELETE = 'delete'
57 CLEAR_DELETE = 'delete'
56 CLEAR_INVALIDATE = 'invalidate'
58 CLEAR_INVALIDATE = 'invalidate'
57
59
@@ -103,7 +105,7 b' def configure_dogpile_cache(settings):'
103 if log.isEnabledFor(logging.DEBUG):
105 if log.isEnabledFor(logging.DEBUG):
104 region_args = dict(backend=new_region.actual_backend,
106 region_args = dict(backend=new_region.actual_backend,
105 region_invalidator=new_region.region_invalidator.__class__)
107 region_invalidator=new_region.region_invalidator.__class__)
106 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
108 log.debug('dogpile: registering a new region key=`%s` args=%s', namespace_name, region_args)
107
109
108 region_meta.dogpile_cache_regions[namespace_name] = new_region
110 region_meta.dogpile_cache_regions[namespace_name] = new_region
109
111
@@ -15,7 +15,7 b''
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import errno
18 #import errno
19 import fcntl
19 import fcntl
20 import functools
20 import functools
21 import logging
21 import logging
@@ -27,7 +27,7 b' from dogpile.cache import CacheRegion'
27
27
28 from vcsserver.utils import sha1
28 from vcsserver.utils import sha1
29 from vcsserver.str_utils import safe_bytes
29 from vcsserver.str_utils import safe_bytes
30 from vcsserver.type_utils import str2bool
30 from vcsserver.type_utils import str2bool # noqa :required by imports from .utils
31
31
32 from . import region_meta
32 from . import region_meta
33
33
@@ -177,6 +177,7 b' def get_or_create_region(region_name, re'
177
177
178 region_uid_name = f'{region_name}:{region_namespace}'
178 region_uid_name = f'{region_name}:{region_namespace}'
179
179
180 # Special case for ONLY the FileNamespaceBackend backend. We register one-file-per-region
180 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
181 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
181 if not region_namespace:
182 if not region_namespace:
182 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
183 raise ValueError(f'{FileNamespaceBackend} used requires to specify region_namespace param')
@@ -192,7 +193,7 b' def get_or_create_region(region_name, re'
192 namespace_cache_dir = cache_dir
193 namespace_cache_dir = cache_dir
193
194
194 # we default the namespace_cache_dir to our default cache dir.
195 # we default the namespace_cache_dir to our default cache dir.
195 # however if this backend is configured with filename= param, we prioritize that
196 # however, if this backend is configured with filename= param, we prioritize that
196 # so all caches within that particular region, even those namespaced end up in the same path
197 # so all caches within that particular region, even those namespaced end up in the same path
197 if region_obj.actual_backend.filename:
198 if region_obj.actual_backend.filename:
198 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
199 namespace_cache_dir = os.path.dirname(region_obj.actual_backend.filename)
@@ -23,7 +23,7 b' class RemoteBase(object):'
23
23
24 def _region(self, wire):
24 def _region(self, wire):
25 cache_repo_id = wire.get('cache_repo_id', '')
25 cache_repo_id = wire.get('cache_repo_id', '')
26 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
26 cache_namespace_uid = f'cache_repo.{rc_cache.CACHE_OBJ_CACHE_VER}.{cache_repo_id}'
27 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
27 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
28
28
29 def _cache_on(self, wire):
29 def _cache_on(self, wire):
@@ -35,13 +35,12 b' class RemoteBase(object):'
35 return cache_on, context_uid, repo_id
35 return cache_on, context_uid, repo_id
36
36
37 def vcsserver_invalidate_cache(self, wire, delete):
37 def vcsserver_invalidate_cache(self, wire, delete):
38 from vcsserver.lib import rc_cache
39 repo_id = wire.get('repo_id', '')
40 cache_repo_id = wire.get('cache_repo_id', '')
38 cache_repo_id = wire.get('cache_repo_id', '')
41 cache_namespace_uid = f'cache_repo.{cache_repo_id}'
39 cache_namespace_uid = f'cache_repo.{rc_cache.CACHE_OBJ_CACHE_VER}.{cache_repo_id}'
42
40
43 if delete:
41 if delete:
44 rc_cache.clear_cache_namespace(
42 rc_cache.clear_cache_namespace(
45 'repo_object', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
43 'repo_object', cache_namespace_uid, method=rc_cache.CLEAR_DELETE)
46
44
45 repo_id = wire.get('repo_id', '')
47 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
46 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
General Comments 0
You need to be logged in to leave comments. Login now