##// END OF EJS Templates
caches: don't use key_manglers instead prefix keys based on backend.
marcink -
r734:2c6e72c0 default
parent child Browse files
Show More
@@ -1,69 +1,68 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 from dogpile.cache import register_backend
19 from dogpile.cache import register_backend
20
20
21 register_backend(
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
23 "LRUMemoryBackend")
24
24
25 register_backend(
25 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "FileNamespaceBackend")
27 "FileNamespaceBackend")
28
28
29 register_backend(
29 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "RedisPickleBackend")
31 "RedisPickleBackend")
32
32
33
33
34 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
35
35
36 from . import region_meta
36 from . import region_meta
37 from .utils import (get_default_cache_settings, key_generator, make_region)
37 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
38
38
39
39
40 def configure_dogpile_cache(settings):
40 def configure_dogpile_cache(settings):
41 cache_dir = settings.get('cache_dir')
41 cache_dir = settings.get('cache_dir')
42 if cache_dir:
42 if cache_dir:
43 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
43 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
44
44
45 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
45 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
46
46
47 # inspect available namespaces
47 # inspect available namespaces
48 avail_regions = set()
48 avail_regions = set()
49 for key in rc_cache_data.keys():
49 for key in rc_cache_data.keys():
50 namespace_name = key.split('.', 1)[0]
50 namespace_name = key.split('.', 1)[0]
51 avail_regions.add(namespace_name)
51 avail_regions.add(namespace_name)
52 log.debug('dogpile: found following cache regions: %s', avail_regions)
52 log.debug('dogpile: found following cache regions: %s', avail_regions)
53
53
54 # register them into namespace
54 # register them into namespace
55 for region_name in avail_regions:
55 for region_name in avail_regions:
56 new_region = make_region(
56 new_region = make_region(
57 name=region_name,
57 name=region_name,
58 function_key_generator=key_generator
58 function_key_generator=None
59 )
59 )
60
60
61 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
61 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
62
62 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
63 log.debug('dogpile: registering a new region %s[%s]',
63 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
64 region_name, new_region.__dict__)
65 region_meta.dogpile_cache_regions[region_name] = new_region
64 region_meta.dogpile_cache_regions[region_name] = new_region
66
65
67
66
68 def includeme(config):
67 def includeme(config):
69 configure_dogpile_cache(config.registry.settings)
68 configure_dogpile_cache(config.registry.settings)
@@ -1,179 +1,231 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
23 from dogpile.cache.api import CachedValue
22 from dogpile.cache.backends import memory as memory_backend
24 from dogpile.cache.backends import memory as memory_backend
23 from dogpile.cache.backends import file as file_backend
25 from dogpile.cache.backends import file as file_backend
24 from dogpile.cache.backends import redis as redis_backend
26 from dogpile.cache.backends import redis as redis_backend
25 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
27 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
26 from dogpile.cache.util import memoized_property
28 from dogpile.cache.util import memoized_property
27
29
28 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
30 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
29
31
30
32
31 _default_max_size = 1024
33 _default_max_size = 1024
32
34
33 log = logging.getLogger(__name__)
35 log = logging.getLogger(__name__)
34
36
35
37
36 class LRUMemoryBackend(memory_backend.MemoryBackend):
38 class LRUMemoryBackend(memory_backend.MemoryBackend):
39 key_prefix = 'lru_mem_backend'
37 pickle_values = False
40 pickle_values = False
38
41
39 def __init__(self, arguments):
42 def __init__(self, arguments):
40 max_size = arguments.pop('max_size', _default_max_size)
43 max_size = arguments.pop('max_size', _default_max_size)
41
44
42 LRUDictClass = LRUDict
45 LRUDictClass = LRUDict
43 if arguments.pop('log_key_count', None):
46 if arguments.pop('log_key_count', None):
44 LRUDictClass = LRUDictDebug
47 LRUDictClass = LRUDictDebug
45
48
46 arguments['cache_dict'] = LRUDictClass(max_size)
49 arguments['cache_dict'] = LRUDictClass(max_size)
47 super(LRUMemoryBackend, self).__init__(arguments)
50 super(LRUMemoryBackend, self).__init__(arguments)
48
51
49 def delete(self, key):
52 def delete(self, key):
50 try:
53 try:
51 del self._cache[key]
54 del self._cache[key]
52 except KeyError:
55 except KeyError:
53 # we don't care if key isn't there at deletion
56 # we don't care if key isn't there at deletion
54 pass
57 pass
55
58
56 def delete_multi(self, keys):
59 def delete_multi(self, keys):
57 for key in keys:
60 for key in keys:
58 self.delete(key)
61 self.delete(key)
59
62
60
63
61 class Serializer(object):
64 class PickleSerializer(object):
65
62 def _dumps(self, value, safe=False):
66 def _dumps(self, value, safe=False):
63 try:
67 try:
64 return compat.pickle.dumps(value)
68 return compat.pickle.dumps(value)
65 except Exception:
69 except Exception:
66 if safe:
70 if safe:
67 return NO_VALUE
71 return NO_VALUE
68 else:
72 else:
69 raise
73 raise
70
74
71 def _loads(self, value, safe=True):
75 def _loads(self, value, safe=True):
72 try:
76 try:
73 return compat.pickle.loads(value)
77 return compat.pickle.loads(value)
74 except Exception:
78 except Exception:
75 if safe:
79 if safe:
76 return NO_VALUE
80 return NO_VALUE
77 else:
81 else:
78 raise
82 raise
79
83
80
84
85 class MsgPackSerializer(object):
86
87 def _dumps(self, value, safe=False):
88 try:
89 return msgpack.packb(value)
90 except Exception:
91 if safe:
92 return NO_VALUE
93 else:
94 raise
95
96 def _loads(self, value, safe=True):
97 """
98 pickle maintained the `CachedValue` wrapper of the tuple
99 msgpack does not, so it must be added back in.
100 """
101 try:
102 value = msgpack.unpackb(value, use_list=False)
103 return CachedValue(*value)
104 except Exception:
105 if safe:
106 return NO_VALUE
107 else:
108 raise
109
110
81 import fcntl
111 import fcntl
82 flock_org = fcntl.flock
112 flock_org = fcntl.flock
83
113
84
114
85 class CustomLockFactory(FileLock):
115 class CustomLockFactory(FileLock):
86
116
87 pass
117 pass
88
118
89
119
90 class FileNamespaceBackend(Serializer, file_backend.DBMBackend):
120 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
121 key_prefix = 'file_backend'
91
122
92 def __init__(self, arguments):
123 def __init__(self, arguments):
93 arguments['lock_factory'] = CustomLockFactory
124 arguments['lock_factory'] = CustomLockFactory
94 super(FileNamespaceBackend, self).__init__(arguments)
125 super(FileNamespaceBackend, self).__init__(arguments)
95
126
96 def list_keys(self, prefix=''):
127 def list_keys(self, prefix=''):
128 prefix = '{}:{}'.format(self.key_prefix, prefix)
129
97 def cond(v):
130 def cond(v):
98 if not prefix:
131 if not prefix:
99 return True
132 return True
100
133
101 if v.startswith(prefix):
134 if v.startswith(prefix):
102 return True
135 return True
103 return False
136 return False
104
137
105 with self._dbm_file(True) as dbm:
138 with self._dbm_file(True) as dbm:
106
139
107 return filter(cond, dbm.keys())
140 return filter(cond, dbm.keys())
108
141
109 def get_store(self):
142 def get_store(self):
110 return self.filename
143 return self.filename
111
144
112 def get(self, key):
145 def get(self, key):
113 with self._dbm_file(False) as dbm:
146 with self._dbm_file(False) as dbm:
114 if hasattr(dbm, 'get'):
147 if hasattr(dbm, 'get'):
115 value = dbm.get(key, NO_VALUE)
148 value = dbm.get(key, NO_VALUE)
116 else:
149 else:
117 # gdbm objects lack a .get method
150 # gdbm objects lack a .get method
118 try:
151 try:
119 value = dbm[key]
152 value = dbm[key]
120 except KeyError:
153 except KeyError:
121 value = NO_VALUE
154 value = NO_VALUE
122 if value is not NO_VALUE:
155 if value is not NO_VALUE:
123 value = self._loads(value)
156 value = self._loads(value)
124 return value
157 return value
125
158
126 def set(self, key, value):
159 def set(self, key, value):
127 with self._dbm_file(True) as dbm:
160 with self._dbm_file(True) as dbm:
128 dbm[key] = self._dumps(value)
161 dbm[key] = self._dumps(value)
129
162
130 def set_multi(self, mapping):
163 def set_multi(self, mapping):
131 with self._dbm_file(True) as dbm:
164 with self._dbm_file(True) as dbm:
132 for key, value in mapping.items():
165 for key, value in mapping.items():
133 dbm[key] = self._dumps(value)
166 dbm[key] = self._dumps(value)
134
167
135
168
136 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
169 class BaseRedisBackend(redis_backend.RedisBackend):
137 def list_keys(self, prefix=''):
170 def list_keys(self, prefix=''):
138 if prefix:
171 prefix = '{}:{}*'.format(self.key_prefix, prefix)
139 prefix = prefix + '*'
140 return self.client.keys(prefix)
172 return self.client.keys(prefix)
141
173
142 def get_store(self):
174 def get_store(self):
143 return self.client.connection_pool
175 return self.client.connection_pool
144
176
145 def get(self, key):
177 def get(self, key):
146 value = self.client.get(key)
178 value = self.client.get(key)
147 if value is None:
179 if value is None:
148 return NO_VALUE
180 return NO_VALUE
149 return self._loads(value)
181 return self._loads(value)
150
182
183 def get_multi(self, keys):
184 if not keys:
185 return []
186 values = self.client.mget(keys)
187 loads = self._loads
188 return [
189 loads(v) if v is not None else NO_VALUE
190 for v in values]
191
151 def set(self, key, value):
192 def set(self, key, value):
152 if self.redis_expiration_time:
193 if self.redis_expiration_time:
153 self.client.setex(key, self.redis_expiration_time,
194 self.client.setex(key, self.redis_expiration_time,
154 self._dumps(value))
195 self._dumps(value))
155 else:
196 else:
156 self.client.set(key, self._dumps(value))
197 self.client.set(key, self._dumps(value))
157
198
158 def set_multi(self, mapping):
199 def set_multi(self, mapping):
200 dumps = self._dumps
159 mapping = dict(
201 mapping = dict(
160 (k, self._dumps(v))
202 (k, dumps(v))
161 for k, v in mapping.items()
203 for k, v in mapping.items()
162 )
204 )
163
205
164 if not self.redis_expiration_time:
206 if not self.redis_expiration_time:
165 self.client.mset(mapping)
207 self.client.mset(mapping)
166 else:
208 else:
167 pipe = self.client.pipeline()
209 pipe = self.client.pipeline()
168 for key, value in mapping.items():
210 for key, value in mapping.items():
169 pipe.setex(key, self.redis_expiration_time, value)
211 pipe.setex(key, self.redis_expiration_time, value)
170 pipe.execute()
212 pipe.execute()
171
213
172 def get_mutex(self, key):
214 def get_mutex(self, key):
173 u = redis_backend.u
215 u = redis_backend.u
174 if self.distributed_lock:
216 if self.distributed_lock:
175 lock_key = u('_lock_{0}').format(key)
217 lock_key = u('_lock_{0}').format(key)
176 log.debug('Trying to acquire Redis lock for key %s', lock_key)
218 log.debug('Trying to acquire Redis lock for key %s', lock_key)
177 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
219 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
178 else:
220 else:
179 return None
221 return None
222
223
224 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
225 key_prefix = 'redis_pickle_backend'
226 pass
227
228
229 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
230 key_prefix = 'redis_msgpack_backend'
231 pass
@@ -1,139 +1,149 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import functools
20 import functools
21
21
22 from dogpile.cache import CacheRegion
22 from dogpile.cache import CacheRegion
23 from dogpile.cache.util import compat
23 from dogpile.cache.util import compat
24
24
25 from vcsserver.utils import safe_str, sha1
25 from vcsserver.utils import safe_str, sha1
26
26
27
27
28 log = logging.getLogger(__name__)
28 log = logging.getLogger(__name__)
29
29
30
30
31 class RhodeCodeCacheRegion(CacheRegion):
31 class RhodeCodeCacheRegion(CacheRegion):
32
32
33 def conditional_cache_on_arguments(
33 def conditional_cache_on_arguments(
34 self, namespace=None,
34 self, namespace=None,
35 expiration_time=None,
35 expiration_time=None,
36 should_cache_fn=None,
36 should_cache_fn=None,
37 to_str=compat.string_type,
37 to_str=compat.string_type,
38 function_key_generator=None,
38 function_key_generator=None,
39 condition=True):
39 condition=True):
40 """
40 """
41 Custom conditional decorator, that will not touch any dogpile internals if
41 Custom conditional decorator, that will not touch any dogpile internals if
42 condition isn't meet. This works a bit different than should_cache_fn
42 condition isn't meet. This works a bit different than should_cache_fn
43 And it's faster in cases we don't ever want to compute cached values
43 And it's faster in cases we don't ever want to compute cached values
44 """
44 """
45 expiration_time_is_callable = compat.callable(expiration_time)
45 expiration_time_is_callable = compat.callable(expiration_time)
46
46
47 if function_key_generator is None:
47 if function_key_generator is None:
48 function_key_generator = self.function_key_generator
48 function_key_generator = self.function_key_generator
49
49
50 def decorator(fn):
50 def decorator(fn):
51 if to_str is compat.string_type:
51 if to_str is compat.string_type:
52 # backwards compatible
52 # backwards compatible
53 key_generator = function_key_generator(namespace, fn)
53 key_generator = function_key_generator(namespace, fn)
54 else:
54 else:
55 key_generator = function_key_generator(namespace, fn, to_str=to_str)
55 key_generator = function_key_generator(namespace, fn, to_str=to_str)
56
56
57 @functools.wraps(fn)
57 @functools.wraps(fn)
58 def decorate(*arg, **kw):
58 def decorate(*arg, **kw):
59 key = key_generator(*arg, **kw)
59 key = key_generator(*arg, **kw)
60
60
61 @functools.wraps(fn)
61 @functools.wraps(fn)
62 def creator():
62 def creator():
63 return fn(*arg, **kw)
63 return fn(*arg, **kw)
64
64
65 if not condition:
65 if not condition:
66 return creator()
66 return creator()
67
67
68 timeout = expiration_time() if expiration_time_is_callable \
68 timeout = expiration_time() if expiration_time_is_callable \
69 else expiration_time
69 else expiration_time
70
70
71 return self.get_or_create(key, creator, timeout, should_cache_fn)
71 return self.get_or_create(key, creator, timeout, should_cache_fn)
72
72
73 def invalidate(*arg, **kw):
73 def invalidate(*arg, **kw):
74 key = key_generator(*arg, **kw)
74 key = key_generator(*arg, **kw)
75 self.delete(key)
75 self.delete(key)
76
76
77 def set_(value, *arg, **kw):
77 def set_(value, *arg, **kw):
78 key = key_generator(*arg, **kw)
78 key = key_generator(*arg, **kw)
79 self.set(key, value)
79 self.set(key, value)
80
80
81 def get(*arg, **kw):
81 def get(*arg, **kw):
82 key = key_generator(*arg, **kw)
82 key = key_generator(*arg, **kw)
83 return self.get(key)
83 return self.get(key)
84
84
85 def refresh(*arg, **kw):
85 def refresh(*arg, **kw):
86 key = key_generator(*arg, **kw)
86 key = key_generator(*arg, **kw)
87 value = fn(*arg, **kw)
87 value = fn(*arg, **kw)
88 self.set(key, value)
88 self.set(key, value)
89 return value
89 return value
90
90
91 decorate.set = set_
91 decorate.set = set_
92 decorate.invalidate = invalidate
92 decorate.invalidate = invalidate
93 decorate.refresh = refresh
93 decorate.refresh = refresh
94 decorate.get = get
94 decorate.get = get
95 decorate.original = fn
95 decorate.original = fn
96 decorate.key_generator = key_generator
96 decorate.key_generator = key_generator
97 decorate.__wrapped__ = fn
97 decorate.__wrapped__ = fn
98
98
99 return decorate
99 return decorate
100
100
101 return decorator
101 return decorator
102
102
103
103
104 def make_region(*arg, **kw):
104 def make_region(*arg, **kw):
105 return RhodeCodeCacheRegion(*arg, **kw)
105 return RhodeCodeCacheRegion(*arg, **kw)
106
106
107
107
108 def get_default_cache_settings(settings, prefixes=None):
108 def get_default_cache_settings(settings, prefixes=None):
109 prefixes = prefixes or []
109 prefixes = prefixes or []
110 cache_settings = {}
110 cache_settings = {}
111 for key in settings.keys():
111 for key in settings.keys():
112 for prefix in prefixes:
112 for prefix in prefixes:
113 if key.startswith(prefix):
113 if key.startswith(prefix):
114 name = key.split(prefix)[1].strip()
114 name = key.split(prefix)[1].strip()
115 val = settings[key]
115 val = settings[key]
116 if isinstance(val, compat.string_types):
116 if isinstance(val, compat.string_types):
117 val = val.strip()
117 val = val.strip()
118 cache_settings[name] = val
118 cache_settings[name] = val
119 return cache_settings
119 return cache_settings
120
120
121
121
122 def compute_key_from_params(*args):
122 def compute_key_from_params(*args):
123 """
123 """
124 Helper to compute key from given params to be used in cache manager
124 Helper to compute key from given params to be used in cache manager
125 """
125 """
126 return sha1("_".join(map(safe_str, args)))
126 return sha1("_".join(map(safe_str, args)))
127
127
128
128
129 def key_generator(namespace, fn):
129 def backend_key_generator(backend):
130 """
131 Special wrapper that also sends over the backend to the key generator
132 """
133 def wrapper(namespace, fn):
134 return key_generator(backend, namespace, fn)
135 return wrapper
136
137
138 def key_generator(backend, namespace, fn):
130 fname = fn.__name__
139 fname = fn.__name__
131
140
132 def generate_key(*args):
141 def generate_key(*args):
133 namespace_pref = namespace or 'default'
142 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
143 namespace_pref = namespace or 'default_namespace'
134 arg_key = compute_key_from_params(*args)
144 arg_key = compute_key_from_params(*args)
135 final_key = "{}:{}_{}".format(namespace_pref, fname, arg_key)
145 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
136
146
137 return final_key
147 return final_key
138
148
139 return generate_key
149 return generate_key
General Comments 0
You need to be logged in to leave comments. Login now