##// END OF EJS Templates
models: Remove unused imports.
Martin Bornhold -
r895:e970000e default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,234 +1,235 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2015-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import beaker
23 23 import logging
24 24 import threading
25 25
26 26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
27 from sqlalchemy.exc import IntegrityError
27 28
28 29 from rhodecode.lib.utils import safe_str, md5
29 from rhodecode.model.db import Session, CacheKey, IntegrityError
30 from rhodecode.model.db import Session, CacheKey
30 31
31 32 log = logging.getLogger(__name__)
32 33
33 34 FILE_TREE = 'cache_file_tree'
34 35 FILE_TREE_META = 'cache_file_tree_metadata'
35 36 FILE_SEARCH_TREE_META = 'cache_file_search_metadata'
36 37 SUMMARY_STATS = 'cache_summary_stats'
37 38
38 39 # This list of caches gets purged when invalidation happens
39 40 USED_REPO_CACHES = (FILE_TREE, FILE_SEARCH_TREE_META)
40 41
41 42 DEFAULT_CACHE_MANAGER_CONFIG = {
42 43 'type': 'memorylru_base',
43 44 'max_items': 10240,
44 45 'key_length': 256,
45 46 'enabled': True
46 47 }
47 48
48 49
49 50 def configure_cache_region(
50 51 region_name, region_kw, default_cache_kw, default_expire=60):
51 52 default_type = default_cache_kw.get('type', 'memory')
52 53 default_lock_dir = default_cache_kw.get('lock_dir')
53 54 default_data_dir = default_cache_kw.get('data_dir')
54 55
55 56 region_kw['lock_dir'] = region_kw.get('lock_dir', default_lock_dir)
56 57 region_kw['data_dir'] = region_kw.get('data_dir', default_data_dir)
57 58 region_kw['type'] = region_kw.get('type', default_type)
58 59 region_kw['expire'] = int(region_kw.get('expire', default_expire))
59 60
60 61 beaker.cache.cache_regions[region_name] = region_kw
61 62
62 63
63 64 def get_cache_manager(region_name, cache_name, custom_ttl=None):
64 65 """
65 66 Creates a Beaker cache manager. Such instance can be used like that::
66 67
67 68 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
68 69 cache_manager = caches.get_cache_manager('repo_cache_long', _namespace)
69 70 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
70 71 def heavy_compute():
71 72 ...
72 73 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
73 74
74 75 :param region_name: region from ini file
75 76 :param cache_name: custom cache name, usually prefix+repo_name. eg
76 77 file_switcher_repo1
77 78 :param custom_ttl: override .ini file timeout on this cache
78 79 :return: instance of cache manager
79 80 """
80 81
81 82 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
82 83 if custom_ttl:
83 84 log.debug('Updating region %s with custom ttl: %s',
84 85 region_name, custom_ttl)
85 86 cache_config.update({'expire': custom_ttl})
86 87
87 88 return beaker.cache.Cache._get_cache(cache_name, cache_config)
88 89
89 90
90 91 def clear_cache_manager(cache_manager):
91 92 """
92 93 namespace = 'foobar'
93 94 cache_manager = get_cache_manager('repo_cache_long', namespace)
94 95 clear_cache_manager(cache_manager)
95 96 """
96 97
97 98 log.debug('Clearing all values for cache manager %s', cache_manager)
98 99 cache_manager.clear()
99 100
100 101
101 102 def clear_repo_caches(repo_name):
102 103 # invalidate cache manager for this repo
103 104 for prefix in USED_REPO_CACHES:
104 105 namespace = get_repo_namespace_key(prefix, repo_name)
105 106 cache_manager = get_cache_manager('repo_cache_long', namespace)
106 107 clear_cache_manager(cache_manager)
107 108
108 109
109 110 def compute_key_from_params(*args):
110 111 """
111 112 Helper to compute key from given params to be used in cache manager
112 113 """
113 114 return md5("_".join(map(safe_str, args)))
114 115
115 116
116 117 def get_repo_namespace_key(prefix, repo_name):
117 118 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
118 119
119 120
120 121 def conditional_cache(region, prefix, condition, func):
121 122 """
122 123 Conditional caching function use like::
123 124 def _c(arg):
124 125 # heavy computation function
125 126 return data
126 127
127 128 # depending on the condition the compute is wrapped in cache or not
128 129 compute = conditional_cache('short_term', 'cache_desc',
129 130 condition=True, func=func)
130 131 return compute(arg)
131 132
132 133 :param region: name of cache region
133 134 :param prefix: cache region prefix
134 135 :param condition: condition for cache to be triggered, and
135 136 return data cached
136 137 :param func: wrapped heavy function to compute
137 138
138 139 """
139 140 wrapped = func
140 141 if condition:
141 142 log.debug('conditional_cache: True, wrapping call of '
142 143 'func: %s into %s region cache', region, func)
143 144 cached_region = _cache_decorate((prefix,), None, None, region)
144 145 wrapped = cached_region(func)
145 146 return wrapped
146 147
147 148
148 149 class ActiveRegionCache(object):
149 150 def __init__(self, context):
150 151 self.context = context
151 152
152 153 def invalidate(self, *args, **kwargs):
153 154 return False
154 155
155 156 def compute(self):
156 157 log.debug('Context cache: getting obj %s from cache', self.context)
157 158 return self.context.compute_func(self.context.cache_key)
158 159
159 160
160 161 class FreshRegionCache(ActiveRegionCache):
161 162 def invalidate(self):
162 163 log.debug('Context cache: invalidating cache for %s', self.context)
163 164 region_invalidate(
164 165 self.context.compute_func, None, self.context.cache_key)
165 166 return True
166 167
167 168
168 169 class InvalidationContext(object):
169 170 def __repr__(self):
170 171 return '<InvalidationContext:{}[{}]>'.format(
171 172 safe_str(self.repo_name), safe_str(self.cache_type))
172 173
173 174 def __init__(self, compute_func, repo_name, cache_type,
174 175 raise_exception=False, thread_scoped=False):
175 176 self.compute_func = compute_func
176 177 self.repo_name = repo_name
177 178 self.cache_type = cache_type
178 179 self.cache_key = compute_key_from_params(
179 180 repo_name, cache_type)
180 181 self.raise_exception = raise_exception
181 182
182 183 # Append the thread id to the cache key if this invalidation context
183 184 # should be scoped to the current thread.
184 185 if thread_scoped:
185 186 thread_id = threading.current_thread().ident
186 187 self.cache_key = '{cache_key}_{thread_id}'.format(
187 188 cache_key=self.cache_key, thread_id=thread_id)
188 189
189 190 def get_cache_obj(self):
190 191 cache_key = CacheKey.get_cache_key(
191 192 self.repo_name, self.cache_type)
192 193 cache_obj = CacheKey.get_active_cache(cache_key)
193 194 if not cache_obj:
194 195 cache_obj = CacheKey(cache_key, self.repo_name)
195 196 return cache_obj
196 197
197 198 def __enter__(self):
198 199 """
199 200 Test if current object is valid, and return CacheRegion function
200 201 that does invalidation and calculation
201 202 """
202 203
203 204 self.cache_obj = self.get_cache_obj()
204 205 if self.cache_obj.cache_active:
205 206 # means our cache obj is existing and marked as it's
206 207 # cache is not outdated, we return BaseInvalidator
207 208 self.skip_cache_active_change = True
208 209 return ActiveRegionCache(self)
209 210
210 211 # the key is either not existing or set to False, we return
211 212 # the real invalidator which re-computes value. We additionally set
212 213 # the flag to actually update the Database objects
213 214 self.skip_cache_active_change = False
214 215 return FreshRegionCache(self)
215 216
216 217 def __exit__(self, exc_type, exc_val, exc_tb):
217 218
218 219 if self.skip_cache_active_change:
219 220 return
220 221
221 222 try:
222 223 self.cache_obj.cache_active = True
223 224 Session().add(self.cache_obj)
224 225 Session().commit()
225 226 except IntegrityError:
226 227 # if we catch integrity error, it means we inserted this object
227 228 # assumption is that's really an edge race-condition case and
228 229 # it's safe is to skip it
229 230 Session().rollback()
230 231 except Exception:
231 232 log.exception('Failed to commit on cache key update')
232 233 Session().rollback()
233 234 if self.raise_exception:
234 235 raise
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now