##// END OF EJS Templates
models: Remove unused imports.
Martin Bornhold -
r895:e970000e default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,234 +1,235 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2015-2016 RhodeCode GmbH
3 # Copyright (C) 2015-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import beaker
22 import beaker
23 import logging
23 import logging
24 import threading
24 import threading
25
25
26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
26 from beaker.cache import _cache_decorate, cache_regions, region_invalidate
27 from sqlalchemy.exc import IntegrityError
27
28
28 from rhodecode.lib.utils import safe_str, md5
29 from rhodecode.lib.utils import safe_str, md5
29 from rhodecode.model.db import Session, CacheKey, IntegrityError
30 from rhodecode.model.db import Session, CacheKey
30
31
31 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
32
33
33 FILE_TREE = 'cache_file_tree'
34 FILE_TREE = 'cache_file_tree'
34 FILE_TREE_META = 'cache_file_tree_metadata'
35 FILE_TREE_META = 'cache_file_tree_metadata'
35 FILE_SEARCH_TREE_META = 'cache_file_search_metadata'
36 FILE_SEARCH_TREE_META = 'cache_file_search_metadata'
36 SUMMARY_STATS = 'cache_summary_stats'
37 SUMMARY_STATS = 'cache_summary_stats'
37
38
38 # This list of caches gets purged when invalidation happens
39 # This list of caches gets purged when invalidation happens
39 USED_REPO_CACHES = (FILE_TREE, FILE_SEARCH_TREE_META)
40 USED_REPO_CACHES = (FILE_TREE, FILE_SEARCH_TREE_META)
40
41
41 DEFAULT_CACHE_MANAGER_CONFIG = {
42 DEFAULT_CACHE_MANAGER_CONFIG = {
42 'type': 'memorylru_base',
43 'type': 'memorylru_base',
43 'max_items': 10240,
44 'max_items': 10240,
44 'key_length': 256,
45 'key_length': 256,
45 'enabled': True
46 'enabled': True
46 }
47 }
47
48
48
49
49 def configure_cache_region(
50 def configure_cache_region(
50 region_name, region_kw, default_cache_kw, default_expire=60):
51 region_name, region_kw, default_cache_kw, default_expire=60):
51 default_type = default_cache_kw.get('type', 'memory')
52 default_type = default_cache_kw.get('type', 'memory')
52 default_lock_dir = default_cache_kw.get('lock_dir')
53 default_lock_dir = default_cache_kw.get('lock_dir')
53 default_data_dir = default_cache_kw.get('data_dir')
54 default_data_dir = default_cache_kw.get('data_dir')
54
55
55 region_kw['lock_dir'] = region_kw.get('lock_dir', default_lock_dir)
56 region_kw['lock_dir'] = region_kw.get('lock_dir', default_lock_dir)
56 region_kw['data_dir'] = region_kw.get('data_dir', default_data_dir)
57 region_kw['data_dir'] = region_kw.get('data_dir', default_data_dir)
57 region_kw['type'] = region_kw.get('type', default_type)
58 region_kw['type'] = region_kw.get('type', default_type)
58 region_kw['expire'] = int(region_kw.get('expire', default_expire))
59 region_kw['expire'] = int(region_kw.get('expire', default_expire))
59
60
60 beaker.cache.cache_regions[region_name] = region_kw
61 beaker.cache.cache_regions[region_name] = region_kw
61
62
62
63
63 def get_cache_manager(region_name, cache_name, custom_ttl=None):
64 def get_cache_manager(region_name, cache_name, custom_ttl=None):
64 """
65 """
65 Creates a Beaker cache manager. Such instance can be used like that::
66 Creates a Beaker cache manager. Such instance can be used like that::
66
67
67 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
68 _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name)
68 cache_manager = caches.get_cache_manager('repo_cache_long', _namespace)
69 cache_manager = caches.get_cache_manager('repo_cache_long', _namespace)
69 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
70 _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id)
70 def heavy_compute():
71 def heavy_compute():
71 ...
72 ...
72 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
73 result = cache_manager.get(_cache_key, createfunc=heavy_compute)
73
74
74 :param region_name: region from ini file
75 :param region_name: region from ini file
75 :param cache_name: custom cache name, usually prefix+repo_name. eg
76 :param cache_name: custom cache name, usually prefix+repo_name. eg
76 file_switcher_repo1
77 file_switcher_repo1
77 :param custom_ttl: override .ini file timeout on this cache
78 :param custom_ttl: override .ini file timeout on this cache
78 :return: instance of cache manager
79 :return: instance of cache manager
79 """
80 """
80
81
81 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
82 cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG)
82 if custom_ttl:
83 if custom_ttl:
83 log.debug('Updating region %s with custom ttl: %s',
84 log.debug('Updating region %s with custom ttl: %s',
84 region_name, custom_ttl)
85 region_name, custom_ttl)
85 cache_config.update({'expire': custom_ttl})
86 cache_config.update({'expire': custom_ttl})
86
87
87 return beaker.cache.Cache._get_cache(cache_name, cache_config)
88 return beaker.cache.Cache._get_cache(cache_name, cache_config)
88
89
89
90
90 def clear_cache_manager(cache_manager):
91 def clear_cache_manager(cache_manager):
91 """
92 """
92 namespace = 'foobar'
93 namespace = 'foobar'
93 cache_manager = get_cache_manager('repo_cache_long', namespace)
94 cache_manager = get_cache_manager('repo_cache_long', namespace)
94 clear_cache_manager(cache_manager)
95 clear_cache_manager(cache_manager)
95 """
96 """
96
97
97 log.debug('Clearing all values for cache manager %s', cache_manager)
98 log.debug('Clearing all values for cache manager %s', cache_manager)
98 cache_manager.clear()
99 cache_manager.clear()
99
100
100
101
101 def clear_repo_caches(repo_name):
102 def clear_repo_caches(repo_name):
102 # invalidate cache manager for this repo
103 # invalidate cache manager for this repo
103 for prefix in USED_REPO_CACHES:
104 for prefix in USED_REPO_CACHES:
104 namespace = get_repo_namespace_key(prefix, repo_name)
105 namespace = get_repo_namespace_key(prefix, repo_name)
105 cache_manager = get_cache_manager('repo_cache_long', namespace)
106 cache_manager = get_cache_manager('repo_cache_long', namespace)
106 clear_cache_manager(cache_manager)
107 clear_cache_manager(cache_manager)
107
108
108
109
109 def compute_key_from_params(*args):
110 def compute_key_from_params(*args):
110 """
111 """
111 Helper to compute key from given params to be used in cache manager
112 Helper to compute key from given params to be used in cache manager
112 """
113 """
113 return md5("_".join(map(safe_str, args)))
114 return md5("_".join(map(safe_str, args)))
114
115
115
116
116 def get_repo_namespace_key(prefix, repo_name):
117 def get_repo_namespace_key(prefix, repo_name):
117 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
118 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
118
119
119
120
120 def conditional_cache(region, prefix, condition, func):
121 def conditional_cache(region, prefix, condition, func):
121 """
122 """
122 Conditional caching function use like::
123 Conditional caching function use like::
123 def _c(arg):
124 def _c(arg):
124 # heavy computation function
125 # heavy computation function
125 return data
126 return data
126
127
127 # depending on the condition the compute is wrapped in cache or not
128 # depending on the condition the compute is wrapped in cache or not
128 compute = conditional_cache('short_term', 'cache_desc',
129 compute = conditional_cache('short_term', 'cache_desc',
129 condition=True, func=func)
130 condition=True, func=func)
130 return compute(arg)
131 return compute(arg)
131
132
132 :param region: name of cache region
133 :param region: name of cache region
133 :param prefix: cache region prefix
134 :param prefix: cache region prefix
134 :param condition: condition for cache to be triggered, and
135 :param condition: condition for cache to be triggered, and
135 return data cached
136 return data cached
136 :param func: wrapped heavy function to compute
137 :param func: wrapped heavy function to compute
137
138
138 """
139 """
139 wrapped = func
140 wrapped = func
140 if condition:
141 if condition:
141 log.debug('conditional_cache: True, wrapping call of '
142 log.debug('conditional_cache: True, wrapping call of '
142 'func: %s into %s region cache', region, func)
143 'func: %s into %s region cache', region, func)
143 cached_region = _cache_decorate((prefix,), None, None, region)
144 cached_region = _cache_decorate((prefix,), None, None, region)
144 wrapped = cached_region(func)
145 wrapped = cached_region(func)
145 return wrapped
146 return wrapped
146
147
147
148
148 class ActiveRegionCache(object):
149 class ActiveRegionCache(object):
149 def __init__(self, context):
150 def __init__(self, context):
150 self.context = context
151 self.context = context
151
152
152 def invalidate(self, *args, **kwargs):
153 def invalidate(self, *args, **kwargs):
153 return False
154 return False
154
155
155 def compute(self):
156 def compute(self):
156 log.debug('Context cache: getting obj %s from cache', self.context)
157 log.debug('Context cache: getting obj %s from cache', self.context)
157 return self.context.compute_func(self.context.cache_key)
158 return self.context.compute_func(self.context.cache_key)
158
159
159
160
160 class FreshRegionCache(ActiveRegionCache):
161 class FreshRegionCache(ActiveRegionCache):
161 def invalidate(self):
162 def invalidate(self):
162 log.debug('Context cache: invalidating cache for %s', self.context)
163 log.debug('Context cache: invalidating cache for %s', self.context)
163 region_invalidate(
164 region_invalidate(
164 self.context.compute_func, None, self.context.cache_key)
165 self.context.compute_func, None, self.context.cache_key)
165 return True
166 return True
166
167
167
168
168 class InvalidationContext(object):
169 class InvalidationContext(object):
169 def __repr__(self):
170 def __repr__(self):
170 return '<InvalidationContext:{}[{}]>'.format(
171 return '<InvalidationContext:{}[{}]>'.format(
171 safe_str(self.repo_name), safe_str(self.cache_type))
172 safe_str(self.repo_name), safe_str(self.cache_type))
172
173
173 def __init__(self, compute_func, repo_name, cache_type,
174 def __init__(self, compute_func, repo_name, cache_type,
174 raise_exception=False, thread_scoped=False):
175 raise_exception=False, thread_scoped=False):
175 self.compute_func = compute_func
176 self.compute_func = compute_func
176 self.repo_name = repo_name
177 self.repo_name = repo_name
177 self.cache_type = cache_type
178 self.cache_type = cache_type
178 self.cache_key = compute_key_from_params(
179 self.cache_key = compute_key_from_params(
179 repo_name, cache_type)
180 repo_name, cache_type)
180 self.raise_exception = raise_exception
181 self.raise_exception = raise_exception
181
182
182 # Append the thread id to the cache key if this invalidation context
183 # Append the thread id to the cache key if this invalidation context
183 # should be scoped to the current thread.
184 # should be scoped to the current thread.
184 if thread_scoped:
185 if thread_scoped:
185 thread_id = threading.current_thread().ident
186 thread_id = threading.current_thread().ident
186 self.cache_key = '{cache_key}_{thread_id}'.format(
187 self.cache_key = '{cache_key}_{thread_id}'.format(
187 cache_key=self.cache_key, thread_id=thread_id)
188 cache_key=self.cache_key, thread_id=thread_id)
188
189
189 def get_cache_obj(self):
190 def get_cache_obj(self):
190 cache_key = CacheKey.get_cache_key(
191 cache_key = CacheKey.get_cache_key(
191 self.repo_name, self.cache_type)
192 self.repo_name, self.cache_type)
192 cache_obj = CacheKey.get_active_cache(cache_key)
193 cache_obj = CacheKey.get_active_cache(cache_key)
193 if not cache_obj:
194 if not cache_obj:
194 cache_obj = CacheKey(cache_key, self.repo_name)
195 cache_obj = CacheKey(cache_key, self.repo_name)
195 return cache_obj
196 return cache_obj
196
197
197 def __enter__(self):
198 def __enter__(self):
198 """
199 """
199 Test if current object is valid, and return CacheRegion function
200 Test if current object is valid, and return CacheRegion function
200 that does invalidation and calculation
201 that does invalidation and calculation
201 """
202 """
202
203
203 self.cache_obj = self.get_cache_obj()
204 self.cache_obj = self.get_cache_obj()
204 if self.cache_obj.cache_active:
205 if self.cache_obj.cache_active:
205 # means our cache obj is existing and marked as it's
206 # means our cache obj is existing and marked as it's
206 # cache is not outdated, we return BaseInvalidator
207 # cache is not outdated, we return BaseInvalidator
207 self.skip_cache_active_change = True
208 self.skip_cache_active_change = True
208 return ActiveRegionCache(self)
209 return ActiveRegionCache(self)
209
210
210 # the key is either not existing or set to False, we return
211 # the key is either not existing or set to False, we return
211 # the real invalidator which re-computes value. We additionally set
212 # the real invalidator which re-computes value. We additionally set
212 # the flag to actually update the Database objects
213 # the flag to actually update the Database objects
213 self.skip_cache_active_change = False
214 self.skip_cache_active_change = False
214 return FreshRegionCache(self)
215 return FreshRegionCache(self)
215
216
216 def __exit__(self, exc_type, exc_val, exc_tb):
217 def __exit__(self, exc_type, exc_val, exc_tb):
217
218
218 if self.skip_cache_active_change:
219 if self.skip_cache_active_change:
219 return
220 return
220
221
221 try:
222 try:
222 self.cache_obj.cache_active = True
223 self.cache_obj.cache_active = True
223 Session().add(self.cache_obj)
224 Session().add(self.cache_obj)
224 Session().commit()
225 Session().commit()
225 except IntegrityError:
226 except IntegrityError:
226 # if we catch integrity error, it means we inserted this object
227 # if we catch integrity error, it means we inserted this object
227 # assumption is that's really an edge race-condition case and
228 # assumption is that's really an edge race-condition case and
228 # it's safe is to skip it
229 # it's safe is to skip it
229 Session().rollback()
230 Session().rollback()
230 except Exception:
231 except Exception:
231 log.exception('Failed to commit on cache key update')
232 log.exception('Failed to commit on cache key update')
232 Session().rollback()
233 Session().rollback()
233 if self.raise_exception:
234 if self.raise_exception:
234 raise
235 raise
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now