Show More
@@ -351,22 +351,6 b' rc_cache.sql_cache_short.expiration_time' | |||||
351 |
|
351 | |||
352 |
|
352 | |||
353 | #################################### |
|
353 | #################################### | |
354 | ### BEAKER CACHE #### |
|
|||
355 | #################################### |
|
|||
356 |
|
||||
357 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
|||
358 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
|||
359 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
|||
360 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
|||
361 |
|
||||
362 | beaker.cache.regions = long_term |
|
|||
363 |
|
||||
364 | beaker.cache.long_term.type = memorylru_base |
|
|||
365 | beaker.cache.long_term.expire = 172800 |
|
|||
366 | beaker.cache.long_term.key_length = 256 |
|
|||
367 |
|
||||
368 |
|
||||
369 | #################################### |
|
|||
370 | ### BEAKER SESSION #### |
|
354 | ### BEAKER SESSION #### | |
371 | #################################### |
|
355 | #################################### | |
372 |
|
356 |
@@ -326,22 +326,6 b' rc_cache.sql_cache_short.expiration_time' | |||||
326 |
|
326 | |||
327 |
|
327 | |||
328 | #################################### |
|
328 | #################################### | |
329 | ### BEAKER CACHE #### |
|
|||
330 | #################################### |
|
|||
331 |
|
||||
332 | ## locking and default file storage for Beaker. Putting this into a ramdisk |
|
|||
333 | ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data |
|
|||
334 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
|||
335 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
|||
336 |
|
||||
337 | beaker.cache.regions = long_term |
|
|||
338 |
|
||||
339 | beaker.cache.long_term.type = memory |
|
|||
340 | beaker.cache.long_term.expire = 172800 |
|
|||
341 | beaker.cache.long_term.key_length = 256 |
|
|||
342 |
|
||||
343 |
|
||||
344 | #################################### |
|
|||
345 | ### BEAKER SESSION #### |
|
329 | ### BEAKER SESSION #### | |
346 | #################################### |
|
330 | #################################### | |
347 |
|
331 |
@@ -17,17 +17,17 b'' | |||||
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | import time | ||
21 | import pytz |
|
21 | import pytz | |
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 | from beaker.cache import cache_region |
|
|||
25 | from pyramid.view import view_config |
|
24 | from pyramid.view import view_config | |
26 | from pyramid.response import Response |
|
25 | from pyramid.response import Response | |
27 | from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed |
|
26 | from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed | |
28 |
|
27 | |||
29 | from rhodecode.apps._base import RepoAppView |
|
28 | from rhodecode.apps._base import RepoAppView | |
30 | from rhodecode.lib import audit_logger |
|
29 | from rhodecode.lib import audit_logger | |
|
30 | from rhodecode.lib import rc_cache | |||
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib.auth import ( |
|
32 | from rhodecode.lib.auth import ( | |
33 | LoginRequired, HasRepoPermissionAnyDecorator) |
|
33 | LoginRequired, HasRepoPermissionAnyDecorator) | |
@@ -124,11 +124,23 b' class RepoFeedView(RepoAppView):' | |||||
124 | """ |
|
124 | """ | |
125 | self.load_default_context() |
|
125 | self.load_default_context() | |
126 |
|
126 | |||
127 | def _generate_feed(): |
|
127 | cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( | |
|
128 | self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED) | |||
|
129 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
130 | repo_id=self.db_repo.repo_id) | |||
|
131 | ||||
|
132 | region = rc_cache.get_or_create_region('cache_repo_longterm', | |||
|
133 | cache_namespace_uid) | |||
|
134 | ||||
|
135 | condition = not self.path_filter.is_enabled | |||
|
136 | ||||
|
137 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |||
|
138 | condition=condition) | |||
|
139 | def generate_atom_feed(repo_id, _repo_name, _feed_type): | |||
128 | feed = Atom1Feed( |
|
140 | feed = Atom1Feed( | |
129 |
title=self.title % |
|
141 | title=self.title % _repo_name, | |
130 |
link=h.route_url('repo_summary', repo_name= |
|
142 | link=h.route_url('repo_summary', repo_name=_repo_name), | |
131 |
description=self.description % |
|
143 | description=self.description % _repo_name, | |
132 | language=self.language, |
|
144 | language=self.language, | |
133 | ttl=self.ttl |
|
145 | ttl=self.ttl | |
134 | ) |
|
146 | ) | |
@@ -136,30 +148,31 b' class RepoFeedView(RepoAppView):' | |||||
136 | for commit in reversed(self._get_commits()): |
|
148 | for commit in reversed(self._get_commits()): | |
137 | date = self._set_timezone(commit.date) |
|
149 | date = self._set_timezone(commit.date) | |
138 | feed.add_item( |
|
150 | feed.add_item( | |
139 |
unique_id=self.uid( |
|
151 | unique_id=self.uid(repo_id, commit.raw_id), | |
140 | title=self._get_title(commit), |
|
152 | title=self._get_title(commit), | |
141 | author_name=commit.author, |
|
153 | author_name=commit.author, | |
142 | description=self._get_description(commit), |
|
154 | description=self._get_description(commit), | |
143 | link=h.route_url( |
|
155 | link=h.route_url( | |
144 |
'repo_commit', repo_name= |
|
156 | 'repo_commit', repo_name=_repo_name, | |
145 | commit_id=commit.raw_id), |
|
157 | commit_id=commit.raw_id), | |
146 | pubdate=date,) |
|
158 | pubdate=date,) | |
147 |
|
159 | |||
148 | return feed.mime_type, feed.writeString('utf-8') |
|
160 | return feed.mime_type, feed.writeString('utf-8') | |
149 |
|
161 | |||
150 | @cache_region('long_term') |
|
162 | start = time.time() | |
151 | def _generate_feed_and_cache(cache_key): |
|
163 | inv_context_manager = rc_cache.InvalidationContext( | |
152 | return _generate_feed() |
|
164 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | |
|
165 | with inv_context_manager as invalidation_context: | |||
|
166 | # check for stored invalidation signal, and maybe purge the cache | |||
|
167 | # before computing it again | |||
|
168 | if invalidation_context.should_invalidate(): | |||
|
169 | generate_atom_feed.invalidate( | |||
|
170 | self.db_repo.repo_id, self.db_repo.repo_name, 'atom') | |||
153 |
|
171 | |||
154 | if self.path_filter.is_enabled: |
|
172 | mime_type, feed = generate_atom_feed( | |
155 | mime_type, feed = _generate_feed() |
|
173 | self.db_repo.repo_id, self.db_repo.repo_name, 'atom') | |
156 | else: |
|
174 | compute_time = time.time() - start | |
157 | invalidator_context = CacheKey.repo_context_cache( |
|
175 | log.debug('Repo ATOM feed computed in %.3fs', compute_time) | |
158 | _generate_feed_and_cache, self.db_repo_name, |
|
|||
159 | CacheKey.CACHE_TYPE_ATOM) |
|
|||
160 | with invalidator_context as context: |
|
|||
161 | context.invalidate() |
|
|||
162 | mime_type, feed = context.compute() |
|
|||
163 |
|
176 | |||
164 | response = Response(feed) |
|
177 | response = Response(feed) | |
165 | response.content_type = mime_type |
|
178 | response.content_type = mime_type | |
@@ -177,11 +190,22 b' class RepoFeedView(RepoAppView):' | |||||
177 | """ |
|
190 | """ | |
178 | self.load_default_context() |
|
191 | self.load_default_context() | |
179 |
|
192 | |||
180 | def _generate_feed(): |
|
193 | cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( | |
|
194 | self.db_repo.repo_id, CacheKey.CACHE_TYPE_FEED) | |||
|
195 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
196 | repo_id=self.db_repo.repo_id) | |||
|
197 | region = rc_cache.get_or_create_region('cache_repo_longterm', | |||
|
198 | cache_namespace_uid) | |||
|
199 | ||||
|
200 | condition = not self.path_filter.is_enabled | |||
|
201 | ||||
|
202 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |||
|
203 | condition=condition) | |||
|
204 | def generate_rss_feed(repo_id, _repo_name, _feed_type): | |||
181 | feed = Rss201rev2Feed( |
|
205 | feed = Rss201rev2Feed( | |
182 |
title=self.title % |
|
206 | title=self.title % _repo_name, | |
183 |
link=h.route_url('repo_summary', repo_name= |
|
207 | link=h.route_url('repo_summary', repo_name=_repo_name), | |
184 |
description=self.description % |
|
208 | description=self.description % _repo_name, | |
185 | language=self.language, |
|
209 | language=self.language, | |
186 | ttl=self.ttl |
|
210 | ttl=self.ttl | |
187 | ) |
|
211 | ) | |
@@ -189,31 +213,31 b' class RepoFeedView(RepoAppView):' | |||||
189 | for commit in reversed(self._get_commits()): |
|
213 | for commit in reversed(self._get_commits()): | |
190 | date = self._set_timezone(commit.date) |
|
214 | date = self._set_timezone(commit.date) | |
191 | feed.add_item( |
|
215 | feed.add_item( | |
192 |
unique_id=self.uid( |
|
216 | unique_id=self.uid(repo_id, commit.raw_id), | |
193 | title=self._get_title(commit), |
|
217 | title=self._get_title(commit), | |
194 | author_name=commit.author, |
|
218 | author_name=commit.author, | |
195 | description=self._get_description(commit), |
|
219 | description=self._get_description(commit), | |
196 | link=h.route_url( |
|
220 | link=h.route_url( | |
197 |
'repo_commit', repo_name= |
|
221 | 'repo_commit', repo_name=_repo_name, | |
198 | commit_id=commit.raw_id), |
|
222 | commit_id=commit.raw_id), | |
199 | pubdate=date,) |
|
223 | pubdate=date,) | |
200 |
|
224 | |||
201 | return feed.mime_type, feed.writeString('utf-8') |
|
225 | return feed.mime_type, feed.writeString('utf-8') | |
202 |
|
226 | |||
203 | @cache_region('long_term') |
|
227 | start = time.time() | |
204 | def _generate_feed_and_cache(cache_key): |
|
228 | inv_context_manager = rc_cache.InvalidationContext( | |
205 | return _generate_feed() |
|
229 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | |
|
230 | with inv_context_manager as invalidation_context: | |||
|
231 | # check for stored invalidation signal, and maybe purge the cache | |||
|
232 | # before computing it again | |||
|
233 | if invalidation_context.should_invalidate(): | |||
|
234 | generate_rss_feed.invalidate( | |||
|
235 | self.db_repo.repo_id, self.db_repo.repo_name, 'rss') | |||
206 |
|
236 | |||
207 | if self.path_filter.is_enabled: |
|
237 | mime_type, feed = generate_rss_feed( | |
208 | mime_type, feed = _generate_feed() |
|
238 | self.db_repo.repo_id, self.db_repo.repo_name, 'rss') | |
209 | else: |
|
239 | compute_time = time.time() - start | |
210 | invalidator_context = CacheKey.repo_context_cache( |
|
240 | log.debug('Repo RSS feed computed in %.3fs', compute_time) | |
211 | _generate_feed_and_cache, self.db_repo_name, |
|
|||
212 | CacheKey.CACHE_TYPE_RSS) |
|
|||
213 |
|
||||
214 | with invalidator_context as context: |
|
|||
215 | context.invalidate() |
|
|||
216 | mime_type, feed = context.compute() |
|
|||
217 |
|
241 | |||
218 | response = Response(feed) |
|
242 | response = Response(feed) | |
219 | response.content_type = mime_type |
|
243 | response.content_type = mime_type |
@@ -34,7 +34,7 b' import rhodecode' | |||||
34 | from rhodecode.apps._base import RepoAppView |
|
34 | from rhodecode.apps._base import RepoAppView | |
35 |
|
35 | |||
36 | from rhodecode.controllers.utils import parse_path_ref |
|
36 | from rhodecode.controllers.utils import parse_path_ref | |
37 |
from rhodecode.lib import diffs, helpers as h, |
|
37 | from rhodecode.lib import diffs, helpers as h, rc_cache | |
38 | from rhodecode.lib import audit_logger |
|
38 | from rhodecode.lib import audit_logger | |
39 | from rhodecode.lib.exceptions import NonRelativePathError |
|
39 | from rhodecode.lib.exceptions import NonRelativePathError | |
40 | from rhodecode.lib.codeblocks import ( |
|
40 | from rhodecode.lib.codeblocks import ( |
@@ -18,12 +18,12 b'' | |||||
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
|
21 | import time | |||
21 | import logging |
|
22 | import logging | |
22 | import string |
|
23 | import string | |
23 | import rhodecode |
|
24 | import rhodecode | |
24 |
|
25 | |||
25 | from pyramid.view import view_config |
|
26 | from pyramid.view import view_config | |
26 | from beaker.cache import cache_region |
|
|||
27 |
|
27 | |||
28 | from rhodecode.controllers import utils |
|
28 | from rhodecode.controllers import utils | |
29 | from rhodecode.apps._base import RepoAppView |
|
29 | from rhodecode.apps._base import RepoAppView | |
@@ -53,26 +53,32 b' class RepoSummaryView(RepoAppView):' | |||||
53 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
53 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
54 | return c |
|
54 | return c | |
55 |
|
55 | |||
56 |
def _get_readme_data(self, db_repo, |
|
56 | def _get_readme_data(self, db_repo, renderer_type): | |
57 | repo_name = db_repo.repo_name |
|
57 | ||
58 | log.debug('Looking for README file') |
|
58 | log.debug('Looking for README file') | |
59 |
|
59 | |||
60 | @cache_region('long_term') |
|
60 | cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( | |
61 | def _generate_readme(cache_key): |
|
61 | db_repo.repo_id, CacheKey.CACHE_TYPE_README) | |
|
62 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
63 | repo_id=self.db_repo.repo_id) | |||
|
64 | region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) | |||
|
65 | ||||
|
66 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | |||
|
67 | def generate_repo_readme(repo_id, _repo_name, _renderer_type): | |||
62 | readme_data = None |
|
68 | readme_data = None | |
63 | readme_node = None |
|
69 | readme_node = None | |
64 | readme_filename = None |
|
70 | readme_filename = None | |
65 | commit = self._get_landing_commit_or_none(db_repo) |
|
71 | commit = self._get_landing_commit_or_none(db_repo) | |
66 | if commit: |
|
72 | if commit: | |
67 | log.debug("Searching for a README file.") |
|
73 | log.debug("Searching for a README file.") | |
68 |
readme_node = ReadmeFinder( |
|
74 | readme_node = ReadmeFinder(_renderer_type).search(commit) | |
69 | if readme_node: |
|
75 | if readme_node: | |
70 | relative_urls = { |
|
76 | relative_urls = { | |
71 | 'raw': h.route_path( |
|
77 | 'raw': h.route_path( | |
72 | 'repo_file_raw', repo_name=repo_name, |
|
78 | 'repo_file_raw', repo_name=_repo_name, | |
73 | commit_id=commit.raw_id, f_path=readme_node.path), |
|
79 | commit_id=commit.raw_id, f_path=readme_node.path), | |
74 | 'standard': h.route_path( |
|
80 | 'standard': h.route_path( | |
75 | 'repo_files', repo_name=repo_name, |
|
81 | 'repo_files', repo_name=_repo_name, | |
76 | commit_id=commit.raw_id, f_path=readme_node.path), |
|
82 | commit_id=commit.raw_id, f_path=readme_node.path), | |
77 | } |
|
83 | } | |
78 | readme_data = self._render_readme_or_none( |
|
84 | readme_data = self._render_readme_or_none( | |
@@ -80,14 +86,21 b' class RepoSummaryView(RepoAppView):' | |||||
80 | readme_filename = readme_node.path |
|
86 | readme_filename = readme_node.path | |
81 | return readme_data, readme_filename |
|
87 | return readme_data, readme_filename | |
82 |
|
88 | |||
83 | invalidator_context = CacheKey.repo_context_cache( |
|
89 | start = time.time() | |
84 | _generate_readme, repo_name, CacheKey.CACHE_TYPE_README) |
|
90 | inv_context_manager = rc_cache.InvalidationContext( | |
|
91 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | |||
|
92 | with inv_context_manager as invalidation_context: | |||
|
93 | # check for stored invalidation signal, and maybe purge the cache | |||
|
94 | # before computing it again | |||
|
95 | if invalidation_context.should_invalidate(): | |||
|
96 | generate_repo_readme.invalidate( | |||
|
97 | db_repo.repo_id, db_repo.repo_name, renderer_type) | |||
85 |
|
98 | |||
86 | with invalidator_context as context: |
|
99 | instance = generate_repo_readme( | |
87 | context.invalidate() |
|
100 | db_repo.repo_id, db_repo.repo_name, renderer_type) | |
88 |
compute |
|
101 | compute_time = time.time() - start | |
89 |
|
102 | log.debug('Repo readme generated and computed in %.3fs', compute_time) | ||
90 |
return |
|
103 | return instance | |
91 |
|
104 | |||
92 | def _get_landing_commit_or_none(self, db_repo): |
|
105 | def _get_landing_commit_or_none(self, db_repo): | |
93 | log.debug("Getting the landing commit.") |
|
106 | log.debug("Getting the landing commit.") |
@@ -35,7 +35,7 b' from pyramid.threadlocal import get_curr' | |||||
35 |
|
35 | |||
36 | from rhodecode.authentication.interface import IAuthnPluginRegistry |
|
36 | from rhodecode.authentication.interface import IAuthnPluginRegistry | |
37 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase |
|
37 | from rhodecode.authentication.schema import AuthnPluginSettingsSchemaBase | |
38 |
from rhodecode.lib import |
|
38 | from rhodecode.lib import rc_cache | |
39 | from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt |
|
39 | from rhodecode.lib.auth import PasswordGenerator, _RhodeCodeCryptoBCrypt | |
40 | from rhodecode.lib.utils2 import safe_int, safe_str |
|
40 | from rhodecode.lib.utils2 import safe_int, safe_str | |
41 | from rhodecode.lib.exceptions import LdapConnectionError |
|
41 | from rhodecode.lib.exceptions import LdapConnectionError |
@@ -232,7 +232,6 b' def includeme(config):' | |||||
232 | # Includes which are required. The application would fail without them. |
|
232 | # Includes which are required. The application would fail without them. | |
233 | config.include('pyramid_mako') |
|
233 | config.include('pyramid_mako') | |
234 | config.include('pyramid_beaker') |
|
234 | config.include('pyramid_beaker') | |
235 | config.include('rhodecode.lib.caches') |
|
|||
236 | config.include('rhodecode.lib.rc_cache') |
|
235 | config.include('rhodecode.lib.rc_cache') | |
237 |
|
236 | |||
238 | config.include('rhodecode.authentication') |
|
237 | config.include('rhodecode.authentication') | |
@@ -467,6 +466,20 b' def _sanitize_cache_settings(settings):' | |||||
467 | 'rc_cache.cache_repo.arguments.filename', |
|
466 | 'rc_cache.cache_repo.arguments.filename', | |
468 | os.path.join(tempfile.gettempdir(), 'rc_cache_2')) |
|
467 | os.path.join(tempfile.gettempdir(), 'rc_cache_2')) | |
469 |
|
468 | |||
|
469 | # cache_repo_longterm memory, 96H | |||
|
470 | _string_setting( | |||
|
471 | settings, | |||
|
472 | 'rc_cache.cache_repo_longterm.backend', | |||
|
473 | 'dogpile.cache.rc.memory_lru') | |||
|
474 | _int_setting( | |||
|
475 | settings, | |||
|
476 | 'rc_cache.cache_repo_longterm.expiration_time', | |||
|
477 | 345600) | |||
|
478 | _int_setting( | |||
|
479 | settings, | |||
|
480 | 'rc_cache.cache_repo_longterm.max_size', | |||
|
481 | 10000) | |||
|
482 | ||||
470 | # sql_cache_short |
|
483 | # sql_cache_short | |
471 | _string_setting( |
|
484 | _string_setting( | |
472 | settings, |
|
485 | settings, |
@@ -504,7 +504,6 b' def bootstrap_config(request):' | |||||
504 | # allow pyramid lookup in testing |
|
504 | # allow pyramid lookup in testing | |
505 | config.include('pyramid_mako') |
|
505 | config.include('pyramid_mako') | |
506 | config.include('pyramid_beaker') |
|
506 | config.include('pyramid_beaker') | |
507 | config.include('rhodecode.lib.caches') |
|
|||
508 | config.include('rhodecode.lib.rc_cache') |
|
507 | config.include('rhodecode.lib.rc_cache') | |
509 |
|
508 | |||
510 | add_events_routes(config) |
|
509 | add_events_routes(config) |
@@ -648,17 +648,7 b' class Repository(Base, BaseModel):' | |||||
648 |
|
648 | |||
649 | @property |
|
649 | @property | |
650 | def scm_instance_cached(self): |
|
650 | def scm_instance_cached(self): | |
651 | @cache_region('long_term') |
|
651 | return self.__get_instance() | |
652 | def _c(repo_name): |
|
|||
653 | return self.__get_instance() |
|
|||
654 | rn = self.repo_name |
|
|||
655 |
|
||||
656 | inv = self.invalidate |
|
|||
657 | if inv is not None: |
|
|||
658 | region_invalidate(_c, None, rn) |
|
|||
659 | # update our cache |
|
|||
660 | CacheInvalidation.set_valid(inv.cache_key) |
|
|||
661 | return _c(rn) |
|
|||
662 |
|
652 | |||
663 | def __get_instance(self): |
|
653 | def __get_instance(self): | |
664 |
|
654 |
@@ -670,17 +670,7 b' class Repository(Base, BaseModel):' | |||||
670 |
|
670 | |||
671 | @property |
|
671 | @property | |
672 | def scm_instance_cached(self): |
|
672 | def scm_instance_cached(self): | |
673 | @cache_region('long_term') |
|
673 | return self.__get_instance() | |
674 | def _c(repo_name): |
|
|||
675 | return self.__get_instance() |
|
|||
676 | rn = self.repo_name |
|
|||
677 | log.debug('Getting cached instance of repo') |
|
|||
678 | inv = self.invalidate |
|
|||
679 | if inv is not None: |
|
|||
680 | region_invalidate(_c, None, rn) |
|
|||
681 | # update our cache |
|
|||
682 | CacheInvalidation.set_valid(inv.cache_key) |
|
|||
683 | return _c(rn) |
|
|||
684 |
|
674 | |||
685 | def __get_instance(self): |
|
675 | def __get_instance(self): | |
686 | repo_full_path = self.repo_full_path |
|
676 | repo_full_path = self.repo_full_path |
@@ -2262,18 +2262,7 b' class Repository(Base, BaseModel):' | |||||
2262 | return self._get_instance(cache=bool(cache), config=config) |
|
2262 | return self._get_instance(cache=bool(cache), config=config) | |
2263 |
|
2263 | |||
2264 | def _get_instance_cached(self): |
|
2264 | def _get_instance_cached(self): | |
2265 | @cache_region('long_term') |
|
2265 | self._get_instance() | |
2266 | def _get_repo(cache_key): |
|
|||
2267 | return self._get_instance() |
|
|||
2268 |
|
||||
2269 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
2270 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
|||
2271 |
|
||||
2272 | with invalidator_context as context: |
|
|||
2273 | context.invalidate() |
|
|||
2274 | repo = context.compute() |
|
|||
2275 |
|
||||
2276 | return repo |
|
|||
2277 |
|
2266 | |||
2278 | def _get_instance(self, cache=True, config=None): |
|
2267 | def _get_instance(self, cache=True, config=None): | |
2279 | config = config or self._config |
|
2268 | config = config or self._config | |
@@ -3165,27 +3154,6 b' class CacheKey(Base, BaseModel):' | |||||
3165 | return inv_obj |
|
3154 | return inv_obj | |
3166 | return None |
|
3155 | return None | |
3167 |
|
3156 | |||
3168 | @classmethod |
|
|||
3169 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
3170 | thread_scoped=False): |
|
|||
3171 | """ |
|
|||
3172 | @cache_region('long_term') |
|
|||
3173 | def _heavy_calculation(cache_key): |
|
|||
3174 | return 'result' |
|
|||
3175 |
|
||||
3176 | cache_context = CacheKey.repo_context_cache( |
|
|||
3177 | _heavy_calculation, repo_name, cache_type) |
|
|||
3178 |
|
||||
3179 | with cache_context as context: |
|
|||
3180 | context.invalidate() |
|
|||
3181 | computed = context.compute() |
|
|||
3182 |
|
||||
3183 | assert computed == 'result' |
|
|||
3184 | """ |
|
|||
3185 | from rhodecode.lib import caches |
|
|||
3186 | return caches.InvalidationContext( |
|
|||
3187 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
3188 |
|
||||
3189 |
|
3157 | |||
3190 | class ChangesetComment(Base, BaseModel): |
|
3158 | class ChangesetComment(Base, BaseModel): | |
3191 | __tablename__ = 'changeset_comments' |
|
3159 | __tablename__ = 'changeset_comments' |
@@ -1963,18 +1963,7 b' class Repository(Base, BaseModel):' | |||||
1963 | return self._get_instance(cache=bool(cache), config=config) |
|
1963 | return self._get_instance(cache=bool(cache), config=config) | |
1964 |
|
1964 | |||
1965 | def _get_instance_cached(self): |
|
1965 | def _get_instance_cached(self): | |
1966 | @cache_region('long_term') |
|
1966 | self._get_instance() | |
1967 | def _get_repo(cache_key): |
|
|||
1968 | return self._get_instance() |
|
|||
1969 |
|
||||
1970 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
1971 | _get_repo, self.repo_name, None) |
|
|||
1972 |
|
||||
1973 | with invalidator_context as context: |
|
|||
1974 | context.invalidate() |
|
|||
1975 | repo = context.compute() |
|
|||
1976 |
|
||||
1977 | return repo |
|
|||
1978 |
|
1967 | |||
1979 | def _get_instance(self, cache=True, config=None): |
|
1968 | def _get_instance(self, cache=True, config=None): | |
1980 | repo_full_path = self.repo_full_path |
|
1969 | repo_full_path = self.repo_full_path | |
@@ -2849,25 +2838,6 b' class CacheKey(Base, BaseModel):' | |||||
2849 | return inv_obj |
|
2838 | return inv_obj | |
2850 | return None |
|
2839 | return None | |
2851 |
|
2840 | |||
2852 | @classmethod |
|
|||
2853 | def repo_context_cache(cls, compute_func, repo_name, cache_type): |
|
|||
2854 | """ |
|
|||
2855 | @cache_region('long_term') |
|
|||
2856 | def _heavy_calculation(cache_key): |
|
|||
2857 | return 'result' |
|
|||
2858 |
|
||||
2859 | cache_context = CacheKey.repo_context_cache( |
|
|||
2860 | _heavy_calculation, repo_name, cache_type) |
|
|||
2861 |
|
||||
2862 | with cache_context as context: |
|
|||
2863 | context.invalidate() |
|
|||
2864 | computed = context.compute() |
|
|||
2865 |
|
||||
2866 | assert computed == 'result' |
|
|||
2867 | """ |
|
|||
2868 | from rhodecode.lib import caches |
|
|||
2869 | return caches.InvalidationContext(compute_func, repo_name, cache_type) |
|
|||
2870 |
|
||||
2871 |
|
2841 | |||
2872 | class ChangesetComment(Base, BaseModel): |
|
2842 | class ChangesetComment(Base, BaseModel): | |
2873 | __tablename__ = 'changeset_comments' |
|
2843 | __tablename__ = 'changeset_comments' |
@@ -1966,18 +1966,7 b' class Repository(Base, BaseModel):' | |||||
1966 | return self._get_instance(cache=bool(cache), config=config) |
|
1966 | return self._get_instance(cache=bool(cache), config=config) | |
1967 |
|
1967 | |||
1968 | def _get_instance_cached(self): |
|
1968 | def _get_instance_cached(self): | |
1969 | @cache_region('long_term') |
|
1969 | self._get_instance() | |
1970 | def _get_repo(cache_key): |
|
|||
1971 | return self._get_instance() |
|
|||
1972 |
|
||||
1973 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
1974 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
|||
1975 |
|
||||
1976 | with invalidator_context as context: |
|
|||
1977 | context.invalidate() |
|
|||
1978 | repo = context.compute() |
|
|||
1979 |
|
||||
1980 | return repo |
|
|||
1981 |
|
1970 | |||
1982 | def _get_instance(self, cache=True, config=None): |
|
1971 | def _get_instance(self, cache=True, config=None): | |
1983 | config = config or self._config |
|
1972 | config = config or self._config | |
@@ -2841,27 +2830,6 b' class CacheKey(Base, BaseModel):' | |||||
2841 | return inv_obj |
|
2830 | return inv_obj | |
2842 | return None |
|
2831 | return None | |
2843 |
|
2832 | |||
2844 | @classmethod |
|
|||
2845 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
2846 | thread_scoped=False): |
|
|||
2847 | """ |
|
|||
2848 | @cache_region('long_term') |
|
|||
2849 | def _heavy_calculation(cache_key): |
|
|||
2850 | return 'result' |
|
|||
2851 |
|
||||
2852 | cache_context = CacheKey.repo_context_cache( |
|
|||
2853 | _heavy_calculation, repo_name, cache_type) |
|
|||
2854 |
|
||||
2855 | with cache_context as context: |
|
|||
2856 | context.invalidate() |
|
|||
2857 | computed = context.compute() |
|
|||
2858 |
|
||||
2859 | assert computed == 'result' |
|
|||
2860 | """ |
|
|||
2861 | from rhodecode.lib import caches |
|
|||
2862 | return caches.InvalidationContext( |
|
|||
2863 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
2864 |
|
||||
2865 |
|
2833 | |||
2866 | class ChangesetComment(Base, BaseModel): |
|
2834 | class ChangesetComment(Base, BaseModel): | |
2867 | __tablename__ = 'changeset_comments' |
|
2835 | __tablename__ = 'changeset_comments' |
@@ -1966,18 +1966,7 b' class Repository(Base, BaseModel):' | |||||
1966 | return self._get_instance(cache=bool(cache), config=config) |
|
1966 | return self._get_instance(cache=bool(cache), config=config) | |
1967 |
|
1967 | |||
1968 | def _get_instance_cached(self): |
|
1968 | def _get_instance_cached(self): | |
1969 | @cache_region('long_term') |
|
1969 | self._get_instance() | |
1970 | def _get_repo(cache_key): |
|
|||
1971 | return self._get_instance() |
|
|||
1972 |
|
||||
1973 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
1974 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
|||
1975 |
|
||||
1976 | with invalidator_context as context: |
|
|||
1977 | context.invalidate() |
|
|||
1978 | repo = context.compute() |
|
|||
1979 |
|
||||
1980 | return repo |
|
|||
1981 |
|
1970 | |||
1982 | def _get_instance(self, cache=True, config=None): |
|
1971 | def _get_instance(self, cache=True, config=None): | |
1983 | config = config or self._config |
|
1972 | config = config or self._config | |
@@ -2841,26 +2830,6 b' class CacheKey(Base, BaseModel):' | |||||
2841 | return inv_obj |
|
2830 | return inv_obj | |
2842 | return None |
|
2831 | return None | |
2843 |
|
2832 | |||
2844 | @classmethod |
|
|||
2845 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
2846 | thread_scoped=False): |
|
|||
2847 | """ |
|
|||
2848 | @cache_region('long_term') |
|
|||
2849 | def _heavy_calculation(cache_key): |
|
|||
2850 | return 'result' |
|
|||
2851 |
|
||||
2852 | cache_context = CacheKey.repo_context_cache( |
|
|||
2853 | _heavy_calculation, repo_name, cache_type) |
|
|||
2854 |
|
||||
2855 | with cache_context as context: |
|
|||
2856 | context.invalidate() |
|
|||
2857 | computed = context.compute() |
|
|||
2858 |
|
||||
2859 | assert computed == 'result' |
|
|||
2860 | """ |
|
|||
2861 | from rhodecode.lib import caches |
|
|||
2862 | return caches.InvalidationContext( |
|
|||
2863 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
2864 |
|
2833 | |||
2865 |
|
2834 | |||
2866 | class ChangesetComment(Base, BaseModel): |
|
2835 | class ChangesetComment(Base, BaseModel): |
@@ -1968,18 +1968,7 b' class Repository(Base, BaseModel):' | |||||
1968 | return self._get_instance(cache=bool(cache), config=config) |
|
1968 | return self._get_instance(cache=bool(cache), config=config) | |
1969 |
|
1969 | |||
1970 | def _get_instance_cached(self): |
|
1970 | def _get_instance_cached(self): | |
1971 | @cache_region('long_term') |
|
1971 | self._get_instance() | |
1972 | def _get_repo(cache_key): |
|
|||
1973 | return self._get_instance() |
|
|||
1974 |
|
||||
1975 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
1976 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
|||
1977 |
|
||||
1978 | with invalidator_context as context: |
|
|||
1979 | context.invalidate() |
|
|||
1980 | repo = context.compute() |
|
|||
1981 |
|
||||
1982 | return repo |
|
|||
1983 |
|
1972 | |||
1984 | def _get_instance(self, cache=True, config=None): |
|
1973 | def _get_instance(self, cache=True, config=None): | |
1985 | config = config or self._config |
|
1974 | config = config or self._config | |
@@ -2845,27 +2834,6 b' class CacheKey(Base, BaseModel):' | |||||
2845 | return inv_obj |
|
2834 | return inv_obj | |
2846 | return None |
|
2835 | return None | |
2847 |
|
2836 | |||
2848 | @classmethod |
|
|||
2849 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
2850 | thread_scoped=False): |
|
|||
2851 | """ |
|
|||
2852 | @cache_region('long_term') |
|
|||
2853 | def _heavy_calculation(cache_key): |
|
|||
2854 | return 'result' |
|
|||
2855 |
|
||||
2856 | cache_context = CacheKey.repo_context_cache( |
|
|||
2857 | _heavy_calculation, repo_name, cache_type) |
|
|||
2858 |
|
||||
2859 | with cache_context as context: |
|
|||
2860 | context.invalidate() |
|
|||
2861 | computed = context.compute() |
|
|||
2862 |
|
||||
2863 | assert computed == 'result' |
|
|||
2864 | """ |
|
|||
2865 | from rhodecode.lib import caches |
|
|||
2866 | return caches.InvalidationContext( |
|
|||
2867 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
2868 |
|
||||
2869 |
|
2837 | |||
2870 | class ChangesetComment(Base, BaseModel): |
|
2838 | class ChangesetComment(Base, BaseModel): | |
2871 | __tablename__ = 'changeset_comments' |
|
2839 | __tablename__ = 'changeset_comments' |
@@ -1968,18 +1968,7 b' class Repository(Base, BaseModel):' | |||||
1968 | return self._get_instance(cache=bool(cache), config=config) |
|
1968 | return self._get_instance(cache=bool(cache), config=config) | |
1969 |
|
1969 | |||
1970 | def _get_instance_cached(self): |
|
1970 | def _get_instance_cached(self): | |
1971 | @cache_region('long_term') |
|
1971 | self._get_instance() | |
1972 | def _get_repo(cache_key): |
|
|||
1973 | return self._get_instance() |
|
|||
1974 |
|
||||
1975 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
1976 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
|||
1977 |
|
||||
1978 | with invalidator_context as context: |
|
|||
1979 | context.invalidate() |
|
|||
1980 | repo = context.compute() |
|
|||
1981 |
|
||||
1982 | return repo |
|
|||
1983 |
|
1972 | |||
1984 | def _get_instance(self, cache=True, config=None): |
|
1973 | def _get_instance(self, cache=True, config=None): | |
1985 | config = config or self._config |
|
1974 | config = config or self._config | |
@@ -2845,27 +2834,6 b' class CacheKey(Base, BaseModel):' | |||||
2845 | return inv_obj |
|
2834 | return inv_obj | |
2846 | return None |
|
2835 | return None | |
2847 |
|
2836 | |||
2848 | @classmethod |
|
|||
2849 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
2850 | thread_scoped=False): |
|
|||
2851 | """ |
|
|||
2852 | @cache_region('long_term') |
|
|||
2853 | def _heavy_calculation(cache_key): |
|
|||
2854 | return 'result' |
|
|||
2855 |
|
||||
2856 | cache_context = CacheKey.repo_context_cache( |
|
|||
2857 | _heavy_calculation, repo_name, cache_type) |
|
|||
2858 |
|
||||
2859 | with cache_context as context: |
|
|||
2860 | context.invalidate() |
|
|||
2861 | computed = context.compute() |
|
|||
2862 |
|
||||
2863 | assert computed == 'result' |
|
|||
2864 | """ |
|
|||
2865 | from rhodecode.lib import caches |
|
|||
2866 | return caches.InvalidationContext( |
|
|||
2867 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
2868 |
|
||||
2869 |
|
2837 | |||
2870 | class ChangesetComment(Base, BaseModel): |
|
2838 | class ChangesetComment(Base, BaseModel): | |
2871 | __tablename__ = 'changeset_comments' |
|
2839 | __tablename__ = 'changeset_comments' |
@@ -2010,18 +2010,7 b' class Repository(Base, BaseModel):' | |||||
2010 | return self._get_instance(cache=bool(cache), config=config) |
|
2010 | return self._get_instance(cache=bool(cache), config=config) | |
2011 |
|
2011 | |||
2012 | def _get_instance_cached(self): |
|
2012 | def _get_instance_cached(self): | |
2013 | @cache_region('long_term') |
|
2013 | self._get_instance() | |
2014 | def _get_repo(cache_key): |
|
|||
2015 | return self._get_instance() |
|
|||
2016 |
|
||||
2017 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
2018 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
|||
2019 |
|
||||
2020 | with invalidator_context as context: |
|
|||
2021 | context.invalidate() |
|
|||
2022 | repo = context.compute() |
|
|||
2023 |
|
||||
2024 | return repo |
|
|||
2025 |
|
2014 | |||
2026 | def _get_instance(self, cache=True, config=None): |
|
2015 | def _get_instance(self, cache=True, config=None): | |
2027 | config = config or self._config |
|
2016 | config = config or self._config | |
@@ -2900,27 +2889,6 b' class CacheKey(Base, BaseModel):' | |||||
2900 | return inv_obj |
|
2889 | return inv_obj | |
2901 | return None |
|
2890 | return None | |
2902 |
|
2891 | |||
2903 | @classmethod |
|
|||
2904 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
2905 | thread_scoped=False): |
|
|||
2906 | """ |
|
|||
2907 | @cache_region('long_term') |
|
|||
2908 | def _heavy_calculation(cache_key): |
|
|||
2909 | return 'result' |
|
|||
2910 |
|
||||
2911 | cache_context = CacheKey.repo_context_cache( |
|
|||
2912 | _heavy_calculation, repo_name, cache_type) |
|
|||
2913 |
|
||||
2914 | with cache_context as context: |
|
|||
2915 | context.invalidate() |
|
|||
2916 | computed = context.compute() |
|
|||
2917 |
|
||||
2918 | assert computed == 'result' |
|
|||
2919 | """ |
|
|||
2920 | from rhodecode.lib import caches |
|
|||
2921 | return caches.InvalidationContext( |
|
|||
2922 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
2923 |
|
||||
2924 |
|
2892 | |||
2925 | class ChangesetComment(Base, BaseModel): |
|
2893 | class ChangesetComment(Base, BaseModel): | |
2926 | __tablename__ = 'changeset_comments' |
|
2894 | __tablename__ = 'changeset_comments' |
@@ -2011,18 +2011,7 b' class Repository(Base, BaseModel):' | |||||
2011 | return self._get_instance(cache=bool(cache), config=config) |
|
2011 | return self._get_instance(cache=bool(cache), config=config) | |
2012 |
|
2012 | |||
2013 | def _get_instance_cached(self): |
|
2013 | def _get_instance_cached(self): | |
2014 | @cache_region('long_term') |
|
2014 | self._get_instance() | |
2015 | def _get_repo(cache_key): |
|
|||
2016 | return self._get_instance() |
|
|||
2017 |
|
||||
2018 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
2019 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
|||
2020 |
|
||||
2021 | with invalidator_context as context: |
|
|||
2022 | context.invalidate() |
|
|||
2023 | repo = context.compute() |
|
|||
2024 |
|
||||
2025 | return repo |
|
|||
2026 |
|
2015 | |||
2027 | def _get_instance(self, cache=True, config=None): |
|
2016 | def _get_instance(self, cache=True, config=None): | |
2028 | config = config or self._config |
|
2017 | config = config or self._config | |
@@ -2901,27 +2890,6 b' class CacheKey(Base, BaseModel):' | |||||
2901 | return inv_obj |
|
2890 | return inv_obj | |
2902 | return None |
|
2891 | return None | |
2903 |
|
2892 | |||
2904 | @classmethod |
|
|||
2905 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
2906 | thread_scoped=False): |
|
|||
2907 | """ |
|
|||
2908 | @cache_region('long_term') |
|
|||
2909 | def _heavy_calculation(cache_key): |
|
|||
2910 | return 'result' |
|
|||
2911 |
|
||||
2912 | cache_context = CacheKey.repo_context_cache( |
|
|||
2913 | _heavy_calculation, repo_name, cache_type) |
|
|||
2914 |
|
||||
2915 | with cache_context as context: |
|
|||
2916 | context.invalidate() |
|
|||
2917 | computed = context.compute() |
|
|||
2918 |
|
||||
2919 | assert computed == 'result' |
|
|||
2920 | """ |
|
|||
2921 | from rhodecode.lib import caches |
|
|||
2922 | return caches.InvalidationContext( |
|
|||
2923 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
2924 |
|
||||
2925 |
|
2893 | |||
2926 | class ChangesetComment(Base, BaseModel): |
|
2894 | class ChangesetComment(Base, BaseModel): | |
2927 | __tablename__ = 'changeset_comments' |
|
2895 | __tablename__ = 'changeset_comments' |
@@ -2199,18 +2199,7 b' class Repository(Base, BaseModel):' | |||||
2199 | return self._get_instance(cache=bool(cache), config=config) |
|
2199 | return self._get_instance(cache=bool(cache), config=config) | |
2200 |
|
2200 | |||
2201 | def _get_instance_cached(self): |
|
2201 | def _get_instance_cached(self): | |
2202 | @cache_region('long_term') |
|
2202 | self._get_instance() | |
2203 | def _get_repo(cache_key): |
|
|||
2204 | return self._get_instance() |
|
|||
2205 |
|
||||
2206 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
2207 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
|||
2208 |
|
||||
2209 | with invalidator_context as context: |
|
|||
2210 | context.invalidate() |
|
|||
2211 | repo = context.compute() |
|
|||
2212 |
|
||||
2213 | return repo |
|
|||
2214 |
|
2203 | |||
2215 | def _get_instance(self, cache=True, config=None): |
|
2204 | def _get_instance(self, cache=True, config=None): | |
2216 | config = config or self._config |
|
2205 | config = config or self._config | |
@@ -3101,27 +3090,6 b' class CacheKey(Base, BaseModel):' | |||||
3101 | return inv_obj |
|
3090 | return inv_obj | |
3102 | return None |
|
3091 | return None | |
3103 |
|
3092 | |||
3104 | @classmethod |
|
|||
3105 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
3106 | thread_scoped=False): |
|
|||
3107 | """ |
|
|||
3108 | @cache_region('long_term') |
|
|||
3109 | def _heavy_calculation(cache_key): |
|
|||
3110 | return 'result' |
|
|||
3111 |
|
||||
3112 | cache_context = CacheKey.repo_context_cache( |
|
|||
3113 | _heavy_calculation, repo_name, cache_type) |
|
|||
3114 |
|
||||
3115 | with cache_context as context: |
|
|||
3116 | context.invalidate() |
|
|||
3117 | computed = context.compute() |
|
|||
3118 |
|
||||
3119 | assert computed == 'result' |
|
|||
3120 | """ |
|
|||
3121 | from rhodecode.lib import caches |
|
|||
3122 | return caches.InvalidationContext( |
|
|||
3123 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
3124 |
|
||||
3125 |
|
3093 | |||
3126 | class ChangesetComment(Base, BaseModel): |
|
3094 | class ChangesetComment(Base, BaseModel): | |
3127 | __tablename__ = 'changeset_comments' |
|
3095 | __tablename__ = 'changeset_comments' |
@@ -26,7 +26,7 b' from urlparse import urljoin' | |||||
26 | import requests |
|
26 | import requests | |
27 | from pyramid.httpexceptions import HTTPNotAcceptable |
|
27 | from pyramid.httpexceptions import HTTPNotAcceptable | |
28 |
|
28 | |||
29 |
from rhodecode.lib import cache |
|
29 | from rhodecode.lib import rc_cache | |
30 | from rhodecode.lib.middleware import simplevcs |
|
30 | from rhodecode.lib.middleware import simplevcs | |
31 | from rhodecode.lib.utils import is_valid_repo |
|
31 | from rhodecode.lib.utils import is_valid_repo | |
32 | from rhodecode.lib.utils2 import str2bool, safe_int |
|
32 | from rhodecode.lib.utils2 import str2bool, safe_int | |
@@ -86,7 +86,7 b' class SimpleSvnApp(object):' | |||||
86 |
|
86 | |||
87 | if response.headers.get('SVN-Txn-name'): |
|
87 | if response.headers.get('SVN-Txn-name'): | |
88 | svn_tx_id = response.headers.get('SVN-Txn-name') |
|
88 | svn_tx_id = response.headers.get('SVN-Txn-name') | |
89 |
txn_id = cache |
|
89 | txn_id = rc_cache.compute_key_from_params( | |
90 | self.config['repository'], svn_tx_id) |
|
90 | self.config['repository'], svn_tx_id) | |
91 | port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1]) |
|
91 | port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1]) | |
92 | store_txn_id_data(txn_id, {'port': port}) |
|
92 | store_txn_id_data(txn_id, {'port': port}) |
@@ -40,7 +40,7 b' from zope.cachedescriptors.property impo' | |||||
40 |
|
40 | |||
41 | import rhodecode |
|
41 | import rhodecode | |
42 | from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin |
|
42 | from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin | |
43 |
from rhodecode.lib import |
|
43 | from rhodecode.lib import rc_cache | |
44 | from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware |
|
44 | from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware | |
45 | from rhodecode.lib.base import ( |
|
45 | from rhodecode.lib.base import ( | |
46 | BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context) |
|
46 | BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context) | |
@@ -77,7 +77,7 b' def extract_svn_txn_id(acl_repo_name, da' | |||||
77 | match = pat.search(sub_el.text) |
|
77 | match = pat.search(sub_el.text) | |
78 | if match: |
|
78 | if match: | |
79 | svn_tx_id = match.groupdict()['txn_id'] |
|
79 | svn_tx_id = match.groupdict()['txn_id'] | |
80 |
txn_id = cache |
|
80 | txn_id = rc_cache.compute_key_from_params( | |
81 | acl_repo_name, svn_tx_id) |
|
81 | acl_repo_name, svn_tx_id) | |
82 | return txn_id |
|
82 | return txn_id | |
83 | except Exception: |
|
83 | except Exception: |
@@ -39,7 +39,8 b' log = logging.getLogger(__name__)' | |||||
39 | from . import region_meta |
|
39 | from . import region_meta | |
40 | from .utils import ( |
|
40 | from .utils import ( | |
41 | get_default_cache_settings, key_generator, get_or_create_region, |
|
41 | get_default_cache_settings, key_generator, get_or_create_region, | |
42 |
clear_cache_namespace, make_region |
|
42 | clear_cache_namespace, make_region, InvalidationContext, | |
|
43 | FreshRegionCache, ActiveRegionCache) | |||
43 |
|
44 | |||
44 |
|
45 | |||
45 | def configure_dogpile_cache(settings): |
|
46 | def configure_dogpile_cache(settings): |
@@ -20,11 +20,16 b'' | |||||
20 | import os |
|
20 | import os | |
21 | import logging |
|
21 | import logging | |
22 | import functools |
|
22 | import functools | |
|
23 | import threading | |||
23 |
|
24 | |||
24 | from dogpile.cache import CacheRegion |
|
25 | from dogpile.cache import CacheRegion | |
25 | from dogpile.cache.util import compat |
|
26 | from dogpile.cache.util import compat | |
26 |
|
27 | |||
|
28 | import rhodecode | |||
27 | from rhodecode.lib.utils import safe_str, sha1 |
|
29 | from rhodecode.lib.utils import safe_str, sha1 | |
|
30 | from rhodecode.lib.utils2 import safe_unicode | |||
|
31 | from rhodecode.model.db import Session, CacheKey, IntegrityError | |||
|
32 | ||||
28 | from . import region_meta |
|
33 | from . import region_meta | |
29 |
|
34 | |||
30 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
@@ -183,3 +188,127 b' def clear_cache_namespace(cache_region, ' | |||||
183 | cache_keys = region.backend.list_keys(prefix=cache_namespace_uid) |
|
188 | cache_keys = region.backend.list_keys(prefix=cache_namespace_uid) | |
184 | region.delete_multi(cache_keys) |
|
189 | region.delete_multi(cache_keys) | |
185 | return len(cache_keys) |
|
190 | return len(cache_keys) | |
|
191 | ||||
|
192 | ||||
|
193 | class ActiveRegionCache(object): | |||
|
194 | def __init__(self, context): | |||
|
195 | self.context = context | |||
|
196 | ||||
|
197 | def should_invalidate(self): | |||
|
198 | return False | |||
|
199 | ||||
|
200 | ||||
|
201 | class FreshRegionCache(object): | |||
|
202 | def __init__(self, context): | |||
|
203 | self.context = context | |||
|
204 | ||||
|
205 | def should_invalidate(self): | |||
|
206 | return True | |||
|
207 | ||||
|
208 | ||||
|
209 | class InvalidationContext(object): | |||
|
210 | """ | |||
|
211 | usage:: | |||
|
212 | ||||
|
213 | import time | |||
|
214 | from rhodecode.lib import rc_cache | |||
|
215 | my_id = 1 | |||
|
216 | cache_namespace_uid = 'cache_demo.{}'.format(my_id) | |||
|
217 | invalidation_namespace = 'repo_cache:1' | |||
|
218 | region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid) | |||
|
219 | ||||
|
220 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |||
|
221 | expiration_time=30, | |||
|
222 | condition=True) | |||
|
223 | def heavy_compute(cache_name, param1, param2): | |||
|
224 | print('COMPUTE {}, {}, {}'.format(cache_name, param1, param2)) | |||
|
225 | import time | |||
|
226 | time.sleep(30) | |||
|
227 | return True | |||
|
228 | ||||
|
229 | start = time.time() | |||
|
230 | inv_context_manager = rc_cache.InvalidationContext( | |||
|
231 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | |||
|
232 | with inv_context_manager as invalidation_context: | |||
|
233 | # check for stored invalidation signal, and maybe purge the cache | |||
|
234 | # before computing it again | |||
|
235 | if invalidation_context.should_invalidate(): | |||
|
236 | heavy_compute.invalidate('some_name', 'param1', 'param2') | |||
|
237 | ||||
|
238 | result = heavy_compute('some_name', 'param1', 'param2') | |||
|
239 | compute_time = time.time() - start | |||
|
240 | print(compute_time) | |||
|
241 | ||||
|
242 | # To send global invalidation signal, simply run | |||
|
243 | CacheKey.set_invalidate(invalidation_namespace) | |||
|
244 | ||||
|
245 | """ | |||
|
246 | ||||
|
247 | def __repr__(self): | |||
|
248 | return '<InvalidationContext:{}[{}]>'.format( | |||
|
249 | safe_str(self.cache_key), safe_str(self.uid)) | |||
|
250 | ||||
|
251 | def __init__(self, uid, invalidation_namespace='', | |||
|
252 | raise_exception=False, thread_scoped=True): | |||
|
253 | self.uid = uid | |||
|
254 | self.invalidation_namespace = invalidation_namespace | |||
|
255 | self.raise_exception = raise_exception | |||
|
256 | self.proc_id = safe_unicode(rhodecode.CONFIG.get('instance_id') or 'DEFAULT') | |||
|
257 | self.thread_id = 'global' | |||
|
258 | ||||
|
259 | # Append the thread id to the cache key if this invalidation context | |||
|
260 | # should be scoped to the current thread. | |||
|
261 | if thread_scoped: | |||
|
262 | self.thread_id = threading.current_thread().ident | |||
|
263 | ||||
|
264 | self.cache_key = compute_key_from_params(uid) | |||
|
265 | self.cache_key = 'proc:{}_thread:{}_{}'.format( | |||
|
266 | self.proc_id, self.thread_id, self.cache_key) | |||
|
267 | ||||
|
268 | def get_or_create_cache_obj(self, uid, invalidation_namespace=''): | |||
|
269 | log.debug('Checking if %s cache key is present and active', self.cache_key) | |||
|
270 | cache_obj = CacheKey.get_active_cache(self.cache_key) | |||
|
271 | invalidation_namespace = invalidation_namespace or self.invalidation_namespace | |||
|
272 | if not cache_obj: | |||
|
273 | cache_obj = CacheKey(self.cache_key, cache_args=invalidation_namespace) | |||
|
274 | return cache_obj | |||
|
275 | ||||
|
276 | def __enter__(self): | |||
|
277 | """ | |||
|
278 | Test if current object is valid, and return CacheRegion function | |||
|
279 | that does invalidation and calculation | |||
|
280 | """ | |||
|
281 | # register or get a new key based on uid | |||
|
282 | self.cache_obj = self.get_or_create_cache_obj(uid=self.uid) | |||
|
283 | ||||
|
284 | if self.cache_obj.cache_active: | |||
|
285 | # means our cache obj is existing and marked as it's | |||
|
286 | # cache is not outdated, we return ActiveRegionCache | |||
|
287 | self.skip_cache_active_change = True | |||
|
288 | return ActiveRegionCache(context=self) | |||
|
289 | ||||
|
290 | # the key is either not existing or set to False, we return | |||
|
291 | # the real invalidator which re-computes value. We additionally set | |||
|
292 | # the flag to actually update the Database objects | |||
|
293 | self.skip_cache_active_change = False | |||
|
294 | return FreshRegionCache(context=self) | |||
|
295 | ||||
|
296 | def __exit__(self, exc_type, exc_val, exc_tb): | |||
|
297 | ||||
|
298 | if self.skip_cache_active_change: | |||
|
299 | return | |||
|
300 | ||||
|
301 | try: | |||
|
302 | self.cache_obj.cache_active = True | |||
|
303 | Session().add(self.cache_obj) | |||
|
304 | Session().commit() | |||
|
305 | except IntegrityError: | |||
|
306 | # if we catch integrity error, it means we inserted this object | |||
|
307 | # assumption is that's really an edge race-condition case and | |||
|
308 | # it's safe is to skip it | |||
|
309 | Session().rollback() | |||
|
310 | except Exception: | |||
|
311 | log.exception('Failed to commit on cache key update') | |||
|
312 | Session().rollback() | |||
|
313 | if self.raise_exception: | |||
|
314 | raise |
@@ -47,7 +47,6 b' from sqlalchemy.ext.declarative import d' | |||||
47 | from sqlalchemy.ext.hybrid import hybrid_property |
|
47 | from sqlalchemy.ext.hybrid import hybrid_property | |
48 | from sqlalchemy.exc import IntegrityError # noqa |
|
48 | from sqlalchemy.exc import IntegrityError # noqa | |
49 | from sqlalchemy.dialects.mysql import LONGTEXT |
|
49 | from sqlalchemy.dialects.mysql import LONGTEXT | |
50 | from beaker.cache import cache_region |
|
|||
51 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
50 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
52 |
|
51 | |||
53 | from pyramid.threadlocal import get_current_request |
|
52 | from pyramid.threadlocal import get_current_request | |
@@ -1845,8 +1844,10 b' class Repository(Base, BaseModel):' | |||||
1845 | """ |
|
1844 | """ | |
1846 | Returns associated cache keys for that repo |
|
1845 | Returns associated cache keys for that repo | |
1847 | """ |
|
1846 | """ | |
|
1847 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
1848 | repo_id=self.repo_id) | |||
1848 | return CacheKey.query()\ |
|
1849 | return CacheKey.query()\ | |
1849 |
.filter(CacheKey.cache_args == |
|
1850 | .filter(CacheKey.cache_args == invalidation_namespace)\ | |
1850 | .order_by(CacheKey.cache_key)\ |
|
1851 | .order_by(CacheKey.cache_key)\ | |
1851 | .all() |
|
1852 | .all() | |
1852 |
|
1853 | |||
@@ -2327,18 +2328,30 b' class Repository(Base, BaseModel):' | |||||
2327 | return self._get_instance(cache=bool(cache), config=config) |
|
2328 | return self._get_instance(cache=bool(cache), config=config) | |
2328 |
|
2329 | |||
2329 | def _get_instance_cached(self): |
|
2330 | def _get_instance_cached(self): | |
2330 | @cache_region('long_term') |
|
2331 | from rhodecode.lib import rc_cache | |
2331 | def _get_repo(cache_key): |
|
2332 | ||
|
2333 | cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id) | |||
|
2334 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
2335 | repo_id=self.repo_id) | |||
|
2336 | region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) | |||
|
2337 | ||||
|
2338 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | |||
|
2339 | def get_instance_cached(repo_id): | |||
2332 | return self._get_instance() |
|
2340 | return self._get_instance() | |
2333 |
|
2341 | |||
2334 | invalidator_context = CacheKey.repo_context_cache( |
|
2342 | start = time.time() | |
2335 | _get_repo, self.repo_name, None, thread_scoped=True) |
|
2343 | inv_context_manager = rc_cache.InvalidationContext( | |
2336 |
|
2344 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | ||
2337 |
with inv |
|
2345 | with inv_context_manager as invalidation_context: | |
2338 | context.invalidate() |
|
2346 | # check for stored invalidation signal, and maybe purge the cache | |
2339 | repo = context.compute() |
|
2347 | # before computing it again | |
2340 |
|
2348 | if invalidation_context.should_invalidate(): | ||
2341 | return repo |
|
2349 | get_instance_cached.invalidate(self.repo_id) | |
|
2350 | ||||
|
2351 | instance = get_instance_cached(self.repo_id) | |||
|
2352 | compute_time = time.time() - start | |||
|
2353 | log.debug('Repo instance fetched in %.3fs', compute_time) | |||
|
2354 | return instance | |||
2342 |
|
2355 | |||
2343 | def _get_instance(self, cache=True, config=None): |
|
2356 | def _get_instance(self, cache=True, config=None): | |
2344 | config = config or self._config |
|
2357 | config = config or self._config | |
@@ -3128,9 +3141,10 b' class CacheKey(Base, BaseModel):' | |||||
3128 | base_table_args, |
|
3141 | base_table_args, | |
3129 | ) |
|
3142 | ) | |
3130 |
|
3143 | |||
3131 |
CACHE_TYPE_ |
|
3144 | CACHE_TYPE_FEED = 'FEED' | |
3132 | CACHE_TYPE_RSS = 'RSS' |
|
|||
3133 | CACHE_TYPE_README = 'README' |
|
3145 | CACHE_TYPE_README = 'README' | |
|
3146 | # namespaces used to register process/thread aware caches | |||
|
3147 | REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}' | |||
3134 |
|
3148 | |||
3135 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3149 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
3136 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
3150 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) | |
@@ -3179,44 +3193,27 b' class CacheKey(Base, BaseModel):' | |||||
3179 | Session().commit() |
|
3193 | Session().commit() | |
3180 |
|
3194 | |||
3181 | @classmethod |
|
3195 | @classmethod | |
3182 | def get_cache_key(cls, repo_name, cache_type): |
|
3196 | def set_invalidate(cls, cache_uid, delete=False): | |
3183 | """ |
|
|||
3184 |
|
||||
3185 | Generate a cache key for this process of RhodeCode instance. |
|
|||
3186 | Prefix most likely will be process id or maybe explicitly set |
|
|||
3187 | instance_id from .ini file. |
|
|||
3188 | """ |
|
|||
3189 | import rhodecode |
|
|||
3190 | prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '') |
|
|||
3191 |
|
||||
3192 | repo_as_unicode = safe_unicode(repo_name) |
|
|||
3193 | key = u'{}_{}'.format(repo_as_unicode, cache_type) \ |
|
|||
3194 | if cache_type else repo_as_unicode |
|
|||
3195 |
|
||||
3196 | return u'{}{}'.format(prefix, key) |
|
|||
3197 |
|
||||
3198 | @classmethod |
|
|||
3199 | def set_invalidate(cls, repo_name, delete=False): |
|
|||
3200 | """ |
|
3197 | """ | |
3201 | Mark all caches of a repo as invalid in the database. |
|
3198 | Mark all caches of a repo as invalid in the database. | |
3202 | """ |
|
3199 | """ | |
3203 |
|
3200 | |||
3204 | try: |
|
3201 | try: | |
3205 |
qry = Session().query(cls).filter(cls.cache_args == |
|
3202 | qry = Session().query(cls).filter(cls.cache_args == cache_uid) | |
3206 | if delete: |
|
3203 | if delete: | |
3207 | log.debug('cache objects deleted for repo %s', |
|
|||
3208 | safe_str(repo_name)) |
|
|||
3209 | qry.delete() |
|
3204 | qry.delete() | |
|
3205 | log.debug('cache objects deleted for cache args %s', | |||
|
3206 | safe_str(cache_uid)) | |||
3210 | else: |
|
3207 | else: | |
3211 | log.debug('cache objects marked as invalid for repo %s', |
|
|||
3212 | safe_str(repo_name)) |
|
|||
3213 | qry.update({"cache_active": False}) |
|
3208 | qry.update({"cache_active": False}) | |
|
3209 | log.debug('cache objects marked as invalid for cache args %s', | |||
|
3210 | safe_str(cache_uid)) | |||
3214 |
|
3211 | |||
3215 | Session().commit() |
|
3212 | Session().commit() | |
3216 | except Exception: |
|
3213 | except Exception: | |
3217 | log.exception( |
|
3214 | log.exception( | |
3218 |
'Cache key invalidation failed for |
|
3215 | 'Cache key invalidation failed for cache args %s', | |
3219 |
safe_str( |
|
3216 | safe_str(cache_uid)) | |
3220 | Session().rollback() |
|
3217 | Session().rollback() | |
3221 |
|
3218 | |||
3222 | @classmethod |
|
3219 | @classmethod | |
@@ -3226,27 +3223,6 b' class CacheKey(Base, BaseModel):' | |||||
3226 | return inv_obj |
|
3223 | return inv_obj | |
3227 | return None |
|
3224 | return None | |
3228 |
|
3225 | |||
3229 | @classmethod |
|
|||
3230 | def repo_context_cache(cls, compute_func, repo_name, cache_type, |
|
|||
3231 | thread_scoped=False): |
|
|||
3232 | """ |
|
|||
3233 | @cache_region('long_term') |
|
|||
3234 | def _heavy_calculation(cache_key): |
|
|||
3235 | return 'result' |
|
|||
3236 |
|
||||
3237 | cache_context = CacheKey.repo_context_cache( |
|
|||
3238 | _heavy_calculation, repo_name, cache_type) |
|
|||
3239 |
|
||||
3240 | with cache_context as context: |
|
|||
3241 | context.invalidate() |
|
|||
3242 | computed = context.compute() |
|
|||
3243 |
|
||||
3244 | assert computed == 'result' |
|
|||
3245 | """ |
|
|||
3246 | from rhodecode.lib import caches |
|
|||
3247 | return caches.InvalidationContext( |
|
|||
3248 | compute_func, repo_name, cache_type, thread_scoped=thread_scoped) |
|
|||
3249 |
|
||||
3250 |
|
3226 | |||
3251 | class ChangesetComment(Base, BaseModel): |
|
3227 | class ChangesetComment(Base, BaseModel): | |
3252 | __tablename__ = 'changeset_comments' |
|
3228 | __tablename__ = 'changeset_comments' |
@@ -43,7 +43,7 b' from rhodecode.lib.auth import (' | |||||
43 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
43 | HasRepoPermissionAny, HasRepoGroupPermissionAny, | |
44 | HasUserGroupPermissionAny) |
|
44 | HasUserGroupPermissionAny) | |
45 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
45 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError | |
46 |
from rhodecode.lib import hooks_utils |
|
46 | from rhodecode.lib import hooks_utils | |
47 | from rhodecode.lib.utils import ( |
|
47 | from rhodecode.lib.utils import ( | |
48 | get_filesystem_repos, make_db_config) |
|
48 | get_filesystem_repos, make_db_config) | |
49 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) |
|
49 | from rhodecode.lib.utils2 import (safe_str, safe_unicode) | |
@@ -269,10 +269,13 b' class ScmModel(BaseModel):' | |||||
269 | :param delete: delete the entry keys instead of setting bool |
|
269 | :param delete: delete the entry keys instead of setting bool | |
270 | flag on them, and also purge caches used by the dogpile |
|
270 | flag on them, and also purge caches used by the dogpile | |
271 | """ |
|
271 | """ | |
272 | CacheKey.set_invalidate(repo_name, delete=delete) |
|
|||
273 | repo = Repository.get_by_repo_name(repo_name) |
|
272 | repo = Repository.get_by_repo_name(repo_name) | |
274 |
|
273 | |||
275 | if repo: |
|
274 | if repo: | |
|
275 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
276 | repo_id=repo.repo_id) | |||
|
277 | CacheKey.set_invalidate(invalidation_namespace, delete=delete) | |||
|
278 | ||||
276 | repo_id = repo.repo_id |
|
279 | repo_id = repo.repo_id | |
277 | config = repo._config |
|
280 | config = repo._config | |
278 | config.set('extensions', 'largefiles', '') |
|
281 | config.set('extensions', 'largefiles', '') |
@@ -26,7 +26,7 b' from collections import namedtuple' | |||||
26 | from functools import wraps |
|
26 | from functools import wraps | |
27 | import bleach |
|
27 | import bleach | |
28 |
|
28 | |||
29 |
from rhodecode.lib import |
|
29 | from rhodecode.lib import rc_cache | |
30 | from rhodecode.lib.utils2 import ( |
|
30 | from rhodecode.lib.utils2 import ( | |
31 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) |
|
31 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) | |
32 | from rhodecode.lib.vcs.backends import base |
|
32 | from rhodecode.lib.vcs.backends import base |
@@ -36,14 +36,14 b'' | |||||
36 | <div class="field" > |
|
36 | <div class="field" > | |
37 | <table class="rctable edit_cache"> |
|
37 | <table class="rctable edit_cache"> | |
38 | <tr> |
|
38 | <tr> | |
39 | <th>${_('Prefix')}</th> |
|
|||
40 | <th>${_('Key')}</th> |
|
39 | <th>${_('Key')}</th> | |
|
40 | <th>${_('Namespace')}</th> | |||
41 | <th>${_('Active')}</th> |
|
41 | <th>${_('Active')}</th> | |
42 | </tr> |
|
42 | </tr> | |
43 | %for cache in c.rhodecode_db_repo.cache_keys: |
|
43 | %for cache in c.rhodecode_db_repo.cache_keys: | |
44 | <tr> |
|
44 | <tr> | |
45 |
<td class="td-prefix">${cache. |
|
45 | <td class="td-prefix"><code>${cache.cache_key}</code></td> | |
46 |
<td class="td-cachekey">${cache.cache_ |
|
46 | <td class="td-cachekey"><code>${cache.cache_args}</code></td> | |
47 | <td class="td-active">${h.bool2icon(cache.cache_active)}</td> |
|
47 | <td class="td-active">${h.bool2icon(cache.cache_active)}</td> | |
48 | </tr> |
|
48 | </tr> | |
49 | %endfor |
|
49 | %endfor |
@@ -25,7 +25,7 b' import pytest' | |||||
25 | from rhodecode.lib import rc_cache |
|
25 | from rhodecode.lib import rc_cache | |
26 |
|
26 | |||
27 |
|
27 | |||
28 |
@pytest.mark.usefixtures( |
|
28 | @pytest.mark.usefixtures('app') | |
29 | class TestCaches(object): |
|
29 | class TestCaches(object): | |
30 |
|
30 | |||
31 | def test_cache_decorator_init_not_configured(self): |
|
31 | def test_cache_decorator_init_not_configured(self): |
@@ -30,10 +30,12 b' import pytest' | |||||
30 |
|
30 | |||
31 | from rhodecode.tests import no_newline_id_generator |
|
31 | from rhodecode.tests import no_newline_id_generator | |
32 | from rhodecode.tests.utils import run_test_concurrently |
|
32 | from rhodecode.tests.utils import run_test_concurrently | |
33 | from rhodecode.lib.helpers import InitialsGravatar |
|
|||
34 |
|
33 | |||
|
34 | from rhodecode.lib import rc_cache | |||
|
35 | from rhodecode.lib.helpers import InitialsGravatar | |||
35 | from rhodecode.lib.utils2 import AttributeDict |
|
36 | from rhodecode.lib.utils2 import AttributeDict | |
36 | from rhodecode.model.db import Repository |
|
37 | ||
|
38 | from rhodecode.model.db import Repository, CacheKey | |||
37 |
|
39 | |||
38 |
|
40 | |||
39 | def _urls_for_proto(proto): |
|
41 | def _urls_for_proto(proto): | |
@@ -558,87 +560,124 b' def test_get_repo_by_id(test, expected):' | |||||
558 | assert _test == expected |
|
560 | assert _test == expected | |
559 |
|
561 | |||
560 |
|
562 | |||
561 | @pytest.mark.parametrize("test_repo_name, repo_type", [ |
|
563 | def test_invalidation_context(baseapp): | |
562 | ("test_repo_1", None), |
|
564 | repo_id = 999 | |
563 | ("repo_group/foobar", None), |
|
565 | ||
564 | ("test_non_asci_Δ ΔΔ", None), |
|
566 | cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( | |
565 | (u"test_non_asci_unicode_Δ ΔΔ", None), |
|
567 | repo_id, CacheKey.CACHE_TYPE_README) | |
566 | ]) |
|
568 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |
567 | def test_invalidation_context(baseapp, test_repo_name, repo_type): |
|
569 | repo_id=repo_id) | |
568 | from beaker.cache import cache_region |
|
570 | region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) | |
569 | from rhodecode.lib import caches |
|
571 | ||
570 | from rhodecode.model.db import CacheKey |
|
572 | calls = [1, 2] | |
571 |
|
573 | |||
572 | @cache_region('long_term') |
|
574 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | |
573 | def _dummy_func(cache_key): |
|
575 | def _dummy_func(cache_key): | |
574 | return 'result' |
|
576 | val = calls.pop(0) | |
|
577 | return 'result:{}'.format(val) | |||
|
578 | ||||
|
579 | inv_context_manager = rc_cache.InvalidationContext( | |||
|
580 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | |||
|
581 | ||||
|
582 | # 1st call, fresh caches | |||
|
583 | with inv_context_manager as invalidation_context: | |||
|
584 | should_invalidate = invalidation_context.should_invalidate() | |||
|
585 | if should_invalidate: | |||
|
586 | _dummy_func.invalidate('some-key') | |||
|
587 | result = _dummy_func('some-key') | |||
|
588 | ||||
|
589 | assert isinstance(invalidation_context, rc_cache.FreshRegionCache) | |||
|
590 | assert should_invalidate is True | |||
575 |
|
591 | |||
576 | invalidator_context = CacheKey.repo_context_cache( |
|
592 | assert 'result:1' == result | |
577 | _dummy_func, test_repo_name, 'repo') |
|
593 | # should be cached so calling it twice will give the same result ! | |
|
594 | result = _dummy_func('some-key') | |||
|
595 | assert 'result:1' == result | |||
578 |
|
596 | |||
579 | with invalidator_context as context: |
|
597 | # 2nd call, we create a new context manager, this should be now key aware, and | |
580 | invalidated = context.invalidate() |
|
598 | # return an active cache region | |
581 | result = context.compute() |
|
599 | with inv_context_manager as invalidation_context: | |
|
600 | should_invalidate = invalidation_context.should_invalidate() | |||
|
601 | assert isinstance(invalidation_context, rc_cache.ActiveRegionCache) | |||
|
602 | assert should_invalidate is False | |||
|
603 | ||||
|
604 | # Mark invalidation | |||
|
605 | CacheKey.set_invalidate(invalidation_namespace) | |||
582 |
|
606 | |||
583 | assert invalidated == True |
|
607 | # 3nd call, fresh caches | |
584 | assert 'result' == result |
|
608 | with inv_context_manager as invalidation_context: | |
585 | assert isinstance(context, caches.FreshRegionCache) |
|
609 | should_invalidate = invalidation_context.should_invalidate() | |
586 |
|
610 | if should_invalidate: | ||
587 | assert 'InvalidationContext' in repr(invalidator_context) |
|
611 | _dummy_func.invalidate('some-key') | |
|
612 | result = _dummy_func('some-key') | |||
588 |
|
613 | |||
589 | with invalidator_context as context: |
|
614 | assert isinstance(invalidation_context, rc_cache.FreshRegionCache) | |
590 | context.invalidate() |
|
615 | assert should_invalidate is True | |
591 | result = context.compute() |
|
|||
592 |
|
616 | |||
593 | assert 'result' == result |
|
617 | assert 'result:2' == result | |
594 | assert isinstance(context, caches.ActiveRegionCache) |
|
618 | ||
|
619 | # cached again, same result | |||
|
620 | result = _dummy_func('some-key') | |||
|
621 | assert 'result:2' == result | |||
595 |
|
622 | |||
596 |
|
623 | |||
597 | def test_invalidation_context_exception_in_compute(baseapp): |
|
624 | def test_invalidation_context_exception_in_compute(baseapp): | |
598 | from rhodecode.model.db import CacheKey |
|
625 | repo_id = 888 | |
599 | from beaker.cache import cache_region |
|
|||
600 |
|
626 | |||
601 | @cache_region('long_term') |
|
627 | cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( | |
|
628 | repo_id, CacheKey.CACHE_TYPE_README) | |||
|
629 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
630 | repo_id=repo_id) | |||
|
631 | region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) | |||
|
632 | ||||
|
633 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | |||
602 | def _dummy_func(cache_key): |
|
634 | def _dummy_func(cache_key): | |
603 | # this causes error since it doesn't get any params |
|
635 | raise Exception('Error in cache func') | |
604 | raise Exception('ups') |
|
|||
605 |
|
||||
606 | invalidator_context = CacheKey.repo_context_cache( |
|
|||
607 | _dummy_func, 'test_repo_2', 'repo') |
|
|||
608 |
|
636 | |||
609 | with pytest.raises(Exception): |
|
637 | with pytest.raises(Exception): | |
610 | with invalidator_context as context: |
|
638 | inv_context_manager = rc_cache.InvalidationContext( | |
611 | context.invalidate() |
|
639 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | |
612 | context.compute() |
|
640 | ||
|
641 | # 1st call, fresh caches | |||
|
642 | with inv_context_manager as invalidation_context: | |||
|
643 | should_invalidate = invalidation_context.should_invalidate() | |||
|
644 | if should_invalidate: | |||
|
645 | _dummy_func.invalidate('some-key-2') | |||
|
646 | _dummy_func('some-key-2') | |||
613 |
|
647 | |||
614 |
|
648 | |||
615 | @pytest.mark.parametrize('execution_number', range(5)) |
|
649 | @pytest.mark.parametrize('execution_number', range(5)) | |
616 | def test_cache_invalidation_race_condition(execution_number, baseapp): |
|
650 | def test_cache_invalidation_race_condition(execution_number, baseapp): | |
617 | import time |
|
651 | import time | |
618 | from beaker.cache import cache_region |
|
652 | ||
619 | from rhodecode.model.db import CacheKey |
|
653 | repo_id = 777 | |
620 |
|
654 | |||
621 | if CacheKey.metadata.bind.url.get_backend_name() == "mysql": |
|
655 | cache_namespace_uid = 'cache_repo_instance.{}_{}'.format( | |
622 | reason = ( |
|
656 | repo_id, CacheKey.CACHE_TYPE_README) | |
623 | 'Fails on MariaDB due to some locking issues. Investigation' |
|
657 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |
624 | ' needed') |
|
658 | repo_id=repo_id) | |
625 | pytest.xfail(reason=reason) |
|
659 | region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) | |
626 |
|
660 | |||
627 | @run_test_concurrently(25) |
|
661 | @run_test_concurrently(25) | |
628 | def test_create_and_delete_cache_keys(): |
|
662 | def test_create_and_delete_cache_keys(): | |
629 | time.sleep(0.2) |
|
663 | time.sleep(0.2) | |
630 |
|
664 | |||
631 | @cache_region('long_term') |
|
665 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) | |
632 | def _dummy_func(cache_key): |
|
666 | def _dummy_func(cache_key): | |
633 |
|
|
667 | val = 'async' | |
|
668 | return 'result:{}'.format(val) | |||
|
669 | ||||
|
670 | inv_context_manager = rc_cache.InvalidationContext( | |||
|
671 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | |||
634 |
|
672 | |||
635 | invalidator_context = CacheKey.repo_context_cache( |
|
673 | # 1st call, fresh caches | |
636 | _dummy_func, 'test_repo_1', 'repo') |
|
674 | with inv_context_manager as invalidation_context: | |
|
675 | should_invalidate = invalidation_context.should_invalidate() | |||
|
676 | if should_invalidate: | |||
|
677 | _dummy_func.invalidate('some-key-3') | |||
|
678 | _dummy_func('some-key-3') | |||
637 |
|
679 | |||
638 | with invalidator_context as context: |
|
680 | # Mark invalidation | |
639 | context.invalidate() |
|
681 | CacheKey.set_invalidate(invalidation_namespace) | |
640 | context.compute() |
|
|||
641 |
|
||||
642 | CacheKey.set_invalidate('test_repo_1', delete=True) |
|
|||
643 |
|
682 | |||
644 | test_create_and_delete_cache_keys() |
|
683 | test_create_and_delete_cache_keys() |
@@ -125,11 +125,6 b' def vcsserver_factory(tmpdir_factory):' | |||||
125 | overrides = list(overrides) |
|
125 | overrides = list(overrides) | |
126 | overrides.append({'server:main': {'port': vcsserver_port}}) |
|
126 | overrides.append({'server:main': {'port': vcsserver_port}}) | |
127 |
|
127 | |||
128 | if is_cygwin(): |
|
|||
129 | platform_override = {'DEFAULT': { |
|
|||
130 | 'beaker.cache.repo_object.type': 'nocache'}} |
|
|||
131 | overrides.append(platform_override) |
|
|||
132 |
|
||||
133 | option_name = 'vcsserver_config_http' |
|
128 | option_name = 'vcsserver_config_http' | |
134 | override_option_name = 'vcsserver_config_override' |
|
129 | override_option_name = 'vcsserver_config_override' | |
135 | config_file = get_config( |
|
130 | config_file = get_config( |
@@ -32,6 +32,7 b' import time' | |||||
32 |
|
32 | |||
33 | import pytest |
|
33 | import pytest | |
34 |
|
34 | |||
|
35 | from rhodecode.lib import rc_cache | |||
35 | from rhodecode.model.auth_token import AuthTokenModel |
|
36 | from rhodecode.model.auth_token import AuthTokenModel | |
36 | from rhodecode.model.db import Repository, UserIpMap, CacheKey |
|
37 | from rhodecode.model.db import Repository, UserIpMap, CacheKey | |
37 | from rhodecode.model.meta import Session |
|
38 | from rhodecode.model.meta import Session | |
@@ -217,46 +218,44 b' class TestVCSOperations(object):' | |||||
217 |
|
218 | |||
218 | _check_proper_git_push(stdout, stderr) |
|
219 | _check_proper_git_push(stdout, stderr) | |
219 |
|
220 | |||
220 |
def test_push_invalidates_cache |
|
221 | def test_push_invalidates_cache(self, rc_web_server, tmpdir): | |
221 | key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).scalar() |
|
222 | hg_repo = Repository.get_by_repo_name(HG_REPO) | |
222 | if not key: |
|
223 | ||
223 | key = CacheKey(HG_REPO, HG_REPO) |
|
224 | # init cache objects | |
|
225 | CacheKey.delete_all_cache() | |||
|
226 | cache_namespace_uid = 'cache_push_test.{}'.format(hg_repo.repo_id) | |||
|
227 | invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format( | |||
|
228 | repo_id=hg_repo.repo_id) | |||
224 |
|
229 | |||
225 | key.cache_active = True |
|
230 | inv_context_manager = rc_cache.InvalidationContext( | |
226 | Session().add(key) |
|
231 | uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace) | |
227 | Session().commit() |
|
|||
228 |
|
232 | |||
229 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
233 | with inv_context_manager as invalidation_context: | |
|
234 | # __enter__ will create and register cache objects | |||
|
235 | pass | |||
|
236 | ||||
|
237 | # clone to init cache | |||
|
238 | clone_url = rc_web_server.repo_clone_url(hg_repo.repo_name) | |||
230 | stdout, stderr = Command('/tmp').execute( |
|
239 | stdout, stderr = Command('/tmp').execute( | |
231 | 'hg clone', clone_url, tmpdir.strpath) |
|
240 | 'hg clone', clone_url, tmpdir.strpath) | |
232 |
|
241 | |||
|
242 | cache_keys = hg_repo.cache_keys | |||
|
243 | assert cache_keys != [] | |||
|
244 | for key in cache_keys: | |||
|
245 | assert key.cache_active is True | |||
|
246 | ||||
|
247 | # PUSH that should trigger invalidation cache | |||
233 | stdout, stderr = _add_files_and_push( |
|
248 | stdout, stderr = _add_files_and_push( | |
234 | 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1) |
|
249 | 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1) | |
235 |
|
250 | |||
236 | key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).one() |
|
251 | # flush... | |
237 | assert key.cache_active is False |
|
|||
238 |
|
||||
239 | def test_push_invalidates_cache_git(self, rc_web_server, tmpdir): |
|
|||
240 | key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).scalar() |
|
|||
241 | if not key: |
|
|||
242 | key = CacheKey(GIT_REPO, GIT_REPO) |
|
|||
243 |
|
||||
244 | key.cache_active = True |
|
|||
245 | Session().add(key) |
|
|||
246 | Session().commit() |
|
252 | Session().commit() | |
247 |
|
253 | hg_repo = Repository.get_by_repo_name(HG_REPO) | ||
248 | clone_url = rc_web_server.repo_clone_url(GIT_REPO) |
|
254 | cache_keys = hg_repo.cache_keys | |
249 | stdout, stderr = Command('/tmp').execute( |
|
255 | assert cache_keys != [] | |
250 | 'git clone', clone_url, tmpdir.strpath) |
|
256 | for key in cache_keys: | |
251 |
|
257 | # keys should be marked as not active | ||
252 | # commit some stuff into this repo |
|
258 | assert key.cache_active is False | |
253 | stdout, stderr = _add_files_and_push( |
|
|||
254 | 'git', tmpdir.strpath, clone_url=clone_url, files_no=1) |
|
|||
255 | _check_proper_git_push(stdout, stderr) |
|
|||
256 |
|
||||
257 | key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).one() |
|
|||
258 |
|
||||
259 | assert key.cache_active is False |
|
|||
260 |
|
259 | |||
261 | def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir): |
|
260 | def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir): | |
262 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
|
261 | clone_url = rc_web_server.repo_clone_url(HG_REPO) |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now