Show More
@@ -345,6 +345,10 b' rc_cache.cache_repo.arguments.filename =' | |||
|
345 | 345 | #rc_cache.cache_repo.arguments.db = 1 |
|
346 | 346 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
347 | 347 | |
|
348 | ## cache settings for SQL queries | |
|
349 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
|
350 | rc_cache.sql_cache_short.expiration_time = 30 | |
|
351 | ||
|
348 | 352 | |
|
349 | 353 | #################################### |
|
350 | 354 | ### BEAKER CACHE #### |
@@ -355,16 +359,12 b' rc_cache.cache_repo.arguments.filename =' | |||
|
355 | 359 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
356 | 360 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
357 | 361 | |
|
358 |
beaker.cache.regions = long_term |
|
|
362 | beaker.cache.regions = long_term | |
|
359 | 363 | |
|
360 | 364 | beaker.cache.long_term.type = memorylru_base |
|
361 | 365 | beaker.cache.long_term.expire = 172800 |
|
362 | 366 | beaker.cache.long_term.key_length = 256 |
|
363 | 367 | |
|
364 | beaker.cache.sql_cache_short.type = memorylru_base | |
|
365 | beaker.cache.sql_cache_short.expire = 10 | |
|
366 | beaker.cache.sql_cache_short.key_length = 256 | |
|
367 | ||
|
368 | 368 | |
|
369 | 369 | #################################### |
|
370 | 370 | ### BEAKER SESSION #### |
@@ -320,6 +320,10 b' rc_cache.cache_repo.arguments.filename =' | |||
|
320 | 320 | #rc_cache.cache_repo.arguments.db = 1 |
|
321 | 321 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
322 | 322 | |
|
323 | ## cache settings for SQL queries | |
|
324 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
|
325 | rc_cache.sql_cache_short.expiration_time = 30 | |
|
326 | ||
|
323 | 327 | |
|
324 | 328 | #################################### |
|
325 | 329 | ### BEAKER CACHE #### |
@@ -330,16 +334,12 b' rc_cache.cache_repo.arguments.filename =' | |||
|
330 | 334 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
331 | 335 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
332 | 336 | |
|
333 |
beaker.cache.regions = long_term |
|
|
337 | beaker.cache.regions = long_term | |
|
334 | 338 | |
|
335 | 339 | beaker.cache.long_term.type = memory |
|
336 | 340 | beaker.cache.long_term.expire = 172800 |
|
337 | 341 | beaker.cache.long_term.key_length = 256 |
|
338 | 342 | |
|
339 | beaker.cache.sql_cache_short.type = memory | |
|
340 | beaker.cache.sql_cache_short.expire = 10 | |
|
341 | beaker.cache.sql_cache_short.key_length = 256 | |
|
342 | ||
|
343 | 343 | |
|
344 | 344 | #################################### |
|
345 | 345 | ### BEAKER SESSION #### |
@@ -25,7 +25,7 b' from rhodecode.model.meta import Session' | |||
|
25 | 25 | from rhodecode.model.permission import PermissionModel |
|
26 | 26 | from rhodecode.model.ssh_key import SshKeyModel |
|
27 | 27 | from rhodecode.tests import ( |
|
28 |
TestController, clear_ |
|
|
28 | TestController, clear_cache_regions, assert_session_flash) | |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def route_path(name, params=None, **kwargs): |
@@ -221,22 +221,21 b' class TestAdminPermissionsController(Tes' | |||
|
221 | 221 | def test_index_ips(self): |
|
222 | 222 | self.log_user() |
|
223 | 223 | response = self.app.get(route_path('admin_permissions_ips')) |
|
224 | # TODO: Test response... | |
|
225 | 224 | response.mustcontain('All IP addresses are allowed') |
|
226 | 225 | |
|
227 | 226 | def test_add_delete_ips(self): |
|
227 | clear_cache_regions(['sql_cache_short']) | |
|
228 | 228 | self.log_user() |
|
229 | clear_all_caches() | |
|
230 | 229 | |
|
231 | 230 | # ADD |
|
232 | 231 | default_user_id = User.get_default_user().user_id |
|
233 | 232 | self.app.post( |
|
234 | 233 | route_path('edit_user_ips_add', user_id=default_user_id), |
|
235 |
params={'new_ip': ' |
|
|
234 | params={'new_ip': '0.0.0.0/24', 'csrf_token': self.csrf_token}) | |
|
236 | 235 | |
|
237 | 236 | response = self.app.get(route_path('admin_permissions_ips')) |
|
238 |
response.mustcontain(' |
|
|
239 |
response.mustcontain(' |
|
|
237 | response.mustcontain('0.0.0.0/24') | |
|
238 | response.mustcontain('0.0.0.0 - 0.0.0.255') | |
|
240 | 239 | |
|
241 | 240 | # DELETE |
|
242 | 241 | default_user_id = User.get_default_user().user_id |
@@ -249,11 +248,11 b' class TestAdminPermissionsController(Tes' | |||
|
249 | 248 | |
|
250 | 249 | assert_session_flash(response, 'Removed ip address from user whitelist') |
|
251 | 250 | |
|
252 |
clear_ |
|
|
251 | clear_cache_regions(['sql_cache_short']) | |
|
253 | 252 | response = self.app.get(route_path('admin_permissions_ips')) |
|
254 | 253 | response.mustcontain('All IP addresses are allowed') |
|
255 |
response.mustcontain(no=[' |
|
|
256 |
response.mustcontain(no=[' |
|
|
254 | response.mustcontain(no=['0.0.0.0/24']) | |
|
255 | response.mustcontain(no=['0.0.0.0 - 0.0.0.255']) | |
|
257 | 256 | |
|
258 | 257 | def test_index_overview(self): |
|
259 | 258 | self.log_user() |
@@ -22,7 +22,7 b' import pytest' | |||
|
22 | 22 | |
|
23 | 23 | from rhodecode.lib import helpers as h |
|
24 | 24 | from rhodecode.tests import ( |
|
25 |
TestController, clear_ |
|
|
25 | TestController, clear_cache_regions, | |
|
26 | 26 | TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
27 | 27 | from rhodecode.tests.fixture import Fixture |
|
28 | 28 | from rhodecode.tests.utils import AssertResponse |
@@ -64,7 +64,7 b' class TestPasswordReset(TestController):' | |||
|
64 | 64 | ]) |
|
65 | 65 | def test_password_reset_settings( |
|
66 | 66 | self, pwd_reset_setting, show_link, show_reset): |
|
67 |
clear_ |
|
|
67 | clear_cache_regions() | |
|
68 | 68 | self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
69 | 69 | params = { |
|
70 | 70 | 'csrf_token': self.csrf_token, |
@@ -31,7 +31,6 b' from rhodecode.authentication.base impor' | |||
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.lib.auth import ( |
|
33 | 33 | LoginRequired, HasPermissionAllDecorator, CSRFRequired) |
|
34 | from rhodecode.lib.caches import clear_cache_manager | |
|
35 | 34 | from rhodecode.model.forms import AuthSettingsForm |
|
36 | 35 | from rhodecode.model.meta import Session |
|
37 | 36 | from rhodecode.model.settings import SettingsModel |
@@ -427,20 +427,47 b' def _sanitize_vcs_settings(settings):' | |||
|
427 | 427 | def _sanitize_cache_settings(settings): |
|
428 | 428 | _string_setting(settings, 'cache_dir', |
|
429 | 429 | os.path.join(tempfile.gettempdir(), 'rc_cache')) |
|
430 | # cache_perms | |
|
431 | _string_setting( | |
|
432 | settings, | |
|
433 | 'rc_cache.cache_perms.backend', | |
|
434 | 'dogpile.cache.rc.file_namespace') | |
|
435 | _int_setting( | |
|
436 | settings, | |
|
437 | 'rc_cache.cache_perms.expiration_time', | |
|
438 | 60) | |
|
439 | _string_setting( | |
|
440 | settings, | |
|
441 | 'rc_cache.cache_perms.arguments.filename', | |
|
442 | os.path.join(tempfile.gettempdir(), 'rc_cache_1')) | |
|
430 | 443 | |
|
431 | _string_setting(settings, 'rc_cache.cache_perms.backend', | |
|
432 | 'dogpile.cache.rc.file_namespace') | |
|
433 | _int_setting(settings, 'rc_cache.cache_perms.expiration_time', | |
|
434 | 60) | |
|
435 | _string_setting(settings, 'rc_cache.cache_perms.arguments.filename', | |
|
436 | os.path.join(tempfile.gettempdir(), 'rc_cache_1')) | |
|
444 | # cache_repo | |
|
445 | _string_setting( | |
|
446 | settings, | |
|
447 | 'rc_cache.cache_repo.backend', | |
|
448 | 'dogpile.cache.rc.file_namespace') | |
|
449 | _int_setting( | |
|
450 | settings, | |
|
451 | 'rc_cache.cache_repo.expiration_time', | |
|
452 | 60) | |
|
453 | _string_setting( | |
|
454 | settings, | |
|
455 | 'rc_cache.cache_repo.arguments.filename', | |
|
456 | os.path.join(tempfile.gettempdir(), 'rc_cache_2')) | |
|
437 | 457 | |
|
438 | _string_setting(settings, 'rc_cache.cache_repo.backend', | |
|
439 | 'dogpile.cache.rc.file_namespace') | |
|
440 | _int_setting(settings, 'rc_cache.cache_repo.expiration_time', | |
|
441 | 60) | |
|
442 | _string_setting(settings, 'rc_cache.cache_repo.arguments.filename', | |
|
443 | os.path.join(tempfile.gettempdir(), 'rc_cache_2')) | |
|
458 | # sql_cache_short | |
|
459 | _string_setting( | |
|
460 | settings, | |
|
461 | 'rc_cache.sql_cache_short.backend', | |
|
462 | 'dogpile.cache.rc.memory_lru') | |
|
463 | _int_setting( | |
|
464 | settings, | |
|
465 | 'rc_cache.sql_cache_short.expiration_time', | |
|
466 | 30) | |
|
467 | _int_setting( | |
|
468 | settings, | |
|
469 | 'rc_cache.sql_cache_short.max_size', | |
|
470 | 10000) | |
|
444 | 471 | |
|
445 | 472 | |
|
446 | 473 | def _int_setting(settings, name, default): |
@@ -23,7 +23,7 b' import beaker' | |||
|
23 | 23 | import logging |
|
24 | 24 | import threading |
|
25 | 25 | |
|
26 |
from beaker.cache import _cache_decorate, |
|
|
26 | from beaker.cache import _cache_decorate, region_invalidate | |
|
27 | 27 | from sqlalchemy.exc import IntegrityError |
|
28 | 28 | |
|
29 | 29 | from rhodecode.lib.utils import safe_str, sha1 |
@@ -84,44 +84,6 b' def configure_cache_region(' | |||
|
84 | 84 | beaker.cache.cache_regions[region_name] = region_settings |
|
85 | 85 | |
|
86 | 86 | |
|
87 | def get_cache_manager(region_name, cache_name, custom_ttl=None): | |
|
88 | """ | |
|
89 | Creates a Beaker cache manager. Such instance can be used like that:: | |
|
90 | ||
|
91 | _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name) | |
|
92 | cache_manager = caches.get_cache_manager('some_namespace_name', _namespace) | |
|
93 | _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id) | |
|
94 | def heavy_compute(): | |
|
95 | ... | |
|
96 | result = cache_manager.get(_cache_key, createfunc=heavy_compute) | |
|
97 | ||
|
98 | :param region_name: region from ini file | |
|
99 | :param cache_name: custom cache name, usually prefix+repo_name. eg | |
|
100 | file_switcher_repo1 | |
|
101 | :param custom_ttl: override .ini file timeout on this cache | |
|
102 | :return: instance of cache manager | |
|
103 | """ | |
|
104 | ||
|
105 | cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG) | |
|
106 | if custom_ttl: | |
|
107 | log.debug('Updating region %s with custom ttl: %s', | |
|
108 | region_name, custom_ttl) | |
|
109 | cache_config.update({'expire': custom_ttl}) | |
|
110 | ||
|
111 | return beaker.cache.Cache._get_cache(cache_name, cache_config) | |
|
112 | ||
|
113 | ||
|
114 | def clear_cache_manager(cache_manager): | |
|
115 | """ | |
|
116 | namespace = 'foobar' | |
|
117 | cache_manager = get_cache_manager('some_namespace_name', namespace) | |
|
118 | clear_cache_manager(cache_manager) | |
|
119 | """ | |
|
120 | ||
|
121 | log.debug('Clearing all values for cache manager %s', cache_manager) | |
|
122 | cache_manager.clear() | |
|
123 | ||
|
124 | ||
|
125 | 87 | def compute_key_from_params(*args): |
|
126 | 88 | """ |
|
127 | 89 | Helper to compute key from given params to be used in cache manager |
@@ -18,16 +18,16 b'' | |||
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | """ | |
|
22 | caching_query.py | |
|
21 | """caching_query.py | |
|
23 | 22 | |
|
24 | Represent persistence structures which allow the usage of | |
|
25 |
|
|
|
23 | Represent functions and classes | |
|
24 | which allow the usage of Dogpile caching with SQLAlchemy. | |
|
25 | Introduces a query option called FromCache. | |
|
26 | 26 | |
|
27 | 27 | The three new concepts introduced here are: |
|
28 | 28 | |
|
29 | 29 | * CachingQuery - a Query subclass that caches and |
|
30 |
retrieves results in/from |
|
|
30 | retrieves results in/from dogpile.cache. | |
|
31 | 31 | * FromCache - a query option that establishes caching |
|
32 | 32 | parameters on a Query |
|
33 | 33 | * RelationshipCache - a variant of FromCache which is specific |
@@ -36,57 +36,44 b' The three new concepts introduced here a' | |||
|
36 | 36 | a Query. |
|
37 | 37 | |
|
38 | 38 | The rest of what's here are standard SQLAlchemy and |
|
39 |
|
|
|
39 | dogpile.cache constructs. | |
|
40 | 40 | |
|
41 | 41 | """ |
|
42 | import beaker | |
|
43 | from beaker.exceptions import BeakerException | |
|
44 | ||
|
45 | 42 | from sqlalchemy.orm.interfaces import MapperOption |
|
46 | 43 | from sqlalchemy.orm.query import Query |
|
47 | 44 | from sqlalchemy.sql import visitors |
|
45 | from dogpile.cache.api import NO_VALUE | |
|
48 | 46 | |
|
49 | 47 | from rhodecode.lib.utils2 import safe_str |
|
50 | 48 | |
|
51 | 49 | |
|
52 | 50 | class CachingQuery(Query): |
|
53 |
"""A Query subclass which optionally loads full results from a |
|
|
51 | """A Query subclass which optionally loads full results from a dogpile | |
|
54 | 52 | cache region. |
|
55 | 53 | |
|
56 | The CachingQuery stores additional state that allows it to consult | |
|
57 |
a |
|
|
58 | ||
|
59 | * A "region", which is a cache region argument passed to a | |
|
60 | Beaker CacheManager, specifies a particular cache configuration | |
|
61 | (including backend implementation, expiration times, etc.) | |
|
62 | * A "namespace", which is a qualifying name that identifies a | |
|
63 | group of keys within the cache. A query that filters on a name | |
|
64 | might use the name "by_name", a query that filters on a date range | |
|
65 | to a joined table might use the name "related_date_range". | |
|
66 | ||
|
67 | When the above state is present, a Beaker cache is retrieved. | |
|
68 | ||
|
69 | The "namespace" name is first concatenated with | |
|
70 | a string composed of the individual entities and columns the Query | |
|
71 | requests, i.e. such as ``Query(User.id, User.name)``. | |
|
72 | ||
|
73 | The Beaker cache is then loaded from the cache manager based | |
|
74 | on the region and composed namespace. The key within the cache | |
|
75 | itself is then constructed against the bind parameters specified | |
|
76 | by this query, which are usually literals defined in the | |
|
77 | WHERE clause. | |
|
54 | The CachingQuery optionally stores additional state that allows it to consult | |
|
55 | a dogpile.cache cache before accessing the database, in the form | |
|
56 | of a FromCache or RelationshipCache object. Each of these objects | |
|
57 | refer to the name of a :class:`dogpile.cache.Region` that's been configured | |
|
58 | and stored in a lookup dictionary. When such an object has associated | |
|
59 | itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region` | |
|
60 | is used to locate a cached result. If none is present, then the | |
|
61 | Query is invoked normally, the results being cached. | |
|
78 | 62 | |
|
79 | 63 | The FromCache and RelationshipCache mapper options below represent |
|
80 | 64 | the "public" method of configuring this state upon the CachingQuery. |
|
81 | 65 | |
|
82 | 66 | """ |
|
67 | def _get_region(self): | |
|
68 | from rhodecode.lib.rc_cache import region_meta | |
|
69 | return region_meta.dogpile_cache_regions | |
|
83 | 70 | |
|
84 |
def __init__(self, |
|
|
85 | self.cache_manager = manager | |
|
71 | def __init__(self, regions, *args, **kw): | |
|
72 | self.cache_regions = regions or self._get_region() | |
|
86 | 73 | Query.__init__(self, *args, **kw) |
|
87 | 74 | |
|
88 | 75 | def __iter__(self): |
|
89 |
"""override __iter__ to pull results from |
|
|
76 | """override __iter__ to pull results from dogpile | |
|
90 | 77 | if particular attributes have been configured. |
|
91 | 78 | |
|
92 | 79 | Note that this approach does *not* detach the loaded objects from |
@@ -98,109 +85,121 b' class CachingQuery(Query):' | |||
|
98 | 85 | in the cache are not the same ones in the current Session. |
|
99 | 86 | |
|
100 | 87 | """ |
|
101 | if hasattr(self, '_cache_parameters'): | |
|
88 | super_ = super(CachingQuery, self) | |
|
89 | ||
|
90 | if hasattr(self, '_cache_region'): | |
|
91 | return self.get_value(createfunc=lambda: list(super_.__iter__())) | |
|
92 | else: | |
|
93 | return super_.__iter__() | |
|
94 | ||
|
95 | def _execute_and_instances(self, context): | |
|
96 | """override _execute_and_instances to pull results from dogpile | |
|
97 | if the query is invoked directly from an external context. | |
|
98 | ||
|
99 | This method is necessary in order to maintain compatibility | |
|
100 | with the "baked query" system now used by default in some | |
|
101 | relationship loader scenarios. Note also the | |
|
102 | RelationshipCache._generate_cache_key method which enables | |
|
103 | the baked query to be used within lazy loads. | |
|
102 | 104 |
|
|
103 | def caching_query(): | |
|
104 | return list(Query.__iter__(self)) | |
|
105 | .. versionadded:: 1.2.7 | |
|
106 | """ | |
|
107 | super_ = super(CachingQuery, self) | |
|
105 | 108 | |
|
106 | return self.get_value(createfunc=caching_query) | |
|
109 | if context.query is not self and hasattr(self, '_cache_region'): | |
|
110 | # special logic called when the Query._execute_and_instances() | |
|
111 | # method is called directly from the baked query | |
|
112 | return self.get_value( | |
|
113 | createfunc=lambda: list( | |
|
114 | super_._execute_and_instances(context) | |
|
115 | ) | |
|
116 | ) | |
|
107 | 117 | else: |
|
108 | return Query.__iter__(self) | |
|
118 | return super_._execute_and_instances(context) | |
|
119 | ||
|
120 | def _get_cache_plus_key(self): | |
|
121 | """Return a cache region plus key.""" | |
|
122 | dogpile_region = self.cache_regions[self._cache_region.region] | |
|
123 | if self._cache_region.cache_key: | |
|
124 | key = self._cache_region.cache_key | |
|
125 | else: | |
|
126 | key = _key_from_query(self) | |
|
127 | return dogpile_region, key | |
|
109 | 128 | |
|
110 | 129 | def invalidate(self): |
|
111 | """Invalidate the value represented by this Query.""" | |
|
130 | """Invalidate the cache value represented by this Query.""" | |
|
112 | 131 | |
|
113 |
|
|
|
114 |
|
|
|
132 | dogpile_region, cache_key = self._get_cache_plus_key() | |
|
133 | dogpile_region.delete(cache_key) | |
|
115 | 134 | |
|
116 |
def get_value(self, merge=True, createfunc=None |
|
|
135 | def get_value(self, merge=True, createfunc=None, | |
|
136 | expiration_time=None, ignore_expiration=False): | |
|
117 | 137 | """Return the value from the cache for this query. |
|
118 | 138 | |
|
119 | 139 | Raise KeyError if no value present and no |
|
120 | 140 | createfunc specified. |
|
121 | 141 | |
|
122 | 142 | """ |
|
123 |
|
|
|
124 | ret = cache.get_value(cache_key, createfunc=createfunc) | |
|
143 | dogpile_region, cache_key = self._get_cache_plus_key() | |
|
144 | ||
|
145 | # ignore_expiration means, if the value is in the cache | |
|
146 | # but is expired, return it anyway. This doesn't make sense | |
|
147 | # with createfunc, which says, if the value is expired, generate | |
|
148 | # a new value. | |
|
149 | assert not ignore_expiration or not createfunc, \ | |
|
150 | "Can't ignore expiration and also provide createfunc" | |
|
151 | ||
|
152 | if ignore_expiration or not createfunc: | |
|
153 | cached_value = dogpile_region.get(cache_key, | |
|
154 | expiration_time=expiration_time, | |
|
155 | ignore_expiration=ignore_expiration) | |
|
156 | else: | |
|
157 | cached_value = dogpile_region.get_or_create( | |
|
158 | cache_key, | |
|
159 | createfunc, | |
|
160 | expiration_time=expiration_time | |
|
161 | ) | |
|
162 | if cached_value is NO_VALUE: | |
|
163 | raise KeyError(cache_key) | |
|
125 | 164 | if merge: |
|
126 |
|
|
|
127 |
return |
|
|
165 | cached_value = self.merge_result(cached_value, load=False) | |
|
166 | return cached_value | |
|
128 | 167 | |
|
129 | 168 | def set_value(self, value): |
|
130 | 169 | """Set the value in the cache for this query.""" |
|
131 | 170 | |
|
132 |
|
|
|
133 |
|
|
|
171 | dogpile_region, cache_key = self._get_cache_plus_key() | |
|
172 | dogpile_region.set(cache_key, value) | |
|
134 | 173 | |
|
135 | 174 | |
|
136 |
def query_callable( |
|
|
175 | def query_callable(regions=None, query_cls=CachingQuery): | |
|
137 | 176 | def query(*arg, **kw): |
|
138 |
return query_cls( |
|
|
177 | return query_cls(regions, *arg, **kw) | |
|
139 | 178 | return query |
|
140 | 179 | |
|
141 | 180 | |
|
142 | def get_cache_region(name, region): | |
|
143 | if region not in beaker.cache.cache_regions: | |
|
144 | raise BeakerException('Cache region `%s` not configured ' | |
|
145 | 'Check if proper cache settings are in the .ini files' % region) | |
|
146 | kw = beaker.cache.cache_regions[region] | |
|
147 | return beaker.cache.Cache._get_cache(name, kw) | |
|
181 | def _key_from_query(query, qualifier=None): | |
|
182 | """Given a Query, create a cache key. | |
|
148 | 183 |
|
|
149 | ||
|
150 | def _get_cache_parameters(query): | |
|
151 | """For a query with cache_region and cache_namespace configured, | |
|
152 | return the correspoinding Cache instance and cache key, based | |
|
153 | on this query's current criterion and parameter values. | |
|
184 | There are many approaches to this; here we use the simplest, | |
|
185 | which is to create an md5 hash of the text of the SQL statement, | |
|
186 | combined with stringified versions of all the bound parameters | |
|
187 | within it. There's a bit of a performance hit with | |
|
188 | compiling out "query.statement" here; other approaches include | |
|
189 | setting up an explicit cache key with a particular Query, | |
|
190 | then combining that with the bound parameter values. | |
|
154 | 191 | |
|
155 | 192 | """ |
|
156 | if not hasattr(query, '_cache_parameters'): | |
|
157 | raise ValueError("This Query does not have caching " | |
|
158 | "parameters configured.") | |
|
159 | 193 | |
|
160 | region, namespace, cache_key = query._cache_parameters | |
|
161 | ||
|
162 | namespace = _namespace_from_query(namespace, query) | |
|
163 | ||
|
164 | if cache_key is None: | |
|
165 | # cache key - the value arguments from this query's parameters. | |
|
166 | args = [safe_str(x) for x in _params_from_query(query)] | |
|
167 | args.extend(filter(lambda k: k not in ['None', None, u'None'], | |
|
168 | [str(query._limit), str(query._offset)])) | |
|
169 | ||
|
170 | cache_key = " ".join(args) | |
|
171 | ||
|
172 | if cache_key is None: | |
|
173 | raise Exception('Cache key cannot be None') | |
|
194 | stmt = query.with_labels().statement | |
|
195 | compiled = stmt.compile() | |
|
196 | params = compiled.params | |
|
174 | 197 | |
|
175 | # get cache | |
|
176 | #cache = query.cache_manager.get_cache_region(namespace, region) | |
|
177 | cache = get_cache_region(namespace, region) | |
|
178 | # optional - hash the cache_key too for consistent length | |
|
179 | # import uuid | |
|
180 | # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) | |
|
181 | ||
|
182 | return cache, cache_key | |
|
183 | ||
|
184 | ||
|
185 | def _namespace_from_query(namespace, query): | |
|
186 | # cache namespace - the token handed in by the | |
|
187 | # option + class we're querying against | |
|
188 | namespace = " ".join([namespace] + [str(x) for x in query._entities]) | |
|
189 | ||
|
190 | # memcached wants this | |
|
191 | namespace = namespace.replace(' ', '_') | |
|
192 | ||
|
193 | return namespace | |
|
194 | ||
|
195 | ||
|
196 | def _set_cache_parameters(query, region, namespace, cache_key): | |
|
197 | ||
|
198 | if hasattr(query, '_cache_parameters'): | |
|
199 | region, namespace, cache_key = query._cache_parameters | |
|
200 | raise ValueError("This query is already configured " | |
|
201 | "for region %r namespace %r" % | |
|
202 | (region, namespace)) | |
|
203 | query._cache_parameters = region, namespace, cache_key | |
|
198 | # here we return the key as a long string. our "key mangler" | |
|
199 | # set up with the region will boil it down to an md5. | |
|
200 | return " ".join( | |
|
201 | [safe_str(compiled)] + | |
|
202 | [safe_str(params[k]) for k in sorted(params)]) | |
|
204 | 203 | |
|
205 | 204 | |
|
206 | 205 | class FromCache(MapperOption): |
@@ -208,15 +207,12 b' class FromCache(MapperOption):' | |||
|
208 | 207 | |
|
209 | 208 | propagate_to_loaders = False |
|
210 | 209 | |
|
211 |
def __init__(self, region |
|
|
210 | def __init__(self, region="sql_cache_short", cache_key=None): | |
|
212 | 211 | """Construct a new FromCache. |
|
213 | 212 | |
|
214 | 213 | :param region: the cache region. Should be a |
|
215 |
region configured in the |
|
|
216 | ||
|
217 | :param namespace: the cache namespace. Should | |
|
218 | be a name uniquely describing the target Query's | |
|
219 | lexical structure. | |
|
214 | region configured in the dictionary of dogpile | |
|
215 | regions. | |
|
220 | 216 | |
|
221 | 217 | :param cache_key: optional. A string cache key |
|
222 | 218 | that will serve as the key to the query. Use this |
@@ -226,14 +222,11 b' class FromCache(MapperOption):' | |||
|
226 | 222 | |
|
227 | 223 | """ |
|
228 | 224 | self.region = region |
|
229 | self.namespace = namespace | |
|
230 | 225 | self.cache_key = cache_key |
|
231 | 226 | |
|
232 | 227 | def process_query(self, query): |
|
233 | 228 | """Process a Query during normal loading operation.""" |
|
234 | ||
|
235 | _set_cache_parameters(query, self.region, self.namespace, | |
|
236 | self.cache_key) | |
|
229 | query._cache_region = self | |
|
237 | 230 | |
|
238 | 231 | |
|
239 | 232 | class RelationshipCache(MapperOption): |
@@ -242,27 +235,39 b' class RelationshipCache(MapperOption):' | |||
|
242 | 235 | |
|
243 | 236 | propagate_to_loaders = True |
|
244 | 237 | |
|
245 | def __init__(self, region, namespace, attribute): | |
|
238 | def __init__(self, attribute, region="sql_cache_short", cache_key=None): | |
|
246 | 239 | """Construct a new RelationshipCache. |
|
247 | 240 | |
|
248 | :param region: the cache region. Should be a | |
|
249 | region configured in the Beaker CacheManager. | |
|
250 | ||
|
251 | :param namespace: the cache namespace. Should | |
|
252 | be a name uniquely describing the target Query's | |
|
253 | lexical structure. | |
|
254 | ||
|
255 | 241 | :param attribute: A Class.attribute which |
|
256 | 242 | indicates a particular class relationship() whose |
|
257 | 243 | lazy loader should be pulled from the cache. |
|
258 | 244 | |
|
245 | :param region: name of the cache region. | |
|
246 | ||
|
247 | :param cache_key: optional. A string cache key | |
|
248 | that will serve as the key to the query, bypassing | |
|
249 | the usual means of forming a key from the Query itself. | |
|
250 | ||
|
259 | 251 | """ |
|
260 | 252 | self.region = region |
|
261 |
self. |
|
|
253 | self.cache_key = cache_key | |
|
262 | 254 | self._relationship_options = { |
|
263 | 255 | (attribute.property.parent.class_, attribute.property.key): self |
|
264 | 256 | } |
|
265 | 257 | |
|
258 | def _generate_cache_key(self, path): | |
|
259 | """Indicate to the lazy-loader strategy that a "baked" query | |
|
260 | may be used by returning ``None``. | |
|
261 | ||
|
262 | If this method is omitted, the default implementation of | |
|
263 | :class:`.MapperOption._generate_cache_key` takes place, which | |
|
264 | returns ``False`` to disable the "baked" query from being used. | |
|
265 | ||
|
266 | .. versionadded:: 1.2.7 | |
|
267 | ||
|
268 | """ | |
|
269 | return None | |
|
270 | ||
|
266 | 271 | def process_query_conditionally(self, query): |
|
267 | 272 | """Process a Query that is used within a lazy loader. |
|
268 | 273 | |
@@ -271,17 +276,14 b' class RelationshipCache(MapperOption):' | |||
|
271 | 276 | |
|
272 | 277 | """ |
|
273 | 278 | if query._current_path: |
|
274 |
mapper, |
|
|
279 | mapper, prop = query._current_path[-2:] | |
|
280 | key = prop.key | |
|
275 | 281 | |
|
276 | 282 | for cls in mapper.class_.__mro__: |
|
277 | 283 | if (cls, key) in self._relationship_options: |
|
278 |
relationship_option = |
|
|
279 |
|
|
|
280 |
|
|
|
281 | query, | |
|
282 | relationship_option.region, | |
|
283 | relationship_option.namespace, | |
|
284 | None) | |
|
284 | relationship_option = self._relationship_options[(cls, key)] | |
|
285 | query._cache_region = relationship_option | |
|
286 | break | |
|
285 | 287 | |
|
286 | 288 | def and_(self, option): |
|
287 | 289 | """Chain another RelationshipCache option to this one. |
@@ -294,32 +296,3 b' class RelationshipCache(MapperOption):' | |||
|
294 | 296 | self._relationship_options.update(option._relationship_options) |
|
295 | 297 | return self |
|
296 | 298 | |
|
297 | ||
|
298 | def _params_from_query(query): | |
|
299 | """Pull the bind parameter values from a query. | |
|
300 | ||
|
301 | This takes into account any scalar attribute bindparam set up. | |
|
302 | ||
|
303 | E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7))) | |
|
304 | would return [5, 7]. | |
|
305 | ||
|
306 | """ | |
|
307 | v = [] | |
|
308 | def visit_bindparam(bind): | |
|
309 | ||
|
310 | if bind.key in query._params: | |
|
311 | value = query._params[bind.key] | |
|
312 | elif bind.callable: | |
|
313 | # lazyloader may dig a callable in here, intended | |
|
314 | # to late-evaluate params after autoflush is called. | |
|
315 | # convert to a scalar value. | |
|
316 | value = bind.callable() | |
|
317 | else: | |
|
318 | value = bind.value | |
|
319 | ||
|
320 | v.append(value) | |
|
321 | if query._criterion is not None: | |
|
322 | visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam}) | |
|
323 | for f in query._from_obj: | |
|
324 | visitors.traverse(f, {}, {'bindparam':visit_bindparam}) | |
|
325 | return v |
@@ -690,7 +690,7 b' class User(Base, BaseModel):' | |||
|
690 | 690 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) |
|
691 | 691 | if cache: |
|
692 | 692 | feed_tokens = feed_tokens.options( |
|
693 |
FromCache(" |
|
|
693 | FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id)) | |
|
694 | 694 | |
|
695 | 695 | feed_tokens = feed_tokens.all() |
|
696 | 696 | if feed_tokens: |
@@ -24,27 +24,22 b' SQLAlchemy Metadata and Session object' | |||
|
24 | 24 | |
|
25 | 25 | from sqlalchemy.ext.declarative import declarative_base |
|
26 | 26 | from sqlalchemy.orm import scoped_session, sessionmaker |
|
27 | from beaker import cache | |
|
28 | 27 | |
|
29 | 28 | from rhodecode.lib import caching_query |
|
30 | 29 | |
|
31 | ||
|
32 | # Beaker CacheManager. A home base for cache configurations. | |
|
33 | cache_manager = cache.CacheManager() | |
|
30 | __all__ = ['Base', 'Session'] | |
|
34 | 31 | |
|
35 | __all__ = ['Base', 'Session'] | |
|
36 | # | |
|
37 | # SQLAlchemy session manager. Updated by model.init_model() | |
|
38 | # | |
|
32 | # scoped_session. Apply our custom CachingQuery class to it, | |
|
33 | # using a callable that will associate the dictionary | |
|
34 | # of regions with the Query. | |
|
35 | # to use cache use this in query | |
|
36 | # .options(FromCache("sqlalchemy_cache_type", "cachekey")) | |
|
39 | 37 | Session = scoped_session( |
|
40 | 38 | sessionmaker( |
|
41 |
query_cls=caching_query.query_callable( |
|
|
39 | query_cls=caching_query.query_callable(), | |
|
42 | 40 | expire_on_commit=True, |
|
43 | 41 | ) |
|
44 | 42 | ) |
|
45 | 43 | |
|
46 | 44 | # The declarative Base |
|
47 | 45 | Base = declarative_base() |
|
48 | ||
|
49 | #to use cache use this in query | |
|
50 | #.options(FromCache("sqlalchemy_cache_type", "cachekey")) |
@@ -21,11 +21,12 b'' | |||
|
21 | 21 | import os |
|
22 | 22 | import hashlib |
|
23 | 23 | import logging |
|
24 | import time | |
|
24 | 25 | from collections import namedtuple |
|
25 | 26 | from functools import wraps |
|
26 | 27 | import bleach |
|
27 | 28 | |
|
28 | from rhodecode.lib import caches | |
|
29 | from rhodecode.lib import caches, rc_cache | |
|
29 | 30 | from rhodecode.lib.utils2 import ( |
|
30 | 31 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) |
|
31 | 32 | from rhodecode.lib.vcs.backends import base |
@@ -206,13 +207,17 b' class SettingsModel(BaseModel):' | |||
|
206 | 207 | return res |
|
207 | 208 | |
|
208 | 209 | def invalidate_settings_cache(self): |
|
209 | namespace = 'rhodecode_settings' | |
|
210 | cache_manager = caches.get_cache_manager('sql_cache_short', namespace) | |
|
211 | caches.clear_cache_manager(cache_manager) | |
|
210 | # NOTE:(marcink) we flush the whole sql_cache_short region, because it | |
|
211 | # reads different settings etc. It's little too much but those caches are | |
|
212 | # anyway very short lived and it's a safest way. | |
|
213 | region = rc_cache.get_or_create_region('sql_cache_short') | |
|
214 | region.invalidate() | |
|
212 | 215 | |
|
213 | 216 | def get_all_settings(self, cache=False): |
|
217 | region = rc_cache.get_or_create_region('sql_cache_short') | |
|
214 | 218 | |
|
215 | def _compute(): | |
|
219 | @region.cache_on_arguments(should_cache_fn=lambda v: cache) | |
|
220 | def _get_all_settings(name, key): | |
|
216 | 221 | q = self._get_settings_query() |
|
217 | 222 | if not q: |
|
218 | 223 | raise Exception('Could not get application settings !') |
@@ -223,20 +228,14 b' class SettingsModel(BaseModel):' | |||
|
223 | 228 | } |
|
224 | 229 | return settings |
|
225 | 230 | |
|
226 | if cache: | |
|
227 | log.debug('Fetching app settings using cache') | |
|
228 | repo = self._get_repo(self.repo) if self.repo else None | |
|
229 |
|
|
|
230 | cache_manager = caches.get_cache_manager( | |
|
231 | 'sql_cache_short', namespace) | |
|
232 | _cache_key = ( | |
|
233 | "get_repo_{}_settings".format(repo.repo_id) | |
|
234 | if repo else "get_app_settings") | |
|
231 | repo = self._get_repo(self.repo) if self.repo else None | |
|
232 | key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app" | |
|
233 | start = time.time() | |
|
234 | result = _get_all_settings('rhodecode_settings', key) | |
|
235 | total = time.time() - start | |
|
236 | log.debug('Fetching app settings for key: %s took: %.3fs', key, total) | |
|
235 | 237 | |
|
236 | return cache_manager.get(_cache_key, createfunc=_compute) | |
|
237 | ||
|
238 | else: | |
|
239 | return _compute() | |
|
238 | return result | |
|
240 | 239 | |
|
241 | 240 | def get_auth_settings(self): |
|
242 | 241 | q = self._get_settings_query() |
@@ -41,7 +41,7 b' log = logging.getLogger(__name__)' | |||
|
41 | 41 | |
|
42 | 42 | __all__ = [ |
|
43 | 43 | 'get_new_dir', 'TestController', |
|
44 |
'link_to', 'clear_ |
|
|
44 | 'link_to', 'clear_cache_regions', | |
|
45 | 45 | 'assert_session_flash', 'login_user', 'no_newline_id_generator', |
|
46 | 46 | 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO', |
|
47 | 47 | 'NEW_HG_REPO', 'NEW_GIT_REPO', |
@@ -95,10 +95,12 b" TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, '" | |||
|
95 | 95 | TEST_REPO_PREFIX = 'vcs-test' |
|
96 | 96 | |
|
97 | 97 | |
|
98 |
def clear_ |
|
|
99 | from beaker.cache import cache_managers | |
|
100 | for _cache in cache_managers.values(): | |
|
101 | _cache.clear() | |
|
98 | def clear_cache_regions(regions=None): | |
|
99 | # dogpile | |
|
100 | from rhodecode.lib.rc_cache import region_meta | |
|
101 | for region_name, region in region_meta.dogpile_cache_regions.items(): | |
|
102 | if not regions or region_name in regions: | |
|
103 | region.invalidate() | |
|
102 | 104 | |
|
103 | 105 | |
|
104 | 106 | def get_new_dir(title): |
@@ -70,8 +70,7 b' def disable_hooks(request, hooks):' | |||
|
70 | 70 | ui_settings.invalidate() |
|
71 | 71 | |
|
72 | 72 | ui_settings = session.query(db.RhodeCodeUi).options( |
|
73 | caching_query.FromCache( | |
|
74 | 'sql_cache_short', 'get_hook_settings', 'get_hook_settings')) | |
|
73 | caching_query.FromCache('sql_cache_short', 'get_hook_settings')) | |
|
75 | 74 | ui_settings.invalidate() |
|
76 | 75 | |
|
77 | 76 | @request.addfinalizer |
@@ -292,15 +292,12 b' cache_dir = %(here)s/data' | |||
|
292 | 292 | beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data |
|
293 | 293 | beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock |
|
294 | 294 | |
|
295 |
beaker.cache.regions = long_term |
|
|
295 | beaker.cache.regions = long_term | |
|
296 | 296 | |
|
297 | 297 | beaker.cache.long_term.type = memory |
|
298 | 298 | beaker.cache.long_term.expire = 36000 |
|
299 | 299 | beaker.cache.long_term.key_length = 256 |
|
300 | 300 | |
|
301 | beaker.cache.sql_cache_short.type = memory | |
|
302 | beaker.cache.sql_cache_short.expire = 1 | |
|
303 | beaker.cache.sql_cache_short.key_length = 256 | |
|
304 | 301 | |
|
305 | 302 | ##################################### |
|
306 | 303 | ### DOGPILE CACHE #### |
@@ -311,6 +308,12 b' rc_cache.cache_perms.backend = dogpile.c' | |||
|
311 | 308 | rc_cache.cache_perms.expiration_time = 0 |
|
312 | 309 | rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1 |
|
313 | 310 | |
|
311 | ||
|
312 | ## cache settings for SQL queries | |
|
313 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |
|
314 | rc_cache.sql_cache_short.expiration_time = 0 | |
|
315 | ||
|
316 | ||
|
314 | 317 | #################################### |
|
315 | 318 | ### BEAKER SESSION #### |
|
316 | 319 | #################################### |
General Comments 0
You need to be logged in to leave comments.
Login now