Show More
@@ -345,6 +345,10 b' rc_cache.cache_repo.arguments.filename =' | |||||
345 | #rc_cache.cache_repo.arguments.db = 1 |
|
345 | #rc_cache.cache_repo.arguments.db = 1 | |
346 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
346 | #rc_cache.cache_repo.arguments.distributed_lock = true | |
347 |
|
347 | |||
|
348 | ## cache settings for SQL queries | |||
|
349 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |||
|
350 | rc_cache.sql_cache_short.expiration_time = 30 | |||
|
351 | ||||
348 |
|
352 | |||
349 | #################################### |
|
353 | #################################### | |
350 | ### BEAKER CACHE #### |
|
354 | ### BEAKER CACHE #### | |
@@ -355,16 +359,12 b' rc_cache.cache_repo.arguments.filename =' | |||||
355 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
359 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data | |
356 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
360 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock | |
357 |
|
361 | |||
358 |
beaker.cache.regions = long_term |
|
362 | beaker.cache.regions = long_term | |
359 |
|
363 | |||
360 | beaker.cache.long_term.type = memorylru_base |
|
364 | beaker.cache.long_term.type = memorylru_base | |
361 | beaker.cache.long_term.expire = 172800 |
|
365 | beaker.cache.long_term.expire = 172800 | |
362 | beaker.cache.long_term.key_length = 256 |
|
366 | beaker.cache.long_term.key_length = 256 | |
363 |
|
367 | |||
364 | beaker.cache.sql_cache_short.type = memorylru_base |
|
|||
365 | beaker.cache.sql_cache_short.expire = 10 |
|
|||
366 | beaker.cache.sql_cache_short.key_length = 256 |
|
|||
367 |
|
||||
368 |
|
368 | |||
369 | #################################### |
|
369 | #################################### | |
370 | ### BEAKER SESSION #### |
|
370 | ### BEAKER SESSION #### |
@@ -320,6 +320,10 b' rc_cache.cache_repo.arguments.filename =' | |||||
320 | #rc_cache.cache_repo.arguments.db = 1 |
|
320 | #rc_cache.cache_repo.arguments.db = 1 | |
321 | #rc_cache.cache_repo.arguments.distributed_lock = true |
|
321 | #rc_cache.cache_repo.arguments.distributed_lock = true | |
322 |
|
322 | |||
|
323 | ## cache settings for SQL queries | |||
|
324 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |||
|
325 | rc_cache.sql_cache_short.expiration_time = 30 | |||
|
326 | ||||
323 |
|
327 | |||
324 | #################################### |
|
328 | #################################### | |
325 | ### BEAKER CACHE #### |
|
329 | ### BEAKER CACHE #### | |
@@ -330,16 +334,12 b' rc_cache.cache_repo.arguments.filename =' | |||||
330 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data |
|
334 | beaker.cache.data_dir = %(here)s/data/cache/beaker_data | |
331 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock |
|
335 | beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock | |
332 |
|
336 | |||
333 |
beaker.cache.regions = long_term |
|
337 | beaker.cache.regions = long_term | |
334 |
|
338 | |||
335 | beaker.cache.long_term.type = memory |
|
339 | beaker.cache.long_term.type = memory | |
336 | beaker.cache.long_term.expire = 172800 |
|
340 | beaker.cache.long_term.expire = 172800 | |
337 | beaker.cache.long_term.key_length = 256 |
|
341 | beaker.cache.long_term.key_length = 256 | |
338 |
|
342 | |||
339 | beaker.cache.sql_cache_short.type = memory |
|
|||
340 | beaker.cache.sql_cache_short.expire = 10 |
|
|||
341 | beaker.cache.sql_cache_short.key_length = 256 |
|
|||
342 |
|
||||
343 |
|
343 | |||
344 | #################################### |
|
344 | #################################### | |
345 | ### BEAKER SESSION #### |
|
345 | ### BEAKER SESSION #### |
@@ -25,7 +25,7 b' from rhodecode.model.meta import Session' | |||||
25 | from rhodecode.model.permission import PermissionModel |
|
25 | from rhodecode.model.permission import PermissionModel | |
26 | from rhodecode.model.ssh_key import SshKeyModel |
|
26 | from rhodecode.model.ssh_key import SshKeyModel | |
27 | from rhodecode.tests import ( |
|
27 | from rhodecode.tests import ( | |
28 |
TestController, clear_ |
|
28 | TestController, clear_cache_regions, assert_session_flash) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def route_path(name, params=None, **kwargs): |
|
31 | def route_path(name, params=None, **kwargs): | |
@@ -221,22 +221,21 b' class TestAdminPermissionsController(Tes' | |||||
221 | def test_index_ips(self): |
|
221 | def test_index_ips(self): | |
222 | self.log_user() |
|
222 | self.log_user() | |
223 | response = self.app.get(route_path('admin_permissions_ips')) |
|
223 | response = self.app.get(route_path('admin_permissions_ips')) | |
224 | # TODO: Test response... |
|
|||
225 | response.mustcontain('All IP addresses are allowed') |
|
224 | response.mustcontain('All IP addresses are allowed') | |
226 |
|
225 | |||
227 | def test_add_delete_ips(self): |
|
226 | def test_add_delete_ips(self): | |
|
227 | clear_cache_regions(['sql_cache_short']) | |||
228 | self.log_user() |
|
228 | self.log_user() | |
229 | clear_all_caches() |
|
|||
230 |
|
229 | |||
231 | # ADD |
|
230 | # ADD | |
232 | default_user_id = User.get_default_user().user_id |
|
231 | default_user_id = User.get_default_user().user_id | |
233 | self.app.post( |
|
232 | self.app.post( | |
234 | route_path('edit_user_ips_add', user_id=default_user_id), |
|
233 | route_path('edit_user_ips_add', user_id=default_user_id), | |
235 |
params={'new_ip': ' |
|
234 | params={'new_ip': '0.0.0.0/24', 'csrf_token': self.csrf_token}) | |
236 |
|
235 | |||
237 | response = self.app.get(route_path('admin_permissions_ips')) |
|
236 | response = self.app.get(route_path('admin_permissions_ips')) | |
238 |
response.mustcontain(' |
|
237 | response.mustcontain('0.0.0.0/24') | |
239 |
response.mustcontain(' |
|
238 | response.mustcontain('0.0.0.0 - 0.0.0.255') | |
240 |
|
239 | |||
241 | # DELETE |
|
240 | # DELETE | |
242 | default_user_id = User.get_default_user().user_id |
|
241 | default_user_id = User.get_default_user().user_id | |
@@ -249,11 +248,11 b' class TestAdminPermissionsController(Tes' | |||||
249 |
|
248 | |||
250 | assert_session_flash(response, 'Removed ip address from user whitelist') |
|
249 | assert_session_flash(response, 'Removed ip address from user whitelist') | |
251 |
|
250 | |||
252 |
clear_ |
|
251 | clear_cache_regions(['sql_cache_short']) | |
253 | response = self.app.get(route_path('admin_permissions_ips')) |
|
252 | response = self.app.get(route_path('admin_permissions_ips')) | |
254 | response.mustcontain('All IP addresses are allowed') |
|
253 | response.mustcontain('All IP addresses are allowed') | |
255 |
response.mustcontain(no=[' |
|
254 | response.mustcontain(no=['0.0.0.0/24']) | |
256 |
response.mustcontain(no=[' |
|
255 | response.mustcontain(no=['0.0.0.0 - 0.0.0.255']) | |
257 |
|
256 | |||
258 | def test_index_overview(self): |
|
257 | def test_index_overview(self): | |
259 | self.log_user() |
|
258 | self.log_user() |
@@ -22,7 +22,7 b' import pytest' | |||||
22 |
|
22 | |||
23 | from rhodecode.lib import helpers as h |
|
23 | from rhodecode.lib import helpers as h | |
24 | from rhodecode.tests import ( |
|
24 | from rhodecode.tests import ( | |
25 |
TestController, clear_ |
|
25 | TestController, clear_cache_regions, | |
26 | TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
26 | TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) | |
27 | from rhodecode.tests.fixture import Fixture |
|
27 | from rhodecode.tests.fixture import Fixture | |
28 | from rhodecode.tests.utils import AssertResponse |
|
28 | from rhodecode.tests.utils import AssertResponse | |
@@ -64,7 +64,7 b' class TestPasswordReset(TestController):' | |||||
64 | ]) |
|
64 | ]) | |
65 | def test_password_reset_settings( |
|
65 | def test_password_reset_settings( | |
66 | self, pwd_reset_setting, show_link, show_reset): |
|
66 | self, pwd_reset_setting, show_link, show_reset): | |
67 |
clear_ |
|
67 | clear_cache_regions() | |
68 | self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) |
|
68 | self.log_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS) | |
69 | params = { |
|
69 | params = { | |
70 | 'csrf_token': self.csrf_token, |
|
70 | 'csrf_token': self.csrf_token, |
@@ -31,7 +31,6 b' from rhodecode.authentication.base impor' | |||||
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib.auth import ( |
|
32 | from rhodecode.lib.auth import ( | |
33 | LoginRequired, HasPermissionAllDecorator, CSRFRequired) |
|
33 | LoginRequired, HasPermissionAllDecorator, CSRFRequired) | |
34 | from rhodecode.lib.caches import clear_cache_manager |
|
|||
35 | from rhodecode.model.forms import AuthSettingsForm |
|
34 | from rhodecode.model.forms import AuthSettingsForm | |
36 | from rhodecode.model.meta import Session |
|
35 | from rhodecode.model.meta import Session | |
37 | from rhodecode.model.settings import SettingsModel |
|
36 | from rhodecode.model.settings import SettingsModel |
@@ -427,20 +427,47 b' def _sanitize_vcs_settings(settings):' | |||||
427 | def _sanitize_cache_settings(settings): |
|
427 | def _sanitize_cache_settings(settings): | |
428 | _string_setting(settings, 'cache_dir', |
|
428 | _string_setting(settings, 'cache_dir', | |
429 | os.path.join(tempfile.gettempdir(), 'rc_cache')) |
|
429 | os.path.join(tempfile.gettempdir(), 'rc_cache')) | |
|
430 | # cache_perms | |||
|
431 | _string_setting( | |||
|
432 | settings, | |||
|
433 | 'rc_cache.cache_perms.backend', | |||
|
434 | 'dogpile.cache.rc.file_namespace') | |||
|
435 | _int_setting( | |||
|
436 | settings, | |||
|
437 | 'rc_cache.cache_perms.expiration_time', | |||
|
438 | 60) | |||
|
439 | _string_setting( | |||
|
440 | settings, | |||
|
441 | 'rc_cache.cache_perms.arguments.filename', | |||
|
442 | os.path.join(tempfile.gettempdir(), 'rc_cache_1')) | |||
430 |
|
443 | |||
431 | _string_setting(settings, 'rc_cache.cache_perms.backend', |
|
444 | # cache_repo | |
432 | 'dogpile.cache.rc.file_namespace') |
|
445 | _string_setting( | |
433 | _int_setting(settings, 'rc_cache.cache_perms.expiration_time', |
|
446 | settings, | |
434 | 60) |
|
447 | 'rc_cache.cache_repo.backend', | |
435 | _string_setting(settings, 'rc_cache.cache_perms.arguments.filename', |
|
448 | 'dogpile.cache.rc.file_namespace') | |
436 | os.path.join(tempfile.gettempdir(), 'rc_cache_1')) |
|
449 | _int_setting( | |
|
450 | settings, | |||
|
451 | 'rc_cache.cache_repo.expiration_time', | |||
|
452 | 60) | |||
|
453 | _string_setting( | |||
|
454 | settings, | |||
|
455 | 'rc_cache.cache_repo.arguments.filename', | |||
|
456 | os.path.join(tempfile.gettempdir(), 'rc_cache_2')) | |||
437 |
|
457 | |||
438 | _string_setting(settings, 'rc_cache.cache_repo.backend', |
|
458 | # sql_cache_short | |
439 | 'dogpile.cache.rc.file_namespace') |
|
459 | _string_setting( | |
440 | _int_setting(settings, 'rc_cache.cache_repo.expiration_time', |
|
460 | settings, | |
441 | 60) |
|
461 | 'rc_cache.sql_cache_short.backend', | |
442 | _string_setting(settings, 'rc_cache.cache_repo.arguments.filename', |
|
462 | 'dogpile.cache.rc.memory_lru') | |
443 | os.path.join(tempfile.gettempdir(), 'rc_cache_2')) |
|
463 | _int_setting( | |
|
464 | settings, | |||
|
465 | 'rc_cache.sql_cache_short.expiration_time', | |||
|
466 | 30) | |||
|
467 | _int_setting( | |||
|
468 | settings, | |||
|
469 | 'rc_cache.sql_cache_short.max_size', | |||
|
470 | 10000) | |||
444 |
|
471 | |||
445 |
|
472 | |||
446 | def _int_setting(settings, name, default): |
|
473 | def _int_setting(settings, name, default): |
@@ -23,7 +23,7 b' import beaker' | |||||
23 | import logging |
|
23 | import logging | |
24 | import threading |
|
24 | import threading | |
25 |
|
25 | |||
26 |
from beaker.cache import _cache_decorate, |
|
26 | from beaker.cache import _cache_decorate, region_invalidate | |
27 | from sqlalchemy.exc import IntegrityError |
|
27 | from sqlalchemy.exc import IntegrityError | |
28 |
|
28 | |||
29 | from rhodecode.lib.utils import safe_str, sha1 |
|
29 | from rhodecode.lib.utils import safe_str, sha1 | |
@@ -84,44 +84,6 b' def configure_cache_region(' | |||||
84 | beaker.cache.cache_regions[region_name] = region_settings |
|
84 | beaker.cache.cache_regions[region_name] = region_settings | |
85 |
|
85 | |||
86 |
|
86 | |||
87 | def get_cache_manager(region_name, cache_name, custom_ttl=None): |
|
|||
88 | """ |
|
|||
89 | Creates a Beaker cache manager. Such instance can be used like that:: |
|
|||
90 |
|
||||
91 | _namespace = caches.get_repo_namespace_key(caches.XXX, repo_name) |
|
|||
92 | cache_manager = caches.get_cache_manager('some_namespace_name', _namespace) |
|
|||
93 | _cache_key = caches.compute_key_from_params(repo_name, commit.raw_id) |
|
|||
94 | def heavy_compute(): |
|
|||
95 | ... |
|
|||
96 | result = cache_manager.get(_cache_key, createfunc=heavy_compute) |
|
|||
97 |
|
||||
98 | :param region_name: region from ini file |
|
|||
99 | :param cache_name: custom cache name, usually prefix+repo_name. eg |
|
|||
100 | file_switcher_repo1 |
|
|||
101 | :param custom_ttl: override .ini file timeout on this cache |
|
|||
102 | :return: instance of cache manager |
|
|||
103 | """ |
|
|||
104 |
|
||||
105 | cache_config = cache_regions.get(region_name, DEFAULT_CACHE_MANAGER_CONFIG) |
|
|||
106 | if custom_ttl: |
|
|||
107 | log.debug('Updating region %s with custom ttl: %s', |
|
|||
108 | region_name, custom_ttl) |
|
|||
109 | cache_config.update({'expire': custom_ttl}) |
|
|||
110 |
|
||||
111 | return beaker.cache.Cache._get_cache(cache_name, cache_config) |
|
|||
112 |
|
||||
113 |
|
||||
114 | def clear_cache_manager(cache_manager): |
|
|||
115 | """ |
|
|||
116 | namespace = 'foobar' |
|
|||
117 | cache_manager = get_cache_manager('some_namespace_name', namespace) |
|
|||
118 | clear_cache_manager(cache_manager) |
|
|||
119 | """ |
|
|||
120 |
|
||||
121 | log.debug('Clearing all values for cache manager %s', cache_manager) |
|
|||
122 | cache_manager.clear() |
|
|||
123 |
|
||||
124 |
|
||||
125 | def compute_key_from_params(*args): |
|
87 | def compute_key_from_params(*args): | |
126 | """ |
|
88 | """ | |
127 | Helper to compute key from given params to be used in cache manager |
|
89 | Helper to compute key from given params to be used in cache manager |
@@ -18,16 +18,16 b'' | |||||
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """caching_query.py | |
22 | caching_query.py |
|
|||
23 |
|
22 | |||
24 | Represent persistence structures which allow the usage of |
|
23 | Represent functions and classes | |
25 |
|
|
24 | which allow the usage of Dogpile caching with SQLAlchemy. | |
|
25 | Introduces a query option called FromCache. | |||
26 |
|
26 | |||
27 | The three new concepts introduced here are: |
|
27 | The three new concepts introduced here are: | |
28 |
|
28 | |||
29 | * CachingQuery - a Query subclass that caches and |
|
29 | * CachingQuery - a Query subclass that caches and | |
30 |
retrieves results in/from |
|
30 | retrieves results in/from dogpile.cache. | |
31 | * FromCache - a query option that establishes caching |
|
31 | * FromCache - a query option that establishes caching | |
32 | parameters on a Query |
|
32 | parameters on a Query | |
33 | * RelationshipCache - a variant of FromCache which is specific |
|
33 | * RelationshipCache - a variant of FromCache which is specific | |
@@ -36,57 +36,44 b' The three new concepts introduced here a' | |||||
36 | a Query. |
|
36 | a Query. | |
37 |
|
37 | |||
38 | The rest of what's here are standard SQLAlchemy and |
|
38 | The rest of what's here are standard SQLAlchemy and | |
39 |
|
|
39 | dogpile.cache constructs. | |
40 |
|
40 | |||
41 | """ |
|
41 | """ | |
42 | import beaker |
|
|||
43 | from beaker.exceptions import BeakerException |
|
|||
44 |
|
||||
45 | from sqlalchemy.orm.interfaces import MapperOption |
|
42 | from sqlalchemy.orm.interfaces import MapperOption | |
46 | from sqlalchemy.orm.query import Query |
|
43 | from sqlalchemy.orm.query import Query | |
47 | from sqlalchemy.sql import visitors |
|
44 | from sqlalchemy.sql import visitors | |
|
45 | from dogpile.cache.api import NO_VALUE | |||
48 |
|
46 | |||
49 | from rhodecode.lib.utils2 import safe_str |
|
47 | from rhodecode.lib.utils2 import safe_str | |
50 |
|
48 | |||
51 |
|
49 | |||
52 | class CachingQuery(Query): |
|
50 | class CachingQuery(Query): | |
53 |
"""A Query subclass which optionally loads full results from a |
|
51 | """A Query subclass which optionally loads full results from a dogpile | |
54 | cache region. |
|
52 | cache region. | |
55 |
|
53 | |||
56 | The CachingQuery stores additional state that allows it to consult |
|
54 | The CachingQuery optionally stores additional state that allows it to consult | |
57 |
a |
|
55 | a dogpile.cache cache before accessing the database, in the form | |
58 |
|
56 | of a FromCache or RelationshipCache object. Each of these objects | ||
59 | * A "region", which is a cache region argument passed to a |
|
57 | refer to the name of a :class:`dogpile.cache.Region` that's been configured | |
60 | Beaker CacheManager, specifies a particular cache configuration |
|
58 | and stored in a lookup dictionary. When such an object has associated | |
61 | (including backend implementation, expiration times, etc.) |
|
59 | itself with the CachingQuery, the corresponding :class:`dogpile.cache.Region` | |
62 | * A "namespace", which is a qualifying name that identifies a |
|
60 | is used to locate a cached result. If none is present, then the | |
63 | group of keys within the cache. A query that filters on a name |
|
61 | Query is invoked normally, the results being cached. | |
64 | might use the name "by_name", a query that filters on a date range |
|
|||
65 | to a joined table might use the name "related_date_range". |
|
|||
66 |
|
||||
67 | When the above state is present, a Beaker cache is retrieved. |
|
|||
68 |
|
||||
69 | The "namespace" name is first concatenated with |
|
|||
70 | a string composed of the individual entities and columns the Query |
|
|||
71 | requests, i.e. such as ``Query(User.id, User.name)``. |
|
|||
72 |
|
||||
73 | The Beaker cache is then loaded from the cache manager based |
|
|||
74 | on the region and composed namespace. The key within the cache |
|
|||
75 | itself is then constructed against the bind parameters specified |
|
|||
76 | by this query, which are usually literals defined in the |
|
|||
77 | WHERE clause. |
|
|||
78 |
|
62 | |||
79 | The FromCache and RelationshipCache mapper options below represent |
|
63 | The FromCache and RelationshipCache mapper options below represent | |
80 | the "public" method of configuring this state upon the CachingQuery. |
|
64 | the "public" method of configuring this state upon the CachingQuery. | |
81 |
|
65 | |||
82 | """ |
|
66 | """ | |
|
67 | def _get_region(self): | |||
|
68 | from rhodecode.lib.rc_cache import region_meta | |||
|
69 | return region_meta.dogpile_cache_regions | |||
83 |
|
70 | |||
84 |
def __init__(self, |
|
71 | def __init__(self, regions, *args, **kw): | |
85 | self.cache_manager = manager |
|
72 | self.cache_regions = regions or self._get_region() | |
86 | Query.__init__(self, *args, **kw) |
|
73 | Query.__init__(self, *args, **kw) | |
87 |
|
74 | |||
88 | def __iter__(self): |
|
75 | def __iter__(self): | |
89 |
"""override __iter__ to pull results from |
|
76 | """override __iter__ to pull results from dogpile | |
90 | if particular attributes have been configured. |
|
77 | if particular attributes have been configured. | |
91 |
|
78 | |||
92 | Note that this approach does *not* detach the loaded objects from |
|
79 | Note that this approach does *not* detach the loaded objects from | |
@@ -98,109 +85,121 b' class CachingQuery(Query):' | |||||
98 | in the cache are not the same ones in the current Session. |
|
85 | in the cache are not the same ones in the current Session. | |
99 |
|
86 | |||
100 | """ |
|
87 | """ | |
101 | if hasattr(self, '_cache_parameters'): |
|
88 | super_ = super(CachingQuery, self) | |
|
89 | ||||
|
90 | if hasattr(self, '_cache_region'): | |||
|
91 | return self.get_value(createfunc=lambda: list(super_.__iter__())) | |||
|
92 | else: | |||
|
93 | return super_.__iter__() | |||
|
94 | ||||
|
95 | def _execute_and_instances(self, context): | |||
|
96 | """override _execute_and_instances to pull results from dogpile | |||
|
97 | if the query is invoked directly from an external context. | |||
|
98 | ||||
|
99 | This method is necessary in order to maintain compatibility | |||
|
100 | with the "baked query" system now used by default in some | |||
|
101 | relationship loader scenarios. Note also the | |||
|
102 | RelationshipCache._generate_cache_key method which enables | |||
|
103 | the baked query to be used within lazy loads. | |||
102 |
|
|
104 | ||
103 | def caching_query(): |
|
105 | .. versionadded:: 1.2.7 | |
104 | return list(Query.__iter__(self)) |
|
106 | """ | |
|
107 | super_ = super(CachingQuery, self) | |||
105 |
|
108 | |||
106 | return self.get_value(createfunc=caching_query) |
|
109 | if context.query is not self and hasattr(self, '_cache_region'): | |
|
110 | # special logic called when the Query._execute_and_instances() | |||
|
111 | # method is called directly from the baked query | |||
|
112 | return self.get_value( | |||
|
113 | createfunc=lambda: list( | |||
|
114 | super_._execute_and_instances(context) | |||
|
115 | ) | |||
|
116 | ) | |||
107 | else: |
|
117 | else: | |
108 | return Query.__iter__(self) |
|
118 | return super_._execute_and_instances(context) | |
|
119 | ||||
|
120 | def _get_cache_plus_key(self): | |||
|
121 | """Return a cache region plus key.""" | |||
|
122 | dogpile_region = self.cache_regions[self._cache_region.region] | |||
|
123 | if self._cache_region.cache_key: | |||
|
124 | key = self._cache_region.cache_key | |||
|
125 | else: | |||
|
126 | key = _key_from_query(self) | |||
|
127 | return dogpile_region, key | |||
109 |
|
128 | |||
110 | def invalidate(self): |
|
129 | def invalidate(self): | |
111 | """Invalidate the value represented by this Query.""" |
|
130 | """Invalidate the cache value represented by this Query.""" | |
112 |
|
131 | |||
113 |
|
|
132 | dogpile_region, cache_key = self._get_cache_plus_key() | |
114 |
|
|
133 | dogpile_region.delete(cache_key) | |
115 |
|
134 | |||
116 |
def get_value(self, merge=True, createfunc=None |
|
135 | def get_value(self, merge=True, createfunc=None, | |
|
136 | expiration_time=None, ignore_expiration=False): | |||
117 | """Return the value from the cache for this query. |
|
137 | """Return the value from the cache for this query. | |
118 |
|
138 | |||
119 | Raise KeyError if no value present and no |
|
139 | Raise KeyError if no value present and no | |
120 | createfunc specified. |
|
140 | createfunc specified. | |
121 |
|
141 | |||
122 | """ |
|
142 | """ | |
123 |
|
|
143 | dogpile_region, cache_key = self._get_cache_plus_key() | |
124 | ret = cache.get_value(cache_key, createfunc=createfunc) |
|
144 | ||
|
145 | # ignore_expiration means, if the value is in the cache | |||
|
146 | # but is expired, return it anyway. This doesn't make sense | |||
|
147 | # with createfunc, which says, if the value is expired, generate | |||
|
148 | # a new value. | |||
|
149 | assert not ignore_expiration or not createfunc, \ | |||
|
150 | "Can't ignore expiration and also provide createfunc" | |||
|
151 | ||||
|
152 | if ignore_expiration or not createfunc: | |||
|
153 | cached_value = dogpile_region.get(cache_key, | |||
|
154 | expiration_time=expiration_time, | |||
|
155 | ignore_expiration=ignore_expiration) | |||
|
156 | else: | |||
|
157 | cached_value = dogpile_region.get_or_create( | |||
|
158 | cache_key, | |||
|
159 | createfunc, | |||
|
160 | expiration_time=expiration_time | |||
|
161 | ) | |||
|
162 | if cached_value is NO_VALUE: | |||
|
163 | raise KeyError(cache_key) | |||
125 | if merge: |
|
164 | if merge: | |
126 |
|
|
165 | cached_value = self.merge_result(cached_value, load=False) | |
127 |
return |
|
166 | return cached_value | |
128 |
|
167 | |||
129 | def set_value(self, value): |
|
168 | def set_value(self, value): | |
130 | """Set the value in the cache for this query.""" |
|
169 | """Set the value in the cache for this query.""" | |
131 |
|
170 | |||
132 |
|
|
171 | dogpile_region, cache_key = self._get_cache_plus_key() | |
133 |
|
|
172 | dogpile_region.set(cache_key, value) | |
134 |
|
173 | |||
135 |
|
174 | |||
136 |
def query_callable( |
|
175 | def query_callable(regions=None, query_cls=CachingQuery): | |
137 | def query(*arg, **kw): |
|
176 | def query(*arg, **kw): | |
138 |
return query_cls( |
|
177 | return query_cls(regions, *arg, **kw) | |
139 | return query |
|
178 | return query | |
140 |
|
179 | |||
141 |
|
180 | |||
142 | def get_cache_region(name, region): |
|
181 | def _key_from_query(query, qualifier=None): | |
143 | if region not in beaker.cache.cache_regions: |
|
182 | """Given a Query, create a cache key. | |
144 | raise BeakerException('Cache region `%s` not configured ' |
|
|||
145 | 'Check if proper cache settings are in the .ini files' % region) |
|
|||
146 | kw = beaker.cache.cache_regions[region] |
|
|||
147 | return beaker.cache.Cache._get_cache(name, kw) |
|
|||
148 |
|
|
183 | ||
149 |
|
184 | There are many approaches to this; here we use the simplest, | ||
150 | def _get_cache_parameters(query): |
|
185 | which is to create an md5 hash of the text of the SQL statement, | |
151 | """For a query with cache_region and cache_namespace configured, |
|
186 | combined with stringified versions of all the bound parameters | |
152 | return the correspoinding Cache instance and cache key, based |
|
187 | within it. There's a bit of a performance hit with | |
153 | on this query's current criterion and parameter values. |
|
188 | compiling out "query.statement" here; other approaches include | |
|
189 | setting up an explicit cache key with a particular Query, | |||
|
190 | then combining that with the bound parameter values. | |||
154 |
|
191 | |||
155 | """ |
|
192 | """ | |
156 | if not hasattr(query, '_cache_parameters'): |
|
|||
157 | raise ValueError("This Query does not have caching " |
|
|||
158 | "parameters configured.") |
|
|||
159 |
|
193 | |||
160 | region, namespace, cache_key = query._cache_parameters |
|
194 | stmt = query.with_labels().statement | |
161 |
|
195 | compiled = stmt.compile() | ||
162 | namespace = _namespace_from_query(namespace, query) |
|
196 | params = compiled.params | |
163 |
|
||||
164 | if cache_key is None: |
|
|||
165 | # cache key - the value arguments from this query's parameters. |
|
|||
166 | args = [safe_str(x) for x in _params_from_query(query)] |
|
|||
167 | args.extend(filter(lambda k: k not in ['None', None, u'None'], |
|
|||
168 | [str(query._limit), str(query._offset)])) |
|
|||
169 |
|
||||
170 | cache_key = " ".join(args) |
|
|||
171 |
|
||||
172 | if cache_key is None: |
|
|||
173 | raise Exception('Cache key cannot be None') |
|
|||
174 |
|
197 | |||
175 | # get cache |
|
198 | # here we return the key as a long string. our "key mangler" | |
176 | #cache = query.cache_manager.get_cache_region(namespace, region) |
|
199 | # set up with the region will boil it down to an md5. | |
177 | cache = get_cache_region(namespace, region) |
|
200 | return " ".join( | |
178 | # optional - hash the cache_key too for consistent length |
|
201 | [safe_str(compiled)] + | |
179 | # import uuid |
|
202 | [safe_str(params[k]) for k in sorted(params)]) | |
180 | # cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key)) |
|
|||
181 |
|
||||
182 | return cache, cache_key |
|
|||
183 |
|
||||
184 |
|
||||
185 | def _namespace_from_query(namespace, query): |
|
|||
186 | # cache namespace - the token handed in by the |
|
|||
187 | # option + class we're querying against |
|
|||
188 | namespace = " ".join([namespace] + [str(x) for x in query._entities]) |
|
|||
189 |
|
||||
190 | # memcached wants this |
|
|||
191 | namespace = namespace.replace(' ', '_') |
|
|||
192 |
|
||||
193 | return namespace |
|
|||
194 |
|
||||
195 |
|
||||
196 | def _set_cache_parameters(query, region, namespace, cache_key): |
|
|||
197 |
|
||||
198 | if hasattr(query, '_cache_parameters'): |
|
|||
199 | region, namespace, cache_key = query._cache_parameters |
|
|||
200 | raise ValueError("This query is already configured " |
|
|||
201 | "for region %r namespace %r" % |
|
|||
202 | (region, namespace)) |
|
|||
203 | query._cache_parameters = region, namespace, cache_key |
|
|||
204 |
|
203 | |||
205 |
|
204 | |||
206 | class FromCache(MapperOption): |
|
205 | class FromCache(MapperOption): | |
@@ -208,15 +207,12 b' class FromCache(MapperOption):' | |||||
208 |
|
207 | |||
209 | propagate_to_loaders = False |
|
208 | propagate_to_loaders = False | |
210 |
|
209 | |||
211 |
def __init__(self, region |
|
210 | def __init__(self, region="sql_cache_short", cache_key=None): | |
212 | """Construct a new FromCache. |
|
211 | """Construct a new FromCache. | |
213 |
|
212 | |||
214 | :param region: the cache region. Should be a |
|
213 | :param region: the cache region. Should be a | |
215 |
region configured in the |
|
214 | region configured in the dictionary of dogpile | |
216 |
|
215 | regions. | ||
217 | :param namespace: the cache namespace. Should |
|
|||
218 | be a name uniquely describing the target Query's |
|
|||
219 | lexical structure. |
|
|||
220 |
|
216 | |||
221 | :param cache_key: optional. A string cache key |
|
217 | :param cache_key: optional. A string cache key | |
222 | that will serve as the key to the query. Use this |
|
218 | that will serve as the key to the query. Use this | |
@@ -226,14 +222,11 b' class FromCache(MapperOption):' | |||||
226 |
|
222 | |||
227 | """ |
|
223 | """ | |
228 | self.region = region |
|
224 | self.region = region | |
229 | self.namespace = namespace |
|
|||
230 | self.cache_key = cache_key |
|
225 | self.cache_key = cache_key | |
231 |
|
226 | |||
232 | def process_query(self, query): |
|
227 | def process_query(self, query): | |
233 | """Process a Query during normal loading operation.""" |
|
228 | """Process a Query during normal loading operation.""" | |
234 |
|
229 | query._cache_region = self | ||
235 | _set_cache_parameters(query, self.region, self.namespace, |
|
|||
236 | self.cache_key) |
|
|||
237 |
|
230 | |||
238 |
|
231 | |||
239 | class RelationshipCache(MapperOption): |
|
232 | class RelationshipCache(MapperOption): | |
@@ -242,27 +235,39 b' class RelationshipCache(MapperOption):' | |||||
242 |
|
235 | |||
243 | propagate_to_loaders = True |
|
236 | propagate_to_loaders = True | |
244 |
|
237 | |||
245 | def __init__(self, region, namespace, attribute): |
|
238 | def __init__(self, attribute, region="sql_cache_short", cache_key=None): | |
246 | """Construct a new RelationshipCache. |
|
239 | """Construct a new RelationshipCache. | |
247 |
|
240 | |||
248 | :param region: the cache region. Should be a |
|
|||
249 | region configured in the Beaker CacheManager. |
|
|||
250 |
|
||||
251 | :param namespace: the cache namespace. Should |
|
|||
252 | be a name uniquely describing the target Query's |
|
|||
253 | lexical structure. |
|
|||
254 |
|
||||
255 | :param attribute: A Class.attribute which |
|
241 | :param attribute: A Class.attribute which | |
256 | indicates a particular class relationship() whose |
|
242 | indicates a particular class relationship() whose | |
257 | lazy loader should be pulled from the cache. |
|
243 | lazy loader should be pulled from the cache. | |
258 |
|
244 | |||
|
245 | :param region: name of the cache region. | |||
|
246 | ||||
|
247 | :param cache_key: optional. A string cache key | |||
|
248 | that will serve as the key to the query, bypassing | |||
|
249 | the usual means of forming a key from the Query itself. | |||
|
250 | ||||
259 | """ |
|
251 | """ | |
260 | self.region = region |
|
252 | self.region = region | |
261 |
self. |
|
253 | self.cache_key = cache_key | |
262 | self._relationship_options = { |
|
254 | self._relationship_options = { | |
263 | (attribute.property.parent.class_, attribute.property.key): self |
|
255 | (attribute.property.parent.class_, attribute.property.key): self | |
264 | } |
|
256 | } | |
265 |
|
257 | |||
|
258 | def _generate_cache_key(self, path): | |||
|
259 | """Indicate to the lazy-loader strategy that a "baked" query | |||
|
260 | may be used by returning ``None``. | |||
|
261 | ||||
|
262 | If this method is omitted, the default implementation of | |||
|
263 | :class:`.MapperOption._generate_cache_key` takes place, which | |||
|
264 | returns ``False`` to disable the "baked" query from being used. | |||
|
265 | ||||
|
266 | .. versionadded:: 1.2.7 | |||
|
267 | ||||
|
268 | """ | |||
|
269 | return None | |||
|
270 | ||||
266 | def process_query_conditionally(self, query): |
|
271 | def process_query_conditionally(self, query): | |
267 | """Process a Query that is used within a lazy loader. |
|
272 | """Process a Query that is used within a lazy loader. | |
268 |
|
273 | |||
@@ -271,17 +276,14 b' class RelationshipCache(MapperOption):' | |||||
271 |
|
276 | |||
272 | """ |
|
277 | """ | |
273 | if query._current_path: |
|
278 | if query._current_path: | |
274 |
mapper, |
|
279 | mapper, prop = query._current_path[-2:] | |
|
280 | key = prop.key | |||
275 |
|
281 | |||
276 | for cls in mapper.class_.__mro__: |
|
282 | for cls in mapper.class_.__mro__: | |
277 | if (cls, key) in self._relationship_options: |
|
283 | if (cls, key) in self._relationship_options: | |
278 |
relationship_option = |
|
284 | relationship_option = self._relationship_options[(cls, key)] | |
279 |
|
|
285 | query._cache_region = relationship_option | |
280 |
|
|
286 | break | |
281 | query, |
|
|||
282 | relationship_option.region, |
|
|||
283 | relationship_option.namespace, |
|
|||
284 | None) |
|
|||
285 |
|
287 | |||
286 | def and_(self, option): |
|
288 | def and_(self, option): | |
287 | """Chain another RelationshipCache option to this one. |
|
289 | """Chain another RelationshipCache option to this one. | |
@@ -294,32 +296,3 b' class RelationshipCache(MapperOption):' | |||||
294 | self._relationship_options.update(option._relationship_options) |
|
296 | self._relationship_options.update(option._relationship_options) | |
295 | return self |
|
297 | return self | |
296 |
|
298 | |||
297 |
|
||||
298 | def _params_from_query(query): |
|
|||
299 | """Pull the bind parameter values from a query. |
|
|||
300 |
|
||||
301 | This takes into account any scalar attribute bindparam set up. |
|
|||
302 |
|
||||
303 | E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7))) |
|
|||
304 | would return [5, 7]. |
|
|||
305 |
|
||||
306 | """ |
|
|||
307 | v = [] |
|
|||
308 | def visit_bindparam(bind): |
|
|||
309 |
|
||||
310 | if bind.key in query._params: |
|
|||
311 | value = query._params[bind.key] |
|
|||
312 | elif bind.callable: |
|
|||
313 | # lazyloader may dig a callable in here, intended |
|
|||
314 | # to late-evaluate params after autoflush is called. |
|
|||
315 | # convert to a scalar value. |
|
|||
316 | value = bind.callable() |
|
|||
317 | else: |
|
|||
318 | value = bind.value |
|
|||
319 |
|
||||
320 | v.append(value) |
|
|||
321 | if query._criterion is not None: |
|
|||
322 | visitors.traverse(query._criterion, {}, {'bindparam':visit_bindparam}) |
|
|||
323 | for f in query._from_obj: |
|
|||
324 | visitors.traverse(f, {}, {'bindparam':visit_bindparam}) |
|
|||
325 | return v |
|
@@ -690,7 +690,7 b' class User(Base, BaseModel):' | |||||
690 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) |
|
690 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) | |
691 | if cache: |
|
691 | if cache: | |
692 | feed_tokens = feed_tokens.options( |
|
692 | feed_tokens = feed_tokens.options( | |
693 |
FromCache(" |
|
693 | FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id)) | |
694 |
|
694 | |||
695 | feed_tokens = feed_tokens.all() |
|
695 | feed_tokens = feed_tokens.all() | |
696 | if feed_tokens: |
|
696 | if feed_tokens: |
@@ -24,27 +24,22 b' SQLAlchemy Metadata and Session object' | |||||
24 |
|
24 | |||
25 | from sqlalchemy.ext.declarative import declarative_base |
|
25 | from sqlalchemy.ext.declarative import declarative_base | |
26 | from sqlalchemy.orm import scoped_session, sessionmaker |
|
26 | from sqlalchemy.orm import scoped_session, sessionmaker | |
27 | from beaker import cache |
|
|||
28 |
|
27 | |||
29 | from rhodecode.lib import caching_query |
|
28 | from rhodecode.lib import caching_query | |
30 |
|
29 | |||
31 |
|
30 | __all__ = ['Base', 'Session'] | ||
32 | # Beaker CacheManager. A home base for cache configurations. |
|
|||
33 | cache_manager = cache.CacheManager() |
|
|||
34 |
|
31 | |||
35 | __all__ = ['Base', 'Session'] |
|
32 | # scoped_session. Apply our custom CachingQuery class to it, | |
36 | # |
|
33 | # using a callable that will associate the dictionary | |
37 | # SQLAlchemy session manager. Updated by model.init_model() |
|
34 | # of regions with the Query. | |
38 | # |
|
35 | # to use cache use this in query | |
|
36 | # .options(FromCache("sqlalchemy_cache_type", "cachekey")) | |||
39 | Session = scoped_session( |
|
37 | Session = scoped_session( | |
40 | sessionmaker( |
|
38 | sessionmaker( | |
41 |
query_cls=caching_query.query_callable( |
|
39 | query_cls=caching_query.query_callable(), | |
42 | expire_on_commit=True, |
|
40 | expire_on_commit=True, | |
43 | ) |
|
41 | ) | |
44 | ) |
|
42 | ) | |
45 |
|
43 | |||
46 | # The declarative Base |
|
44 | # The declarative Base | |
47 | Base = declarative_base() |
|
45 | Base = declarative_base() | |
48 |
|
||||
49 | #to use cache use this in query |
|
|||
50 | #.options(FromCache("sqlalchemy_cache_type", "cachekey")) |
|
@@ -21,11 +21,12 b'' | |||||
21 | import os |
|
21 | import os | |
22 | import hashlib |
|
22 | import hashlib | |
23 | import logging |
|
23 | import logging | |
|
24 | import time | |||
24 | from collections import namedtuple |
|
25 | from collections import namedtuple | |
25 | from functools import wraps |
|
26 | from functools import wraps | |
26 | import bleach |
|
27 | import bleach | |
27 |
|
28 | |||
28 | from rhodecode.lib import caches |
|
29 | from rhodecode.lib import caches, rc_cache | |
29 | from rhodecode.lib.utils2 import ( |
|
30 | from rhodecode.lib.utils2 import ( | |
30 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) |
|
31 | Optional, AttributeDict, safe_str, remove_prefix, str2bool) | |
31 | from rhodecode.lib.vcs.backends import base |
|
32 | from rhodecode.lib.vcs.backends import base | |
@@ -206,13 +207,17 b' class SettingsModel(BaseModel):' | |||||
206 | return res |
|
207 | return res | |
207 |
|
208 | |||
208 | def invalidate_settings_cache(self): |
|
209 | def invalidate_settings_cache(self): | |
209 | namespace = 'rhodecode_settings' |
|
210 | # NOTE:(marcink) we flush the whole sql_cache_short region, because it | |
210 | cache_manager = caches.get_cache_manager('sql_cache_short', namespace) |
|
211 | # reads different settings etc. It's little too much but those caches are | |
211 | caches.clear_cache_manager(cache_manager) |
|
212 | # anyway very short lived and it's a safest way. | |
|
213 | region = rc_cache.get_or_create_region('sql_cache_short') | |||
|
214 | region.invalidate() | |||
212 |
|
215 | |||
213 | def get_all_settings(self, cache=False): |
|
216 | def get_all_settings(self, cache=False): | |
|
217 | region = rc_cache.get_or_create_region('sql_cache_short') | |||
214 |
|
218 | |||
215 | def _compute(): |
|
219 | @region.cache_on_arguments(should_cache_fn=lambda v: cache) | |
|
220 | def _get_all_settings(name, key): | |||
216 | q = self._get_settings_query() |
|
221 | q = self._get_settings_query() | |
217 | if not q: |
|
222 | if not q: | |
218 | raise Exception('Could not get application settings !') |
|
223 | raise Exception('Could not get application settings !') | |
@@ -223,20 +228,14 b' class SettingsModel(BaseModel):' | |||||
223 | } |
|
228 | } | |
224 | return settings |
|
229 | return settings | |
225 |
|
230 | |||
226 | if cache: |
|
231 | repo = self._get_repo(self.repo) if self.repo else None | |
227 | log.debug('Fetching app settings using cache') |
|
232 | key = "settings_repo.{}".format(repo.repo_id) if repo else "settings_app" | |
228 | repo = self._get_repo(self.repo) if self.repo else None |
|
233 | start = time.time() | |
229 |
|
|
234 | result = _get_all_settings('rhodecode_settings', key) | |
230 | cache_manager = caches.get_cache_manager( |
|
235 | total = time.time() - start | |
231 | 'sql_cache_short', namespace) |
|
236 | log.debug('Fetching app settings for key: %s took: %.3fs', key, total) | |
232 | _cache_key = ( |
|
|||
233 | "get_repo_{}_settings".format(repo.repo_id) |
|
|||
234 | if repo else "get_app_settings") |
|
|||
235 |
|
237 | |||
236 | return cache_manager.get(_cache_key, createfunc=_compute) |
|
238 | return result | |
237 |
|
||||
238 | else: |
|
|||
239 | return _compute() |
|
|||
240 |
|
239 | |||
241 | def get_auth_settings(self): |
|
240 | def get_auth_settings(self): | |
242 | q = self._get_settings_query() |
|
241 | q = self._get_settings_query() |
@@ -41,7 +41,7 b' log = logging.getLogger(__name__)' | |||||
41 |
|
41 | |||
42 | __all__ = [ |
|
42 | __all__ = [ | |
43 | 'get_new_dir', 'TestController', |
|
43 | 'get_new_dir', 'TestController', | |
44 |
'link_to', 'clear_ |
|
44 | 'link_to', 'clear_cache_regions', | |
45 | 'assert_session_flash', 'login_user', 'no_newline_id_generator', |
|
45 | 'assert_session_flash', 'login_user', 'no_newline_id_generator', | |
46 | 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO', |
|
46 | 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO', | |
47 | 'NEW_HG_REPO', 'NEW_GIT_REPO', |
|
47 | 'NEW_HG_REPO', 'NEW_GIT_REPO', | |
@@ -95,10 +95,12 b" TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, '" | |||||
95 | TEST_REPO_PREFIX = 'vcs-test' |
|
95 | TEST_REPO_PREFIX = 'vcs-test' | |
96 |
|
96 | |||
97 |
|
97 | |||
98 |
def clear_ |
|
98 | def clear_cache_regions(regions=None): | |
99 | from beaker.cache import cache_managers |
|
99 | # dogpile | |
100 | for _cache in cache_managers.values(): |
|
100 | from rhodecode.lib.rc_cache import region_meta | |
101 | _cache.clear() |
|
101 | for region_name, region in region_meta.dogpile_cache_regions.items(): | |
|
102 | if not regions or region_name in regions: | |||
|
103 | region.invalidate() | |||
102 |
|
104 | |||
103 |
|
105 | |||
104 | def get_new_dir(title): |
|
106 | def get_new_dir(title): |
@@ -70,8 +70,7 b' def disable_hooks(request, hooks):' | |||||
70 | ui_settings.invalidate() |
|
70 | ui_settings.invalidate() | |
71 |
|
71 | |||
72 | ui_settings = session.query(db.RhodeCodeUi).options( |
|
72 | ui_settings = session.query(db.RhodeCodeUi).options( | |
73 | caching_query.FromCache( |
|
73 | caching_query.FromCache('sql_cache_short', 'get_hook_settings')) | |
74 | 'sql_cache_short', 'get_hook_settings', 'get_hook_settings')) |
|
|||
75 | ui_settings.invalidate() |
|
74 | ui_settings.invalidate() | |
76 |
|
75 | |||
77 | @request.addfinalizer |
|
76 | @request.addfinalizer |
@@ -292,15 +292,12 b' cache_dir = %(here)s/data' | |||||
292 | beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data |
|
292 | beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data | |
293 | beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock |
|
293 | beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock | |
294 |
|
294 | |||
295 |
beaker.cache.regions = long_term |
|
295 | beaker.cache.regions = long_term | |
296 |
|
296 | |||
297 | beaker.cache.long_term.type = memory |
|
297 | beaker.cache.long_term.type = memory | |
298 | beaker.cache.long_term.expire = 36000 |
|
298 | beaker.cache.long_term.expire = 36000 | |
299 | beaker.cache.long_term.key_length = 256 |
|
299 | beaker.cache.long_term.key_length = 256 | |
300 |
|
300 | |||
301 | beaker.cache.sql_cache_short.type = memory |
|
|||
302 | beaker.cache.sql_cache_short.expire = 1 |
|
|||
303 | beaker.cache.sql_cache_short.key_length = 256 |
|
|||
304 |
|
301 | |||
305 | ##################################### |
|
302 | ##################################### | |
306 | ### DOGPILE CACHE #### |
|
303 | ### DOGPILE CACHE #### | |
@@ -311,6 +308,12 b' rc_cache.cache_perms.backend = dogpile.c' | |||||
311 | rc_cache.cache_perms.expiration_time = 0 |
|
308 | rc_cache.cache_perms.expiration_time = 0 | |
312 | rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1 |
|
309 | rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1 | |
313 |
|
310 | |||
|
311 | ||||
|
312 | ## cache settings for SQL queries | |||
|
313 | rc_cache.sql_cache_short.backend = dogpile.cache.rc.memory_lru | |||
|
314 | rc_cache.sql_cache_short.expiration_time = 0 | |||
|
315 | ||||
|
316 | ||||
314 | #################################### |
|
317 | #################################### | |
315 | ### BEAKER SESSION #### |
|
318 | ### BEAKER SESSION #### | |
316 | #################################### |
|
319 | #################################### |
General Comments 0
You need to be logged in to leave comments.
Login now