##// END OF EJS Templates
caches: don't use beaker for file caches anymore
marcink -
r2846:bbc96602 default
parent child Browse files
Show More
@@ -308,47 +308,72 b' celery.max_tasks_per_child = 100'
308 308 ## tasks will never be sent to the queue, but executed locally instead.
309 309 celery.task_always_eager = false
310 310
311 #####################################
312 ### DOGPILE CACHE ####
313 #####################################
314 ## Default cache dir for caches. Putting this into a ramdisk
315 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
316 ## of space
317 cache_dir = /tmp/rcdev/data
318
319 ## cache settings for permission tree, auth TTL.
320 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
321 rc_cache.cache_perms.expiration_time = 300
322 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
323
324 ## redis backend with distributed locks
325 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
326 #rc_cache.cache_perms.expiration_time = 300
327 #rc_cache.cache_perms.arguments.host = localhost
328 #rc_cache.cache_perms.arguments.port = 6379
329 #rc_cache.cache_perms.arguments.db = 0
330 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
331 #rc_cache.cache_perms.arguments.distributed_lock = true
332
333
334 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
335 rc_cache.cache_repo.expiration_time = 2592000
336 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
337
338 ## redis backend with distributed locks
339 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
340 #rc_cache.cache_repo.expiration_time = 2592000
341 ## this needs to be greater then expiration_time
342 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
343 #rc_cache.cache_repo.arguments.host = localhost
344 #rc_cache.cache_repo.arguments.port = 6379
345 #rc_cache.cache_repo.arguments.db = 1
346 #rc_cache.cache_repo.arguments.distributed_lock = true
347
348
311 349 ####################################
312 350 ### BEAKER CACHE ####
313 351 ####################################
314 # default cache dir for templates. Putting this into a ramdisk
315 ## can boost performance, eg. %(here)s/data_ramdisk
316 cache_dir = %(here)s/data
317 352
318 353 ## locking and default file storage for Beaker. Putting this into a ramdisk
319 354 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
320 355 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
321 356 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
322 357
323 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
358 beaker.cache.regions = long_term, sql_cache_short
324 359
325 beaker.cache.long_term.type = memory
326 beaker.cache.long_term.expire = 36000
360 beaker.cache.long_term.type = memorylru_base
361 beaker.cache.long_term.expire = 172800
327 362 beaker.cache.long_term.key_length = 256
328 363
329 beaker.cache.sql_cache_short.type = memory
364 beaker.cache.sql_cache_short.type = memorylru_base
330 365 beaker.cache.sql_cache_short.expire = 10
331 366 beaker.cache.sql_cache_short.key_length = 256
332 367
333 beaker.cache.repo_cache_long.type = memorylru_base
334 beaker.cache.repo_cache_long.max_items = 4096
335 beaker.cache.repo_cache_long.expire = 2592000
336
337 ## default is memorylru_base cache, configure only if required
338 ## using multi-node or multi-worker setup
339 #beaker.cache.repo_cache_long.type = ext:memcached
340 #beaker.cache.repo_cache_long.url = localhost:11211
341 #beaker.cache.repo_cache_long.expire = 1209600
342 #beaker.cache.repo_cache_long.key_length = 256
343 368
344 369 ####################################
345 370 ### BEAKER SESSION ####
346 371 ####################################
347 372
348 373 ## .session.type is type of storage options for the session, current allowed
349 ## types are file, ext:memcached, ext:database, and memory (default).
374 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
350 375 beaker.session.type = file
351 beaker.session.data_dir = %(here)s/data/sessions/data
376 beaker.session.data_dir = %(here)s/data/sessions
352 377
353 378 ## db based session, fast, and allows easy management over logged in users
354 379 #beaker.session.type = ext:database
@@ -496,6 +521,8 b' debug_style = true'
496 521 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
497 522 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
498 523 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
524 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
525
499 526 sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
500 527
501 528 # see sqlalchemy docs for other advanced settings
@@ -515,6 +542,9 b' sqlalchemy.db1.convert_unicode = true'
515 542 ## which defaults to five.
516 543 #sqlalchemy.db1.max_overflow = 10
517 544
545 ## Connection check ping, used to detect broken database connections
546 ## could be enabled to better handle cases if MySQL has gone away errors
547 #sqlalchemy.db1.ping_connection = true
518 548
519 549 ##################
520 550 ### VCS CONFIG ###
@@ -624,7 +654,7 b' custom.conf = 1'
624 654 ### LOGGING CONFIGURATION ####
625 655 ################################
626 656 [loggers]
627 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
657 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
628 658
629 659 [handlers]
630 660 keys = console, console_sql
@@ -680,9 +710,12 b' level = DEBUG'
680 710 formatter = color_formatter
681 711
682 712 [handler_console_sql]
713 # "level = DEBUG" logs SQL queries and results.
714 # "level = INFO" logs SQL queries.
715 # "level = WARN" logs neither. (Recommended for production systems.)
683 716 class = StreamHandler
684 717 args = (sys.stderr, )
685 level = DEBUG
718 level = WARN
686 719 formatter = color_formatter_sql
687 720
688 721 ################
@@ -283,47 +283,72 b' celery.max_tasks_per_child = 100'
283 283 ## tasks will never be sent to the queue, but executed locally instead.
284 284 celery.task_always_eager = false
285 285
286 #####################################
287 ### DOGPILE CACHE ####
288 #####################################
289 ## Default cache dir for caches. Putting this into a ramdisk
290 ## can boost performance, eg. /tmpfs/data_ramdisk, however this might require lots
291 ## of space
292 cache_dir = /tmp/rcdev/data
293
294 ## cache settings for permission tree, auth TTL.
295 rc_cache.cache_perms.backend = dogpile.cache.rc.file_namespace
296 rc_cache.cache_perms.expiration_time = 300
297 rc_cache.cache_perms.arguments.filename = /tmp/rc_cache_1
298
299 ## redis backend with distributed locks
300 #rc_cache.cache_perms.backend = dogpile.cache.rc.redis
301 #rc_cache.cache_perms.expiration_time = 300
302 #rc_cache.cache_perms.arguments.host = localhost
303 #rc_cache.cache_perms.arguments.port = 6379
304 #rc_cache.cache_perms.arguments.db = 0
305 #rc_cache.cache_perms.arguments.redis_expiration_time = 7200
306 #rc_cache.cache_perms.arguments.distributed_lock = true
307
308
309 rc_cache.cache_repo.backend = dogpile.cache.rc.file_namespace
310 rc_cache.cache_repo.expiration_time = 2592000
311 rc_cache.cache_repo.arguments.filename = /tmp/rc_cache_2
312
313 ## redis backend with distributed locks
314 #rc_cache.cache_repo.backend = dogpile.cache.rc.redis
315 #rc_cache.cache_repo.expiration_time = 2592000
316 ## this needs to be greater then expiration_time
317 #rc_cache.cache_repo.arguments.redis_expiration_time = 2678400
318 #rc_cache.cache_repo.arguments.host = localhost
319 #rc_cache.cache_repo.arguments.port = 6379
320 #rc_cache.cache_repo.arguments.db = 1
321 #rc_cache.cache_repo.arguments.distributed_lock = true
322
323
286 324 ####################################
287 325 ### BEAKER CACHE ####
288 326 ####################################
289 # default cache dir for templates. Putting this into a ramdisk
290 ## can boost performance, eg. %(here)s/data_ramdisk
291 cache_dir = %(here)s/data
292 327
293 328 ## locking and default file storage for Beaker. Putting this into a ramdisk
294 329 ## can boost performance, eg. %(here)s/data_ramdisk/cache/beaker_data
295 330 beaker.cache.data_dir = %(here)s/data/cache/beaker_data
296 331 beaker.cache.lock_dir = %(here)s/data/cache/beaker_lock
297 332
298 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
333 beaker.cache.regions = long_term, sql_cache_short
299 334
300 335 beaker.cache.long_term.type = memory
301 beaker.cache.long_term.expire = 36000
336 beaker.cache.long_term.expire = 172800
302 337 beaker.cache.long_term.key_length = 256
303 338
304 339 beaker.cache.sql_cache_short.type = memory
305 340 beaker.cache.sql_cache_short.expire = 10
306 341 beaker.cache.sql_cache_short.key_length = 256
307 342
308 beaker.cache.repo_cache_long.type = memorylru_base
309 beaker.cache.repo_cache_long.max_items = 4096
310 beaker.cache.repo_cache_long.expire = 2592000
311
312 ## default is memorylru_base cache, configure only if required
313 ## using multi-node or multi-worker setup
314 #beaker.cache.repo_cache_long.type = ext:memcached
315 #beaker.cache.repo_cache_long.url = localhost:11211
316 #beaker.cache.repo_cache_long.expire = 1209600
317 #beaker.cache.repo_cache_long.key_length = 256
318 343
319 344 ####################################
320 345 ### BEAKER SESSION ####
321 346 ####################################
322 347
323 348 ## .session.type is type of storage options for the session, current allowed
324 ## types are file, ext:memcached, ext:database, and memory (default).
349 ## types are file, ext:memcached, ext:redis, ext:database, and memory (default).
325 350 beaker.session.type = file
326 beaker.session.data_dir = %(here)s/data/sessions/data
351 beaker.session.data_dir = %(here)s/data/sessions
327 352
328 353 ## db based session, fast, and allows easy management over logged in users
329 354 #beaker.session.type = ext:database
@@ -466,6 +491,8 b' set debug = false'
466 491 #sqlalchemy.db1.url = sqlite:///%(here)s/rhodecode.db?timeout=30
467 492 #sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
468 493 #sqlalchemy.db1.url = mysql://root:qweqwe@localhost/rhodecode
494 #sqlalchemy.db1.url = mysql+pymysql://root:qweqwe@localhost/rhodecode
495
469 496 sqlalchemy.db1.url = postgresql://postgres:qweqwe@localhost/rhodecode
470 497
471 498 # see sqlalchemy docs for other advanced settings
@@ -485,6 +512,9 b' sqlalchemy.db1.convert_unicode = true'
485 512 ## which defaults to five.
486 513 #sqlalchemy.db1.max_overflow = 10
487 514
515 ## Connection check ping, used to detect broken database connections
516 ## could be enabled to better handle cases if MySQL has gone away errors
517 #sqlalchemy.db1.ping_connection = true
488 518
489 519 ##################
490 520 ### VCS CONFIG ###
@@ -593,7 +623,7 b' custom.conf = 1'
593 623 ### LOGGING CONFIGURATION ####
594 624 ################################
595 625 [loggers]
596 keys = root, sqlalchemy, beaker, rhodecode, ssh_wrapper, celery
626 keys = root, sqlalchemy, beaker, celery, rhodecode, ssh_wrapper
597 627
598 628 [handlers]
599 629 keys = console, console_sql
@@ -649,6 +679,9 b' level = INFO'
649 679 formatter = generic
650 680
651 681 [handler_console_sql]
682 # "level = DEBUG" logs SQL queries and results.
683 # "level = INFO" logs SQL queries.
684 # "level = WARN" logs neither. (Recommended for production systems.)
652 685 class = StreamHandler
653 686 args = (sys.stderr, )
654 687 level = WARN
@@ -1199,13 +1199,6 b' class UsersView(UserAppView):'
1199 1199
1200 1200 return perm_user.permissions
1201 1201
1202 def _get_user_cache_keys(self, cache_namespace_uid, keys):
1203 user_keys = []
1204 for k in sorted(keys):
1205 if k.startswith(cache_namespace_uid):
1206 user_keys.append(k)
1207 return user_keys
1208
1209 1202 @LoginRequired()
1210 1203 @HasPermissionAllDecorator('hg.admin')
1211 1204 @view_config(
@@ -1222,8 +1215,7 b' class UsersView(UserAppView):'
1222 1215 cache_namespace_uid = 'cache_user_auth.{}'.format(self.db_user.user_id)
1223 1216 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1224 1217 c.backend = c.region.backend
1225 c.user_keys = self._get_user_cache_keys(
1226 cache_namespace_uid, c.region.backend.list_keys())
1218 c.user_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
1227 1219
1228 1220 return self._get_template_context(c)
1229 1221
@@ -1241,14 +1233,9 b' class UsersView(UserAppView):'
1241 1233 c.perm_user = c.user.AuthUser(ip_addr=self.request.remote_addr)
1242 1234
1243 1235 cache_namespace_uid = 'cache_user_auth.{}'.format(self.db_user.user_id)
1244 c.region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1236 del_keys = rc_cache.clear_cache_namespace('cache_perms', cache_namespace_uid)
1245 1237
1246 c.user_keys = self._get_user_cache_keys(
1247 cache_namespace_uid, c.region.backend.list_keys())
1248 for k in c.user_keys:
1249 c.region.delete(k)
1250
1251 h.flash(_("Deleted {} cache keys").format(len(c.user_keys)), category='success')
1238 h.flash(_("Deleted {} cache keys").format(del_keys), category='success')
1252 1239
1253 1240 return HTTPFound(h.route_path(
1254 1241 'edit_user_caches', user_id=c.user.user_id))
@@ -27,7 +27,7 b' from pyramid.view import view_config'
27 27 from rhodecode.apps._base import RepoAppView
28 28 from rhodecode.lib.auth import (
29 29 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
30 from rhodecode.lib import helpers as h
30 from rhodecode.lib import helpers as h, rc_cache
31 31 from rhodecode.lib import system_info
32 32 from rhodecode.model.meta import Session
33 33 from rhodecode.model.scm import ScmModel
@@ -54,6 +54,12 b' class RepoCachesView(RepoAppView):'
54 54 if os.path.isdir(cached_diffs_dir):
55 55 c.cached_diff_size = system_info.get_storage_size(cached_diffs_dir)
56 56 c.shadow_repos = c.rhodecode_db_repo.shadow_repos()
57
58 cache_namespace_uid = 'cache_repo.{}'.format(self.db_repo.repo_id)
59 c.region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
60 c.backend = c.region.backend
61 c.repo_keys = sorted(c.region.backend.list_keys(prefix=cache_namespace_uid))
62
57 63 return self._get_template_context(c)
58 64
59 65 @LoginRequired()
@@ -68,7 +74,9 b' class RepoCachesView(RepoAppView):'
68 74
69 75 try:
70 76 ScmModel().mark_for_invalidation(self.db_repo_name, delete=True)
77
71 78 Session().commit()
79
72 80 h.flash(_('Cache invalidation successful'),
73 81 category='success')
74 82 except Exception:
@@ -33,7 +33,7 b' from pyramid.response import Response'
33 33 from rhodecode.apps._base import RepoAppView
34 34
35 35 from rhodecode.controllers.utils import parse_path_ref
36 from rhodecode.lib import diffs, helpers as h, caches
36 from rhodecode.lib import diffs, helpers as h, caches, rc_cache
37 37 from rhodecode.lib import audit_logger
38 38 from rhodecode.lib.exceptions import NonRelativePathError
39 39 from rhodecode.lib.codeblocks import (
@@ -187,32 +187,25 b' class RepoFilesView(RepoAppView):'
187 187 # check if commit is a branch name or branch hash
188 188 return commit_id in valid_heads
189 189
190 def _get_tree_cache_manager(self, namespace_type):
191 _namespace = caches.get_repo_namespace_key(
192 namespace_type, self.db_repo_name)
193 return caches.get_cache_manager('repo_cache_long', _namespace)
190 def _get_tree_at_commit(
191 self, c, commit_id, f_path, full_load=False):
192
193 repo_id = self.db_repo.repo_id
194 194
195 def _get_tree_at_commit(
196 self, c, commit_id, f_path, full_load=False, force=False):
197 def _cached_tree():
198 log.debug('Generating cached file tree for %s, %s, %s',
199 self.db_repo_name, commit_id, f_path)
195 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
196 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
197
198 @region.cache_on_arguments(namespace=cache_namespace_uid)
199 def compute_file_tree(repo_id, commit_id, f_path, full_load):
200 log.debug('Generating cached file tree for repo_id: %s, %s, %s',
201 repo_id, commit_id, f_path)
200 202
201 203 c.full_load = full_load
202 204 return render(
203 205 'rhodecode:templates/files/files_browser_tree.mako',
204 206 self._get_template_context(c), self.request)
205 207
206 cache_manager = self._get_tree_cache_manager(caches.FILE_TREE)
207
208 cache_key = caches.compute_key_from_params(
209 self.db_repo_name, commit_id, f_path)
210
211 if force:
212 # we want to force recompute of caches
213 cache_manager.remove_value(cache_key)
214
215 return cache_manager.get(cache_key, createfunc=_cached_tree)
208 return compute_file_tree(self.db_repo.repo_id, commit_id, f_path, full_load)
216 209
217 210 def _get_archive_spec(self, fname):
218 211 log.debug('Detecting archive spec for: `%s`', fname)
@@ -664,12 +657,8 b' class RepoFilesView(RepoAppView):'
664 657 c.file = dir_node
665 658 c.commit = commit
666 659
667 # using force=True here, make a little trick. We flush the cache and
668 # compute it using the same key as without previous full_load, so now
669 # the fully loaded tree is now returned instead of partial,
670 # and we store this in caches
671 660 html = self._get_tree_at_commit(
672 c, commit.raw_id, dir_node.path, full_load=True, force=True)
661 c, commit.raw_id, dir_node.path, full_load=True)
673 662
674 663 return Response(html)
675 664
@@ -784,10 +773,15 b' class RepoFilesView(RepoAppView):'
784 773
785 774 return response
786 775
787 def _get_nodelist_at_commit(self, repo_name, commit_id, f_path):
788 def _cached_nodes():
789 log.debug('Generating cached nodelist for %s, %s, %s',
790 repo_name, commit_id, f_path)
776 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
777
778 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
779 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
780
781 @region.cache_on_arguments(namespace=cache_namespace_uid)
782 def compute_file_search(repo_id, commit_id, f_path):
783 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
784 repo_id, commit_id, f_path)
791 785 try:
792 786 _d, _f = ScmModel().get_nodes(
793 787 repo_name, commit_id, f_path, flat=False)
@@ -799,12 +793,7 b' class RepoFilesView(RepoAppView):'
799 793 commit_id='tip', f_path='/'))
800 794 return _d + _f
801 795
802 cache_manager = self._get_tree_cache_manager(
803 caches.FILE_SEARCH_TREE_META)
804
805 cache_key = caches.compute_key_from_params(
806 repo_name, commit_id, f_path)
807 return cache_manager.get(cache_key, createfunc=_cached_nodes)
796 return compute_file_search(self.db_repo.repo_id, commit_id, f_path)
808 797
809 798 @LoginRequired()
810 799 @HasRepoPermissionAnyDecorator(
@@ -819,7 +808,7 b' class RepoFilesView(RepoAppView):'
819 808 commit = self._get_commit_or_redirect(commit_id)
820 809
821 810 metadata = self._get_nodelist_at_commit(
822 self.db_repo_name, commit.raw_id, f_path)
811 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
823 812 return {'nodes': metadata}
824 813
825 814 def _create_references(
@@ -27,15 +27,14 b' from beaker.cache import cache_region'
27 27 from rhodecode.controllers import utils
28 28 from rhodecode.apps._base import RepoAppView
29 29 from rhodecode.config.conf import (LANGUAGES_EXTENSIONS_MAP)
30 from rhodecode.lib import caches, helpers as h
31 from rhodecode.lib.helpers import RepoPage
30 from rhodecode.lib import helpers as h, rc_cache
32 31 from rhodecode.lib.utils2 import safe_str, safe_int
33 32 from rhodecode.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator
34 33 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
35 34 from rhodecode.lib.ext_json import json
36 35 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.exceptions import CommitError, EmptyRepositoryError, \
38 CommitDoesNotExistError
36 from rhodecode.lib.vcs.exceptions import (
37 CommitError, EmptyRepositoryError, CommitDoesNotExistError)
39 38 from rhodecode.model.db import Statistics, CacheKey, User
40 39 from rhodecode.model.meta import Session
41 40 from rhodecode.model.repo import ReadmeFinder
@@ -134,7 +133,7 b' class RepoSummaryView(RepoAppView):'
134 133 except EmptyRepositoryError:
135 134 collection = self.rhodecode_vcs_repo
136 135
137 c.repo_commits = RepoPage(
136 c.repo_commits = h.RepoPage(
138 137 collection, page=p, items_per_page=size, url=url_generator)
139 138 page_ids = [x.raw_id for x in c.repo_commits]
140 139 c.comments = self.db_repo.get_comments(page_ids)
@@ -247,16 +246,14 b' class RepoSummaryView(RepoAppView):'
247 246 renderer='json_ext')
248 247 def repo_stats(self):
249 248 commit_id = self.get_request_commit_id()
249 show_stats = bool(self.db_repo.enable_statistics)
250 repo_id = self.db_repo.repo_id
250 251
251 _namespace = caches.get_repo_namespace_key(
252 caches.SUMMARY_STATS, self.db_repo_name)
253 show_stats = bool(self.db_repo.enable_statistics)
254 cache_manager = caches.get_cache_manager(
255 'repo_cache_long', _namespace)
256 _cache_key = caches.compute_key_from_params(
257 self.db_repo_name, commit_id, show_stats)
252 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
253 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
258 254
259 def compute_stats():
255 @region.cache_on_arguments(namespace=cache_namespace_uid)
256 def compute_stats(repo_id, commit_id, show_stats):
260 257 code_stats = {}
261 258 size = 0
262 259 try:
@@ -279,7 +276,7 b' class RepoSummaryView(RepoAppView):'
279 276 return {'size': h.format_byte_size_binary(size),
280 277 'code_stats': code_stats}
281 278
282 stats = cache_manager.get(_cache_key, createfunc=compute_stats)
279 stats = compute_stats(self.db_repo.repo_id, commit_id, show_stats)
283 280 return stats
284 281
285 282 @LoginRequired()
@@ -435,6 +435,13 b' def _sanitize_cache_settings(settings):'
435 435 _string_setting(settings, 'rc_cache.cache_perms.arguments.filename',
436 436 os.path.join(tempfile.gettempdir(), 'rc_cache_1'))
437 437
438 _string_setting(settings, 'rc_cache.cache_repo.backend',
439 'dogpile.cache.rc.file_namespace')
440 _int_setting(settings, 'rc_cache.cache_repo.expiration_time',
441 60)
442 _string_setting(settings, 'rc_cache.cache_repo.arguments.filename',
443 os.path.join(tempfile.gettempdir(), 'rc_cache_2'))
444
438 445
439 446 def _int_setting(settings, name, default):
440 447 settings[name] = int(settings.get(name, default))
@@ -31,13 +31,6 b' from rhodecode.model.db import Session, '
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 FILE_TREE = 'cache_file_tree'
35 FILE_TREE_META = 'cache_file_tree_metadata'
36 FILE_SEARCH_TREE_META = 'cache_file_search_metadata'
37 SUMMARY_STATS = 'cache_summary_stats'
38
39 # This list of caches gets purged when invalidation happens
40 USED_REPO_CACHES = (FILE_TREE, FILE_SEARCH_TREE_META)
41 34
42 35 DEFAULT_CACHE_MANAGER_CONFIG = {
43 36 'type': 'memorylru_base',
@@ -129,14 +122,6 b' def clear_cache_manager(cache_manager):'
129 122 cache_manager.clear()
130 123
131 124
132 def clear_repo_caches(repo_name):
133 # invalidate cache manager for this repo
134 for prefix in USED_REPO_CACHES:
135 namespace = get_repo_namespace_key(prefix, repo_name)
136 cache_manager = get_cache_manager('repo_cache_long', namespace)
137 clear_cache_manager(cache_manager)
138
139
140 125 def compute_key_from_params(*args):
141 126 """
142 127 Helper to compute key from given params to be used in cache manager
@@ -148,60 +133,6 b' def get_repo_namespace_key(prefix, repo_'
148 133 return '{0}_{1}'.format(prefix, compute_key_from_params(repo_name))
149 134
150 135
151 def conditional_cache(region, cache_namespace, condition, func):
152 """
153 Conditional caching function use like::
154 def _c(arg):
155 # heavy computation function
156 return data
157
158 # depending on the condition the compute is wrapped in cache or not
159 compute = conditional_cache('short_term', 'cache_namespace_id',
160 condition=True, func=func)
161 return compute(arg)
162
163 :param region: name of cache region
164 :param cache_namespace: cache namespace
165 :param condition: condition for cache to be triggered, and
166 return data cached
167 :param func: wrapped heavy function to compute
168
169 """
170 wrapped = func
171 if condition:
172 log.debug('conditional_cache: True, wrapping call of '
173 'func: %s into %s region cache', region, func)
174
175 def _cache_wrap(region_name, cache_namespace):
176 """Return a caching wrapper"""
177
178 def decorate(func):
179 @functools.wraps(func)
180 def cached(*args, **kwargs):
181 if kwargs:
182 raise AttributeError(
183 'Usage of kwargs is not allowed. '
184 'Use only positional arguments in wrapped function')
185 manager = get_cache_manager(region_name, cache_namespace)
186 cache_key = compute_key_from_params(*args)
187
188 def go():
189 return func(*args, **kwargs)
190
191 # save org function name
192 go.__name__ = '_cached_%s' % (func.__name__,)
193
194 return manager.get(cache_key, createfunc=go)
195 return cached
196
197 return decorate
198
199 cached_region = _cache_wrap(region, cache_namespace)
200 wrapped = cached_region(func)
201
202 return wrapped
203
204
205 136 class ActiveRegionCache(object):
206 137 def __init__(self, context):
207 138 self.context = context
@@ -35,7 +35,9 b' register_backend('
35 35
36 36
37 37 from . import region_meta
38 from .utils import get_default_cache_settings, key_generator, get_or_create_region
38 from .utils import (
39 get_default_cache_settings, key_generator, get_or_create_region,
40 clear_cache_namespace)
39 41
40 42
41 43 def configure_dogpile_cache(settings):
@@ -49,9 +49,18 b' class FileNamespaceBackend(Serializer, f'
49 49 def __init__(self, arguments):
50 50 super(FileNamespaceBackend, self).__init__(arguments)
51 51
52 def list_keys(self):
52 def list_keys(self, prefix=''):
53 def cond(v):
54 if not prefix:
55 return True
56
57 if v.startswith(prefix):
58 return True
59 return False
60
53 61 with self._dbm_file(True) as dbm:
54 return dbm.keys()
62
63 return filter(cond, dbm.keys())
55 64
56 65 def get_store(self):
57 66 return self.filename
@@ -81,8 +90,10 b' class FileNamespaceBackend(Serializer, f'
81 90
82 91
83 92 class RedisPickleBackend(Serializer, redis_backend.RedisBackend):
84 def list_keys(self):
85 return self.client.keys()
93 def list_keys(self, prefix=''):
94 if prefix:
95 prefix = prefix + '*'
96 return self.client.keys(prefix)
86 97
87 98 def get_store(self):
88 99 return self.client.connection_pool
@@ -97,3 +97,11 b' def get_or_create_region(region_name, re'
97 97 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
98 98
99 99 return region_obj
100
101
102 def clear_cache_namespace(cache_region, cache_namespace_uid):
103 region = get_or_create_region(cache_region, cache_namespace_uid)
104 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
105 for k in cache_keys:
106 region.delete(k)
107 return len(cache_keys)
@@ -38,7 +38,7 b' from rhodecode.lib.vcs import get_backen'
38 38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 39 from rhodecode.lib.vcs.nodes import FileNode
40 40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib import helpers as h
41 from rhodecode.lib import helpers as h, rc_cache
42 42 from rhodecode.lib.auth import (
43 43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 44 HasUserGroupPermissionAny)
@@ -267,16 +267,19 b' class ScmModel(BaseModel):'
267 267 :param repo_name: the repo_name for which caches should be marked
268 268 invalid, or deleted
269 269 :param delete: delete the entry keys instead of setting bool
270 flag on them
270 flag on them, and also purge caches used by the dogpile
271 271 """
272 272 CacheKey.set_invalidate(repo_name, delete=delete)
273 273 repo = Repository.get_by_repo_name(repo_name)
274 274
275 275 if repo:
276 repo_id = repo.repo_id
276 277 config = repo._config
277 278 config.set('extensions', 'largefiles', '')
278 279 repo.update_commit_cache(config=config, cs_cache=None)
279 caches.clear_repo_caches(repo_name)
280 if delete:
281 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
282 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
280 283
281 284 def toggle_following_repo(self, follow_repo_id, user_id):
282 285
@@ -54,6 +54,41 b''
54 54
55 55 <div class="panel panel-default">
56 56 <div class="panel-heading">
57 <h3 class="panel-title">
58 ${_('Cache keys')}
59 </h3>
60 </div>
61 <div class="panel-body">
62 <p>
63 Cache keys used for storing cached values of repository stats,
64 file tree history and file tree search.
65 Invalidating the cache will remove those entries.
66 </p>
67 <pre>
68 region: ${c.region.name}
69 backend: ${c.region.actual_backend.__class__}
70 store: ${c.region.actual_backend.get_store()}
71
72
73 % if c.repo_keys:
74 ${len(c.repo_keys)} <a href="#showKeys" onclick="$('#show-keys').toggle()">${_('Show all')}</a>
75 <span id="show-keys" style="display: none">
76 % for k in c.repo_keys:
77 - ${k}
78 % endfor
79 </span>
80 % else:
81 NO KEYS FOUND
82 % endif
83
84 </pre>
85
86 </div>
87 </div>
88
89
90 <div class="panel panel-default">
91 <div class="panel-heading">
57 92 <h3 class="panel-title">${_('Shadow Repositories')}</h3>
58 93 </div>
59 94 <div class="panel-body">
@@ -5,16 +5,28 b''
5 5 <h3 class="panel-title">${_('Caches')}</h3>
6 6 </div>
7 7 <div class="panel-body">
8 <p>
9 Cache keys used for storing cached values of user permissions and authentication plugin cache.
10 Invalidating the cache will remove those entries.
11 </p>
12
8 13 <pre>
9 14 region: ${c.region.name}
10 15 backend: ${c.region.actual_backend.__class__}
11 16 store: ${c.region.actual_backend.get_store()}
12 17
18 % if c.user_keys:
19 ${len(c.user_keys)} <a href="#showKeys" onclick="$('#show-keys').toggle()">${_('Show all')}</a>
20 <span id="show-keys" style="display: none">
13 21 % for k in c.user_keys:
14 22 - ${k}
15 23 % endfor
24 </span>
25 % else:
26 NO KEYS FOUND
27 % endif
16 28 </pre>
17
29 <p></p>
18 30 ${h.secure_form(h.route_path('edit_user_caches_update', user_id=c.user.user_id), request=request)}
19 31 <div class="form">
20 32 <div class="fields">
@@ -292,7 +292,7 b' cache_dir = %(here)s/data'
292 292 beaker.cache.data_dir = %(here)s/rc/data/cache/beaker_data
293 293 beaker.cache.lock_dir = %(here)s/rc/data/cache/beaker_lock
294 294
295 beaker.cache.regions = long_term, sql_cache_short, repo_cache_long
295 beaker.cache.regions = long_term, sql_cache_short
296 296
297 297 beaker.cache.long_term.type = memory
298 298 beaker.cache.long_term.expire = 36000
@@ -302,18 +302,6 b' beaker.cache.sql_cache_short.type = memo'
302 302 beaker.cache.sql_cache_short.expire = 1
303 303 beaker.cache.sql_cache_short.key_length = 256
304 304
305 beaker.cache.repo_cache_long.type = memorylru_base
306 beaker.cache.repo_cache_long.max_items = 4096
307 beaker.cache.repo_cache_long.expire = 2592000
308
309 ## default is memorylru_base cache, configure only if required
310 ## using multi-node or multi-worker setup
311 #beaker.cache.repo_cache_long.type = ext:memcached
312 #beaker.cache.repo_cache_long.url = localhost:11211
313 #beaker.cache.repo_cache_long.expire = 1209600
314 #beaker.cache.repo_cache_long.key_length = 256
315
316
317 305 #####################################
318 306 ### DOGPILE CACHE ####
319 307 #####################################
General Comments 0
You need to be logged in to leave comments. Login now