##// END OF EJS Templates
release: Finish preparation for 4.15.0
release: Finish preparation for 4.15.0

File last commit:

r497:23957c07 default
r596:6c6f49fd v4.15.0 stable
Show More
backends.py
51 lines | 1.7 KiB | text/x-python | PythonLexer
caches: replaced beaker with dogpile cache.
r483 # RhodeCode VCSServer provides access to different vcs backends via network.
# Copyright (C) 2014-2018 RhodeCode GmbH
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from dogpile.cache.backends import memory as memory_backend
caches: use repo.lru based Dict cache. This LRUDict uses Timing Algo to not have to use locking...
r497 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
caches: replaced beaker with dogpile cache.
r483
_default_max_size = 1024
log = logging.getLogger(__name__)
class LRUMemoryBackend(memory_backend.MemoryBackend):
pickle_values = False
def __init__(self, arguments):
max_size = arguments.pop('max_size', _default_max_size)
caches: use repo.lru based Dict cache. This LRUDict uses Timing Algo to not have to use locking...
r497 LRUDictClass = LRUDict
if arguments.pop('log_key_count', None):
LRUDictClass = LRUDictDebug
arguments['cache_dict'] = LRUDictClass(max_size)
caches: replaced beaker with dogpile cache.
r483 super(LRUMemoryBackend, self).__init__(arguments)
def delete(self, key):
caches: use repo.lru based Dict cache. This LRUDict uses Timing Algo to not have to use locking...
r497 try:
caches: replaced beaker with dogpile cache.
r483 del self._cache[key]
caches: use repo.lru based Dict cache. This LRUDict uses Timing Algo to not have to use locking...
r497 except KeyError:
# we don't care if key isn't there at deletion
pass
caches: replaced beaker with dogpile cache.
r483
def delete_multi(self, keys):
for key in keys:
caches: use repo.lru based Dict cache. This LRUDict uses Timing Algo to not have to use locking...
r497 self.delete(key)