|
|
# Copyright (C) 2015-2020 RhodeCode GmbH
|
|
|
#
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
# it under the terms of the GNU Affero General Public License, version 3
|
|
|
# (only), as published by the Free Software Foundation.
|
|
|
#
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
# GNU General Public License for more details.
|
|
|
#
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
#
|
|
|
# This program is dual-licensed. If you wish to learn more about the
|
|
|
# RhodeCode Enterprise Edition, including its added features, Support services,
|
|
|
# and proprietary license terms, please see https://rhodecode.com/licenses/
|
|
|
|
|
|
import logging
|
|
|
import os
|
|
|
import diskcache
|
|
|
from diskcache import RLock
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
cache_meta = None
|
|
|
|
|
|
|
|
|
class ReentrantLock(RLock):
|
|
|
def __enter__(self):
|
|
|
reentrant_lock_key = self._key
|
|
|
|
|
|
log.debug('Acquire ReentrantLock(key=%s) for archive cache generation...', reentrant_lock_key)
|
|
|
#self.acquire()
|
|
|
log.debug('Lock for key=%s acquired', reentrant_lock_key)
|
|
|
|
|
|
def __exit__(self, *exc_info):
|
|
|
#self.release()
|
|
|
pass
|
|
|
|
|
|
|
|
|
def get_archival_config(config):
|
|
|
|
|
|
final_config = {
|
|
|
'archive_cache.eviction_policy': 'least-frequently-used'
|
|
|
}
|
|
|
|
|
|
for k, v in config.items():
|
|
|
if k.startswith('archive_cache'):
|
|
|
final_config[k] = v
|
|
|
|
|
|
return final_config
|
|
|
|
|
|
|
|
|
def get_archival_cache_store(config):
|
|
|
|
|
|
global cache_meta
|
|
|
if cache_meta is not None:
|
|
|
return cache_meta
|
|
|
|
|
|
config = get_archival_config(config)
|
|
|
|
|
|
archive_cache_dir = config['archive_cache.store_dir']
|
|
|
archive_cache_size_gb = config['archive_cache.cache_size_gb']
|
|
|
archive_cache_shards = config['archive_cache.cache_shards']
|
|
|
archive_cache_eviction_policy = config['archive_cache.eviction_policy']
|
|
|
|
|
|
log.debug('Initializing archival cache instance under %s', archive_cache_dir)
|
|
|
|
|
|
# check if it's ok to write, and re-create the archive cache
|
|
|
if not os.path.isdir(archive_cache_dir):
|
|
|
os.makedirs(archive_cache_dir, exist_ok=True)
|
|
|
|
|
|
d_cache = diskcache.FanoutCache(
|
|
|
archive_cache_dir, shards=archive_cache_shards,
|
|
|
cull_limit=0, # manual eviction required
|
|
|
size_limit=archive_cache_size_gb * 1024 * 1024 * 1024,
|
|
|
eviction_policy=archive_cache_eviction_policy,
|
|
|
timeout=30
|
|
|
)
|
|
|
cache_meta = d_cache
|
|
|
return cache_meta
|
|
|
|
|
|
|
|
|
def includeme(config):
|
|
|
# init our cache at start
|
|
|
settings = config.get_settings()
|
|
|
get_archival_cache_store(settings)
|
|
|
|