##// END OF EJS Templates
docs: updated docs for importing repos and fixed howto on building them
docs: updated docs for importing repos and fixed howto on building them

File last commit:

r5608:6d33e504 default
r5654:f7519e5d default
Show More
test_archive_caches.py
105 lines | 3.6 KiB | text/x-python | PythonLexer
/ rhodecode / tests / lib / test_archive_caches.py
core: updated copyright to 2024
r5608 # Copyright (C) 2016-2024 RhodeCode GmbH
feat(archive-cache): implemented s3 based backend for filecaches
r5433 #
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License, version 3
# (only), as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This program is dual-licensed. If you wish to learn more about the
# RhodeCode Enterprise Edition, including its added features, Support services,
# and proprietary license terms, please see https://rhodecode.com/licenses/
import time
import pytest
import rhodecode
import os
import shutil
from tempfile import mkdtemp
from rhodecode.lib import archive_cache
def file_reader(temp_store):
with open(temp_store, 'w') as f:
for cnt in range(10000):
f.write(str(cnt))
return open(temp_store, 'rb')
@pytest.fixture()
def d_cache_instance(ini_settings):
config = ini_settings
d_cache = archive_cache.get_archival_cache_store(config=config, always_init=True)
return d_cache
@pytest.mark.usefixtures('app')
class TestArchiveCaches(object):
def test_archivecache_empty_stats(self, d_cache_instance):
d_cache = d_cache_instance
shutil.rmtree(d_cache._directory)
stats = d_cache.get_statistics()
assert (0, 0, {}) == stats
def test_archivecache_store_keys(self, d_cache_instance, tmp_path):
d_cache = d_cache_instance
shutil.rmtree(d_cache._directory)
for n in range(100):
archive_name = f'my-archive-abc-{n}.zip'
temp_archive_path = os.path.join(tmp_path, archive_name)
d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'})
reader, meta = d_cache.fetch(archive_name)
content = reader.read()
assert content == open(temp_archive_path, 'rb').read()
stats = d_cache.get_statistics()
assert (100, 3889000, {}) == stats
def test_archivecache_remove_keys(self, d_cache_instance, tmp_path):
d_cache = d_cache_instance
shutil.rmtree(d_cache._directory)
n = 1
archive_name = f'my-archive-abc-{n}.zip'
temp_archive_path = os.path.join(tmp_path, archive_name)
d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'})
stats = d_cache.get_statistics()
assert (1, 38890, {}) == stats
assert 1 == d_cache.remove(archive_name)
stats = d_cache.get_statistics()
assert (0, 0, {}) == stats
def test_archivecache_evict_keys(self, d_cache_instance, tmp_path):
d_cache = d_cache_instance
shutil.rmtree(d_cache._directory)
tries = 500
for n in range(tries):
archive_name = f'my-archive-abc-{n}.zip'
temp_archive_path = os.path.join(tmp_path, archive_name)
d_cache.store(archive_name, file_reader(temp_archive_path ), {'foo': 'bar'})
stats = d_cache.get_statistics()
assert (tries, 19445000, {}) == stats
evict_to = 0.005 # around (5mb)
evicted_items = d_cache.evict(size_limit=d_cache.gb_to_bytes(evict_to))
evicted = 361
feat(archive-cache): added extra info on number of evicted caches
r5434 assert {'removed_items': evicted, 'removed_size': 14039290} == evicted_items
feat(archive-cache): implemented s3 based backend for filecaches
r5433
stats = d_cache.get_statistics()
assert (tries - evicted, 5405710, {}) == stats