##// END OF EJS Templates
sparse-revlog: fix delta validity computation...
sparse-revlog: fix delta validity computation When considering the validity of a delta with sparse-revlog, we check the size of the largest read. To do so, we use some regular logic with the extra delta information. Some of this logic was not handling this extra delta properly, confusing it with "nullrev". This confusion with nullrev lead to wrong results for this computation but preventing a crash. Changeset 781b2720d2ac on default revealed this error, crashing. This changeset fixes the logic on stable so that the computation is correct (and the crash is averted). The fix is made on stable as this will impact 4.7 clients interacting with sparse-revlog repositories (eg: created by later version).

File last commit:

r37513:b1fb341d default
r39179:3730b779 stable
Show More
test_data_structures_fuzzing.py
75 lines | 3.5 KiB | text/x-python | PythonLexer
/ contrib / python-zstandard / tests / test_data_structures_fuzzing.py
import io
import os
import sys
import unittest
try:
import hypothesis
import hypothesis.strategies as strategies
except ImportError:
raise unittest.SkipTest('hypothesis not available')
import zstandard as zstd
from .common import (
make_cffi,
)
s_windowlog = strategies.integers(min_value=zstd.WINDOWLOG_MIN,
max_value=zstd.WINDOWLOG_MAX)
s_chainlog = strategies.integers(min_value=zstd.CHAINLOG_MIN,
max_value=zstd.CHAINLOG_MAX)
s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN,
max_value=zstd.HASHLOG_MAX)
s_searchlog = strategies.integers(min_value=zstd.SEARCHLOG_MIN,
max_value=zstd.SEARCHLOG_MAX)
s_searchlength = strategies.integers(min_value=zstd.SEARCHLENGTH_MIN,
max_value=zstd.SEARCHLENGTH_MAX)
s_targetlength = strategies.integers(min_value=zstd.TARGETLENGTH_MIN,
max_value=2**32)
s_strategy = strategies.sampled_from((zstd.STRATEGY_FAST,
zstd.STRATEGY_DFAST,
zstd.STRATEGY_GREEDY,
zstd.STRATEGY_LAZY,
zstd.STRATEGY_LAZY2,
zstd.STRATEGY_BTLAZY2,
zstd.STRATEGY_BTOPT,
zstd.STRATEGY_BTULTRA))
@make_cffi
@unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
class TestCompressionParametersHypothesis(unittest.TestCase):
@hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
s_searchlength, s_targetlength, s_strategy)
def test_valid_init(self, windowlog, chainlog, hashlog, searchlog,
searchlength, targetlength, strategy):
zstd.ZstdCompressionParameters(window_log=windowlog,
chain_log=chainlog,
hash_log=hashlog,
search_log=searchlog,
min_match=searchlength,
target_length=targetlength,
compression_strategy=strategy)
@hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
s_searchlength, s_targetlength, s_strategy)
def test_estimated_compression_context_size(self, windowlog, chainlog,
hashlog, searchlog,
searchlength, targetlength,
strategy):
if searchlength == zstd.SEARCHLENGTH_MIN and strategy in (zstd.STRATEGY_FAST, zstd.STRATEGY_GREEDY):
searchlength += 1
elif searchlength == zstd.SEARCHLENGTH_MAX and strategy != zstd.STRATEGY_FAST:
searchlength -= 1
p = zstd.ZstdCompressionParameters(window_log=windowlog,
chain_log=chainlog,
hash_log=hashlog,
search_log=searchlog,
min_match=searchlength,
target_length=targetlength,
compression_strategy=strategy)
size = p.estimated_compression_context_size()