Show More
@@ -9,7 +9,6 b' build/' | |||
|
9 | 9 | | \.mypy_cache/ |
|
10 | 10 | | \.venv/ |
|
11 | 11 | | mercurial/thirdparty/ |
|
12 | | contrib/python-zstandard/ | |
|
13 | 12 | ''' |
|
14 | 13 | skip-string-normalization = true |
|
15 | 14 | quiet = true |
@@ -6,7 +6,7 b' rustfmt:command = rustfmt +nightly' | |||
|
6 | 6 | rustfmt:pattern = set:**.rs |
|
7 | 7 | |
|
8 | 8 | black:command = black --config=black.toml - |
|
9 |
black:pattern = set:**.py - mercurial/thirdparty/** |
|
|
9 | black:pattern = set:**.py - mercurial/thirdparty/** | |
|
10 | 10 | |
|
11 | 11 | # Mercurial doesn't have any Go code, but if we did this is how we |
|
12 | 12 | # would configure `hg fix` for Go: |
@@ -52,7 +52,8 b' SOURCES = [' | |||
|
52 | 52 | |
|
53 | 53 | # Headers whose preprocessed output will be fed into cdef(). |
|
54 | 54 | HEADERS = [ |
|
55 | os.path.join(HERE, "zstd", *p) for p in (("zstd.h",), ("dictBuilder", "zdict.h"),) | |
|
55 | os.path.join(HERE, "zstd", *p) | |
|
56 | for p in (("zstd.h",), ("dictBuilder", "zdict.h"),) | |
|
56 | 57 | ] |
|
57 | 58 | |
|
58 | 59 | INCLUDE_DIRS = [ |
@@ -139,7 +140,9 b' def preprocess(path):' | |||
|
139 | 140 | env = dict(os.environ) |
|
140 | 141 | if getattr(compiler, "_paths", None): |
|
141 | 142 | env["PATH"] = compiler._paths |
|
142 | process = subprocess.Popen(args + [input_file], stdout=subprocess.PIPE, env=env) | |
|
143 | process = subprocess.Popen( | |
|
144 | args + [input_file], stdout=subprocess.PIPE, env=env | |
|
145 | ) | |
|
143 | 146 | output = process.communicate()[0] |
|
144 | 147 | ret = process.poll() |
|
145 | 148 | if ret: |
@@ -87,7 +87,9 b' with open("c-ext/python-zstandard.h", "r' | |||
|
87 | 87 | break |
|
88 | 88 | |
|
89 | 89 | if not version: |
|
90 | raise Exception("could not resolve package version; " "this should never happen") | |
|
90 | raise Exception( | |
|
91 | "could not resolve package version; " "this should never happen" | |
|
92 | ) | |
|
91 | 93 | |
|
92 | 94 | setup( |
|
93 | 95 | name="zstandard", |
@@ -138,12 +138,16 b' def get_c_extension(' | |||
|
138 | 138 | if not system_zstd: |
|
139 | 139 | sources.update([os.path.join(actual_root, p) for p in zstd_sources]) |
|
140 | 140 | if support_legacy: |
|
141 | sources.update([os.path.join(actual_root, p) for p in zstd_sources_legacy]) | |
|
141 | sources.update( | |
|
142 | [os.path.join(actual_root, p) for p in zstd_sources_legacy] | |
|
143 | ) | |
|
142 | 144 | sources = list(sources) |
|
143 | 145 | |
|
144 | 146 | include_dirs = set([os.path.join(actual_root, d) for d in ext_includes]) |
|
145 | 147 | if not system_zstd: |
|
146 | include_dirs.update([os.path.join(actual_root, d) for d in zstd_includes]) | |
|
148 | include_dirs.update( | |
|
149 | [os.path.join(actual_root, d) for d in zstd_includes] | |
|
150 | ) | |
|
147 | 151 | if support_legacy: |
|
148 | 152 | include_dirs.update( |
|
149 | 153 | [os.path.join(actual_root, d) for d in zstd_includes_legacy] |
@@ -50,7 +50,9 b' def make_cffi(cls):' | |||
|
50 | 50 | os.environ.update(old_env) |
|
51 | 51 | |
|
52 | 52 | if mod.backend != "cffi": |
|
53 | raise Exception("got the zstandard %s backend instead of cffi" % mod.backend) | |
|
53 | raise Exception( | |
|
54 | "got the zstandard %s backend instead of cffi" % mod.backend | |
|
55 | ) | |
|
54 | 56 | |
|
55 | 57 | # If CFFI version is available, dynamically construct test methods |
|
56 | 58 | # that use it. |
@@ -84,7 +86,9 b' def make_cffi(cls):' | |||
|
84 | 86 | fn.__func__.func_defaults, |
|
85 | 87 | fn.__func__.func_closure, |
|
86 | 88 | ) |
|
87 |
new_method = types.UnboundMethodType( |
|
|
89 | new_method = types.UnboundMethodType( | |
|
90 | new_fn, fn.im_self, fn.im_class | |
|
91 | ) | |
|
88 | 92 | |
|
89 | 93 | setattr(cls, name, new_method) |
|
90 | 94 | |
@@ -194,4 +198,6 b' if hypothesis:' | |||
|
194 | 198 | expensive_settings = hypothesis.settings(deadline=None, max_examples=10000) |
|
195 | 199 | hypothesis.settings.register_profile("expensive", expensive_settings) |
|
196 | 200 | |
|
197 |
hypothesis.settings.load_profile( |
|
|
201 | hypothesis.settings.load_profile( | |
|
202 | os.environ.get("HYPOTHESIS_PROFILE", "default") | |
|
203 | ) |
@@ -67,7 +67,8 b' class TestBufferWithSegments(TestCase):' | |||
|
67 | 67 | self.skipTest("BufferWithSegments not available") |
|
68 | 68 | |
|
69 | 69 | b = zstd.BufferWithSegments( |
|
70 | b"foofooxfooxy", b"".join([ss.pack(0, 3), ss.pack(3, 4), ss.pack(7, 5)]) | |
|
70 | b"foofooxfooxy", | |
|
71 | b"".join([ss.pack(0, 3), ss.pack(3, 4), ss.pack(7, 5)]), | |
|
71 | 72 | ) |
|
72 | 73 | self.assertEqual(len(b), 3) |
|
73 | 74 | self.assertEqual(b.size, 12) |
@@ -83,17 +84,23 b' class TestBufferWithSegmentsCollection(T' | |||
|
83 | 84 | if not hasattr(zstd, "BufferWithSegmentsCollection"): |
|
84 | 85 | self.skipTest("BufferWithSegmentsCollection not available") |
|
85 | 86 | |
|
86 |
with self.assertRaisesRegex( |
|
|
87 | with self.assertRaisesRegex( | |
|
88 | ValueError, "must pass at least 1 argument" | |
|
89 | ): | |
|
87 | 90 | zstd.BufferWithSegmentsCollection() |
|
88 | 91 | |
|
89 | 92 | def test_argument_validation(self): |
|
90 | 93 | if not hasattr(zstd, "BufferWithSegmentsCollection"): |
|
91 | 94 | self.skipTest("BufferWithSegmentsCollection not available") |
|
92 | 95 | |
|
93 | with self.assertRaisesRegex(TypeError, "arguments must be BufferWithSegments"): | |
|
96 | with self.assertRaisesRegex( | |
|
97 | TypeError, "arguments must be BufferWithSegments" | |
|
98 | ): | |
|
94 | 99 | zstd.BufferWithSegmentsCollection(None) |
|
95 | 100 | |
|
96 | with self.assertRaisesRegex(TypeError, "arguments must be BufferWithSegments"): | |
|
101 | with self.assertRaisesRegex( | |
|
102 | TypeError, "arguments must be BufferWithSegments" | |
|
103 | ): | |
|
97 | 104 | zstd.BufferWithSegmentsCollection( |
|
98 | 105 | zstd.BufferWithSegments(b"foo", ss.pack(0, 3)), None |
|
99 | 106 | ) |
@@ -24,7 +24,9 b' else:' | |||
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | def multithreaded_chunk_size(level, source_size=0): |
|
27 |
params = zstd.ZstdCompressionParameters.from_level( |
|
|
27 | params = zstd.ZstdCompressionParameters.from_level( | |
|
28 | level, source_size=source_size | |
|
29 | ) | |
|
28 | 30 | |
|
29 | 31 | return 1 << (params.window_log + 2) |
|
30 | 32 | |
@@ -86,7 +88,9 b' class TestCompressor_compress(TestCase):' | |||
|
86 | 88 | |
|
87 | 89 | # This matches the test for read_to_iter() below. |
|
88 | 90 | cctx = zstd.ZstdCompressor(level=1, write_content_size=False) |
|
89 | result = cctx.compress(b"f" * zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE + b"o") | |
|
91 | result = cctx.compress( | |
|
92 | b"f" * zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE + b"o" | |
|
93 | ) | |
|
90 | 94 | self.assertEqual( |
|
91 | 95 | result, |
|
92 | 96 | b"\x28\xb5\x2f\xfd\x00\x40\x54\x00\x00" |
@@ -99,7 +103,9 b' class TestCompressor_compress(TestCase):' | |||
|
99 | 103 | result = cctx.compress(b"foo" * 256) |
|
100 | 104 | |
|
101 | 105 | def test_no_magic(self): |
|
102 |
params = zstd.ZstdCompressionParameters.from_level( |
|
|
106 | params = zstd.ZstdCompressionParameters.from_level( | |
|
107 | 1, format=zstd.FORMAT_ZSTD1 | |
|
108 | ) | |
|
103 | 109 | cctx = zstd.ZstdCompressor(compression_params=params) |
|
104 | 110 | magic = cctx.compress(b"foobar") |
|
105 | 111 | |
@@ -223,7 +229,8 b' class TestCompressor_compress(TestCase):' | |||
|
223 | 229 | |
|
224 | 230 | self.assertEqual( |
|
225 | 231 | result, |
|
226 |
b"\x28\xb5\x2f\xfd\x23\x8f\x55\x0f\x70\x03\x19\x00\x00" |
|
|
232 | b"\x28\xb5\x2f\xfd\x23\x8f\x55\x0f\x70\x03\x19\x00\x00" | |
|
233 | b"\x66\x6f\x6f", | |
|
227 | 234 | ) |
|
228 | 235 | |
|
229 | 236 | def test_multithreaded_compression_params(self): |
@@ -234,7 +241,9 b' class TestCompressor_compress(TestCase):' | |||
|
234 | 241 | params = zstd.get_frame_parameters(result) |
|
235 | 242 | self.assertEqual(params.content_size, 3) |
|
236 | 243 | |
|
237 | self.assertEqual(result, b"\x28\xb5\x2f\xfd\x20\x03\x19\x00\x00\x66\x6f\x6f") | |
|
244 | self.assertEqual( | |
|
245 | result, b"\x28\xb5\x2f\xfd\x20\x03\x19\x00\x00\x66\x6f\x6f" | |
|
246 | ) | |
|
238 | 247 | |
|
239 | 248 | |
|
240 | 249 | @make_cffi |
@@ -347,7 +356,9 b' class TestCompressor_compressobj(TestCas' | |||
|
347 | 356 | ) |
|
348 | 357 | self.assertEqual(cobj.compress(b"bar"), b"") |
|
349 | 358 | # 3 byte header plus content. |
|
350 | self.assertEqual(cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK), b"\x18\x00\x00bar") | |
|
359 | self.assertEqual( | |
|
360 | cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK), b"\x18\x00\x00bar" | |
|
361 | ) | |
|
351 | 362 | self.assertEqual(cobj.flush(), b"\x01\x00\x00") |
|
352 | 363 | |
|
353 | 364 | def test_flush_empty_block(self): |
@@ -445,7 +456,9 b' class TestCompressor_copy_stream(TestCas' | |||
|
445 | 456 | self.assertEqual(int(r), 0) |
|
446 | 457 | self.assertEqual(w, 9) |
|
447 | 458 | |
|
448 | self.assertEqual(dest.getvalue(), b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00") | |
|
459 | self.assertEqual( | |
|
460 | dest.getvalue(), b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00" | |
|
461 | ) | |
|
449 | 462 | |
|
450 | 463 | def test_large_data(self): |
|
451 | 464 | source = io.BytesIO() |
@@ -478,7 +491,9 b' class TestCompressor_copy_stream(TestCas' | |||
|
478 | 491 | cctx = zstd.ZstdCompressor(level=1, write_checksum=True) |
|
479 | 492 | cctx.copy_stream(source, with_checksum) |
|
480 | 493 | |
|
481 | self.assertEqual(len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4) | |
|
494 | self.assertEqual( | |
|
495 | len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4 | |
|
496 | ) | |
|
482 | 497 | |
|
483 | 498 | no_params = zstd.get_frame_parameters(no_checksum.getvalue()) |
|
484 | 499 | with_params = zstd.get_frame_parameters(with_checksum.getvalue()) |
@@ -585,7 +600,9 b' class TestCompressor_stream_reader(TestC' | |||
|
585 | 600 | cctx = zstd.ZstdCompressor() |
|
586 | 601 | |
|
587 | 602 | with cctx.stream_reader(b"foo") as reader: |
|
588 |
with self.assertRaisesRegex( |
|
|
603 | with self.assertRaisesRegex( | |
|
604 | ValueError, "cannot __enter__ multiple times" | |
|
605 | ): | |
|
589 | 606 | with reader as reader2: |
|
590 | 607 | pass |
|
591 | 608 | |
@@ -744,7 +761,9 b' class TestCompressor_stream_reader(TestC' | |||
|
744 | 761 | source = io.BytesIO(b"foobar") |
|
745 | 762 | |
|
746 | 763 | with cctx.stream_reader(source, size=2) as reader: |
|
747 |
with self.assertRaisesRegex( |
|
|
764 | with self.assertRaisesRegex( | |
|
765 | zstd.ZstdError, "Src size is incorrect" | |
|
766 | ): | |
|
748 | 767 | reader.read(10) |
|
749 | 768 | |
|
750 | 769 | # Try another compression operation. |
@@ -1126,7 +1145,9 b' class TestCompressor_stream_writer(TestC' | |||
|
1126 | 1145 | self.assertFalse(no_params.has_checksum) |
|
1127 | 1146 | self.assertTrue(with_params.has_checksum) |
|
1128 | 1147 | |
|
1129 | self.assertEqual(len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4) | |
|
1148 | self.assertEqual( | |
|
1149 | len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4 | |
|
1150 | ) | |
|
1130 | 1151 | |
|
1131 | 1152 | def test_write_content_size(self): |
|
1132 | 1153 | no_size = NonClosingBytesIO() |
@@ -1145,7 +1166,9 b' class TestCompressor_stream_writer(TestC' | |||
|
1145 | 1166 | |
|
1146 | 1167 | # Declaring size will write the header. |
|
1147 | 1168 | with_size = NonClosingBytesIO() |
|
1148 | with cctx.stream_writer(with_size, size=len(b"foobar" * 256)) as compressor: | |
|
1169 | with cctx.stream_writer( | |
|
1170 | with_size, size=len(b"foobar" * 256) | |
|
1171 | ) as compressor: | |
|
1149 | 1172 | self.assertEqual(compressor.write(b"foobar" * 256), 0) |
|
1150 | 1173 | |
|
1151 | 1174 | no_params = zstd.get_frame_parameters(no_size.getvalue()) |
@@ -1191,7 +1214,9 b' class TestCompressor_stream_writer(TestC' | |||
|
1191 | 1214 | self.assertFalse(no_params.has_checksum) |
|
1192 | 1215 | self.assertFalse(with_params.has_checksum) |
|
1193 | 1216 | |
|
1194 | self.assertEqual(len(with_dict_id.getvalue()), len(no_dict_id.getvalue()) + 4) | |
|
1217 | self.assertEqual( | |
|
1218 | len(with_dict_id.getvalue()), len(no_dict_id.getvalue()) + 4 | |
|
1219 | ) | |
|
1195 | 1220 | |
|
1196 | 1221 | def test_memory_size(self): |
|
1197 | 1222 | cctx = zstd.ZstdCompressor(level=3) |
@@ -1337,7 +1362,9 b' class TestCompressor_read_to_iter(TestCa' | |||
|
1337 | 1362 | for chunk in cctx.read_to_iter(b"foobar"): |
|
1338 | 1363 | pass |
|
1339 | 1364 | |
|
1340 |
with self.assertRaisesRegex( |
|
|
1365 | with self.assertRaisesRegex( | |
|
1366 | ValueError, "must pass an object with a read" | |
|
1367 | ): | |
|
1341 | 1368 | for chunk in cctx.read_to_iter(True): |
|
1342 | 1369 | pass |
|
1343 | 1370 | |
@@ -1513,7 +1540,9 b' class TestCompressor_chunker(TestCase):' | |||
|
1513 | 1540 | |
|
1514 | 1541 | dctx = zstd.ZstdDecompressor() |
|
1515 | 1542 | |
|
1516 | self.assertEqual(dctx.decompress(b"".join(chunks)), (b"x" * 1000) + (b"y" * 24)) | |
|
1543 | self.assertEqual( | |
|
1544 | dctx.decompress(b"".join(chunks)), (b"x" * 1000) + (b"y" * 24) | |
|
1545 | ) | |
|
1517 | 1546 | |
|
1518 | 1547 | def test_small_chunk_size(self): |
|
1519 | 1548 | cctx = zstd.ZstdCompressor() |
@@ -1533,7 +1562,8 b' class TestCompressor_chunker(TestCase):' | |||
|
1533 | 1562 | |
|
1534 | 1563 | dctx = zstd.ZstdDecompressor() |
|
1535 | 1564 | self.assertEqual( |
|
1536 |
dctx.decompress(b"".join(chunks), max_output_size=10000), |
|
|
1565 | dctx.decompress(b"".join(chunks), max_output_size=10000), | |
|
1566 | b"foo" * 1024, | |
|
1537 | 1567 | ) |
|
1538 | 1568 | |
|
1539 | 1569 | def test_input_types(self): |
@@ -1602,7 +1632,8 b' class TestCompressor_chunker(TestCase):' | |||
|
1602 | 1632 | list(chunker.finish()) |
|
1603 | 1633 | |
|
1604 | 1634 | with self.assertRaisesRegex( |
|
1605 | zstd.ZstdError, r"cannot call compress\(\) after compression finished" | |
|
1635 | zstd.ZstdError, | |
|
1636 | r"cannot call compress\(\) after compression finished", | |
|
1606 | 1637 | ): |
|
1607 | 1638 | list(chunker.compress(b"foo")) |
|
1608 | 1639 | |
@@ -1644,7 +1675,9 b' class TestCompressor_multi_compress_to_b' | |||
|
1644 | 1675 | with self.assertRaises(TypeError): |
|
1645 | 1676 | cctx.multi_compress_to_buffer((1, 2)) |
|
1646 | 1677 | |
|
1647 |
with self.assertRaisesRegex( |
|
|
1678 | with self.assertRaisesRegex( | |
|
1679 | TypeError, "item 0 not a bytes like object" | |
|
1680 | ): | |
|
1648 | 1681 | cctx.multi_compress_to_buffer([u"foo"]) |
|
1649 | 1682 | |
|
1650 | 1683 | def test_empty_input(self): |
@@ -28,9 +28,13 b' class TestCompressor_stream_reader_fuzzi' | |||
|
28 | 28 | original=strategies.sampled_from(random_input_data()), |
|
29 | 29 | level=strategies.integers(min_value=1, max_value=5), |
|
30 | 30 | source_read_size=strategies.integers(1, 16384), |
|
31 |
read_size=strategies.integers( |
|
|
31 | read_size=strategies.integers( | |
|
32 | -1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
33 | ), | |
|
32 | 34 | ) |
|
33 | def test_stream_source_read(self, original, level, source_read_size, read_size): | |
|
35 | def test_stream_source_read( | |
|
36 | self, original, level, source_read_size, read_size | |
|
37 | ): | |
|
34 | 38 | if read_size == 0: |
|
35 | 39 | read_size = -1 |
|
36 | 40 | |
@@ -58,9 +62,13 b' class TestCompressor_stream_reader_fuzzi' | |||
|
58 | 62 | original=strategies.sampled_from(random_input_data()), |
|
59 | 63 | level=strategies.integers(min_value=1, max_value=5), |
|
60 | 64 | source_read_size=strategies.integers(1, 16384), |
|
61 |
read_size=strategies.integers( |
|
|
65 | read_size=strategies.integers( | |
|
66 | -1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
67 | ), | |
|
62 | 68 | ) |
|
63 | def test_buffer_source_read(self, original, level, source_read_size, read_size): | |
|
69 | def test_buffer_source_read( | |
|
70 | self, original, level, source_read_size, read_size | |
|
71 | ): | |
|
64 | 72 | if read_size == 0: |
|
65 | 73 | read_size = -1 |
|
66 | 74 | |
@@ -155,9 +163,13 b' class TestCompressor_stream_reader_fuzzi' | |||
|
155 | 163 | original=strategies.sampled_from(random_input_data()), |
|
156 | 164 | level=strategies.integers(min_value=1, max_value=5), |
|
157 | 165 | source_read_size=strategies.integers(1, 16384), |
|
158 |
read_size=strategies.integers( |
|
|
166 | read_size=strategies.integers( | |
|
167 | 1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
168 | ), | |
|
159 | 169 | ) |
|
160 | def test_stream_source_readinto(self, original, level, source_read_size, read_size): | |
|
170 | def test_stream_source_readinto( | |
|
171 | self, original, level, source_read_size, read_size | |
|
172 | ): | |
|
161 | 173 | refctx = zstd.ZstdCompressor(level=level) |
|
162 | 174 | ref_frame = refctx.compress(original) |
|
163 | 175 | |
@@ -184,9 +196,13 b' class TestCompressor_stream_reader_fuzzi' | |||
|
184 | 196 | original=strategies.sampled_from(random_input_data()), |
|
185 | 197 | level=strategies.integers(min_value=1, max_value=5), |
|
186 | 198 | source_read_size=strategies.integers(1, 16384), |
|
187 |
read_size=strategies.integers( |
|
|
199 | read_size=strategies.integers( | |
|
200 | 1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
201 | ), | |
|
188 | 202 | ) |
|
189 | def test_buffer_source_readinto(self, original, level, source_read_size, read_size): | |
|
203 | def test_buffer_source_readinto( | |
|
204 | self, original, level, source_read_size, read_size | |
|
205 | ): | |
|
190 | 206 | |
|
191 | 207 | refctx = zstd.ZstdCompressor(level=level) |
|
192 | 208 | ref_frame = refctx.compress(original) |
@@ -285,9 +301,13 b' class TestCompressor_stream_reader_fuzzi' | |||
|
285 | 301 | original=strategies.sampled_from(random_input_data()), |
|
286 | 302 | level=strategies.integers(min_value=1, max_value=5), |
|
287 | 303 | source_read_size=strategies.integers(1, 16384), |
|
288 |
read_size=strategies.integers( |
|
|
304 | read_size=strategies.integers( | |
|
305 | -1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
306 | ), | |
|
289 | 307 | ) |
|
290 | def test_stream_source_read1(self, original, level, source_read_size, read_size): | |
|
308 | def test_stream_source_read1( | |
|
309 | self, original, level, source_read_size, read_size | |
|
310 | ): | |
|
291 | 311 | if read_size == 0: |
|
292 | 312 | read_size = -1 |
|
293 | 313 | |
@@ -315,9 +335,13 b' class TestCompressor_stream_reader_fuzzi' | |||
|
315 | 335 | original=strategies.sampled_from(random_input_data()), |
|
316 | 336 | level=strategies.integers(min_value=1, max_value=5), |
|
317 | 337 | source_read_size=strategies.integers(1, 16384), |
|
318 |
read_size=strategies.integers( |
|
|
338 | read_size=strategies.integers( | |
|
339 | -1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
340 | ), | |
|
319 | 341 | ) |
|
320 | def test_buffer_source_read1(self, original, level, source_read_size, read_size): | |
|
342 | def test_buffer_source_read1( | |
|
343 | self, original, level, source_read_size, read_size | |
|
344 | ): | |
|
321 | 345 | if read_size == 0: |
|
322 | 346 | read_size = -1 |
|
323 | 347 | |
@@ -412,7 +436,9 b' class TestCompressor_stream_reader_fuzzi' | |||
|
412 | 436 | original=strategies.sampled_from(random_input_data()), |
|
413 | 437 | level=strategies.integers(min_value=1, max_value=5), |
|
414 | 438 | source_read_size=strategies.integers(1, 16384), |
|
415 |
read_size=strategies.integers( |
|
|
439 | read_size=strategies.integers( | |
|
440 | 1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
441 | ), | |
|
416 | 442 | ) |
|
417 | 443 | def test_stream_source_readinto1( |
|
418 | 444 | self, original, level, source_read_size, read_size |
@@ -446,7 +472,9 b' class TestCompressor_stream_reader_fuzzi' | |||
|
446 | 472 | original=strategies.sampled_from(random_input_data()), |
|
447 | 473 | level=strategies.integers(min_value=1, max_value=5), |
|
448 | 474 | source_read_size=strategies.integers(1, 16384), |
|
449 |
read_size=strategies.integers( |
|
|
475 | read_size=strategies.integers( | |
|
476 | 1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
477 | ), | |
|
450 | 478 | ) |
|
451 | 479 | def test_buffer_source_readinto1( |
|
452 | 480 | self, original, level, source_read_size, read_size |
@@ -576,7 +604,9 b' class TestCompressor_copy_stream_fuzzing' | |||
|
576 | 604 | read_size=strategies.integers(min_value=1, max_value=1048576), |
|
577 | 605 | write_size=strategies.integers(min_value=1, max_value=1048576), |
|
578 | 606 | ) |
|
579 |
def test_read_write_size_variance( |
|
|
607 | def test_read_write_size_variance( | |
|
608 | self, original, level, read_size, write_size | |
|
609 | ): | |
|
580 | 610 | refctx = zstd.ZstdCompressor(level=level) |
|
581 | 611 | ref_frame = refctx.compress(original) |
|
582 | 612 | |
@@ -585,7 +615,11 b' class TestCompressor_copy_stream_fuzzing' | |||
|
585 | 615 | dest = io.BytesIO() |
|
586 | 616 | |
|
587 | 617 | cctx.copy_stream( |
|
588 | source, dest, size=len(original), read_size=read_size, write_size=write_size | |
|
618 | source, | |
|
619 | dest, | |
|
620 | size=len(original), | |
|
621 | read_size=read_size, | |
|
622 | write_size=write_size, | |
|
589 | 623 | ) |
|
590 | 624 | |
|
591 | 625 | self.assertEqual(dest.getvalue(), ref_frame) |
@@ -675,7 +709,9 b' class TestCompressor_compressobj_fuzzing' | |||
|
675 | 709 | decompressed_chunks.append(dobj.decompress(chunk)) |
|
676 | 710 | |
|
677 | 711 | self.assertEqual( |
|
678 | dctx.decompress(b"".join(compressed_chunks), max_output_size=len(original)), | |
|
712 | dctx.decompress( | |
|
713 | b"".join(compressed_chunks), max_output_size=len(original) | |
|
714 | ), | |
|
679 | 715 | original, |
|
680 | 716 | ) |
|
681 | 717 | self.assertEqual(b"".join(decompressed_chunks), original) |
@@ -690,7 +726,9 b' class TestCompressor_read_to_iter_fuzzin' | |||
|
690 | 726 | read_size=strategies.integers(min_value=1, max_value=4096), |
|
691 | 727 | write_size=strategies.integers(min_value=1, max_value=4096), |
|
692 | 728 | ) |
|
693 |
def test_read_write_size_variance( |
|
|
729 | def test_read_write_size_variance( | |
|
730 | self, original, level, read_size, write_size | |
|
731 | ): | |
|
694 | 732 | refcctx = zstd.ZstdCompressor(level=level) |
|
695 | 733 | ref_frame = refcctx.compress(original) |
|
696 | 734 | |
@@ -699,7 +737,10 b' class TestCompressor_read_to_iter_fuzzin' | |||
|
699 | 737 | cctx = zstd.ZstdCompressor(level=level) |
|
700 | 738 | chunks = list( |
|
701 | 739 | cctx.read_to_iter( |
|
702 | source, size=len(original), read_size=read_size, write_size=write_size | |
|
740 | source, | |
|
741 | size=len(original), | |
|
742 | read_size=read_size, | |
|
743 | write_size=write_size, | |
|
703 | 744 | ) |
|
704 | 745 | ) |
|
705 | 746 | |
@@ -710,7 +751,9 b' class TestCompressor_read_to_iter_fuzzin' | |||
|
710 | 751 | class TestCompressor_multi_compress_to_buffer_fuzzing(TestCase): |
|
711 | 752 | @hypothesis.given( |
|
712 | 753 | original=strategies.lists( |
|
713 |
strategies.sampled_from(random_input_data()), |
|
|
754 | strategies.sampled_from(random_input_data()), | |
|
755 | min_size=1, | |
|
756 | max_size=1024, | |
|
714 | 757 | ), |
|
715 | 758 | threads=strategies.integers(min_value=1, max_value=8), |
|
716 | 759 | use_dict=strategies.booleans(), |
@@ -776,7 +819,8 b' class TestCompressor_chunker_fuzzing(Tes' | |||
|
776 | 819 | dctx = zstd.ZstdDecompressor() |
|
777 | 820 | |
|
778 | 821 | self.assertEqual( |
|
779 |
dctx.decompress(b"".join(chunks), max_output_size=len(original)), |
|
|
822 | dctx.decompress(b"".join(chunks), max_output_size=len(original)), | |
|
823 | original, | |
|
780 | 824 | ) |
|
781 | 825 | |
|
782 | 826 | self.assertTrue(all(len(chunk) == chunk_size for chunk in chunks[:-1])) |
@@ -794,7 +838,9 b' class TestCompressor_chunker_fuzzing(Tes' | |||
|
794 | 838 | input_sizes=strategies.data(), |
|
795 | 839 | flushes=strategies.data(), |
|
796 | 840 | ) |
|
797 | def test_flush_block(self, original, level, chunk_size, input_sizes, flushes): | |
|
841 | def test_flush_block( | |
|
842 | self, original, level, chunk_size, input_sizes, flushes | |
|
843 | ): | |
|
798 | 844 | cctx = zstd.ZstdCompressor(level=level) |
|
799 | 845 | chunker = cctx.chunker(chunk_size=chunk_size) |
|
800 | 846 | |
@@ -830,7 +876,9 b' class TestCompressor_chunker_fuzzing(Tes' | |||
|
830 | 876 | decompressed_chunks.append(dobj.decompress(b"".join(chunks))) |
|
831 | 877 | |
|
832 | 878 | self.assertEqual( |
|
833 | dctx.decompress(b"".join(compressed_chunks), max_output_size=len(original)), | |
|
879 | dctx.decompress( | |
|
880 | b"".join(compressed_chunks), max_output_size=len(original) | |
|
881 | ), | |
|
834 | 882 | original, |
|
835 | 883 | ) |
|
836 | 884 | self.assertEqual(b"".join(decompressed_chunks), original) |
@@ -65,7 +65,9 b' class TestCompressionParameters(TestCase' | |||
|
65 | 65 | p = zstd.ZstdCompressionParameters(threads=4) |
|
66 | 66 | self.assertEqual(p.threads, 4) |
|
67 | 67 | |
|
68 |
p = zstd.ZstdCompressionParameters( |
|
|
68 | p = zstd.ZstdCompressionParameters( | |
|
69 | threads=2, job_size=1048576, overlap_log=6 | |
|
70 | ) | |
|
69 | 71 | self.assertEqual(p.threads, 2) |
|
70 | 72 | self.assertEqual(p.job_size, 1048576) |
|
71 | 73 | self.assertEqual(p.overlap_log, 6) |
@@ -128,7 +130,9 b' class TestCompressionParameters(TestCase' | |||
|
128 | 130 | with self.assertRaisesRegex( |
|
129 | 131 | ValueError, "cannot specify both ldm_hash_rate_log" |
|
130 | 132 | ): |
|
131 |
zstd.ZstdCompressionParameters( |
|
|
133 | zstd.ZstdCompressionParameters( | |
|
134 | ldm_hash_rate_log=8, ldm_hash_every_log=4 | |
|
135 | ) | |
|
132 | 136 | |
|
133 | 137 | p = zstd.ZstdCompressionParameters(ldm_hash_rate_log=8) |
|
134 | 138 | self.assertEqual(p.ldm_hash_every_log, 8) |
@@ -137,7 +141,9 b' class TestCompressionParameters(TestCase' | |||
|
137 | 141 | self.assertEqual(p.ldm_hash_every_log, 16) |
|
138 | 142 | |
|
139 | 143 | def test_overlap_log(self): |
|
140 |
with self.assertRaisesRegex( |
|
|
144 | with self.assertRaisesRegex( | |
|
145 | ValueError, "cannot specify both overlap_log" | |
|
146 | ): | |
|
141 | 147 | zstd.ZstdCompressionParameters(overlap_log=1, overlap_size_log=9) |
|
142 | 148 | |
|
143 | 149 | p = zstd.ZstdCompressionParameters(overlap_log=2) |
@@ -169,10 +175,14 b' class TestFrameParameters(TestCase):' | |||
|
169 | 175 | zstd.get_frame_parameters(u"foobarbaz") |
|
170 | 176 | |
|
171 | 177 | def test_invalid_input_sizes(self): |
|
172 |
with self.assertRaisesRegex( |
|
|
178 | with self.assertRaisesRegex( | |
|
179 | zstd.ZstdError, "not enough data for frame" | |
|
180 | ): | |
|
173 | 181 | zstd.get_frame_parameters(b"") |
|
174 | 182 | |
|
175 |
with self.assertRaisesRegex( |
|
|
183 | with self.assertRaisesRegex( | |
|
184 | zstd.ZstdError, "not enough data for frame" | |
|
185 | ): | |
|
176 | 186 | zstd.get_frame_parameters(zstd.FRAME_HEADER) |
|
177 | 187 | |
|
178 | 188 | def test_invalid_frame(self): |
@@ -201,7 +211,9 b' class TestFrameParameters(TestCase):' | |||
|
201 | 211 | self.assertTrue(params.has_checksum) |
|
202 | 212 | |
|
203 | 213 | # Upper 2 bits indicate content size. |
|
204 |
params = zstd.get_frame_parameters( |
|
|
214 | params = zstd.get_frame_parameters( | |
|
215 | zstd.FRAME_HEADER + b"\x40\x00\xff\x00" | |
|
216 | ) | |
|
205 | 217 | self.assertEqual(params.content_size, 511) |
|
206 | 218 | self.assertEqual(params.window_size, 1024) |
|
207 | 219 | self.assertEqual(params.dict_id, 0) |
@@ -215,7 +227,9 b' class TestFrameParameters(TestCase):' | |||
|
215 | 227 | self.assertFalse(params.has_checksum) |
|
216 | 228 | |
|
217 | 229 | # Set multiple things. |
|
218 |
params = zstd.get_frame_parameters( |
|
|
230 | params = zstd.get_frame_parameters( | |
|
231 | zstd.FRAME_HEADER + b"\x45\x40\x0f\x10\x00" | |
|
232 | ) | |
|
219 | 233 | self.assertEqual(params.content_size, 272) |
|
220 | 234 | self.assertEqual(params.window_size, 262144) |
|
221 | 235 | self.assertEqual(params.dict_id, 15) |
@@ -23,7 +23,9 b' s_windowlog = strategies.integers(' | |||
|
23 | 23 | s_chainlog = strategies.integers( |
|
24 | 24 | min_value=zstd.CHAINLOG_MIN, max_value=zstd.CHAINLOG_MAX |
|
25 | 25 | ) |
|
26 | s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN, max_value=zstd.HASHLOG_MAX) | |
|
26 | s_hashlog = strategies.integers( | |
|
27 | min_value=zstd.HASHLOG_MIN, max_value=zstd.HASHLOG_MAX | |
|
28 | ) | |
|
27 | 29 | s_searchlog = strategies.integers( |
|
28 | 30 | min_value=zstd.SEARCHLOG_MIN, max_value=zstd.SEARCHLOG_MAX |
|
29 | 31 | ) |
@@ -61,7 +63,14 b' class TestCompressionParametersHypothesi' | |||
|
61 | 63 | s_strategy, |
|
62 | 64 | ) |
|
63 | 65 | def test_valid_init( |
|
64 | self, windowlog, chainlog, hashlog, searchlog, minmatch, targetlength, strategy | |
|
66 | self, | |
|
67 | windowlog, | |
|
68 | chainlog, | |
|
69 | hashlog, | |
|
70 | searchlog, | |
|
71 | minmatch, | |
|
72 | targetlength, | |
|
73 | strategy, | |
|
65 | 74 | ): |
|
66 | 75 | zstd.ZstdCompressionParameters( |
|
67 | 76 | window_log=windowlog, |
@@ -83,7 +92,14 b' class TestCompressionParametersHypothesi' | |||
|
83 | 92 | s_strategy, |
|
84 | 93 | ) |
|
85 | 94 | def test_estimated_compression_context_size( |
|
86 | self, windowlog, chainlog, hashlog, searchlog, minmatch, targetlength, strategy | |
|
95 | self, | |
|
96 | windowlog, | |
|
97 | chainlog, | |
|
98 | hashlog, | |
|
99 | searchlog, | |
|
100 | minmatch, | |
|
101 | targetlength, | |
|
102 | strategy, | |
|
87 | 103 | ): |
|
88 | 104 | if minmatch == zstd.MINMATCH_MIN and strategy in ( |
|
89 | 105 | zstd.STRATEGY_FAST, |
@@ -170,11 +170,15 b' class TestDecompressor_decompress(TestCa' | |||
|
170 | 170 | dctx.decompress(compressed, max_output_size=len(source) - 1) |
|
171 | 171 | |
|
172 | 172 | # Input size + 1 works |
|
173 |
decompressed = dctx.decompress( |
|
|
173 | decompressed = dctx.decompress( | |
|
174 | compressed, max_output_size=len(source) + 1 | |
|
175 | ) | |
|
174 | 176 | self.assertEqual(decompressed, source) |
|
175 | 177 | |
|
176 | 178 | # A much larger buffer works. |
|
177 |
decompressed = dctx.decompress( |
|
|
179 | decompressed = dctx.decompress( | |
|
180 | compressed, max_output_size=len(source) * 64 | |
|
181 | ) | |
|
178 | 182 | self.assertEqual(decompressed, source) |
|
179 | 183 | |
|
180 | 184 | def test_stupidly_large_output_buffer(self): |
@@ -237,7 +241,8 b' class TestDecompressor_decompress(TestCa' | |||
|
237 | 241 | dctx = zstd.ZstdDecompressor(max_window_size=2 ** zstd.WINDOWLOG_MIN) |
|
238 | 242 | |
|
239 | 243 | with self.assertRaisesRegex( |
|
240 | zstd.ZstdError, "decompression error: Frame requires too much memory" | |
|
244 | zstd.ZstdError, | |
|
245 | "decompression error: Frame requires too much memory", | |
|
241 | 246 | ): |
|
242 | 247 | dctx.decompress(frame, max_output_size=len(source)) |
|
243 | 248 | |
@@ -291,7 +296,9 b' class TestDecompressor_copy_stream(TestC' | |||
|
291 | 296 | self.assertEqual(w, len(source.getvalue())) |
|
292 | 297 | |
|
293 | 298 | def test_read_write_size(self): |
|
294 | source = OpCountingBytesIO(zstd.ZstdCompressor().compress(b"foobarfoobar")) | |
|
299 | source = OpCountingBytesIO( | |
|
300 | zstd.ZstdCompressor().compress(b"foobarfoobar") | |
|
301 | ) | |
|
295 | 302 | |
|
296 | 303 | dest = OpCountingBytesIO() |
|
297 | 304 | dctx = zstd.ZstdDecompressor() |
@@ -309,7 +316,9 b' class TestDecompressor_stream_reader(Tes' | |||
|
309 | 316 | dctx = zstd.ZstdDecompressor() |
|
310 | 317 | |
|
311 | 318 | with dctx.stream_reader(b"foo") as reader: |
|
312 |
with self.assertRaisesRegex( |
|
|
319 | with self.assertRaisesRegex( | |
|
320 | ValueError, "cannot __enter__ multiple times" | |
|
321 | ): | |
|
313 | 322 | with reader as reader2: |
|
314 | 323 | pass |
|
315 | 324 | |
@@ -474,7 +483,9 b' class TestDecompressor_stream_reader(Tes' | |||
|
474 | 483 | dctx = zstd.ZstdDecompressor() |
|
475 | 484 | |
|
476 | 485 | with dctx.stream_reader(frame) as reader: |
|
477 |
with self.assertRaisesRegex( |
|
|
486 | with self.assertRaisesRegex( | |
|
487 | ValueError, "cannot seek to negative position" | |
|
488 | ): | |
|
478 | 489 | reader.seek(-1, os.SEEK_SET) |
|
479 | 490 | |
|
480 | 491 | reader.read(1) |
@@ -490,7 +501,8 b' class TestDecompressor_stream_reader(Tes' | |||
|
490 | 501 | reader.seek(-1, os.SEEK_CUR) |
|
491 | 502 | |
|
492 | 503 | with self.assertRaisesRegex( |
|
493 | ValueError, "zstd decompression streams cannot be seeked with SEEK_END" | |
|
504 | ValueError, | |
|
505 | "zstd decompression streams cannot be seeked with SEEK_END", | |
|
494 | 506 | ): |
|
495 | 507 | reader.seek(0, os.SEEK_END) |
|
496 | 508 | |
@@ -743,7 +755,9 b' class TestDecompressor_stream_reader(Tes' | |||
|
743 | 755 | |
|
744 | 756 | def test_read_lines(self): |
|
745 | 757 | cctx = zstd.ZstdCompressor() |
|
746 | source = b"\n".join(("line %d" % i).encode("ascii") for i in range(1024)) | |
|
758 | source = b"\n".join( | |
|
759 | ("line %d" % i).encode("ascii") for i in range(1024) | |
|
760 | ) | |
|
747 | 761 | |
|
748 | 762 | frame = cctx.compress(source) |
|
749 | 763 | |
@@ -821,7 +835,9 b' class TestDecompressor_decompressobj(Tes' | |||
|
821 | 835 | dobj = dctx.decompressobj() |
|
822 | 836 | dobj.decompress(data) |
|
823 | 837 | |
|
824 |
with self.assertRaisesRegex( |
|
|
838 | with self.assertRaisesRegex( | |
|
839 | zstd.ZstdError, "cannot use a decompressobj" | |
|
840 | ): | |
|
825 | 841 | dobj.decompress(data) |
|
826 | 842 | self.assertIsNone(dobj.flush()) |
|
827 | 843 | |
@@ -1124,7 +1140,9 b' class TestDecompressor_read_to_iter(Test' | |||
|
1124 | 1140 | # Buffer protocol works. |
|
1125 | 1141 | dctx.read_to_iter(b"foobar") |
|
1126 | 1142 | |
|
1127 |
with self.assertRaisesRegex( |
|
|
1143 | with self.assertRaisesRegex( | |
|
1144 | ValueError, "must pass an object with a read" | |
|
1145 | ): | |
|
1128 | 1146 | b"".join(dctx.read_to_iter(True)) |
|
1129 | 1147 | |
|
1130 | 1148 | def test_empty_input(self): |
@@ -1226,7 +1244,9 b' class TestDecompressor_read_to_iter(Test' | |||
|
1226 | 1244 | decompressed = b"".join(chunks) |
|
1227 | 1245 | self.assertEqual(decompressed, source.getvalue()) |
|
1228 | 1246 | |
|
1229 | @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") | |
|
1247 | @unittest.skipUnless( | |
|
1248 | "ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set" | |
|
1249 | ) | |
|
1230 | 1250 | def test_large_input(self): |
|
1231 | 1251 | bytes = list(struct.Struct(">B").pack(i) for i in range(256)) |
|
1232 | 1252 | compressed = NonClosingBytesIO() |
@@ -1241,13 +1261,16 b' class TestDecompressor_read_to_iter(Test' | |||
|
1241 | 1261 | len(compressed.getvalue()) |
|
1242 | 1262 | > zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE |
|
1243 | 1263 | ) |
|
1244 | have_raw = input_size > zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE * 2 | |
|
1264 | have_raw = ( | |
|
1265 | input_size > zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE * 2 | |
|
1266 | ) | |
|
1245 | 1267 | if have_compressed and have_raw: |
|
1246 | 1268 | break |
|
1247 | 1269 | |
|
1248 | 1270 | compressed = io.BytesIO(compressed.getvalue()) |
|
1249 | 1271 | self.assertGreater( |
|
1250 |
len(compressed.getvalue()), |
|
|
1272 | len(compressed.getvalue()), | |
|
1273 | zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE, | |
|
1251 | 1274 | ) |
|
1252 | 1275 | |
|
1253 | 1276 | dctx = zstd.ZstdDecompressor() |
@@ -1303,7 +1326,9 b' class TestDecompressor_read_to_iter(Test' | |||
|
1303 | 1326 | self.assertEqual(streamed, source.getvalue()) |
|
1304 | 1327 | |
|
1305 | 1328 | def test_read_write_size(self): |
|
1306 | source = OpCountingBytesIO(zstd.ZstdCompressor().compress(b"foobarfoobar")) | |
|
1329 | source = OpCountingBytesIO( | |
|
1330 | zstd.ZstdCompressor().compress(b"foobarfoobar") | |
|
1331 | ) | |
|
1307 | 1332 | dctx = zstd.ZstdDecompressor() |
|
1308 | 1333 | for chunk in dctx.read_to_iter(source, read_size=1, write_size=1): |
|
1309 | 1334 | self.assertEqual(len(chunk), 1) |
@@ -1355,10 +1380,14 b' class TestDecompressor_content_dict_chai' | |||
|
1355 | 1380 | ): |
|
1356 | 1381 | dctx.decompress_content_dict_chain([zstd.FRAME_HEADER]) |
|
1357 | 1382 | |
|
1358 | with self.assertRaisesRegex(ValueError, "chunk 0 is not a valid zstd frame"): | |
|
1383 | with self.assertRaisesRegex( | |
|
1384 | ValueError, "chunk 0 is not a valid zstd frame" | |
|
1385 | ): | |
|
1359 | 1386 | dctx.decompress_content_dict_chain([b"foo" * 8]) |
|
1360 | 1387 | |
|
1361 |
no_size = zstd.ZstdCompressor(write_content_size=False).compress( |
|
|
1388 | no_size = zstd.ZstdCompressor(write_content_size=False).compress( | |
|
1389 | b"foo" * 64 | |
|
1390 | ) | |
|
1362 | 1391 | |
|
1363 | 1392 | with self.assertRaisesRegex( |
|
1364 | 1393 | ValueError, "chunk 0 missing content size in frame" |
@@ -1389,10 +1418,14 b' class TestDecompressor_content_dict_chai' | |||
|
1389 | 1418 | ): |
|
1390 | 1419 | dctx.decompress_content_dict_chain([initial, zstd.FRAME_HEADER]) |
|
1391 | 1420 | |
|
1392 | with self.assertRaisesRegex(ValueError, "chunk 1 is not a valid zstd frame"): | |
|
1421 | with self.assertRaisesRegex( | |
|
1422 | ValueError, "chunk 1 is not a valid zstd frame" | |
|
1423 | ): | |
|
1393 | 1424 | dctx.decompress_content_dict_chain([initial, b"foo" * 8]) |
|
1394 | 1425 | |
|
1395 |
no_size = zstd.ZstdCompressor(write_content_size=False).compress( |
|
|
1426 | no_size = zstd.ZstdCompressor(write_content_size=False).compress( | |
|
1427 | b"foo" * 64 | |
|
1428 | ) | |
|
1396 | 1429 | |
|
1397 | 1430 | with self.assertRaisesRegex( |
|
1398 | 1431 | ValueError, "chunk 1 missing content size in frame" |
@@ -1400,7 +1433,9 b' class TestDecompressor_content_dict_chai' | |||
|
1400 | 1433 | dctx.decompress_content_dict_chain([initial, no_size]) |
|
1401 | 1434 | |
|
1402 | 1435 | # Corrupt second frame. |
|
1403 | cctx = zstd.ZstdCompressor(dict_data=zstd.ZstdCompressionDict(b"foo" * 64)) | |
|
1436 | cctx = zstd.ZstdCompressor( | |
|
1437 | dict_data=zstd.ZstdCompressionDict(b"foo" * 64) | |
|
1438 | ) | |
|
1404 | 1439 | frame = cctx.compress(b"bar" * 64) |
|
1405 | 1440 | frame = frame[0:12] + frame[15:] |
|
1406 | 1441 | |
@@ -1447,7 +1482,9 b' class TestDecompressor_multi_decompress_' | |||
|
1447 | 1482 | with self.assertRaises(TypeError): |
|
1448 | 1483 | dctx.multi_decompress_to_buffer((1, 2)) |
|
1449 | 1484 | |
|
1450 |
with self.assertRaisesRegex( |
|
|
1485 | with self.assertRaisesRegex( | |
|
1486 | TypeError, "item 0 not a bytes like object" | |
|
1487 | ): | |
|
1451 | 1488 | dctx.multi_decompress_to_buffer([u"foo"]) |
|
1452 | 1489 | |
|
1453 | 1490 | with self.assertRaisesRegex( |
@@ -1491,7 +1528,9 b' class TestDecompressor_multi_decompress_' | |||
|
1491 | 1528 | if not hasattr(dctx, "multi_decompress_to_buffer"): |
|
1492 | 1529 | self.skipTest("multi_decompress_to_buffer not available") |
|
1493 | 1530 | |
|
1494 |
result = dctx.multi_decompress_to_buffer( |
|
|
1531 | result = dctx.multi_decompress_to_buffer( | |
|
1532 | frames, decompressed_sizes=sizes | |
|
1533 | ) | |
|
1495 | 1534 | |
|
1496 | 1535 | self.assertEqual(len(result), len(frames)) |
|
1497 | 1536 | self.assertEqual(result.size(), sum(map(len, original))) |
@@ -1582,10 +1621,15 b' class TestDecompressor_multi_decompress_' | |||
|
1582 | 1621 | # And a manual mode. |
|
1583 | 1622 | b = b"".join([frames[0].tobytes(), frames[1].tobytes()]) |
|
1584 | 1623 | b1 = zstd.BufferWithSegments( |
|
1585 | b, struct.pack("=QQQQ", 0, len(frames[0]), len(frames[0]), len(frames[1])) | |
|
1624 | b, | |
|
1625 | struct.pack( | |
|
1626 | "=QQQQ", 0, len(frames[0]), len(frames[0]), len(frames[1]) | |
|
1627 | ), | |
|
1586 | 1628 | ) |
|
1587 | 1629 | |
|
1588 | b = b"".join([frames[2].tobytes(), frames[3].tobytes(), frames[4].tobytes()]) | |
|
1630 | b = b"".join( | |
|
1631 | [frames[2].tobytes(), frames[3].tobytes(), frames[4].tobytes()] | |
|
1632 | ) | |
|
1589 | 1633 | b2 = zstd.BufferWithSegments( |
|
1590 | 1634 | b, |
|
1591 | 1635 | struct.pack( |
@@ -196,7 +196,9 b' class TestDecompressor_stream_reader_fuz' | |||
|
196 | 196 | streaming=strategies.booleans(), |
|
197 | 197 | source_read_size=strategies.integers(1, 1048576), |
|
198 | 198 | ) |
|
199 | def test_stream_source_readall(self, original, level, streaming, source_read_size): | |
|
199 | def test_stream_source_readall( | |
|
200 | self, original, level, streaming, source_read_size | |
|
201 | ): | |
|
200 | 202 | cctx = zstd.ZstdCompressor(level=level) |
|
201 | 203 | |
|
202 | 204 | if streaming: |
@@ -398,7 +400,9 b' class TestDecompressor_stream_writer_fuz' | |||
|
398 | 400 | write_size=strategies.integers(min_value=1, max_value=8192), |
|
399 | 401 | input_sizes=strategies.data(), |
|
400 | 402 | ) |
|
401 | def test_write_size_variance(self, original, level, write_size, input_sizes): | |
|
403 | def test_write_size_variance( | |
|
404 | self, original, level, write_size, input_sizes | |
|
405 | ): | |
|
402 | 406 | cctx = zstd.ZstdCompressor(level=level) |
|
403 | 407 | frame = cctx.compress(original) |
|
404 | 408 | |
@@ -433,7 +437,9 b' class TestDecompressor_copy_stream_fuzzi' | |||
|
433 | 437 | read_size=strategies.integers(min_value=1, max_value=8192), |
|
434 | 438 | write_size=strategies.integers(min_value=1, max_value=8192), |
|
435 | 439 | ) |
|
436 |
def test_read_write_size_variance( |
|
|
440 | def test_read_write_size_variance( | |
|
441 | self, original, level, read_size, write_size | |
|
442 | ): | |
|
437 | 443 | cctx = zstd.ZstdCompressor(level=level) |
|
438 | 444 | frame = cctx.compress(original) |
|
439 | 445 | |
@@ -441,7 +447,9 b' class TestDecompressor_copy_stream_fuzzi' | |||
|
441 | 447 | dest = io.BytesIO() |
|
442 | 448 | |
|
443 | 449 | dctx = zstd.ZstdDecompressor() |
|
444 | dctx.copy_stream(source, dest, read_size=read_size, write_size=write_size) | |
|
450 | dctx.copy_stream( | |
|
451 | source, dest, read_size=read_size, write_size=write_size | |
|
452 | ) | |
|
445 | 453 | |
|
446 | 454 | self.assertEqual(dest.getvalue(), original) |
|
447 | 455 | |
@@ -490,11 +498,14 b' class TestDecompressor_decompressobj_fuz' | |||
|
490 | 498 | original=strategies.sampled_from(random_input_data()), |
|
491 | 499 | level=strategies.integers(min_value=1, max_value=5), |
|
492 | 500 | write_size=strategies.integers( |
|
493 | min_value=1, max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
501 | min_value=1, | |
|
502 | max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE, | |
|
494 | 503 | ), |
|
495 | 504 | chunk_sizes=strategies.data(), |
|
496 | 505 | ) |
|
497 | def test_random_output_sizes(self, original, level, write_size, chunk_sizes): | |
|
506 | def test_random_output_sizes( | |
|
507 | self, original, level, write_size, chunk_sizes | |
|
508 | ): | |
|
498 | 509 | cctx = zstd.ZstdCompressor(level=level) |
|
499 | 510 | frame = cctx.compress(original) |
|
500 | 511 | |
@@ -524,7 +535,9 b' class TestDecompressor_read_to_iter_fuzz' | |||
|
524 | 535 | read_size=strategies.integers(min_value=1, max_value=4096), |
|
525 | 536 | write_size=strategies.integers(min_value=1, max_value=4096), |
|
526 | 537 | ) |
|
527 |
def test_read_write_size_variance( |
|
|
538 | def test_read_write_size_variance( | |
|
539 | self, original, level, read_size, write_size | |
|
540 | ): | |
|
528 | 541 | cctx = zstd.ZstdCompressor(level=level) |
|
529 | 542 | frame = cctx.compress(original) |
|
530 | 543 | |
@@ -532,7 +545,9 b' class TestDecompressor_read_to_iter_fuzz' | |||
|
532 | 545 | |
|
533 | 546 | dctx = zstd.ZstdDecompressor() |
|
534 | 547 | chunks = list( |
|
535 | dctx.read_to_iter(source, read_size=read_size, write_size=write_size) | |
|
548 | dctx.read_to_iter( | |
|
549 | source, read_size=read_size, write_size=write_size | |
|
550 | ) | |
|
536 | 551 | ) |
|
537 | 552 | |
|
538 | 553 | self.assertEqual(b"".join(chunks), original) |
@@ -542,7 +557,9 b' class TestDecompressor_read_to_iter_fuzz' | |||
|
542 | 557 | class TestDecompressor_multi_decompress_to_buffer_fuzzing(TestCase): |
|
543 | 558 | @hypothesis.given( |
|
544 | 559 | original=strategies.lists( |
|
545 |
strategies.sampled_from(random_input_data()), |
|
|
560 | strategies.sampled_from(random_input_data()), | |
|
561 | min_size=1, | |
|
562 | max_size=1024, | |
|
546 | 563 | ), |
|
547 | 564 | threads=strategies.integers(min_value=1, max_value=8), |
|
548 | 565 | use_dict=strategies.booleans(), |
@@ -51,11 +51,15 b' class TestTrainDictionary(TestCase):' | |||
|
51 | 51 | self.assertEqual(d.d, 16) |
|
52 | 52 | |
|
53 | 53 | def test_set_dict_id(self): |
|
54 | d = zstd.train_dictionary(8192, generate_samples(), k=64, d=16, dict_id=42) | |
|
54 | d = zstd.train_dictionary( | |
|
55 | 8192, generate_samples(), k=64, d=16, dict_id=42 | |
|
56 | ) | |
|
55 | 57 | self.assertEqual(d.dict_id(), 42) |
|
56 | 58 | |
|
57 | 59 | def test_optimize(self): |
|
58 | d = zstd.train_dictionary(8192, generate_samples(), threads=-1, steps=1, d=16) | |
|
60 | d = zstd.train_dictionary( | |
|
61 | 8192, generate_samples(), threads=-1, steps=1, d=16 | |
|
62 | ) | |
|
59 | 63 | |
|
60 | 64 | # This varies by platform. |
|
61 | 65 | self.assertIn(d.k, (50, 2000)) |
@@ -71,10 +75,14 b' class TestCompressionDict(TestCase):' | |||
|
71 | 75 | def test_bad_precompute_compress(self): |
|
72 | 76 | d = zstd.train_dictionary(8192, generate_samples(), k=64, d=16) |
|
73 | 77 | |
|
74 |
with self.assertRaisesRegex( |
|
|
78 | with self.assertRaisesRegex( | |
|
79 | ValueError, "must specify one of level or " | |
|
80 | ): | |
|
75 | 81 | d.precompute_compress() |
|
76 | 82 | |
|
77 | with self.assertRaisesRegex(ValueError, "must only specify one of level or "): | |
|
83 | with self.assertRaisesRegex( | |
|
84 | ValueError, "must only specify one of level or " | |
|
85 | ): | |
|
78 | 86 | d.precompute_compress( |
|
79 | 87 | level=3, compression_params=zstd.CompressionParameters() |
|
80 | 88 | ) |
@@ -88,5 +96,7 b' class TestCompressionDict(TestCase):' | |||
|
88 | 96 | d = zstd.ZstdCompressionDict( |
|
89 | 97 | b"dictcontent" * 64, dict_type=zstd.DICT_TYPE_FULLDICT |
|
90 | 98 | ) |
|
91 | with self.assertRaisesRegex(zstd.ZstdError, "unable to precompute dictionary"): | |
|
99 | with self.assertRaisesRegex( | |
|
100 | zstd.ZstdError, "unable to precompute dictionary" | |
|
101 | ): | |
|
92 | 102 | d.precompute_compress(level=1) |
@@ -299,10 +299,14 b' class ZstdCompressionParameters(object):' | |||
|
299 | 299 | _set_compression_parameter(params, lib.ZSTD_c_chainLog, chain_log) |
|
300 | 300 | _set_compression_parameter(params, lib.ZSTD_c_searchLog, search_log) |
|
301 | 301 | _set_compression_parameter(params, lib.ZSTD_c_minMatch, min_match) |
|
302 | _set_compression_parameter(params, lib.ZSTD_c_targetLength, target_length) | |
|
302 | _set_compression_parameter( | |
|
303 | params, lib.ZSTD_c_targetLength, target_length | |
|
304 | ) | |
|
303 | 305 | |
|
304 | 306 | if strategy != -1 and compression_strategy != -1: |
|
305 | raise ValueError("cannot specify both compression_strategy and strategy") | |
|
307 | raise ValueError( | |
|
308 | "cannot specify both compression_strategy and strategy" | |
|
309 | ) | |
|
306 | 310 | |
|
307 | 311 | if compression_strategy != -1: |
|
308 | 312 | strategy = compression_strategy |
@@ -313,12 +317,16 b' class ZstdCompressionParameters(object):' | |||
|
313 | 317 | _set_compression_parameter( |
|
314 | 318 | params, lib.ZSTD_c_contentSizeFlag, write_content_size |
|
315 | 319 | ) |
|
316 | _set_compression_parameter(params, lib.ZSTD_c_checksumFlag, write_checksum) | |
|
320 | _set_compression_parameter( | |
|
321 | params, lib.ZSTD_c_checksumFlag, write_checksum | |
|
322 | ) | |
|
317 | 323 | _set_compression_parameter(params, lib.ZSTD_c_dictIDFlag, write_dict_id) |
|
318 | 324 | _set_compression_parameter(params, lib.ZSTD_c_jobSize, job_size) |
|
319 | 325 | |
|
320 | 326 | if overlap_log != -1 and overlap_size_log != -1: |
|
321 | raise ValueError("cannot specify both overlap_log and overlap_size_log") | |
|
327 | raise ValueError( | |
|
328 | "cannot specify both overlap_log and overlap_size_log" | |
|
329 | ) | |
|
322 | 330 | |
|
323 | 331 | if overlap_size_log != -1: |
|
324 | 332 | overlap_log = overlap_size_log |
@@ -326,12 +334,16 b' class ZstdCompressionParameters(object):' | |||
|
326 | 334 | overlap_log = 0 |
|
327 | 335 | |
|
328 | 336 | _set_compression_parameter(params, lib.ZSTD_c_overlapLog, overlap_log) |
|
329 | _set_compression_parameter(params, lib.ZSTD_c_forceMaxWindow, force_max_window) | |
|
337 | _set_compression_parameter( | |
|
338 | params, lib.ZSTD_c_forceMaxWindow, force_max_window | |
|
339 | ) | |
|
330 | 340 | _set_compression_parameter( |
|
331 | 341 | params, lib.ZSTD_c_enableLongDistanceMatching, enable_ldm |
|
332 | 342 | ) |
|
333 | 343 | _set_compression_parameter(params, lib.ZSTD_c_ldmHashLog, ldm_hash_log) |
|
334 |
_set_compression_parameter( |
|
|
344 | _set_compression_parameter( | |
|
345 | params, lib.ZSTD_c_ldmMinMatch, ldm_min_match | |
|
346 | ) | |
|
335 | 347 | _set_compression_parameter( |
|
336 | 348 | params, lib.ZSTD_c_ldmBucketSizeLog, ldm_bucket_size_log |
|
337 | 349 | ) |
@@ -346,7 +358,9 b' class ZstdCompressionParameters(object):' | |||
|
346 | 358 | elif ldm_hash_rate_log == -1: |
|
347 | 359 | ldm_hash_rate_log = 0 |
|
348 | 360 | |
|
349 | _set_compression_parameter(params, lib.ZSTD_c_ldmHashRateLog, ldm_hash_rate_log) | |
|
361 | _set_compression_parameter( | |
|
362 | params, lib.ZSTD_c_ldmHashRateLog, ldm_hash_rate_log | |
|
363 | ) | |
|
350 | 364 | |
|
351 | 365 | @property |
|
352 | 366 | def format(self): |
@@ -354,7 +368,9 b' class ZstdCompressionParameters(object):' | |||
|
354 | 368 | |
|
355 | 369 | @property |
|
356 | 370 | def compression_level(self): |
|
357 |
return _get_compression_parameter( |
|
|
371 | return _get_compression_parameter( | |
|
372 | self._params, lib.ZSTD_c_compressionLevel | |
|
373 | ) | |
|
358 | 374 | |
|
359 | 375 | @property |
|
360 | 376 | def window_log(self): |
@@ -386,7 +402,9 b' class ZstdCompressionParameters(object):' | |||
|
386 | 402 | |
|
387 | 403 | @property |
|
388 | 404 | def write_content_size(self): |
|
389 |
return _get_compression_parameter( |
|
|
405 | return _get_compression_parameter( | |
|
406 | self._params, lib.ZSTD_c_contentSizeFlag | |
|
407 | ) | |
|
390 | 408 | |
|
391 | 409 | @property |
|
392 | 410 | def write_checksum(self): |
@@ -410,7 +428,9 b' class ZstdCompressionParameters(object):' | |||
|
410 | 428 | |
|
411 | 429 | @property |
|
412 | 430 | def force_max_window(self): |
|
413 |
return _get_compression_parameter( |
|
|
431 | return _get_compression_parameter( | |
|
432 | self._params, lib.ZSTD_c_forceMaxWindow | |
|
433 | ) | |
|
414 | 434 | |
|
415 | 435 | @property |
|
416 | 436 | def enable_ldm(self): |
@@ -428,11 +448,15 b' class ZstdCompressionParameters(object):' | |||
|
428 | 448 | |
|
429 | 449 | @property |
|
430 | 450 | def ldm_bucket_size_log(self): |
|
431 |
return _get_compression_parameter( |
|
|
451 | return _get_compression_parameter( | |
|
452 | self._params, lib.ZSTD_c_ldmBucketSizeLog | |
|
453 | ) | |
|
432 | 454 | |
|
433 | 455 | @property |
|
434 | 456 | def ldm_hash_rate_log(self): |
|
435 |
return _get_compression_parameter( |
|
|
457 | return _get_compression_parameter( | |
|
458 | self._params, lib.ZSTD_c_ldmHashRateLog | |
|
459 | ) | |
|
436 | 460 | |
|
437 | 461 | @property |
|
438 | 462 | def ldm_hash_every_log(self): |
@@ -457,7 +481,8 b' def _set_compression_parameter(params, p' | |||
|
457 | 481 | zresult = lib.ZSTD_CCtxParams_setParameter(params, param, value) |
|
458 | 482 | if lib.ZSTD_isError(zresult): |
|
459 | 483 | raise ZstdError( |
|
460 |
"unable to set compression context parameter: %s" |
|
|
484 | "unable to set compression context parameter: %s" | |
|
485 | % _zstd_error(zresult) | |
|
461 | 486 | ) |
|
462 | 487 | |
|
463 | 488 | |
@@ -467,14 +492,17 b' def _get_compression_parameter(params, p' | |||
|
467 | 492 | zresult = lib.ZSTD_CCtxParams_getParameter(params, param, result) |
|
468 | 493 | if lib.ZSTD_isError(zresult): |
|
469 | 494 | raise ZstdError( |
|
470 |
"unable to get compression context parameter: %s" |
|
|
495 | "unable to get compression context parameter: %s" | |
|
496 | % _zstd_error(zresult) | |
|
471 | 497 | ) |
|
472 | 498 | |
|
473 | 499 | return result[0] |
|
474 | 500 | |
|
475 | 501 | |
|
476 | 502 | class ZstdCompressionWriter(object): |
|
477 | def __init__(self, compressor, writer, source_size, write_size, write_return_read): | |
|
503 | def __init__( | |
|
504 | self, compressor, writer, source_size, write_size, write_return_read | |
|
505 | ): | |
|
478 | 506 | self._compressor = compressor |
|
479 | 507 | self._writer = writer |
|
480 | 508 | self._write_size = write_size |
@@ -491,7 +519,9 b' class ZstdCompressionWriter(object):' | |||
|
491 | 519 | |
|
492 | 520 | zresult = lib.ZSTD_CCtx_setPledgedSrcSize(compressor._cctx, source_size) |
|
493 | 521 | if lib.ZSTD_isError(zresult): |
|
494 | raise ZstdError("error setting source size: %s" % _zstd_error(zresult)) | |
|
522 | raise ZstdError( | |
|
523 | "error setting source size: %s" % _zstd_error(zresult) | |
|
524 | ) | |
|
495 | 525 | |
|
496 | 526 | def __enter__(self): |
|
497 | 527 | if self._closed: |
@@ -595,13 +625,20 b' class ZstdCompressionWriter(object):' | |||
|
595 | 625 | |
|
596 | 626 | while in_buffer.pos < in_buffer.size: |
|
597 | 627 | zresult = lib.ZSTD_compressStream2( |
|
598 |
self._compressor._cctx, |
|
|
628 | self._compressor._cctx, | |
|
629 | out_buffer, | |
|
630 | in_buffer, | |
|
631 | lib.ZSTD_e_continue, | |
|
599 | 632 | ) |
|
600 | 633 | if lib.ZSTD_isError(zresult): |
|
601 | raise ZstdError("zstd compress error: %s" % _zstd_error(zresult)) | |
|
634 | raise ZstdError( | |
|
635 | "zstd compress error: %s" % _zstd_error(zresult) | |
|
636 | ) | |
|
602 | 637 | |
|
603 | 638 | if out_buffer.pos: |
|
604 |
self._writer.write( |
|
|
639 | self._writer.write( | |
|
640 | ffi.buffer(out_buffer.dst, out_buffer.pos)[:] | |
|
641 | ) | |
|
605 | 642 | total_write += out_buffer.pos |
|
606 | 643 | self._bytes_compressed += out_buffer.pos |
|
607 | 644 | out_buffer.pos = 0 |
@@ -637,10 +674,14 b' class ZstdCompressionWriter(object):' | |||
|
637 | 674 | self._compressor._cctx, out_buffer, in_buffer, flush |
|
638 | 675 | ) |
|
639 | 676 | if lib.ZSTD_isError(zresult): |
|
640 | raise ZstdError("zstd compress error: %s" % _zstd_error(zresult)) | |
|
677 | raise ZstdError( | |
|
678 | "zstd compress error: %s" % _zstd_error(zresult) | |
|
679 | ) | |
|
641 | 680 | |
|
642 | 681 | if out_buffer.pos: |
|
643 |
self._writer.write( |
|
|
682 | self._writer.write( | |
|
683 | ffi.buffer(out_buffer.dst, out_buffer.pos)[:] | |
|
684 | ) | |
|
644 | 685 | total_write += out_buffer.pos |
|
645 | 686 | self._bytes_compressed += out_buffer.pos |
|
646 | 687 | out_buffer.pos = 0 |
@@ -672,7 +713,9 b' class ZstdCompressionObj(object):' | |||
|
672 | 713 | self._compressor._cctx, self._out, source, lib.ZSTD_e_continue |
|
673 | 714 | ) |
|
674 | 715 | if lib.ZSTD_isError(zresult): |
|
675 | raise ZstdError("zstd compress error: %s" % _zstd_error(zresult)) | |
|
716 | raise ZstdError( | |
|
717 | "zstd compress error: %s" % _zstd_error(zresult) | |
|
718 | ) | |
|
676 | 719 | |
|
677 | 720 | if self._out.pos: |
|
678 | 721 | chunks.append(ffi.buffer(self._out.dst, self._out.pos)[:]) |
@@ -681,7 +724,10 b' class ZstdCompressionObj(object):' | |||
|
681 | 724 | return b"".join(chunks) |
|
682 | 725 | |
|
683 | 726 | def flush(self, flush_mode=COMPRESSOBJ_FLUSH_FINISH): |
|
684 | if flush_mode not in (COMPRESSOBJ_FLUSH_FINISH, COMPRESSOBJ_FLUSH_BLOCK): | |
|
727 | if flush_mode not in ( | |
|
728 | COMPRESSOBJ_FLUSH_FINISH, | |
|
729 | COMPRESSOBJ_FLUSH_BLOCK, | |
|
730 | ): | |
|
685 | 731 | raise ValueError("flush mode not recognized") |
|
686 | 732 | |
|
687 | 733 | if self._finished: |
@@ -768,7 +814,9 b' class ZstdCompressionChunker(object):' | |||
|
768 | 814 | self._in.pos = 0 |
|
769 | 815 | |
|
770 | 816 | if lib.ZSTD_isError(zresult): |
|
771 | raise ZstdError("zstd compress error: %s" % _zstd_error(zresult)) | |
|
817 | raise ZstdError( | |
|
818 | "zstd compress error: %s" % _zstd_error(zresult) | |
|
819 | ) | |
|
772 | 820 | |
|
773 | 821 | if self._out.pos == self._out.size: |
|
774 | 822 | yield ffi.buffer(self._out.dst, self._out.pos)[:] |
@@ -780,7 +828,8 b' class ZstdCompressionChunker(object):' | |||
|
780 | 828 | |
|
781 | 829 | if self._in.src != ffi.NULL: |
|
782 | 830 | raise ZstdError( |
|
783 |
"cannot call flush() before consuming output from " |
|
|
831 | "cannot call flush() before consuming output from " | |
|
832 | "previous operation" | |
|
784 | 833 | ) |
|
785 | 834 | |
|
786 | 835 | while True: |
@@ -788,7 +837,9 b' class ZstdCompressionChunker(object):' | |||
|
788 | 837 | self._compressor._cctx, self._out, self._in, lib.ZSTD_e_flush |
|
789 | 838 | ) |
|
790 | 839 | if lib.ZSTD_isError(zresult): |
|
791 | raise ZstdError("zstd compress error: %s" % _zstd_error(zresult)) | |
|
840 | raise ZstdError( | |
|
841 | "zstd compress error: %s" % _zstd_error(zresult) | |
|
842 | ) | |
|
792 | 843 | |
|
793 | 844 | if self._out.pos: |
|
794 | 845 | yield ffi.buffer(self._out.dst, self._out.pos)[:] |
@@ -812,7 +863,9 b' class ZstdCompressionChunker(object):' | |||
|
812 | 863 | self._compressor._cctx, self._out, self._in, lib.ZSTD_e_end |
|
813 | 864 | ) |
|
814 | 865 | if lib.ZSTD_isError(zresult): |
|
815 | raise ZstdError("zstd compress error: %s" % _zstd_error(zresult)) | |
|
866 | raise ZstdError( | |
|
867 | "zstd compress error: %s" % _zstd_error(zresult) | |
|
868 | ) | |
|
816 | 869 | |
|
817 | 870 | if self._out.pos: |
|
818 | 871 | yield ffi.buffer(self._out.dst, self._out.pos)[:] |
@@ -939,7 +992,10 b' class ZstdCompressionReader(object):' | |||
|
939 | 992 | old_pos = out_buffer.pos |
|
940 | 993 | |
|
941 | 994 | zresult = lib.ZSTD_compressStream2( |
|
942 | self._compressor._cctx, out_buffer, self._in_buffer, lib.ZSTD_e_continue | |
|
995 | self._compressor._cctx, | |
|
996 | out_buffer, | |
|
997 | self._in_buffer, | |
|
998 | lib.ZSTD_e_continue, | |
|
943 | 999 | ) |
|
944 | 1000 | |
|
945 | 1001 | self._bytes_compressed += out_buffer.pos - old_pos |
@@ -997,7 +1053,9 b' class ZstdCompressionReader(object):' | |||
|
997 | 1053 | self._bytes_compressed += out_buffer.pos - old_pos |
|
998 | 1054 | |
|
999 | 1055 | if lib.ZSTD_isError(zresult): |
|
1000 | raise ZstdError("error ending compression stream: %s", _zstd_error(zresult)) | |
|
1056 | raise ZstdError( | |
|
1057 | "error ending compression stream: %s", _zstd_error(zresult) | |
|
1058 | ) | |
|
1001 | 1059 | |
|
1002 | 1060 | if zresult == 0: |
|
1003 | 1061 | self._finished_output = True |
@@ -1102,7 +1160,9 b' class ZstdCompressionReader(object):' | |||
|
1102 | 1160 | self._bytes_compressed += out_buffer.pos - old_pos |
|
1103 | 1161 | |
|
1104 | 1162 | if lib.ZSTD_isError(zresult): |
|
1105 | raise ZstdError("error ending compression stream: %s", _zstd_error(zresult)) | |
|
1163 | raise ZstdError( | |
|
1164 | "error ending compression stream: %s", _zstd_error(zresult) | |
|
1165 | ) | |
|
1106 | 1166 | |
|
1107 | 1167 | if zresult == 0: |
|
1108 | 1168 | self._finished_output = True |
@@ -1170,13 +1230,17 b' class ZstdCompressor(object):' | |||
|
1170 | 1230 | threads=0, |
|
1171 | 1231 | ): |
|
1172 | 1232 | if level > lib.ZSTD_maxCLevel(): |
|
1173 | raise ValueError("level must be less than %d" % lib.ZSTD_maxCLevel()) | |
|
1233 | raise ValueError( | |
|
1234 | "level must be less than %d" % lib.ZSTD_maxCLevel() | |
|
1235 | ) | |
|
1174 | 1236 | |
|
1175 | 1237 | if threads < 0: |
|
1176 | 1238 | threads = _cpu_count() |
|
1177 | 1239 | |
|
1178 | 1240 | if compression_params and write_checksum is not None: |
|
1179 | raise ValueError("cannot define compression_params and " "write_checksum") | |
|
1241 | raise ValueError( | |
|
1242 | "cannot define compression_params and " "write_checksum" | |
|
1243 | ) | |
|
1180 | 1244 | |
|
1181 | 1245 | if compression_params and write_content_size is not None: |
|
1182 | 1246 | raise ValueError( |
@@ -1184,7 +1248,9 b' class ZstdCompressor(object):' | |||
|
1184 | 1248 | ) |
|
1185 | 1249 | |
|
1186 | 1250 | if compression_params and write_dict_id is not None: |
|
1187 | raise ValueError("cannot define compression_params and " "write_dict_id") | |
|
1251 | raise ValueError( | |
|
1252 | "cannot define compression_params and " "write_dict_id" | |
|
1253 | ) | |
|
1188 | 1254 | |
|
1189 | 1255 | if compression_params and threads: |
|
1190 | 1256 | raise ValueError("cannot define compression_params and threads") |
@@ -1201,7 +1267,9 b' class ZstdCompressor(object):' | |||
|
1201 | 1267 | |
|
1202 | 1268 | self._params = ffi.gc(params, lib.ZSTD_freeCCtxParams) |
|
1203 | 1269 | |
|
1204 |
_set_compression_parameter( |
|
|
1270 | _set_compression_parameter( | |
|
1271 | self._params, lib.ZSTD_c_compressionLevel, level | |
|
1272 | ) | |
|
1205 | 1273 | |
|
1206 | 1274 | _set_compression_parameter( |
|
1207 | 1275 | self._params, |
@@ -1210,7 +1278,9 b' class ZstdCompressor(object):' | |||
|
1210 | 1278 | ) |
|
1211 | 1279 | |
|
1212 | 1280 | _set_compression_parameter( |
|
1213 | self._params, lib.ZSTD_c_checksumFlag, 1 if write_checksum else 0 | |
|
1281 | self._params, | |
|
1282 | lib.ZSTD_c_checksumFlag, | |
|
1283 | 1 if write_checksum else 0, | |
|
1214 | 1284 | ) |
|
1215 | 1285 | |
|
1216 | 1286 | _set_compression_parameter( |
@@ -1218,7 +1288,9 b' class ZstdCompressor(object):' | |||
|
1218 | 1288 | ) |
|
1219 | 1289 | |
|
1220 | 1290 | if threads: |
|
1221 |
_set_compression_parameter( |
|
|
1291 | _set_compression_parameter( | |
|
1292 | self._params, lib.ZSTD_c_nbWorkers, threads | |
|
1293 | ) | |
|
1222 | 1294 | |
|
1223 | 1295 | cctx = lib.ZSTD_createCCtx() |
|
1224 | 1296 | if cctx == ffi.NULL: |
@@ -1237,10 +1309,13 b' class ZstdCompressor(object):' | |||
|
1237 | 1309 | ) |
|
1238 | 1310 | |
|
1239 | 1311 | def _setup_cctx(self): |
|
1240 |
zresult = lib.ZSTD_CCtx_setParametersUsingCCtxParams( |
|
|
1312 | zresult = lib.ZSTD_CCtx_setParametersUsingCCtxParams( | |
|
1313 | self._cctx, self._params | |
|
1314 | ) | |
|
1241 | 1315 | if lib.ZSTD_isError(zresult): |
|
1242 | 1316 | raise ZstdError( |
|
1243 |
"could not set compression parameters: %s" |
|
|
1317 | "could not set compression parameters: %s" | |
|
1318 | % _zstd_error(zresult) | |
|
1244 | 1319 | ) |
|
1245 | 1320 | |
|
1246 | 1321 | dict_data = self._dict_data |
@@ -1259,7 +1334,8 b' class ZstdCompressor(object):' | |||
|
1259 | 1334 | |
|
1260 | 1335 | if lib.ZSTD_isError(zresult): |
|
1261 | 1336 | raise ZstdError( |
|
1262 |
"could not load compression dictionary: %s" |
|
|
1337 | "could not load compression dictionary: %s" | |
|
1338 | % _zstd_error(zresult) | |
|
1263 | 1339 | ) |
|
1264 | 1340 | |
|
1265 | 1341 | def memory_size(self): |
@@ -1275,7 +1351,9 b' class ZstdCompressor(object):' | |||
|
1275 | 1351 | |
|
1276 | 1352 | zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, len(data_buffer)) |
|
1277 | 1353 | if lib.ZSTD_isError(zresult): |
|
1278 | raise ZstdError("error setting source size: %s" % _zstd_error(zresult)) | |
|
1354 | raise ZstdError( | |
|
1355 | "error setting source size: %s" % _zstd_error(zresult) | |
|
1356 | ) | |
|
1279 | 1357 | |
|
1280 | 1358 | out_buffer = ffi.new("ZSTD_outBuffer *") |
|
1281 | 1359 | in_buffer = ffi.new("ZSTD_inBuffer *") |
@@ -1307,11 +1385,15 b' class ZstdCompressor(object):' | |||
|
1307 | 1385 | |
|
1308 | 1386 | zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size) |
|
1309 | 1387 | if lib.ZSTD_isError(zresult): |
|
1310 | raise ZstdError("error setting source size: %s" % _zstd_error(zresult)) | |
|
1388 | raise ZstdError( | |
|
1389 | "error setting source size: %s" % _zstd_error(zresult) | |
|
1390 | ) | |
|
1311 | 1391 | |
|
1312 | 1392 | cobj = ZstdCompressionObj() |
|
1313 | 1393 | cobj._out = ffi.new("ZSTD_outBuffer *") |
|
1314 |
cobj._dst_buffer = ffi.new( |
|
|
1394 | cobj._dst_buffer = ffi.new( | |
|
1395 | "char[]", COMPRESSION_RECOMMENDED_OUTPUT_SIZE | |
|
1396 | ) | |
|
1315 | 1397 | cobj._out.dst = cobj._dst_buffer |
|
1316 | 1398 | cobj._out.size = COMPRESSION_RECOMMENDED_OUTPUT_SIZE |
|
1317 | 1399 | cobj._out.pos = 0 |
@@ -1328,7 +1410,9 b' class ZstdCompressor(object):' | |||
|
1328 | 1410 | |
|
1329 | 1411 | zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size) |
|
1330 | 1412 | if lib.ZSTD_isError(zresult): |
|
1331 | raise ZstdError("error setting source size: %s" % _zstd_error(zresult)) | |
|
1413 | raise ZstdError( | |
|
1414 | "error setting source size: %s" % _zstd_error(zresult) | |
|
1415 | ) | |
|
1332 | 1416 | |
|
1333 | 1417 | return ZstdCompressionChunker(self, chunk_size=chunk_size) |
|
1334 | 1418 | |
@@ -1353,7 +1437,9 b' class ZstdCompressor(object):' | |||
|
1353 | 1437 | |
|
1354 | 1438 | zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size) |
|
1355 | 1439 | if lib.ZSTD_isError(zresult): |
|
1356 | raise ZstdError("error setting source size: %s" % _zstd_error(zresult)) | |
|
1440 | raise ZstdError( | |
|
1441 | "error setting source size: %s" % _zstd_error(zresult) | |
|
1442 | ) | |
|
1357 | 1443 | |
|
1358 | 1444 | in_buffer = ffi.new("ZSTD_inBuffer *") |
|
1359 | 1445 | out_buffer = ffi.new("ZSTD_outBuffer *") |
@@ -1381,7 +1467,9 b' class ZstdCompressor(object):' | |||
|
1381 | 1467 | self._cctx, out_buffer, in_buffer, lib.ZSTD_e_continue |
|
1382 | 1468 | ) |
|
1383 | 1469 | if lib.ZSTD_isError(zresult): |
|
1384 | raise ZstdError("zstd compress error: %s" % _zstd_error(zresult)) | |
|
1470 | raise ZstdError( | |
|
1471 | "zstd compress error: %s" % _zstd_error(zresult) | |
|
1472 | ) | |
|
1385 | 1473 | |
|
1386 | 1474 | if out_buffer.pos: |
|
1387 | 1475 | ofh.write(ffi.buffer(out_buffer.dst, out_buffer.pos)) |
@@ -1423,7 +1511,9 b' class ZstdCompressor(object):' | |||
|
1423 | 1511 | |
|
1424 | 1512 | zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size) |
|
1425 | 1513 | if lib.ZSTD_isError(zresult): |
|
1426 | raise ZstdError("error setting source size: %s" % _zstd_error(zresult)) | |
|
1514 | raise ZstdError( | |
|
1515 | "error setting source size: %s" % _zstd_error(zresult) | |
|
1516 | ) | |
|
1427 | 1517 | |
|
1428 | 1518 | return ZstdCompressionReader(self, source, read_size) |
|
1429 | 1519 | |
@@ -1443,7 +1533,9 b' class ZstdCompressor(object):' | |||
|
1443 | 1533 | if size < 0: |
|
1444 | 1534 | size = lib.ZSTD_CONTENTSIZE_UNKNOWN |
|
1445 | 1535 | |
|
1446 | return ZstdCompressionWriter(self, writer, size, write_size, write_return_read) | |
|
1536 | return ZstdCompressionWriter( | |
|
1537 | self, writer, size, write_size, write_return_read | |
|
1538 | ) | |
|
1447 | 1539 | |
|
1448 | 1540 | write_to = stream_writer |
|
1449 | 1541 | |
@@ -1473,7 +1565,9 b' class ZstdCompressor(object):' | |||
|
1473 | 1565 | |
|
1474 | 1566 | zresult = lib.ZSTD_CCtx_setPledgedSrcSize(self._cctx, size) |
|
1475 | 1567 | if lib.ZSTD_isError(zresult): |
|
1476 | raise ZstdError("error setting source size: %s" % _zstd_error(zresult)) | |
|
1568 | raise ZstdError( | |
|
1569 | "error setting source size: %s" % _zstd_error(zresult) | |
|
1570 | ) | |
|
1477 | 1571 | |
|
1478 | 1572 | in_buffer = ffi.new("ZSTD_inBuffer *") |
|
1479 | 1573 | out_buffer = ffi.new("ZSTD_outBuffer *") |
@@ -1517,7 +1611,9 b' class ZstdCompressor(object):' | |||
|
1517 | 1611 | self._cctx, out_buffer, in_buffer, lib.ZSTD_e_continue |
|
1518 | 1612 | ) |
|
1519 | 1613 | if lib.ZSTD_isError(zresult): |
|
1520 | raise ZstdError("zstd compress error: %s" % _zstd_error(zresult)) | |
|
1614 | raise ZstdError( | |
|
1615 | "zstd compress error: %s" % _zstd_error(zresult) | |
|
1616 | ) | |
|
1521 | 1617 | |
|
1522 | 1618 | if out_buffer.pos: |
|
1523 | 1619 | data = ffi.buffer(out_buffer.dst, out_buffer.pos)[:] |
@@ -1596,10 +1692,14 b' def get_frame_parameters(data):' | |||
|
1596 | 1692 | data_buffer = ffi.from_buffer(data) |
|
1597 | 1693 | zresult = lib.ZSTD_getFrameHeader(params, data_buffer, len(data_buffer)) |
|
1598 | 1694 | if lib.ZSTD_isError(zresult): |
|
1599 | raise ZstdError("cannot get frame parameters: %s" % _zstd_error(zresult)) | |
|
1695 | raise ZstdError( | |
|
1696 | "cannot get frame parameters: %s" % _zstd_error(zresult) | |
|
1697 | ) | |
|
1600 | 1698 | |
|
1601 | 1699 | if zresult: |
|
1602 | raise ZstdError("not enough data for frame parameters; need %d bytes" % zresult) | |
|
1700 | raise ZstdError( | |
|
1701 | "not enough data for frame parameters; need %d bytes" % zresult | |
|
1702 | ) | |
|
1603 | 1703 | |
|
1604 | 1704 | return FrameParameters(params[0]) |
|
1605 | 1705 | |
@@ -1611,9 +1711,14 b' class ZstdCompressionDict(object):' | |||
|
1611 | 1711 | self.k = k |
|
1612 | 1712 | self.d = d |
|
1613 | 1713 | |
|
1614 | if dict_type not in (DICT_TYPE_AUTO, DICT_TYPE_RAWCONTENT, DICT_TYPE_FULLDICT): | |
|
1714 | if dict_type not in ( | |
|
1715 | DICT_TYPE_AUTO, | |
|
1716 | DICT_TYPE_RAWCONTENT, | |
|
1717 | DICT_TYPE_FULLDICT, | |
|
1718 | ): | |
|
1615 | 1719 | raise ValueError( |
|
1616 |
"invalid dictionary load mode: %d; must use " |
|
|
1720 | "invalid dictionary load mode: %d; must use " | |
|
1721 | "DICT_TYPE_* constants" | |
|
1617 | 1722 | ) |
|
1618 | 1723 | |
|
1619 | 1724 | self._dict_type = dict_type |
@@ -1630,7 +1735,9 b' class ZstdCompressionDict(object):' | |||
|
1630 | 1735 | |
|
1631 | 1736 | def precompute_compress(self, level=0, compression_params=None): |
|
1632 | 1737 | if level and compression_params: |
|
1633 | raise ValueError("must only specify one of level or " "compression_params") | |
|
1738 | raise ValueError( | |
|
1739 | "must only specify one of level or " "compression_params" | |
|
1740 | ) | |
|
1634 | 1741 | |
|
1635 | 1742 | if not level and not compression_params: |
|
1636 | 1743 | raise ValueError("must specify one of level or compression_params") |
@@ -1675,7 +1782,9 b' class ZstdCompressionDict(object):' | |||
|
1675 | 1782 | if ddict == ffi.NULL: |
|
1676 | 1783 | raise ZstdError("could not create decompression dict") |
|
1677 | 1784 | |
|
1678 | ddict = ffi.gc(ddict, lib.ZSTD_freeDDict, size=lib.ZSTD_sizeof_DDict(ddict)) | |
|
1785 | ddict = ffi.gc( | |
|
1786 | ddict, lib.ZSTD_freeDDict, size=lib.ZSTD_sizeof_DDict(ddict) | |
|
1787 | ) | |
|
1679 | 1788 | self.__dict__["_ddict"] = ddict |
|
1680 | 1789 | |
|
1681 | 1790 | return ddict |
@@ -1805,7 +1914,9 b' class ZstdDecompressionObj(object):' | |||
|
1805 | 1914 | self._decompressor._dctx, out_buffer, in_buffer |
|
1806 | 1915 | ) |
|
1807 | 1916 | if lib.ZSTD_isError(zresult): |
|
1808 | raise ZstdError("zstd decompressor error: %s" % _zstd_error(zresult)) | |
|
1917 | raise ZstdError( | |
|
1918 | "zstd decompressor error: %s" % _zstd_error(zresult) | |
|
1919 | ) | |
|
1809 | 1920 | |
|
1810 | 1921 | if zresult == 0: |
|
1811 | 1922 | self._finished = True |
@@ -2105,16 +2216,22 b' class ZstdDecompressionReader(object):' | |||
|
2105 | 2216 | |
|
2106 | 2217 | if whence == os.SEEK_SET: |
|
2107 | 2218 | if pos < 0: |
|
2108 | raise ValueError("cannot seek to negative position with SEEK_SET") | |
|
2219 | raise ValueError( | |
|
2220 | "cannot seek to negative position with SEEK_SET" | |
|
2221 | ) | |
|
2109 | 2222 | |
|
2110 | 2223 | if pos < self._bytes_decompressed: |
|
2111 | raise ValueError("cannot seek zstd decompression stream " "backwards") | |
|
2224 | raise ValueError( | |
|
2225 | "cannot seek zstd decompression stream " "backwards" | |
|
2226 | ) | |
|
2112 | 2227 | |
|
2113 | 2228 | read_amount = pos - self._bytes_decompressed |
|
2114 | 2229 | |
|
2115 | 2230 | elif whence == os.SEEK_CUR: |
|
2116 | 2231 | if pos < 0: |
|
2117 | raise ValueError("cannot seek zstd decompression stream " "backwards") | |
|
2232 | raise ValueError( | |
|
2233 | "cannot seek zstd decompression stream " "backwards" | |
|
2234 | ) | |
|
2118 | 2235 | |
|
2119 | 2236 | read_amount = pos |
|
2120 | 2237 | elif whence == os.SEEK_END: |
@@ -2123,7 +2240,9 b' class ZstdDecompressionReader(object):' | |||
|
2123 | 2240 | ) |
|
2124 | 2241 | |
|
2125 | 2242 | while read_amount: |
|
2126 | result = self.read(min(read_amount, DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE)) | |
|
2243 | result = self.read( | |
|
2244 | min(read_amount, DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE) | |
|
2245 | ) | |
|
2127 | 2246 | |
|
2128 | 2247 | if not result: |
|
2129 | 2248 | break |
@@ -2257,10 +2376,14 b' class ZstdDecompressionWriter(object):' | |||
|
2257 | 2376 | while in_buffer.pos < in_buffer.size: |
|
2258 | 2377 | zresult = lib.ZSTD_decompressStream(dctx, out_buffer, in_buffer) |
|
2259 | 2378 | if lib.ZSTD_isError(zresult): |
|
2260 | raise ZstdError("zstd decompress error: %s" % _zstd_error(zresult)) | |
|
2379 | raise ZstdError( | |
|
2380 | "zstd decompress error: %s" % _zstd_error(zresult) | |
|
2381 | ) | |
|
2261 | 2382 | |
|
2262 | 2383 | if out_buffer.pos: |
|
2263 |
self._writer.write( |
|
|
2384 | self._writer.write( | |
|
2385 | ffi.buffer(out_buffer.dst, out_buffer.pos)[:] | |
|
2386 | ) | |
|
2264 | 2387 | total_write += out_buffer.pos |
|
2265 | 2388 | out_buffer.pos = 0 |
|
2266 | 2389 | |
@@ -2299,7 +2422,9 b' class ZstdDecompressor(object):' | |||
|
2299 | 2422 | |
|
2300 | 2423 | data_buffer = ffi.from_buffer(data) |
|
2301 | 2424 | |
|
2302 |
output_size = lib.ZSTD_getFrameContentSize( |
|
|
2425 | output_size = lib.ZSTD_getFrameContentSize( | |
|
2426 | data_buffer, len(data_buffer) | |
|
2427 | ) | |
|
2303 | 2428 | |
|
2304 | 2429 | if output_size == lib.ZSTD_CONTENTSIZE_ERROR: |
|
2305 | 2430 | raise ZstdError("error determining content size from frame header") |
@@ -2307,7 +2432,9 b' class ZstdDecompressor(object):' | |||
|
2307 | 2432 | return b"" |
|
2308 | 2433 | elif output_size == lib.ZSTD_CONTENTSIZE_UNKNOWN: |
|
2309 | 2434 | if not max_output_size: |
|
2310 | raise ZstdError("could not determine content size in frame header") | |
|
2435 | raise ZstdError( | |
|
2436 | "could not determine content size in frame header" | |
|
2437 | ) | |
|
2311 | 2438 | |
|
2312 | 2439 | result_buffer = ffi.new("char[]", max_output_size) |
|
2313 | 2440 | result_size = max_output_size |
@@ -2330,7 +2457,9 b' class ZstdDecompressor(object):' | |||
|
2330 | 2457 | if lib.ZSTD_isError(zresult): |
|
2331 | 2458 | raise ZstdError("decompression error: %s" % _zstd_error(zresult)) |
|
2332 | 2459 | elif zresult: |
|
2333 | raise ZstdError("decompression error: did not decompress full frame") | |
|
2460 | raise ZstdError( | |
|
2461 | "decompression error: did not decompress full frame" | |
|
2462 | ) | |
|
2334 | 2463 | elif output_size and out_buffer.pos != output_size: |
|
2335 | 2464 | raise ZstdError( |
|
2336 | 2465 | "decompression error: decompressed %d bytes; expected %d" |
@@ -2346,7 +2475,9 b' class ZstdDecompressor(object):' | |||
|
2346 | 2475 | read_across_frames=False, |
|
2347 | 2476 | ): |
|
2348 | 2477 | self._ensure_dctx() |
|
2349 |
return ZstdDecompressionReader( |
|
|
2478 | return ZstdDecompressionReader( | |
|
2479 | self, source, read_size, read_across_frames | |
|
2480 | ) | |
|
2350 | 2481 | |
|
2351 | 2482 | def decompressobj(self, write_size=DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE): |
|
2352 | 2483 | if write_size < 1: |
@@ -2421,9 +2552,13 b' class ZstdDecompressor(object):' | |||
|
2421 | 2552 | while in_buffer.pos < in_buffer.size: |
|
2422 | 2553 | assert out_buffer.pos == 0 |
|
2423 | 2554 | |
|
2424 |
zresult = lib.ZSTD_decompressStream( |
|
|
2555 | zresult = lib.ZSTD_decompressStream( | |
|
2556 | self._dctx, out_buffer, in_buffer | |
|
2557 | ) | |
|
2425 | 2558 | if lib.ZSTD_isError(zresult): |
|
2426 | raise ZstdError("zstd decompress error: %s" % _zstd_error(zresult)) | |
|
2559 | raise ZstdError( | |
|
2560 | "zstd decompress error: %s" % _zstd_error(zresult) | |
|
2561 | ) | |
|
2427 | 2562 | |
|
2428 | 2563 | if out_buffer.pos: |
|
2429 | 2564 | data = ffi.buffer(out_buffer.dst, out_buffer.pos)[:] |
@@ -2449,7 +2584,9 b' class ZstdDecompressor(object):' | |||
|
2449 | 2584 | if not hasattr(writer, "write"): |
|
2450 | 2585 | raise ValueError("must pass an object with a write() method") |
|
2451 | 2586 | |
|
2452 |
return ZstdDecompressionWriter( |
|
|
2587 | return ZstdDecompressionWriter( | |
|
2588 | self, writer, write_size, write_return_read | |
|
2589 | ) | |
|
2453 | 2590 | |
|
2454 | 2591 | write_to = stream_writer |
|
2455 | 2592 | |
@@ -2491,7 +2628,9 b' class ZstdDecompressor(object):' | |||
|
2491 | 2628 | |
|
2492 | 2629 | # Flush all read data to output. |
|
2493 | 2630 | while in_buffer.pos < in_buffer.size: |
|
2494 |
zresult = lib.ZSTD_decompressStream( |
|
|
2631 | zresult = lib.ZSTD_decompressStream( | |
|
2632 | self._dctx, out_buffer, in_buffer | |
|
2633 | ) | |
|
2495 | 2634 | if lib.ZSTD_isError(zresult): |
|
2496 | 2635 | raise ZstdError( |
|
2497 | 2636 | "zstd decompressor error: %s" % _zstd_error(zresult) |
@@ -2521,7 +2660,9 b' class ZstdDecompressor(object):' | |||
|
2521 | 2660 | # All chunks should be zstd frames and should have content size set. |
|
2522 | 2661 | chunk_buffer = ffi.from_buffer(chunk) |
|
2523 | 2662 | params = ffi.new("ZSTD_frameHeader *") |
|
2524 |
zresult = lib.ZSTD_getFrameHeader( |
|
|
2663 | zresult = lib.ZSTD_getFrameHeader( | |
|
2664 | params, chunk_buffer, len(chunk_buffer) | |
|
2665 | ) | |
|
2525 | 2666 | if lib.ZSTD_isError(zresult): |
|
2526 | 2667 | raise ValueError("chunk 0 is not a valid zstd frame") |
|
2527 | 2668 | elif zresult: |
@@ -2546,7 +2687,9 b' class ZstdDecompressor(object):' | |||
|
2546 | 2687 | |
|
2547 | 2688 | zresult = lib.ZSTD_decompressStream(self._dctx, out_buffer, in_buffer) |
|
2548 | 2689 | if lib.ZSTD_isError(zresult): |
|
2549 | raise ZstdError("could not decompress chunk 0: %s" % _zstd_error(zresult)) | |
|
2690 | raise ZstdError( | |
|
2691 | "could not decompress chunk 0: %s" % _zstd_error(zresult) | |
|
2692 | ) | |
|
2550 | 2693 | elif zresult: |
|
2551 | 2694 | raise ZstdError("chunk 0 did not decompress full frame") |
|
2552 | 2695 | |
@@ -2561,11 +2704,15 b' class ZstdDecompressor(object):' | |||
|
2561 | 2704 | raise ValueError("chunk %d must be bytes" % i) |
|
2562 | 2705 | |
|
2563 | 2706 | chunk_buffer = ffi.from_buffer(chunk) |
|
2564 |
zresult = lib.ZSTD_getFrameHeader( |
|
|
2707 | zresult = lib.ZSTD_getFrameHeader( | |
|
2708 | params, chunk_buffer, len(chunk_buffer) | |
|
2709 | ) | |
|
2565 | 2710 | if lib.ZSTD_isError(zresult): |
|
2566 | 2711 | raise ValueError("chunk %d is not a valid zstd frame" % i) |
|
2567 | 2712 | elif zresult: |
|
2568 | raise ValueError("chunk %d is too small to contain a zstd frame" % i) | |
|
2713 | raise ValueError( | |
|
2714 | "chunk %d is too small to contain a zstd frame" % i | |
|
2715 | ) | |
|
2569 | 2716 | |
|
2570 | 2717 | if params.frameContentSize == lib.ZSTD_CONTENTSIZE_UNKNOWN: |
|
2571 | 2718 | raise ValueError("chunk %d missing content size in frame" % i) |
@@ -2580,7 +2727,9 b' class ZstdDecompressor(object):' | |||
|
2580 | 2727 | in_buffer.size = len(chunk_buffer) |
|
2581 | 2728 | in_buffer.pos = 0 |
|
2582 | 2729 | |
|
2583 |
zresult = lib.ZSTD_decompressStream( |
|
|
2730 | zresult = lib.ZSTD_decompressStream( | |
|
2731 | self._dctx, out_buffer, in_buffer | |
|
2732 | ) | |
|
2584 | 2733 | if lib.ZSTD_isError(zresult): |
|
2585 | 2734 | raise ZstdError( |
|
2586 | 2735 | "could not decompress chunk %d: %s" % _zstd_error(zresult) |
@@ -2597,7 +2746,9 b' class ZstdDecompressor(object):' | |||
|
2597 | 2746 | lib.ZSTD_DCtx_reset(self._dctx, lib.ZSTD_reset_session_only) |
|
2598 | 2747 | |
|
2599 | 2748 | if self._max_window_size: |
|
2600 |
zresult = lib.ZSTD_DCtx_setMaxWindowSize( |
|
|
2749 | zresult = lib.ZSTD_DCtx_setMaxWindowSize( | |
|
2750 | self._dctx, self._max_window_size | |
|
2751 | ) | |
|
2601 | 2752 | if lib.ZSTD_isError(zresult): |
|
2602 | 2753 | raise ZstdError( |
|
2603 | 2754 | "unable to set max window size: %s" % _zstd_error(zresult) |
@@ -2605,11 +2756,14 b' class ZstdDecompressor(object):' | |||
|
2605 | 2756 | |
|
2606 | 2757 | zresult = lib.ZSTD_DCtx_setFormat(self._dctx, self._format) |
|
2607 | 2758 | if lib.ZSTD_isError(zresult): |
|
2608 | raise ZstdError("unable to set decoding format: %s" % _zstd_error(zresult)) | |
|
2759 | raise ZstdError( | |
|
2760 | "unable to set decoding format: %s" % _zstd_error(zresult) | |
|
2761 | ) | |
|
2609 | 2762 | |
|
2610 | 2763 | if self._dict_data and load_dict: |
|
2611 | 2764 | zresult = lib.ZSTD_DCtx_refDDict(self._dctx, self._dict_data._ddict) |
|
2612 | 2765 | if lib.ZSTD_isError(zresult): |
|
2613 | 2766 | raise ZstdError( |
|
2614 |
"unable to reference prepared dictionary: %s" |
|
|
2767 | "unable to reference prepared dictionary: %s" | |
|
2768 | % _zstd_error(zresult) | |
|
2615 | 2769 | ) |
@@ -1,5 +1,5 b'' | |||
|
1 | 1 | #require black |
|
2 | 2 | |
|
3 | 3 | $ cd $RUNTESTDIR/.. |
|
4 |
$ black --config=black.toml --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/** |
|
|
4 | $ black --config=black.toml --check --diff `hg files 'set:(**.py + grep("^#!.*python")) - mercurial/thirdparty/**'` | |
|
5 | 5 |
General Comments 0
You need to be logged in to leave comments.
Login now