##// END OF EJS Templates
merge to stable for 5.3 release freeze
Augie Fackler -
r44975:84a0102c merge 5.3rc0 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,368 b''
1 //===- FuzzedDataProvider.h - Utility header for fuzz targets ---*- C++ -* ===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 // A single header library providing an utility class to break up an array of
9 // bytes. Whenever run on the same input, provides the same output, as long as
10 // its methods are called in the same order, with the same arguments.
11 //===----------------------------------------------------------------------===//
12
13 #ifndef LLVM_FUZZER_FUZZED_DATA_PROVIDER_H_
14 #define LLVM_FUZZER_FUZZED_DATA_PROVIDER_H_
15
16 #include <algorithm>
17 #include <climits>
18 #include <cstddef>
19 #include <cstdint>
20 #include <cstring>
21 #include <initializer_list>
22 #include <string>
23 #include <type_traits>
24 #include <utility>
25 #include <vector>
26
27 // In addition to the comments below, the API is also briefly documented at
28 // https://github.com/google/fuzzing/blob/master/docs/split-inputs.md#fuzzed-data-provider
29 class FuzzedDataProvider
30 {
31 public:
32 // |data| is an array of length |size| that the FuzzedDataProvider wraps
33 // to provide more granular access. |data| must outlive the
34 // FuzzedDataProvider.
35 FuzzedDataProvider(const uint8_t *data, size_t size)
36 : data_ptr_(data), remaining_bytes_(size)
37 {
38 }
39 ~FuzzedDataProvider() = default;
40
41 // Returns a std::vector containing |num_bytes| of input data. If fewer
42 // than |num_bytes| of data remain, returns a shorter std::vector
43 // containing all of the data that's left. Can be used with any byte
44 // sized type, such as char, unsigned char, uint8_t, etc.
45 template <typename T> std::vector<T> ConsumeBytes(size_t num_bytes)
46 {
47 num_bytes = std::min(num_bytes, remaining_bytes_);
48 return ConsumeBytes<T>(num_bytes, num_bytes);
49 }
50
51 // Similar to |ConsumeBytes|, but also appends the terminator value at
52 // the end of the resulting vector. Useful, when a mutable
53 // null-terminated C-string is needed, for example. But that is a rare
54 // case. Better avoid it, if possible, and prefer using |ConsumeBytes|
55 // or |ConsumeBytesAsString| methods.
56 template <typename T>
57 std::vector<T> ConsumeBytesWithTerminator(size_t num_bytes,
58 T terminator = 0)
59 {
60 num_bytes = std::min(num_bytes, remaining_bytes_);
61 std::vector<T> result =
62 ConsumeBytes<T>(num_bytes + 1, num_bytes);
63 result.back() = terminator;
64 return result;
65 }
66
67 // Returns a std::string containing |num_bytes| of input data. Using
68 // this and
69 // |.c_str()| on the resulting string is the best way to get an
70 // immutable null-terminated C string. If fewer than |num_bytes| of data
71 // remain, returns a shorter std::string containing all of the data
72 // that's left.
73 std::string ConsumeBytesAsString(size_t num_bytes)
74 {
75 static_assert(sizeof(std::string::value_type) ==
76 sizeof(uint8_t),
77 "ConsumeBytesAsString cannot convert the data to "
78 "a string.");
79
80 num_bytes = std::min(num_bytes, remaining_bytes_);
81 std::string result(
82 reinterpret_cast<const std::string::value_type *>(
83 data_ptr_),
84 num_bytes);
85 Advance(num_bytes);
86 return result;
87 }
88
89 // Returns a number in the range [min, max] by consuming bytes from the
90 // input data. The value might not be uniformly distributed in the given
91 // range. If there's no input data left, always returns |min|. |min|
92 // must be less than or equal to |max|.
93 template <typename T> T ConsumeIntegralInRange(T min, T max)
94 {
95 static_assert(std::is_integral<T>::value,
96 "An integral type is required.");
97 static_assert(sizeof(T) <= sizeof(uint64_t),
98 "Unsupported integral type.");
99
100 if (min > max)
101 abort();
102
103 // Use the biggest type possible to hold the range and the
104 // result.
105 uint64_t range = static_cast<uint64_t>(max) - min;
106 uint64_t result = 0;
107 size_t offset = 0;
108
109 while (offset < sizeof(T) * CHAR_BIT && (range >> offset) > 0 &&
110 remaining_bytes_ != 0) {
111 // Pull bytes off the end of the seed data.
112 // Experimentally, this seems to allow the fuzzer to
113 // more easily explore the input space. This makes
114 // sense, since it works by modifying inputs that caused
115 // new code to run, and this data is often used to
116 // encode length of data read by |ConsumeBytes|.
117 // Separating out read lengths makes it easier modify
118 // the contents of the data that is actually read.
119 --remaining_bytes_;
120 result =
121 (result << CHAR_BIT) | data_ptr_[remaining_bytes_];
122 offset += CHAR_BIT;
123 }
124
125 // Avoid division by 0, in case |range + 1| results in overflow.
126 if (range != std::numeric_limits<decltype(range)>::max())
127 result = result % (range + 1);
128
129 return static_cast<T>(min + result);
130 }
131
132 // Returns a std::string of length from 0 to |max_length|. When it runs
133 // out of input data, returns what remains of the input. Designed to be
134 // more stable with respect to a fuzzer inserting characters than just
135 // picking a random length and then consuming that many bytes with
136 // |ConsumeBytes|.
137 std::string ConsumeRandomLengthString(size_t max_length)
138 {
139 // Reads bytes from the start of |data_ptr_|. Maps "\\" to "\",
140 // and maps "\" followed by anything else to the end of the
141 // string. As a result of this logic, a fuzzer can insert
142 // characters into the string, and the string will be lengthened
143 // to include those new characters, resulting in a more stable
144 // fuzzer than picking the length of a string independently from
145 // picking its contents.
146 std::string result;
147
148 // Reserve the anticipated capaticity to prevent several
149 // reallocations.
150 result.reserve(std::min(max_length, remaining_bytes_));
151 for (size_t i = 0; i < max_length && remaining_bytes_ != 0;
152 ++i) {
153 char next = ConvertUnsignedToSigned<char>(data_ptr_[0]);
154 Advance(1);
155 if (next == '\\' && remaining_bytes_ != 0) {
156 next =
157 ConvertUnsignedToSigned<char>(data_ptr_[0]);
158 Advance(1);
159 if (next != '\\')
160 break;
161 }
162 result += next;
163 }
164
165 result.shrink_to_fit();
166 return result;
167 }
168
169 // Returns a std::vector containing all remaining bytes of the input
170 // data.
171 template <typename T> std::vector<T> ConsumeRemainingBytes()
172 {
173 return ConsumeBytes<T>(remaining_bytes_);
174 }
175
176 // Returns a std::string containing all remaining bytes of the input
177 // data. Prefer using |ConsumeRemainingBytes| unless you actually need a
178 // std::string object.
179 std::string ConsumeRemainingBytesAsString()
180 {
181 return ConsumeBytesAsString(remaining_bytes_);
182 }
183
184 // Returns a number in the range [Type's min, Type's max]. The value
185 // might not be uniformly distributed in the given range. If there's no
186 // input data left, always returns |min|.
187 template <typename T> T ConsumeIntegral()
188 {
189 return ConsumeIntegralInRange(std::numeric_limits<T>::min(),
190 std::numeric_limits<T>::max());
191 }
192
193 // Reads one byte and returns a bool, or false when no data remains.
194 bool ConsumeBool()
195 {
196 return 1 & ConsumeIntegral<uint8_t>();
197 }
198
199 // Returns a copy of the value selected from the given fixed-size
200 // |array|.
201 template <typename T, size_t size>
202 T PickValueInArray(const T (&array)[size])
203 {
204 static_assert(size > 0, "The array must be non empty.");
205 return array[ConsumeIntegralInRange<size_t>(0, size - 1)];
206 }
207
208 template <typename T>
209 T PickValueInArray(std::initializer_list<const T> list)
210 {
211 // TODO(Dor1s): switch to static_assert once C++14 is allowed.
212 if (!list.size())
213 abort();
214
215 return *(list.begin() +
216 ConsumeIntegralInRange<size_t>(0, list.size() - 1));
217 }
218
219 // Returns an enum value. The enum must start at 0 and be contiguous. It
220 // must also contain |kMaxValue| aliased to its largest (inclusive)
221 // value. Such as: enum class Foo { SomeValue, OtherValue, kMaxValue =
222 // OtherValue };
223 template <typename T> T ConsumeEnum()
224 {
225 static_assert(std::is_enum<T>::value,
226 "|T| must be an enum type.");
227 return static_cast<T>(ConsumeIntegralInRange<uint32_t>(
228 0, static_cast<uint32_t>(T::kMaxValue)));
229 }
230
231 // Returns a floating point number in the range [0.0, 1.0]. If there's
232 // no input data left, always returns 0.
233 template <typename T> T ConsumeProbability()
234 {
235 static_assert(std::is_floating_point<T>::value,
236 "A floating point type is required.");
237
238 // Use different integral types for different floating point
239 // types in order to provide better density of the resulting
240 // values.
241 using IntegralType =
242 typename std::conditional<(sizeof(T) <= sizeof(uint32_t)),
243 uint32_t, uint64_t>::type;
244
245 T result = static_cast<T>(ConsumeIntegral<IntegralType>());
246 result /=
247 static_cast<T>(std::numeric_limits<IntegralType>::max());
248 return result;
249 }
250
251 // Returns a floating point value in the range [Type's lowest, Type's
252 // max] by consuming bytes from the input data. If there's no input data
253 // left, always returns approximately 0.
254 template <typename T> T ConsumeFloatingPoint()
255 {
256 return ConsumeFloatingPointInRange<T>(
257 std::numeric_limits<T>::lowest(),
258 std::numeric_limits<T>::max());
259 }
260
261 // Returns a floating point value in the given range by consuming bytes
262 // from the input data. If there's no input data left, returns |min|.
263 // Note that |min| must be less than or equal to |max|.
264 template <typename T> T ConsumeFloatingPointInRange(T min, T max)
265 {
266 if (min > max)
267 abort();
268
269 T range = .0;
270 T result = min;
271 constexpr T zero(.0);
272 if (max > zero && min < zero &&
273 max > min + std::numeric_limits<T>::max()) {
274 // The diff |max - min| would overflow the given
275 // floating point type. Use the half of the diff as the
276 // range and consume a bool to decide whether the result
277 // is in the first of the second part of the diff.
278 range = (max / 2.0) - (min / 2.0);
279 if (ConsumeBool()) {
280 result += range;
281 }
282 } else {
283 range = max - min;
284 }
285
286 return result + range * ConsumeProbability<T>();
287 }
288
289 // Reports the remaining bytes available for fuzzed input.
290 size_t remaining_bytes()
291 {
292 return remaining_bytes_;
293 }
294
295 private:
296 FuzzedDataProvider(const FuzzedDataProvider &) = delete;
297 FuzzedDataProvider &operator=(const FuzzedDataProvider &) = delete;
298
299 void Advance(size_t num_bytes)
300 {
301 if (num_bytes > remaining_bytes_)
302 abort();
303
304 data_ptr_ += num_bytes;
305 remaining_bytes_ -= num_bytes;
306 }
307
308 template <typename T>
309 std::vector<T> ConsumeBytes(size_t size, size_t num_bytes_to_consume)
310 {
311 static_assert(sizeof(T) == sizeof(uint8_t),
312 "Incompatible data type.");
313
314 // The point of using the size-based constructor below is to
315 // increase the odds of having a vector object with capacity
316 // being equal to the length. That part is always implementation
317 // specific, but at least both libc++ and libstdc++ allocate the
318 // requested number of bytes in that constructor, which seems to
319 // be a natural choice for other implementations as well. To
320 // increase the odds even more, we also call |shrink_to_fit|
321 // below.
322 std::vector<T> result(size);
323 if (size == 0) {
324 if (num_bytes_to_consume != 0)
325 abort();
326 return result;
327 }
328
329 std::memcpy(result.data(), data_ptr_, num_bytes_to_consume);
330 Advance(num_bytes_to_consume);
331
332 // Even though |shrink_to_fit| is also implementation specific,
333 // we expect it to provide an additional assurance in case
334 // vector's constructor allocated a buffer which is larger than
335 // the actual amount of data we put inside it.
336 result.shrink_to_fit();
337 return result;
338 }
339
340 template <typename TS, typename TU> TS ConvertUnsignedToSigned(TU value)
341 {
342 static_assert(sizeof(TS) == sizeof(TU),
343 "Incompatible data types.");
344 static_assert(!std::numeric_limits<TU>::is_signed,
345 "Source type must be unsigned.");
346
347 // TODO(Dor1s): change to `if constexpr` once C++17 becomes
348 // mainstream.
349 if (std::numeric_limits<TS>::is_modulo)
350 return static_cast<TS>(value);
351
352 // Avoid using implementation-defined unsigned to signer
353 // conversions. To learn more, see
354 // https://stackoverflow.com/questions/13150449.
355 if (value <= std::numeric_limits<TS>::max()) {
356 return static_cast<TS>(value);
357 } else {
358 constexpr auto TS_min = std::numeric_limits<TS>::min();
359 return TS_min + static_cast<char>(value - TS_min);
360 }
361 }
362
363 const uint8_t *data_ptr_;
364 size_t remaining_bytes_;
365 };
366
367 #endif // LLVM_FUZZER_FUZZED_DATA_PROVIDER_H_
368 // no-check-code since this is from a third party
@@ -0,0 +1,29 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
4 import zipfile
5
6 ap = argparse.ArgumentParser()
7 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
8 args = ap.parse_args()
9
10 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
11 zf.writestr(
12 "greek-tree",
13 "\n".join(
14 [
15 "iota",
16 "A/mu",
17 "A/B/lambda",
18 "A/B/E/alpha",
19 "A/B/E/beta",
20 "A/D/gamma",
21 "A/D/G/pi",
22 "A/D/G/rho",
23 "A/D/G/tau",
24 "A/D/H/chi",
25 "A/D/H/omega",
26 "A/D/H/psi",
27 ]
28 ),
29 )
@@ -0,0 +1,45 b''
1 // Copyright 2017 Google Inc. All Rights Reserved.
2 // Licensed under the Apache License, Version 2.0 (the "License");
3
4 // Example of a standalone runner for "fuzz targets".
5 // It reads all files passed as parameters and feeds their contents
6 // one by one into the fuzz target (LLVMFuzzerTestOneInput).
7 // This runner does not do any fuzzing, but allows us to run the fuzz target
8 // on the test corpus (e.g. "do_stuff_test_data") or on a single file,
9 // e.g. the one that comes from a bug report.
10
11 #include <cassert>
12 #include <fstream>
13 #include <iostream>
14 #include <vector>
15
16 // Forward declare the "fuzz target" interface.
17 // We deliberately keep this inteface simple and header-free.
18 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size);
19
20 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv);
21
22 int main(int argc, char **argv)
23 {
24 LLVMFuzzerInitialize(&argc, &argv);
25
26 for (int i = 1; i < argc; i++) {
27 std::ifstream in(argv[i]);
28 in.seekg(0, in.end);
29 size_t length = in.tellg();
30 in.seekg(0, in.beg);
31 std::cout << "Reading " << length << " bytes from " << argv[i]
32 << std::endl;
33 // Allocate exactly length bytes so that we reliably catch
34 // buffer overflows.
35 std::vector<char> bytes(length);
36 in.read(bytes.data(), bytes.size());
37 assert(in);
38 LLVMFuzzerTestOneInput(
39 reinterpret_cast<const uint8_t *>(bytes.data()),
40 bytes.size());
41 std::cout << "Execution successful" << std::endl;
42 }
43 return 0;
44 }
45 // no-check-code since this is from a third party
@@ -0,0 +1,153 b''
1 # cli.py - Command line interface for automation
2 #
3 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8 # no-check-code because Python 3 native.
9
10 import argparse
11 import os
12 import pathlib
13
14 from . import (
15 inno,
16 wix,
17 )
18
19 HERE = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
20 SOURCE_DIR = HERE.parent.parent.parent
21
22
23 def build_inno(python=None, iscc=None, version=None):
24 if not os.path.isabs(python):
25 raise Exception("--python arg must be an absolute path")
26
27 if iscc:
28 iscc = pathlib.Path(iscc)
29 else:
30 iscc = (
31 pathlib.Path(os.environ["ProgramFiles(x86)"])
32 / "Inno Setup 5"
33 / "ISCC.exe"
34 )
35
36 build_dir = SOURCE_DIR / "build"
37
38 inno.build(
39 SOURCE_DIR, build_dir, pathlib.Path(python), iscc, version=version,
40 )
41
42
43 def build_wix(
44 name=None,
45 python=None,
46 version=None,
47 sign_sn=None,
48 sign_cert=None,
49 sign_password=None,
50 sign_timestamp_url=None,
51 extra_packages_script=None,
52 extra_wxs=None,
53 extra_features=None,
54 ):
55 fn = wix.build_installer
56 kwargs = {
57 "source_dir": SOURCE_DIR,
58 "python_exe": pathlib.Path(python),
59 "version": version,
60 }
61
62 if not os.path.isabs(python):
63 raise Exception("--python arg must be an absolute path")
64
65 if extra_packages_script:
66 kwargs["extra_packages_script"] = extra_packages_script
67 if extra_wxs:
68 kwargs["extra_wxs"] = dict(
69 thing.split("=") for thing in extra_wxs.split(",")
70 )
71 if extra_features:
72 kwargs["extra_features"] = extra_features.split(",")
73
74 if sign_sn or sign_cert:
75 fn = wix.build_signed_installer
76 kwargs["name"] = name
77 kwargs["subject_name"] = sign_sn
78 kwargs["cert_path"] = sign_cert
79 kwargs["cert_password"] = sign_password
80 kwargs["timestamp_url"] = sign_timestamp_url
81
82 fn(**kwargs)
83
84
85 def get_parser():
86 parser = argparse.ArgumentParser()
87
88 subparsers = parser.add_subparsers()
89
90 sp = subparsers.add_parser("inno", help="Build Inno Setup installer")
91 sp.add_argument("--python", required=True, help="path to python.exe to use")
92 sp.add_argument("--iscc", help="path to iscc.exe to use")
93 sp.add_argument(
94 "--version",
95 help="Mercurial version string to use "
96 "(detected from __version__.py if not defined",
97 )
98 sp.set_defaults(func=build_inno)
99
100 sp = subparsers.add_parser(
101 "wix", help="Build Windows installer with WiX Toolset"
102 )
103 sp.add_argument("--name", help="Application name", default="Mercurial")
104 sp.add_argument(
105 "--python", help="Path to Python executable to use", required=True
106 )
107 sp.add_argument(
108 "--sign-sn",
109 help="Subject name (or fragment thereof) of certificate "
110 "to use for signing",
111 )
112 sp.add_argument(
113 "--sign-cert", help="Path to certificate to use for signing"
114 )
115 sp.add_argument("--sign-password", help="Password for signing certificate")
116 sp.add_argument(
117 "--sign-timestamp-url",
118 help="URL of timestamp server to use for signing",
119 )
120 sp.add_argument("--version", help="Version string to use")
121 sp.add_argument(
122 "--extra-packages-script",
123 help=(
124 "Script to execute to include extra packages in " "py2exe binary."
125 ),
126 )
127 sp.add_argument(
128 "--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file"
129 )
130 sp.add_argument(
131 "--extra-features",
132 help=(
133 "CSV of extra feature names to include "
134 "in the installer from the extra wxs files"
135 ),
136 )
137 sp.set_defaults(func=build_wix)
138
139 return parser
140
141
142 def main():
143 parser = get_parser()
144 args = parser.parse_args()
145
146 if not hasattr(args, "func"):
147 parser.print_help()
148 return
149
150 kwargs = dict(vars(args))
151 del kwargs["func"]
152
153 args.func(**kwargs)
@@ -0,0 +1,74 b''
1 #!/usr/bin/env python3
2 #
3 # packaging.py - Mercurial packaging functionality
4 #
5 # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com>
6 #
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
9
10 import os
11 import pathlib
12 import subprocess
13 import sys
14 import venv
15
16
17 HERE = pathlib.Path(os.path.abspath(__file__)).parent
18 REQUIREMENTS_TXT = HERE / "requirements.txt"
19 SOURCE_DIR = HERE.parent.parent
20 VENV = SOURCE_DIR / "build" / "venv-packaging"
21
22
23 def bootstrap():
24 venv_created = not VENV.exists()
25
26 VENV.parent.mkdir(exist_ok=True)
27
28 venv.create(VENV, with_pip=True)
29
30 if os.name == "nt":
31 venv_bin = VENV / "Scripts"
32 pip = venv_bin / "pip.exe"
33 python = venv_bin / "python.exe"
34 else:
35 venv_bin = VENV / "bin"
36 pip = venv_bin / "pip"
37 python = venv_bin / "python"
38
39 args = [
40 str(pip),
41 "install",
42 "-r",
43 str(REQUIREMENTS_TXT),
44 "--disable-pip-version-check",
45 ]
46
47 if not venv_created:
48 args.append("-q")
49
50 subprocess.run(args, check=True)
51
52 os.environ["HGPACKAGING_BOOTSTRAPPED"] = "1"
53 os.environ["PATH"] = "%s%s%s" % (venv_bin, os.pathsep, os.environ["PATH"])
54
55 subprocess.run([str(python), __file__] + sys.argv[1:], check=True)
56
57
58 def run():
59 import hgpackaging.cli as cli
60
61 # Need to strip off main Python executable.
62 cli.main()
63
64
65 if __name__ == "__main__":
66 try:
67 if "HGPACKAGING_BOOTSTRAPPED" not in os.environ:
68 bootstrap()
69 else:
70 run()
71 except subprocess.CalledProcessError as e:
72 sys.exit(e.returncode)
73 except KeyboardInterrupt:
74 sys.exit(1)
@@ -0,0 +1,39 b''
1 #
2 # This file is autogenerated by pip-compile
3 # To update, run:
4 #
5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements.txt contrib/packaging/requirements.txt.in
6 #
7 jinja2==2.10.3 \
8 --hash=sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f \
9 --hash=sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de
10 markupsafe==1.1.1 \
11 --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
12 --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
13 --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
14 --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
15 --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
16 --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \
17 --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
18 --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
19 --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
20 --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
21 --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
22 --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
23 --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
24 --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
25 --hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
26 --hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
27 --hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
28 --hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
29 --hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
30 --hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
31 --hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
32 --hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
33 --hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
34 --hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
35 --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
36 --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
37 --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
38 --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
39 # via jinja2
@@ -0,0 +1,1 b''
1 jinja2
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,5 +1,6 b''
1 {
1 {
2 "conduit_uri": "https://phab.mercurial-scm.org/api",
2 "conduit_uri": "https://phab.mercurial-scm.org/api",
3 "phabricator.uri": "https://phab.mercurial-scm.org/",
3 "repository.callsign": "HG",
4 "repository.callsign": "HG",
4 "arc.land.onto.default": "@",
5 "arc.land.onto.default": "@",
5 "base": "hg:.^"
6 "base": "hg:.^"
@@ -51,6 +51,7 b' tags'
51 cscope.*
51 cscope.*
52 .idea/*
52 .idea/*
53 .asv/*
53 .asv/*
54 .pytype/*
54 i18n/hg.pot
55 i18n/hg.pot
55 locale/*/LC_MESSAGES/hg.mo
56 locale/*/LC_MESSAGES/hg.mo
56 hgext/__index__.py
57 hgext/__index__.py
@@ -11,7 +11,7 b' HGPYTHONS ?= $(HGROOT)/build/pythons'
11 PURE=
11 PURE=
12 PYFILESCMD=find mercurial hgext doc -name '*.py'
12 PYFILESCMD=find mercurial hgext doc -name '*.py'
13 PYFILES:=$(shell $(PYFILESCMD))
13 PYFILES:=$(shell $(PYFILESCMD))
14 DOCFILES=mercurial/help/*.txt
14 DOCFILES=mercurial/helptext/*.txt
15 export LANGUAGE=C
15 export LANGUAGE=C
16 export LC_ALL=C
16 export LC_ALL=C
17 TESTFLAGS ?= $(shell echo $$HGTESTFLAGS)
17 TESTFLAGS ?= $(shell echo $$HGTESTFLAGS)
@@ -189,7 +189,8 b' packaging_targets := \\'
189 docker-centos6 \
189 docker-centos6 \
190 docker-centos7 \
190 docker-centos7 \
191 docker-centos8 \
191 docker-centos8 \
192 docker-debian-jessie \
192 docker-debian-bullseye \
193 docker-debian-buster \
193 docker-debian-stretch \
194 docker-debian-stretch \
194 docker-fedora \
195 docker-fedora \
195 docker-ubuntu-trusty \
196 docker-ubuntu-trusty \
@@ -59,7 +59,7 b" DEBIAN_ACCOUNT_ID_2 = '136693071363'"
59 UBUNTU_ACCOUNT_ID = '099720109477'
59 UBUNTU_ACCOUNT_ID = '099720109477'
60
60
61
61
62 WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-2019.07.12'
62 WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-2019.11.13'
63
63
64
64
65 KEY_PAIRS = {
65 KEY_PAIRS = {
@@ -71,7 +71,7 b' Write-Output "updated Mercurial working '
71 BUILD_INNO = r'''
71 BUILD_INNO = r'''
72 Set-Location C:\hgdev\src
72 Set-Location C:\hgdev\src
73 $python = "C:\hgdev\python27-{arch}\python.exe"
73 $python = "C:\hgdev\python27-{arch}\python.exe"
74 C:\hgdev\python37-x64\python.exe contrib\packaging\inno\build.py --python $python
74 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py inno --python $python
75 if ($LASTEXITCODE -ne 0) {{
75 if ($LASTEXITCODE -ne 0) {{
76 throw "process exited non-0: $LASTEXITCODE"
76 throw "process exited non-0: $LASTEXITCODE"
77 }}
77 }}
@@ -88,7 +88,7 b' if ($LASTEXITCODE -ne 0) {{'
88 BUILD_WIX = r'''
88 BUILD_WIX = r'''
89 Set-Location C:\hgdev\src
89 Set-Location C:\hgdev\src
90 $python = "C:\hgdev\python27-{arch}\python.exe"
90 $python = "C:\hgdev\python27-{arch}\python.exe"
91 C:\hgdev\python37-x64\python.exe contrib\packaging\wix\build.py --python $python {extra_args}
91 C:\hgdev\python37-x64\python.exe contrib\packaging\packaging.py wix --python $python {extra_args}
92 if ($LASTEXITCODE -ne 0) {{
92 if ($LASTEXITCODE -ne 0) {{
93 throw "process exited non-0: $LASTEXITCODE"
93 throw "process exited non-0: $LASTEXITCODE"
94 }}
94 }}
@@ -281,10 +281,10 b' for i in [0, 1]:'
281 for tp in testpats[i]:
281 for tp in testpats[i]:
282 p = tp[0]
282 p = tp[0]
283 m = tp[1]
283 m = tp[1]
284 if p.startswith(r'^'):
284 if p.startswith('^'):
285 p = r"^ [$>] (%s)" % p[1:]
285 p = "^ [$>] (%s)" % p[1:]
286 else:
286 else:
287 p = r"^ [$>] .*(%s)" % p
287 p = "^ [$>] .*(%s)" % p
288 utestpats[i].append((p, m) + tp[2:])
288 utestpats[i].append((p, m) + tp[2:])
289
289
290 # don't transform the following rules:
290 # don't transform the following rules:
@@ -27,32 +27,42 b' beforepatch = afterheader + r"(?!\\n(?!@@'
27
27
28 errors = [
28 errors = [
29 (beforepatch + r".*[(]bc[)]", "(BC) needs to be uppercase"),
29 (beforepatch + r".*[(]bc[)]", "(BC) needs to be uppercase"),
30 (beforepatch + r".*[(]issue \d\d\d",
30 (
31 "no space allowed between issue and number"),
31 beforepatch + r".*[(]issue \d\d\d",
32 "no space allowed between issue and number",
33 ),
32 (beforepatch + r".*[(]bug(\d|\s)", "use (issueDDDD) instead of bug"),
34 (beforepatch + r".*[(]bug(\d|\s)", "use (issueDDDD) instead of bug"),
33 (commitheader + r"# User [^@\n]+\n", "username is not an email address"),
35 (commitheader + r"# User [^@\n]+\n", "username is not an email address"),
34 (commitheader + r"(?!merge with )[^#]\S+[^:] ",
36 (
35 "summary line doesn't start with 'topic: '"),
37 commitheader + r"(?!merge with )[^#]\S+[^:] ",
38 "summary line doesn't start with 'topic: '",
39 ),
36 (afterheader + r"[A-Z][a-z]\S+", "don't capitalize summary lines"),
40 (afterheader + r"[A-Z][a-z]\S+", "don't capitalize summary lines"),
37 (afterheader + r"^\S+: *[A-Z][a-z]\S+", "don't capitalize summary lines"),
41 (afterheader + r"^\S+: *[A-Z][a-z]\S+", "don't capitalize summary lines"),
38 (afterheader + r"\S*[^A-Za-z0-9-_]\S*: ",
42 (
39 "summary keyword should be most user-relevant one-word command or topic"),
43 afterheader + r"\S*[^A-Za-z0-9-_]\S*: ",
44 "summary keyword should be most user-relevant one-word command or topic",
45 ),
40 (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"),
46 (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"),
41 (afterheader + r".{79,}", "summary line too long (limit is 78)"),
47 (afterheader + r".{79,}", "summary line too long (limit is 78)"),
42 ]
48 ]
43
49
44 word = re.compile(r'\S')
50 word = re.compile(r'\S')
51
52
45 def nonempty(first, second):
53 def nonempty(first, second):
46 if word.search(first):
54 if word.search(first):
47 return first
55 return first
48 return second
56 return second
49
57
58
50 def checkcommit(commit, node=None):
59 def checkcommit(commit, node=None):
51 exitcode = 0
60 exitcode = 0
52 printed = node is None
61 printed = node is None
53 hits = []
62 hits = []
54 signtag = (afterheader +
63 signtag = (
55 r'Added (tag [^ ]+|signature) for changeset [a-f0-9]{12}')
64 afterheader + r'Added (tag [^ ]+|signature) for changeset [a-f0-9]{12}'
65 )
56 if re.search(signtag, commit):
66 if re.search(signtag, commit):
57 return 0
67 return 0
58 for exp, msg in errors:
68 for exp, msg in errors:
@@ -84,9 +94,11 b' def checkcommit(commit, node=None):'
84
94
85 return exitcode
95 return exitcode
86
96
97
87 def readcommit(node):
98 def readcommit(node):
88 return os.popen("hg export %s" % node).read()
99 return os.popen("hg export %s" % node).read()
89
100
101
90 if __name__ == "__main__":
102 if __name__ == "__main__":
91 exitcode = 0
103 exitcode = 0
92 node = os.environ.get("HG_NODE")
104 node = os.environ.get("HG_NODE")
@@ -3,100 +3,9 b''
3 mercurial/cext/manifest.c
3 mercurial/cext/manifest.c
4 mercurial/cext/osutil.c
4 mercurial/cext/osutil.c
5 # Vendored code that we should never format:
5 # Vendored code that we should never format:
6 contrib/python-zstandard/c-ext/bufferutil.c
6 syntax: glob
7 contrib/python-zstandard/c-ext/compressionchunker.c
7 contrib/python-zstandard/**.c
8 contrib/python-zstandard/c-ext/compressiondict.c
8 contrib/python-zstandard/**.h
9 contrib/python-zstandard/c-ext/compressionparams.c
9 hgext/fsmonitor/pywatchman/**.c
10 contrib/python-zstandard/c-ext/compressionreader.c
10 mercurial/thirdparty/**.c
11 contrib/python-zstandard/c-ext/compressionwriter.c
11 mercurial/thirdparty/**.h
12 contrib/python-zstandard/c-ext/compressobj.c
13 contrib/python-zstandard/c-ext/compressor.c
14 contrib/python-zstandard/c-ext/compressoriterator.c
15 contrib/python-zstandard/c-ext/constants.c
16 contrib/python-zstandard/c-ext/decompressionreader.c
17 contrib/python-zstandard/c-ext/decompressionwriter.c
18 contrib/python-zstandard/c-ext/decompressobj.c
19 contrib/python-zstandard/c-ext/decompressor.c
20 contrib/python-zstandard/c-ext/decompressoriterator.c
21 contrib/python-zstandard/c-ext/frameparams.c
22 contrib/python-zstandard/c-ext/python-zstandard.h
23 contrib/python-zstandard/zstd.c
24 contrib/python-zstandard/zstd/common/bitstream.h
25 contrib/python-zstandard/zstd/common/compiler.h
26 contrib/python-zstandard/zstd/common/cpu.h
27 contrib/python-zstandard/zstd/common/debug.c
28 contrib/python-zstandard/zstd/common/debug.h
29 contrib/python-zstandard/zstd/common/entropy_common.c
30 contrib/python-zstandard/zstd/common/error_private.c
31 contrib/python-zstandard/zstd/common/error_private.h
32 contrib/python-zstandard/zstd/common/fse_decompress.c
33 contrib/python-zstandard/zstd/common/fse.h
34 contrib/python-zstandard/zstd/common/huf.h
35 contrib/python-zstandard/zstd/common/mem.h
36 contrib/python-zstandard/zstd/common/pool.c
37 contrib/python-zstandard/zstd/common/pool.h
38 contrib/python-zstandard/zstd/common/threading.c
39 contrib/python-zstandard/zstd/common/threading.h
40 contrib/python-zstandard/zstd/common/xxhash.c
41 contrib/python-zstandard/zstd/common/xxhash.h
42 contrib/python-zstandard/zstd/common/zstd_common.c
43 contrib/python-zstandard/zstd/common/zstd_errors.h
44 contrib/python-zstandard/zstd/common/zstd_internal.h
45 contrib/python-zstandard/zstd/compress/fse_compress.c
46 contrib/python-zstandard/zstd/compress/hist.c
47 contrib/python-zstandard/zstd/compress/hist.h
48 contrib/python-zstandard/zstd/compress/huf_compress.c
49 contrib/python-zstandard/zstd/compress/zstd_compress.c
50 contrib/python-zstandard/zstd/compress/zstd_compress_internal.h
51 contrib/python-zstandard/zstd/compress/zstd_compress_literals.c
52 contrib/python-zstandard/zstd/compress/zstd_compress_literals.h
53 contrib/python-zstandard/zstd/compress/zstd_compress_sequences.c
54 contrib/python-zstandard/zstd/compress/zstd_compress_sequences.h
55 contrib/python-zstandard/zstd/compress/zstd_double_fast.c
56 contrib/python-zstandard/zstd/compress/zstd_double_fast.h
57 contrib/python-zstandard/zstd/compress/zstd_fast.c
58 contrib/python-zstandard/zstd/compress/zstd_fast.h
59 contrib/python-zstandard/zstd/compress/zstd_lazy.c
60 contrib/python-zstandard/zstd/compress/zstd_lazy.h
61 contrib/python-zstandard/zstd/compress/zstd_ldm.c
62 contrib/python-zstandard/zstd/compress/zstd_ldm.h
63 contrib/python-zstandard/zstd/compress/zstdmt_compress.c
64 contrib/python-zstandard/zstd/compress/zstdmt_compress.h
65 contrib/python-zstandard/zstd/compress/zstd_opt.c
66 contrib/python-zstandard/zstd/compress/zstd_opt.h
67 contrib/python-zstandard/zstd/decompress/huf_decompress.c
68 contrib/python-zstandard/zstd/decompress/zstd_ddict.c
69 contrib/python-zstandard/zstd/decompress/zstd_ddict.h
70 contrib/python-zstandard/zstd/decompress/zstd_decompress_block.c
71 contrib/python-zstandard/zstd/decompress/zstd_decompress_block.h
72 contrib/python-zstandard/zstd/decompress/zstd_decompress_internal.h
73 contrib/python-zstandard/zstd/decompress/zstd_decompress.c
74 contrib/python-zstandard/zstd/deprecated/zbuff_common.c
75 contrib/python-zstandard/zstd/deprecated/zbuff_compress.c
76 contrib/python-zstandard/zstd/deprecated/zbuff_decompress.c
77 contrib/python-zstandard/zstd/deprecated/zbuff.h
78 contrib/python-zstandard/zstd/dictBuilder/cover.c
79 contrib/python-zstandard/zstd/dictBuilder/cover.h
80 contrib/python-zstandard/zstd/dictBuilder/divsufsort.c
81 contrib/python-zstandard/zstd/dictBuilder/divsufsort.h
82 contrib/python-zstandard/zstd/dictBuilder/fastcover.c
83 contrib/python-zstandard/zstd/dictBuilder/zdict.c
84 contrib/python-zstandard/zstd/dictBuilder/zdict.h
85 contrib/python-zstandard/zstd/zstd.h
86 hgext/fsmonitor/pywatchman/bser.c
87 mercurial/thirdparty/xdiff/xdiff.h
88 mercurial/thirdparty/xdiff/xdiffi.c
89 mercurial/thirdparty/xdiff/xdiffi.h
90 mercurial/thirdparty/xdiff/xemit.c
91 mercurial/thirdparty/xdiff/xemit.h
92 mercurial/thirdparty/xdiff/xhistogram.c
93 mercurial/thirdparty/xdiff/xinclude.h
94 mercurial/thirdparty/xdiff/xmacros.h
95 mercurial/thirdparty/xdiff/xmerge.c
96 mercurial/thirdparty/xdiff/xpatience.c
97 mercurial/thirdparty/xdiff/xprepare.c
98 mercurial/thirdparty/xdiff/xprepare.h
99 mercurial/thirdparty/xdiff/xtypes.h
100 mercurial/thirdparty/xdiff/xutils.c
101 mercurial/thirdparty/xdiff/xutils.h
102 mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c
@@ -11,23 +11,26 b' from mercurial import ('
11 pycompat,
11 pycompat,
12 revlog,
12 revlog,
13 )
13 )
14 from mercurial.utils import (
14 from mercurial.utils import procutil
15 procutil,
16 )
17
15
18 for fp in (sys.stdin, sys.stdout, sys.stderr):
16 for fp in (sys.stdin, sys.stdout, sys.stderr):
19 procutil.setbinary(fp)
17 procutil.setbinary(fp)
20
18
19
21 def binopen(path, mode=b'rb'):
20 def binopen(path, mode=b'rb'):
22 if b'b' not in mode:
21 if b'b' not in mode:
23 mode = mode + b'b'
22 mode = mode + b'b'
24 return open(path, pycompat.sysstr(mode))
23 return open(path, pycompat.sysstr(mode))
24
25
25 binopen.options = {}
26 binopen.options = {}
26
27
28
27 def printb(data, end=b'\n'):
29 def printb(data, end=b'\n'):
28 sys.stdout.flush()
30 sys.stdout.flush()
29 pycompat.stdout.write(data + end)
31 pycompat.stdout.write(data + end)
30
32
33
31 for f in sys.argv[1:]:
34 for f in sys.argv[1:]:
32 r = revlog.revlog(binopen, encoding.strtolocal(f))
35 r = revlog.revlog(binopen, encoding.strtolocal(f))
33 print("file:", f)
36 print("file:", f)
@@ -1,9 +1,14 b''
1 [fix]
1 [fix]
2 clang-format:command = clang-format --style file -i
2 clang-format:command = clang-format --style file
3 clang-format:pattern = (**.c or **.cc or **.h) and not "listfile:contrib/clang-format-ignorelist"
3 clang-format:pattern = set:(**.c or **.cc or **.h) and not "include:contrib/clang-format-ignorelist"
4
4
5 rustfmt:command = rustfmt {rootpath}
5 rustfmt:command = rustfmt +nightly
6 rustfmt:pattern = set:**.rs
6 rustfmt:pattern = set:**.rs
7
7
8 black:command = black --config=black.toml -
8 black:command = black --config=black.toml -
9 black:pattern = set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"
9 black:pattern = set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"
10
11 # Mercurial doesn't have any Go code, but if we did this is how we
12 # would configure `hg fix` for Go:
13 go:command = gofmt
14 go:pattern = set:**.go
@@ -1,184 +1,129 b''
1 CC = clang
1 CC = clang
2 CXX = clang++
2 CXX = clang++
3
3
4 all: bdiff mpatch xdiff
4 # By default, use our own standalone_fuzz_target_runner.
5 # This runner does no fuzzing, but simply executes the inputs
6 # provided via parameters.
7 # Run e.g. "make all LIB_FUZZING_ENGINE=/path/to/libFuzzer.a"
8 # to link the fuzzer(s) against a real fuzzing engine.
9 #
10 # OSS-Fuzz will define its own value for LIB_FUZZING_ENGINE.
11 LIB_FUZZING_ENGINE ?= standalone_fuzz_target_runner.o
5
12
6 fuzzutil.o: fuzzutil.cc fuzzutil.h
13 PYTHON_CONFIG ?= $$OUT/sanpy/bin/python-config
7 $(CXX) $(CXXFLAGS) -g -O1 \
14
8 -std=c++17 \
15 CXXFLAGS += -Wno-deprecated-register
9 -I../../mercurial -c -o fuzzutil.o fuzzutil.cc
10
16
11 fuzzutil-oss-fuzz.o: fuzzutil.cc fuzzutil.h
17 all: standalone_fuzz_target_runner.o oss-fuzz
12 $(CXX) $(CXXFLAGS) -std=c++17 \
18
13 -I../../mercurial -c -o fuzzutil-oss-fuzz.o fuzzutil.cc
19 standalone_fuzz_target_runner.o: standalone_fuzz_target_runner.cc
20
21 $$OUT/%_fuzzer_seed_corpus.zip: %_corpus.py
22 python $< $@
14
23
15 pyutil.o: pyutil.cc pyutil.h
24 pyutil.o: pyutil.cc pyutil.h
16 $(CXX) $(CXXFLAGS) -g -O1 \
25 $(CXX) $(CXXFLAGS) -g -O1 \
17 `$$OUT/sanpy/bin/python-config --cflags` \
26 `$(PYTHON_CONFIG) --cflags` \
18 -I../../mercurial -c -o pyutil.o pyutil.cc
27 -I../../mercurial -c -o pyutil.o pyutil.cc
19
28
20 bdiff.o: ../../mercurial/bdiff.c
21 $(CC) $(CFLAGS) -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
22 ../../mercurial/bdiff.c
23
24 bdiff: bdiff.cc bdiff.o fuzzutil.o
25 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
26 -std=c++17 \
27 -I../../mercurial bdiff.cc bdiff.o fuzzutil.o -o bdiff
28
29 bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
29 bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
30 $(CC) $(CFLAGS) -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
30 $(CC) $(CFLAGS) -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
31
31
32 bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o
32 bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o
33 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial bdiff.cc \
33 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial bdiff.cc \
34 bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
34 bdiff-oss-fuzz.o $(LIB_FUZZING_ENGINE) -o \
35 $$OUT/bdiff_fuzzer
35 $$OUT/bdiff_fuzzer
36
36
37 mpatch.o: ../../mercurial/mpatch.c
37 mpatch.o: ../../mercurial/mpatch.c
38 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c -o mpatch.o \
38 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c -o mpatch.o \
39 ../../mercurial/mpatch.c
39 ../../mercurial/mpatch.c
40
40
41 mpatch: CXXFLAGS += -std=c++17
42 mpatch: mpatch.cc mpatch.o fuzzutil.o
43 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
44 -I../../mercurial mpatch.cc mpatch.o fuzzutil.o -o mpatch
45
46 mpatch-oss-fuzz.o: ../../mercurial/mpatch.c
41 mpatch-oss-fuzz.o: ../../mercurial/mpatch.c
47 $(CC) $(CFLAGS) -c -o mpatch-oss-fuzz.o ../../mercurial/mpatch.c
42 $(CC) $(CFLAGS) -c -o mpatch-oss-fuzz.o ../../mercurial/mpatch.c
48
43
49 mpatch_fuzzer: mpatch.cc mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o
44 mpatch_fuzzer: mpatch.cc mpatch-oss-fuzz.o $$OUT/mpatch_fuzzer_seed_corpus.zip
50 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial mpatch.cc \
45 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial mpatch.cc \
51 mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
46 mpatch-oss-fuzz.o $(LIB_FUZZING_ENGINE) -o \
52 $$OUT/mpatch_fuzzer
47 $$OUT/mpatch_fuzzer
53
48
54 mpatch_corpus.zip:
55 python mpatch_corpus.py $$OUT/mpatch_fuzzer_seed_corpus.zip
56
57 x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
58 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c \
59 -o $@ \
60 $<
61
62 xdiff: CXXFLAGS += -std=c++17
63 xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o fuzzutil.o
64 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
65 -I../../mercurial xdiff.cc \
66 xdiffi.o xprepare.o xutils.o fuzzutil.o -o xdiff
67
68 fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
49 fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
69 $(CC) $(CFLAGS) -c \
50 $(CC) $(CFLAGS) -c \
70 -o $@ \
51 -o $@ \
71 $<
52 $<
72
53
73 xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o
54 xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o
74 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial xdiff.cc \
55 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial xdiff.cc \
75 fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o \
56 fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o \
76 -lFuzzingEngine -o $$OUT/xdiff_fuzzer
57 $(LIB_FUZZING_ENGINE) -o $$OUT/xdiff_fuzzer
77
78 manifest.o: ../../mercurial/cext/manifest.c
79 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
80 -I../../mercurial \
81 -c -o manifest.o ../../mercurial/cext/manifest.c
82
83 charencode.o: ../../mercurial/cext/charencode.c
84 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
85 -I../../mercurial \
86 -c -o charencode.o ../../mercurial/cext/charencode.c
87
58
88 parsers.o: ../../mercurial/cext/parsers.c
59 parsers-%.o: ../../mercurial/cext/%.c
89 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
60 $(CC) -I../../mercurial `$(PYTHON_CONFIG) --cflags` $(CFLAGS) -c \
90 -I../../mercurial \
61 -o $@ $<
91 -c -o parsers.o ../../mercurial/cext/parsers.c
92
93 dirs.o: ../../mercurial/cext/dirs.c
94 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
95 -I../../mercurial \
96 -c -o dirs.o ../../mercurial/cext/dirs.c
97
62
98 pathencode.o: ../../mercurial/cext/pathencode.c
63 PARSERS_OBJS=parsers-manifest.o parsers-charencode.o parsers-parsers.o parsers-dirs.o parsers-pathencode.o parsers-revlog.o
99 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
100 -I../../mercurial \
101 -c -o pathencode.o ../../mercurial/cext/pathencode.c
102
64
103 revlog.o: ../../mercurial/cext/revlog.c
65 dirs_fuzzer: dirs.cc pyutil.o $(PARSERS_OBJS) $$OUT/dirs_fuzzer_seed_corpus.zip
104 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
66 $(CXX) $(CXXFLAGS) `$(PYTHON_CONFIG) --cflags` \
105 -I../../mercurial \
106 -c -o revlog.o ../../mercurial/cext/revlog.c
107
108 dirs_fuzzer: dirs.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
109 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
110 -Wno-register -Wno-macro-redefined \
67 -Wno-register -Wno-macro-redefined \
111 -I../../mercurial dirs.cc \
68 -I../../mercurial dirs.cc \
112 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
69 pyutil.o $(PARSERS_OBJS) \
113 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
70 $(LIB_FUZZING_ENGINE) `$(PYTHON_CONFIG) --ldflags` \
114 -o $$OUT/dirs_fuzzer
71 -o $$OUT/dirs_fuzzer
115
72
116 fncache_fuzzer: fncache.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
73 fncache_fuzzer: fncache.cc
117 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
74 $(CXX) $(CXXFLAGS) `$(PYTHON_CONFIG) --cflags` \
118 -Wno-register -Wno-macro-redefined \
75 -Wno-register -Wno-macro-redefined \
119 -I../../mercurial fncache.cc \
76 -I../../mercurial fncache.cc \
120 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
77 pyutil.o $(PARSERS_OBJS) \
121 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
78 $(LIB_FUZZING_ENGINE) `$(PYTHON_CONFIG) --ldflags` \
122 -o $$OUT/fncache_fuzzer
79 -o $$OUT/fncache_fuzzer
123
80
124 jsonescapeu8fast_fuzzer: jsonescapeu8fast.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
81 jsonescapeu8fast_fuzzer: jsonescapeu8fast.cc pyutil.o $(PARSERS_OBJS)
125 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
82 $(CXX) $(CXXFLAGS) `$(PYTHON_CONFIG) --cflags` \
126 -Wno-register -Wno-macro-redefined \
83 -Wno-register -Wno-macro-redefined \
127 -I../../mercurial jsonescapeu8fast.cc \
84 -I../../mercurial jsonescapeu8fast.cc \
128 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
85 pyutil.o $(PARSERS_OBJS) \
129 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
86 $(LIB_FUZZING_ENGINE) `$(PYTHON_CONFIG) --ldflags` \
130 -o $$OUT/jsonescapeu8fast_fuzzer
87 -o $$OUT/jsonescapeu8fast_fuzzer
131
88
132 manifest_corpus.zip:
89 manifest_fuzzer: manifest.cc pyutil.o $(PARSERS_OBJS) $$OUT/manifest_fuzzer_seed_corpus.zip
133 python manifest_corpus.py $$OUT/manifest_fuzzer_seed_corpus.zip
90 $(CXX) $(CXXFLAGS) `$(PYTHON_CONFIG) --cflags` \
134
135 manifest_fuzzer: manifest.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
136 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
137 -Wno-register -Wno-macro-redefined \
91 -Wno-register -Wno-macro-redefined \
138 -I../../mercurial manifest.cc \
92 -I../../mercurial manifest.cc \
139 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
93 pyutil.o $(PARSERS_OBJS) \
140 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
94 $(LIB_FUZZING_ENGINE) `$(PYTHON_CONFIG) --ldflags` \
141 -o $$OUT/manifest_fuzzer
95 -o $$OUT/manifest_fuzzer
142
96
143 revlog_fuzzer: revlog.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
97 revlog_fuzzer: revlog.cc pyutil.o $(PARSERS_OBJS) $$OUT/revlog_fuzzer_seed_corpus.zip
144 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
98 $(CXX) $(CXXFLAGS) `$(PYTHON_CONFIG) --cflags` \
145 -Wno-register -Wno-macro-redefined \
99 -Wno-register -Wno-macro-redefined \
146 -I../../mercurial revlog.cc \
100 -I../../mercurial revlog.cc \
147 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
101 pyutil.o $(PARSERS_OBJS) \
148 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
102 $(LIB_FUZZING_ENGINE) `$(PYTHON_CONFIG) --ldflags` \
149 -o $$OUT/revlog_fuzzer
103 -o $$OUT/revlog_fuzzer
150
104
151 revlog_corpus.zip:
105 dirstate_fuzzer: dirstate.cc pyutil.o $(PARSERS_OBJS) $$OUT/dirstate_fuzzer_seed_corpus.zip
152 python revlog_corpus.py $$OUT/revlog_fuzzer_seed_corpus.zip
106 $(CXX) $(CXXFLAGS) `$(PYTHON_CONFIG) --cflags` \
153
154 dirstate_fuzzer: dirstate.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
155 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
156 -Wno-register -Wno-macro-redefined \
107 -Wno-register -Wno-macro-redefined \
157 -I../../mercurial dirstate.cc \
108 -I../../mercurial dirstate.cc \
158 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
109 pyutil.o $(PARSERS_OBJS) \
159 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
110 $(LIB_FUZZING_ENGINE) `$(PYTHON_CONFIG) --ldflags` \
160 -o $$OUT/dirstate_fuzzer
111 -o $$OUT/dirstate_fuzzer
161
112
162 dirstate_corpus.zip:
113 fm1readmarkers_fuzzer: fm1readmarkers.cc pyutil.o $(PARSERS_OBJS) $$OUT/fm1readmarkers_fuzzer_seed_corpus.zip
163 python dirstate_corpus.py $$OUT/dirstate_fuzzer_seed_corpus.zip
114 $(CXX) $(CXXFLAGS) `$(PYTHON_CONFIG) --cflags` \
164
165 fm1readmarkers_fuzzer: fm1readmarkers.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
166 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
167 -Wno-register -Wno-macro-redefined \
115 -Wno-register -Wno-macro-redefined \
168 -I../../mercurial fm1readmarkers.cc \
116 -I../../mercurial fm1readmarkers.cc \
169 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
117 pyutil.o $(PARSERS_OBJS) \
170 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
118 $(LIB_FUZZING_ENGINE) `$(PYTHON_CONFIG) --ldflags` \
171 -o $$OUT/fm1readmarkers_fuzzer
119 -o $$OUT/fm1readmarkers_fuzzer
172
120
173 fm1readmarkers_corpus.zip:
174 python fm1readmarkers_corpus.py $$OUT/fm1readmarkers_fuzzer_seed_corpus.zip
175
176 clean:
121 clean:
177 $(RM) *.o *_fuzzer \
122 $(RM) *.o *_fuzzer \
178 bdiff \
123 bdiff \
179 mpatch \
124 mpatch \
180 xdiff
125 xdiff
181
126
182 oss-fuzz: bdiff_fuzzer mpatch_fuzzer mpatch_corpus.zip xdiff_fuzzer dirs_fuzzer fncache_fuzzer jsonescapeu8fast_fuzzer manifest_fuzzer manifest_corpus.zip revlog_fuzzer revlog_corpus.zip dirstate_fuzzer dirstate_corpus.zip fm1readmarkers_fuzzer fm1readmarkers_corpus.zip
127 oss-fuzz: bdiff_fuzzer mpatch_fuzzer xdiff_fuzzer dirs_fuzzer fncache_fuzzer jsonescapeu8fast_fuzzer manifest_fuzzer revlog_fuzzer dirstate_fuzzer fm1readmarkers_fuzzer
183
128
184 .PHONY: all clean oss-fuzz
129 .PHONY: all clean oss-fuzz
@@ -9,22 +9,25 b''
9 #include <memory>
9 #include <memory>
10 #include <stdlib.h>
10 #include <stdlib.h>
11
11
12 #include "fuzzutil.h"
12 #include "FuzzedDataProvider.h"
13
13
14 extern "C" {
14 extern "C" {
15 #include "bdiff.h"
15 #include "bdiff.h"
16
16
17 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
18 {
19 return 0;
20 }
21
17 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
22 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
18 {
23 {
19 auto maybe_inputs = SplitInputs(Data, Size);
24 FuzzedDataProvider provider(Data, Size);
20 if (!maybe_inputs) {
25 std::string left = provider.ConsumeRandomLengthString(Size);
21 return 0;
26 std::string right = provider.ConsumeRemainingBytesAsString();
22 }
23 auto inputs = std::move(maybe_inputs.value());
24
27
25 struct bdiff_line *a, *b;
28 struct bdiff_line *a, *b;
26 int an = bdiff_splitlines(inputs.left.get(), inputs.left_size, &a);
29 int an = bdiff_splitlines(left.c_str(), left.size(), &a);
27 int bn = bdiff_splitlines(inputs.right.get(), inputs.right_size, &b);
30 int bn = bdiff_splitlines(right.c_str(), right.size(), &b);
28 struct bdiff_hunk l;
31 struct bdiff_hunk l;
29 bdiff_diff(a, an, b, bn, &l);
32 bdiff_diff(a, an, b, bn, &l);
30 free(a);
33 free(a);
@@ -33,12 +36,4 b' int LLVMFuzzerTestOneInput(const uint8_t'
33 return 0; // Non-zero return values are reserved for future use.
36 return 0; // Non-zero return values are reserved for future use.
34 }
37 }
35
38
36 #ifdef HG_FUZZER_INCLUDE_MAIN
37 int main(int argc, char **argv)
38 {
39 const char data[] = "asdf";
40 return LLVMFuzzerTestOneInput((const uint8_t *)data, 4);
41 }
42 #endif
43
44 } // extern "C"
39 } // extern "C"
@@ -9,16 +9,15 b''
9
9
10 extern "C" {
10 extern "C" {
11
11
12 static PyCodeObject *code;
12 static PYCODETYPE *code;
13
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
15 {
16 contrib::initpy(*argv[0]);
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
17 code = (PYCODETYPE *)Py_CompileString(R"py(
18 from parsers import dirs
19 try:
18 try:
20 files = mdata.split('\n')
19 files = mdata.split('\n')
21 d = dirs(files)
20 d = parsers.dirs(files)
22 list(d)
21 list(d)
23 'a' in d
22 'a' in d
24 if files:
23 if files:
@@ -29,7 +28,7 b' except Exception as e:'
29 # to debug failures.
28 # to debug failures.
30 # print e
29 # print e
31 )py",
30 )py",
32 "fuzzer", Py_file_input);
31 "fuzzer", Py_file_input);
33 return 0;
32 return 0;
34 }
33 }
35
34
@@ -9,24 +9,23 b''
9
9
10 extern "C" {
10 extern "C" {
11
11
12 static PyCodeObject *code;
12 static PYCODETYPE *code;
13
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
15 {
16 contrib::initpy(*argv[0]);
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
17 code = (PYCODETYPE *)Py_CompileString(R"py(
18 from parsers import parse_dirstate
19 try:
18 try:
20 dmap = {}
19 dmap = {}
21 copymap = {}
20 copymap = {}
22 p = parse_dirstate(dmap, copymap, data)
21 p = parsers.parse_dirstate(dmap, copymap, data)
23 except Exception as e:
22 except Exception as e:
24 pass
23 pass
25 # uncomment this print if you're editing this Python code
24 # uncomment this print if you're editing this Python code
26 # to debug failures.
25 # to debug failures.
27 # print e
26 # print e
28 )py",
27 )py",
29 "fuzzer", Py_file_input);
28 "fuzzer", Py_file_input);
30 return 0;
29 return 0;
31 }
30 }
32
31
@@ -13,5 +13,5 b" dirstate = os.path.join(reporoot, '.hg',"
13
13
14 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
14 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
15 if os.path.exists(dirstate):
15 if os.path.exists(dirstate):
16 with open(dirstate) as f:
16 with open(dirstate, 'rb') as f:
17 zf.writestr("dirstate", f.read())
17 zf.writestr("dirstate", f.read())
@@ -9,13 +9,12 b''
9
9
10 extern "C" {
10 extern "C" {
11
11
12 static PyCodeObject *code;
12 static PYCODETYPE *code;
13
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
15 {
16 contrib::initpy(*argv[0]);
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
17 code = (PYCODETYPE *)Py_CompileString(R"py(
18 from parsers import fm1readmarkers
19 def maybeint(s, default):
18 def maybeint(s, default):
20 try:
19 try:
21 return int(s)
20 return int(s)
@@ -31,14 +30,14 b' try:'
31 else:
30 else:
32 offset = stop = 0
31 offset = stop = 0
33 offset, stop = maybeint(offset, 0), maybeint(stop, len(data))
32 offset, stop = maybeint(offset, 0), maybeint(stop, len(data))
34 fm1readmarkers(data, offset, stop)
33 parsers.fm1readmarkers(data, offset, stop)
35 except Exception as e:
34 except Exception as e:
36 pass
35 pass
37 # uncomment this print if you're editing this Python code
36 # uncomment this print if you're editing this Python code
38 # to debug failures.
37 # to debug failures.
39 # print e
38 # print e
40 )py",
39 )py",
41 "fuzzer", Py_file_input);
40 "fuzzer", Py_file_input);
42 return 0;
41 return 0;
43 }
42 }
44
43
@@ -10,29 +10,20 b''
10
10
11 extern "C" {
11 extern "C" {
12
12
13 static PyCodeObject *code;
13 static PYCODETYPE *code;
14
14
15 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
16 {
16 {
17 contrib::initpy(*argv[0]);
17 contrib::initpy(*argv[0]);
18 code = (PyCodeObject *)Py_CompileString(R"py(
18 code = (PYCODETYPE *)Py_CompileString(R"py(
19 from parsers import (
20 isasciistr,
21 asciilower,
22 asciiupper,
23 encodedir,
24 pathencode,
25 lowerencode,
26 )
27
28 try:
19 try:
29 for fn in (
20 for fn in (
30 isasciistr,
21 parsers.isasciistr,
31 asciilower,
22 parsers.asciilower,
32 asciiupper,
23 parsers.asciiupper,
33 encodedir,
24 parsers.encodedir,
34 pathencode,
25 parsers.pathencode,
35 lowerencode,
26 parsers.lowerencode,
36 ):
27 ):
37 try:
28 try:
38 fn(data)
29 fn(data)
@@ -53,7 +44,7 b' except Exception as e:'
53 # to debug failures.
44 # to debug failures.
54 # print(e)
45 # print(e)
55 )py",
46 )py",
56 "fuzzer", Py_file_input);
47 "fuzzer", Py_file_input);
57 if (!code) {
48 if (!code) {
58 std::cerr << "failed to compile Python code!" << std::endl;
49 std::cerr << "failed to compile Python code!" << std::endl;
59 }
50 }
@@ -34,14 +34,4 b' using absl::optional;'
34 if (level <= DEBUG) \
34 if (level <= DEBUG) \
35 std::cout
35 std::cout
36
36
37 struct two_inputs {
38 std::unique_ptr<char[]> right;
39 size_t right_size;
40 std::unique_ptr<char[]> left;
41 size_t left_size;
42 };
43
44 /* Split a non-zero-length input into two inputs. */
45 contrib::optional<two_inputs> SplitInputs(const uint8_t *Data, size_t Size);
46
47 #endif /* CONTRIB_FUZZ_FUZZUTIL_H */
37 #endif /* CONTRIB_FUZZ_FUZZUTIL_H */
@@ -5,29 +5,27 b''
5
5
6 #include "pyutil.h"
6 #include "pyutil.h"
7
7
8 #include <fuzzer/FuzzedDataProvider.h>
9 #include <iostream>
8 #include <iostream>
10 #include <string>
9 #include <string>
10 #include "FuzzedDataProvider.h"
11
11
12 extern "C" {
12 extern "C" {
13
13
14 static PyCodeObject *code;
14 static PYCODETYPE *code;
15
15
16 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
16 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
17 {
17 {
18 contrib::initpy(*argv[0]);
18 contrib::initpy(*argv[0]);
19 code = (PyCodeObject *)Py_CompileString(R"py(
19 code = (PYCODETYPE *)Py_CompileString(R"py(
20 from parsers import jsonescapeu8fast
21
22 try:
20 try:
23 jsonescapeu8fast(data, paranoid)
21 parsers.jsonescapeu8fast(data, paranoid)
24 except Exception as e:
22 except Exception as e:
25 pass
23 pass
26 # uncomment this print if you're editing this Python code
24 # uncomment this print if you're editing this Python code
27 # to debug failures.
25 # to debug failures.
28 # print(e)
26 # print(e)
29 )py",
27 )py",
30 "fuzzer", Py_file_input);
28 "fuzzer", Py_file_input);
31 if (!code) {
29 if (!code) {
32 std::cerr << "failed to compile Python code!" << std::endl;
30 std::cerr << "failed to compile Python code!" << std::endl;
33 }
31 }
@@ -9,15 +9,14 b''
9
9
10 extern "C" {
10 extern "C" {
11
11
12 static PyCodeObject *code;
12 static PYCODETYPE *code;
13
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
15 {
16 contrib::initpy(*argv[0]);
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
17 code = (PYCODETYPE *)Py_CompileString(R"py(
18 from parsers import lazymanifest
19 try:
18 try:
20 lm = lazymanifest(mdata)
19 lm = parsers.lazymanifest(mdata)
21 # iterate the whole thing, which causes the code to fully parse
20 # iterate the whole thing, which causes the code to fully parse
22 # every line in the manifest
21 # every line in the manifest
23 for e, _, _ in lm.iterentries():
22 for e, _, _ in lm.iterentries():
@@ -41,7 +40,7 b' except Exception as e:'
41 # to debug failures.
40 # to debug failures.
42 # print e
41 # print e
43 )py",
42 )py",
44 "fuzzer", Py_file_input);
43 "fuzzer", Py_file_input);
45 return 0;
44 return 0;
46 }
45 }
47
46
@@ -14,6 +14,11 b''
14
14
15 #include "fuzzutil.h"
15 #include "fuzzutil.h"
16
16
17 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
18 {
19 return 0;
20 }
21
17 // To avoid having too many OOMs from the fuzzer infrastructure, we'll
22 // To avoid having too many OOMs from the fuzzer infrastructure, we'll
18 // skip patch application if the resulting fulltext would be bigger
23 // skip patch application if the resulting fulltext would be bigger
19 // than 10MiB.
24 // than 10MiB.
@@ -106,17 +111,4 b' cleanup:'
106 return 0;
111 return 0;
107 }
112 }
108
113
109 #ifdef HG_FUZZER_INCLUDE_MAIN
110 int main(int argc, char **argv)
111 {
112 // One text, one patch.
113 const char data[] = "\x02\x00\0x1\x00\x0d"
114 // base text
115 "a"
116 // binary delta that will append a single b
117 "\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01b";
118 return LLVMFuzzerTestOneInput((const uint8_t *)data, 19);
119 }
120 #endif
121
122 } // extern "C"
114 } // extern "C"
This diff has been collapsed as it changes many lines, (568 lines changed) Show them Hide them
@@ -2,6 +2,7 b' from __future__ import absolute_import, '
2
2
3 import argparse
3 import argparse
4 import struct
4 import struct
5 import sys
5 import zipfile
6 import zipfile
6
7
7 from mercurial import (
8 from mercurial import (
@@ -14,34 +15,48 b' ap.add_argument("out", metavar="some.zip'
14 args = ap.parse_args()
15 args = ap.parse_args()
15
16
16
17
17 class deltafrag(object):
18 if sys.version_info[0] < 3:
19
20 class py2reprhack(object):
21 def __repr__(self):
22 """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__"""
23 return self.__bytes__()
24
25
26 else:
27
28 class py2reprhack(object):
29 """Not needed on py3."""
30
31
32 class deltafrag(py2reprhack):
18 def __init__(self, start, end, data):
33 def __init__(self, start, end, data):
19 self.start = start
34 self.start = start
20 self.end = end
35 self.end = end
21 self.data = data
36 self.data = data
22
37
23 def __str__(self):
38 def __bytes__(self):
24 return (
39 return (
25 struct.pack(">lll", self.start, self.end, len(self.data))
40 struct.pack(">lll", self.start, self.end, len(self.data))
26 + self.data
41 + self.data
27 )
42 )
28
43
29
44
30 class delta(object):
45 class delta(py2reprhack):
31 def __init__(self, frags):
46 def __init__(self, frags):
32 self.frags = frags
47 self.frags = frags
33
48
34 def __str__(self):
49 def __bytes__(self):
35 return ''.join(str(f) for f in self.frags)
50 return b''.join(bytes(f) for f in self.frags)
36
51
37
52
38 class corpus(object):
53 class corpus(py2reprhack):
39 def __init__(self, base, deltas):
54 def __init__(self, base, deltas):
40 self.base = base
55 self.base = base
41 self.deltas = deltas
56 self.deltas = deltas
42
57
43 def __str__(self):
58 def __bytes__(self):
44 deltas = [str(d) for d in self.deltas]
59 deltas = [bytes(d) for d in self.deltas]
45 parts = (
60 parts = (
46 [
61 [
47 struct.pack(">B", len(deltas) + 1),
62 struct.pack(">B", len(deltas) + 1),
@@ -51,300 +66,301 b' class corpus(object):'
51 + [self.base]
66 + [self.base]
52 + deltas
67 + deltas
53 )
68 )
54 return "".join(parts)
69 return b''.join(parts)
55
70
56
71
57 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
72 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
58 # Manually constructed entries
73 # Manually constructed entries
59 zf.writestr(
74 zf.writestr(
60 "one_delta_applies", str(corpus('a', [delta([deltafrag(0, 1, 'b')])]))
75 "one_delta_applies",
76 bytes(corpus(b'a', [delta([deltafrag(0, 1, b'b')])])),
61 )
77 )
62 zf.writestr(
78 zf.writestr(
63 "one_delta_starts_late",
79 "one_delta_starts_late",
64 str(corpus('a', [delta([deltafrag(3, 1, 'b')])])),
80 bytes(corpus(b'a', [delta([deltafrag(3, 1, b'b')])])),
65 )
81 )
66 zf.writestr(
82 zf.writestr(
67 "one_delta_ends_late",
83 "one_delta_ends_late",
68 str(corpus('a', [delta([deltafrag(0, 20, 'b')])])),
84 bytes(corpus(b'a', [delta([deltafrag(0, 20, b'b')])])),
69 )
85 )
70
86
71 try:
87 try:
72 # Generated from repo data
88 # Generated from repo data
73 r = hg.repository(uimod.ui(), '../..')
89 r = hg.repository(uimod.ui(), b'../..')
74 fl = r.file('mercurial/manifest.py')
90 fl = r.file(b'mercurial/manifest.py')
75 rl = getattr(fl, '_revlog', fl)
91 rl = getattr(fl, '_revlog', fl)
76 bins = rl._chunks(rl._deltachain(10)[0])
92 bins = rl._chunks(rl._deltachain(10)[0])
77 zf.writestr('manifest_py_rev_10', str(corpus(bins[0], bins[1:])))
93 zf.writestr('manifest_py_rev_10', bytes(corpus(bins[0], bins[1:])))
78 except: # skip this, so no re-raises
94 except: # skip this, so no re-raises
79 print('skipping seed file from repo data')
95 print('skipping seed file from repo data')
80 # Automatically discovered by running the fuzzer
96 # Automatically discovered by running the fuzzer
81 zf.writestr(
97 zf.writestr(
82 "mpatch_decode_old_overread", "\x02\x00\x00\x00\x02\x00\x00\x00"
98 "mpatch_decode_old_overread", b"\x02\x00\x00\x00\x02\x00\x00\x00"
83 )
99 )
84 # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876
100 # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876
85 zf.writestr(
101 zf.writestr(
86 "mpatch_ossfuzz_getbe32_ubsan",
102 "mpatch_ossfuzz_getbe32_ubsan",
87 "\x02\x00\x00\x00\x0c \xff\xff\xff\xff ",
103 b"\x02\x00\x00\x00\x0c \xff\xff\xff\xff ",
88 )
104 )
89 zf.writestr(
105 zf.writestr(
90 "mpatch_apply_over_memcpy",
106 "mpatch_apply_over_memcpy",
91 '\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00'
107 b'\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00'
92 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
108 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
93 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00'
109 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00'
94 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
110 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
95 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
111 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
96 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
112 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
97 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
113 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
98 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
114 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
99 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
115 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
100 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
116 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
101 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
117 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
102 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
118 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
103 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
119 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
104 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
120 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
105 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
121 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
106 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
122 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
107 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
123 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
108 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
124 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
109 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
125 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
110 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
126 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
111 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
127 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
112 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
128 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
113 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
129 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
114 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
130 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
115 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
131 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
116 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
132 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
117 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
133 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
118 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
134 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
119 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
135 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
120 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
136 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
121 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8c\x00\x00\x00\x00'
137 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8c\x00\x00\x00\x00'
122 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
138 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
123 '\x00\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00\x00\x00\x00\x00\x00'
139 b'\x00\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00\x00\x00\x00\x00\x00'
124 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
140 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
125 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
141 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
126 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
142 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
127 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
143 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
128 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
144 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
129 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
145 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
130 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
146 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
131 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
147 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
132 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
148 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
133 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
149 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
134 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
150 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
135 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
151 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
136 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
152 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
137 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
153 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
138 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
154 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
139 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
155 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
140 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
156 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
141 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
157 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
142 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
158 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
143 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
159 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
144 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
160 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
145 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
161 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
146 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
162 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
147 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
163 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
148 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
164 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
149 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
165 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
150 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
166 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
151 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
167 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
152 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
168 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
153 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
169 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
154 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
170 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
155 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
171 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
156 '\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00A\x00\x00\x00\x00'
172 b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00A\x00\x00\x00\x00'
157 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
173 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
158 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
174 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
159 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
175 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
160 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
176 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
161 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
177 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
162 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
178 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
163 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
179 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
164 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
180 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
165 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
181 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
166 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
182 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
167 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
183 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
168 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
184 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
169 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
185 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
170 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
186 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
171 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
187 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
172 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
188 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
173 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
189 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
174 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
190 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
175 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
191 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
176 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
192 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
177 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
193 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
178 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
194 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
179 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
195 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
180 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
196 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
181 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x18'
197 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x18'
182 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
198 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
183 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
199 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
184 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
200 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
185 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
201 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
186 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
202 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
187 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
203 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
188 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
204 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
189 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
205 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
190 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
206 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
191 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
207 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
192 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
208 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
193 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
209 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
194 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
210 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
195 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
211 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
196 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
212 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
197 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
213 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
198 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
214 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
199 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
215 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
200 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
216 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
201 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
217 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
202 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
218 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
203 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
219 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
204 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
220 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
205 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
221 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
206 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
222 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
207 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
223 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
208 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
224 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
209 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
225 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
210 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
226 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
211 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
227 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
212 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
228 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
213 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
229 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
214 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
230 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
215 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
231 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
216 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
232 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
217 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
233 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
218 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
234 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
219 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
235 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
220 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
236 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
221 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
237 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
222 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
238 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
223 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
239 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
224 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
240 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
225 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
241 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
226 '\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
242 b'\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
227 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
243 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
228 '\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00\x00\x00\x00\x00'
244 b'\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00\x00\x00\x00\x00'
229 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
245 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
230 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
246 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
231 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
247 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
232 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
248 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
233 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
249 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
234 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
250 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
235 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
251 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
236 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
252 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
237 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
253 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
238 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
254 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
239 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
255 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
240 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
256 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
241 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
257 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
242 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
258 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
243 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
259 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
244 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
260 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
245 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
261 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
246 '\x00\x00\x94\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
262 b'\x00\x00\x94\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
247 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
263 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
248 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
264 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
249 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
265 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
250 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
266 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
251 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
267 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
252 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
268 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
253 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
269 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
254 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
270 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
255 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
271 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
256 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
272 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
257 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
273 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
258 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
274 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
259 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
275 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
260 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
276 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
261 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
277 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
262 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
278 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
263 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
279 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
264 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
280 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
265 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
281 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
266 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
282 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
267 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
283 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
268 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
284 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
269 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
285 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
270 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
286 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
271 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
287 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
272 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
288 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
273 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
289 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
274 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
290 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
275 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
291 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
276 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
292 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
277 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
293 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
278 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
294 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
279 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
295 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
280 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
296 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
281 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
297 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
282 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
298 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
283 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
299 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
284 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
300 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
285 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
301 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
286 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
302 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
287 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
303 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
288 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
304 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
289 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
305 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
290 '\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
306 b'\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
291 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
307 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
292 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00'
308 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00'
293 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
309 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
294 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
310 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
295 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
311 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
296 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
312 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
297 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
313 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
298 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
314 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
299 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
315 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
300 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
316 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
301 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
317 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
302 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
318 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
303 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
319 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
304 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
320 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
305 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
321 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
306 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
322 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
307 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
323 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
308 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
324 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
309 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
325 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
310 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
326 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
311 '\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00'
327 b'\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00'
312 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
328 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
313 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
329 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
314 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
330 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
315 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
331 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
316 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
332 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
317 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
333 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
318 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
334 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
319 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
335 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
320 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
336 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
321 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
337 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
322 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
338 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
323 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
339 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
324 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
340 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
325 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
341 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
326 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00'
342 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00'
327 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
343 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
328 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
344 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
329 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
345 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
330 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
346 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
331 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\x00\x00'
347 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\x00\x00'
332 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
348 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
333 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
349 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
334 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
350 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
335 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
351 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
336 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
352 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
337 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
353 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
338 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
354 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
339 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
355 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
340 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
356 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
341 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
357 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
342 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
358 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
343 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
359 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
344 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
360 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
345 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
361 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
346 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
362 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
347 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00'
363 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00'
348 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
364 b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
349 '\x00\x00\x00\x00',
365 b'\x00\x00\x00\x00',
350 )
366 )
@@ -1,21 +1,31 b''
1 #include "pyutil.h"
1 #include "pyutil.h"
2
2
3 #include <iostream>
3 #include <string>
4 #include <string>
4
5
5 namespace contrib
6 namespace contrib
6 {
7 {
7
8
9 #if PY_MAJOR_VERSION >= 3
10 #define HG_FUZZER_PY3 1
11 PyMODINIT_FUNC PyInit_parsers(void);
12 #else
13 PyMODINIT_FUNC initparsers(void);
14 #endif
15
8 static char cpypath[8192] = "\0";
16 static char cpypath[8192] = "\0";
9
17
10 static PyObject *mainmod;
18 static PyObject *mainmod;
11 static PyObject *globals;
19 static PyObject *globals;
12
20
13 /* TODO: use Python 3 for this fuzzing? */
14 PyMODINIT_FUNC initparsers(void);
15
16 void initpy(const char *cselfpath)
21 void initpy(const char *cselfpath)
17 {
22 {
23 #ifdef HG_FUZZER_PY3
24 const std::string subdir = "/sanpy/lib/python3.7";
25 #else
18 const std::string subdir = "/sanpy/lib/python2.7";
26 const std::string subdir = "/sanpy/lib/python2.7";
27 #endif
28
19 /* HACK ALERT: we need a full Python installation built without
29 /* HACK ALERT: we need a full Python installation built without
20 pymalloc and with ASAN, so we dump one in
30 pymalloc and with ASAN, so we dump one in
21 $OUT/sanpy/lib/python2.7. This helps us wire that up. */
31 $OUT/sanpy/lib/python2.7. This helps us wire that up. */
@@ -24,7 +34,11 b' void initpy(const char *cselfpath)'
24 auto pos = selfpath.rfind("/");
34 auto pos = selfpath.rfind("/");
25 if (pos == std::string::npos) {
35 if (pos == std::string::npos) {
26 char wd[8192];
36 char wd[8192];
27 getcwd(wd, 8192);
37 if (!getcwd(wd, 8192)) {
38 std::cerr << "Failed to call getcwd: errno " << errno
39 << std::endl;
40 exit(1);
41 }
28 pypath = std::string(wd) + subdir;
42 pypath = std::string(wd) + subdir;
29 } else {
43 } else {
30 pypath = selfpath.substr(0, pos) + subdir;
44 pypath = selfpath.substr(0, pos) + subdir;
@@ -34,11 +48,24 b' void initpy(const char *cselfpath)'
34 setenv("PYTHONNOUSERSITE", "1", 1);
48 setenv("PYTHONNOUSERSITE", "1", 1);
35 /* prevent Python from looking up users in the fuzz environment */
49 /* prevent Python from looking up users in the fuzz environment */
36 setenv("PYTHONUSERBASE", cpypath, 1);
50 setenv("PYTHONUSERBASE", cpypath, 1);
51 #ifdef HG_FUZZER_PY3
52 std::wstring wcpypath(pypath.begin(), pypath.end());
53 Py_SetPythonHome(wcpypath.c_str());
54 #else
37 Py_SetPythonHome(cpypath);
55 Py_SetPythonHome(cpypath);
56 #endif
38 Py_InitializeEx(0);
57 Py_InitializeEx(0);
39 mainmod = PyImport_AddModule("__main__");
58 mainmod = PyImport_AddModule("__main__");
40 globals = PyModule_GetDict(mainmod);
59 globals = PyModule_GetDict(mainmod);
60
61 #ifdef HG_FUZZER_PY3
62 PyObject *mod = PyInit_parsers();
63 #else
41 initparsers();
64 initparsers();
65 PyObject *mod = PyImport_ImportModule("parsers");
66 #endif
67
68 PyDict_SetItemString(globals, "parsers", mod);
42 }
69 }
43
70
44 PyObject *pyglobals()
71 PyObject *pyglobals()
@@ -1,5 +1,11 b''
1 #include <Python.h>
1 #include <Python.h>
2
2
3 #if PY_MAJOR_VERSION >= 3
4 #define PYCODETYPE PyObject
5 #else
6 #define PYCODETYPE PyCodeObject
7 #endif
8
3 namespace contrib
9 namespace contrib
4 {
10 {
5
11
@@ -9,16 +9,15 b''
9
9
10 extern "C" {
10 extern "C" {
11
11
12 static PyCodeObject *code;
12 static PYCODETYPE *code;
13
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
15 {
16 contrib::initpy(*argv[0]);
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
17 code = (PYCODETYPE *)Py_CompileString(R"py(
18 from parsers import parse_index2
19 for inline in (True, False):
18 for inline in (True, False):
20 try:
19 try:
21 index, cache = parse_index2(data, inline)
20 index, cache = parsers.parse_index2(data, inline)
22 index.slicechunktodensity(list(range(len(index))), 0.5, 262144)
21 index.slicechunktodensity(list(range(len(index))), 0.5, 262144)
23 index.stats()
22 index.stats()
24 index.findsnapshots({}, 0)
23 index.findsnapshots({}, 0)
@@ -35,7 +34,7 b' for inline in (True, False):'
35 # to debug failures.
34 # to debug failures.
36 # print e
35 # print e
37 )py",
36 )py",
38 "fuzzer", Py_file_input);
37 "fuzzer", Py_file_input);
39 return 0;
38 return 0;
40 }
39 }
41
40
@@ -1,4 +1,4 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import
2
2
3 import argparse
3 import argparse
4 import os
4 import os
@@ -16,13 +16,10 b' contributing = os.path.join('
16 reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i'
16 reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i'
17 )
17 )
18
18
19 print(changelog, os.path.exists(changelog))
20 print(contributing, os.path.exists(contributing))
21
22 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
19 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
23 if os.path.exists(changelog):
20 if os.path.exists(changelog):
24 with open(changelog) as f:
21 with open(changelog, 'rb') as f:
25 zf.writestr("00changelog.i", f.read())
22 zf.writestr("00changelog.i", f.read())
26 if os.path.exists(contributing):
23 if os.path.exists(contributing):
27 with open(contributing) as f:
24 with open(contributing, 'rb') as f:
28 zf.writestr("contributing.i", f.read())
25 zf.writestr("contributing.i", f.read())
@@ -10,10 +10,15 b''
10 #include <inttypes.h>
10 #include <inttypes.h>
11 #include <stdlib.h>
11 #include <stdlib.h>
12
12
13 #include "fuzzutil.h"
13 #include "FuzzedDataProvider.h"
14
14
15 extern "C" {
15 extern "C" {
16
16
17 int LLVMFuzzerInitialize(int *argc, char ***argv)
18 {
19 return 0;
20 }
21
17 int hunk_consumer(long a1, long a2, long b1, long b2, void *priv)
22 int hunk_consumer(long a1, long a2, long b1, long b2, void *priv)
18 {
23 {
19 // TODO: probably also test returning -1 from this when things break?
24 // TODO: probably also test returning -1 from this when things break?
@@ -27,17 +32,15 b' int LLVMFuzzerTestOneInput(const uint8_t'
27 if (Size > 100000) {
32 if (Size > 100000) {
28 return 0;
33 return 0;
29 }
34 }
30 auto maybe_inputs = SplitInputs(Data, Size);
35 FuzzedDataProvider provider(Data, Size);
31 if (!maybe_inputs) {
36 std::string left = provider.ConsumeRandomLengthString(Size);
32 return 0;
37 std::string right = provider.ConsumeRemainingBytesAsString();
33 }
34 auto inputs = std::move(maybe_inputs.value());
35 mmfile_t a, b;
38 mmfile_t a, b;
36
39
37 a.ptr = inputs.left.get();
40 a.ptr = (char *)left.c_str();
38 a.size = inputs.left_size;
41 a.size = left.size();
39 b.ptr = inputs.right.get();
42 b.ptr = (char *)right.c_str();
40 b.size = inputs.right_size;
43 b.size = right.size();
41 xpparam_t xpp = {
44 xpparam_t xpp = {
42 XDF_INDENT_HEURISTIC, /* flags */
45 XDF_INDENT_HEURISTIC, /* flags */
43 };
46 };
@@ -52,12 +55,4 b' int LLVMFuzzerTestOneInput(const uint8_t'
52 return 0; // Non-zero return values are reserved for future use.
55 return 0; // Non-zero return values are reserved for future use.
53 }
56 }
54
57
55 #ifdef HG_FUZZER_INCLUDE_MAIN
56 int main(int argc, char **argv)
57 {
58 const char data[] = "asdf";
59 return LLVMFuzzerTestOneInput((const uint8_t *)data, 4);
60 }
61 #endif
62
63 } // extern "C"
58 } // extern "C"
@@ -35,7 +35,9 b' import shlex'
35 import sys
35 import sys
36
36
37 # enable importing on demand to reduce startup time
37 # enable importing on demand to reduce startup time
38 import hgdemandimport ; hgdemandimport.enable()
38 import hgdemandimport
39
40 hgdemandimport.enable()
39
41
40 from mercurial import (
42 from mercurial import (
41 dispatch,
43 dispatch,
@@ -43,6 +45,7 b' from mercurial import ('
43 ui as uimod,
45 ui as uimod,
44 )
46 )
45
47
48
46 def main():
49 def main():
47 # Prevent insertion/deletion of CRs
50 # Prevent insertion/deletion of CRs
48 dispatch.initstdio()
51 dispatch.initstdio()
@@ -56,9 +59,10 b' def main():'
56 args.pop(0)
59 args.pop(0)
57 else:
60 else:
58 break
61 break
59 allowed_paths = [os.path.normpath(os.path.join(cwd,
62 allowed_paths = [
60 os.path.expanduser(path)))
63 os.path.normpath(os.path.join(cwd, os.path.expanduser(path)))
61 for path in args]
64 for path in args
65 ]
62 orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?')
66 orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?')
63 try:
67 try:
64 cmdargv = shlex.split(orig_cmd)
68 cmdargv = shlex.split(orig_cmd)
@@ -75,10 +79,18 b' def main():'
75 if readonly:
79 if readonly:
76 if not req.ui:
80 if not req.ui:
77 req.ui = uimod.ui.load()
81 req.ui = uimod.ui.load()
78 req.ui.setconfig(b'hooks', b'pretxnopen.hg-ssh',
82 req.ui.setconfig(
79 b'python:__main__.rejectpush', b'hg-ssh')
83 b'hooks',
80 req.ui.setconfig(b'hooks', b'prepushkey.hg-ssh',
84 b'pretxnopen.hg-ssh',
81 b'python:__main__.rejectpush', b'hg-ssh')
85 b'python:__main__.rejectpush',
86 b'hg-ssh',
87 )
88 req.ui.setconfig(
89 b'hooks',
90 b'prepushkey.hg-ssh',
91 b'python:__main__.rejectpush',
92 b'hg-ssh',
93 )
82 dispatch.dispatch(req)
94 dispatch.dispatch(req)
83 else:
95 else:
84 sys.stderr.write('Illegal repository "%s"\n' % repo)
96 sys.stderr.write('Illegal repository "%s"\n' % repo)
@@ -87,11 +99,13 b' def main():'
87 sys.stderr.write('Illegal command "%s"\n' % orig_cmd)
99 sys.stderr.write('Illegal command "%s"\n' % orig_cmd)
88 sys.exit(255)
100 sys.exit(255)
89
101
102
90 def rejectpush(ui, **kwargs):
103 def rejectpush(ui, **kwargs):
91 ui.warn((b"Permission denied\n"))
104 ui.warn((b"Permission denied\n"))
92 # mercurial hooks use unix process conventions for hook return values
105 # mercurial hooks use unix process conventions for hook return values
93 # so a truthy return means failure
106 # so a truthy return means failure
94 return True
107 return True
95
108
109
96 if __name__ == '__main__':
110 if __name__ == '__main__':
97 main()
111 main()
@@ -39,7 +39,7 b' def connectpipe(path=None, extraargs=())'
39 cmdline.extend(extraargs)
39 cmdline.extend(extraargs)
40
40
41 def tonative(cmdline):
41 def tonative(cmdline):
42 if os.name != r'nt':
42 if os.name != 'nt':
43 return cmdline
43 return cmdline
44 return [arg.decode("utf-8") for arg in cmdline]
44 return [arg.decode("utf-8") for arg in cmdline]
45
45
@@ -37,18 +37,24 b" libdir = '@LIBDIR@'"
37
37
38 if libdir != '@' 'LIBDIR' '@':
38 if libdir != '@' 'LIBDIR' '@':
39 if not os.path.isabs(libdir):
39 if not os.path.isabs(libdir):
40 libdir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
40 libdir = os.path.join(
41 libdir)
41 os.path.dirname(os.path.realpath(__file__)), libdir
42 )
42 libdir = os.path.abspath(libdir)
43 libdir = os.path.abspath(libdir)
43 sys.path.insert(0, libdir)
44 sys.path.insert(0, libdir)
44
45
45 # enable importing on demand to reduce startup time
46 # enable importing on demand to reduce startup time
46 try:
47 try:
47 from mercurial import demandimport; demandimport.enable()
48 from mercurial import demandimport
49
50 demandimport.enable()
48 except ImportError:
51 except ImportError:
49 import sys
52 import sys
50 sys.stderr.write("abort: couldn't find mercurial libraries in [%s]\n" %
53
51 ' '.join(sys.path))
54 sys.stderr.write(
55 "abort: couldn't find mercurial libraries in [%s]\n"
56 % ' '.join(sys.path)
57 )
52 sys.stderr.write("(check your install and PYTHONPATH)\n")
58 sys.stderr.write("(check your install and PYTHONPATH)\n")
53 sys.exit(-1)
59 sys.exit(-1)
54
60
@@ -57,6 +63,7 b' from mercurial import ('
57 util,
63 util,
58 )
64 )
59
65
66
60 def timer(func, title=None):
67 def timer(func, title=None):
61 results = []
68 results = []
62 begin = util.timer()
69 begin = util.timer()
@@ -69,7 +76,7 b' def timer(func, title=None):'
69 ostop = os.times()
76 ostop = os.times()
70 count += 1
77 count += 1
71 a, b = ostart, ostop
78 a, b = ostart, ostop
72 results.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
79 results.append((cstop - cstart, b[0] - a[0], b[1] - a[1]))
73 if cstop - begin > 3 and count >= 100:
80 if cstop - begin > 3 and count >= 100:
74 break
81 break
75 if cstop - begin > 10 and count >= 3:
82 if cstop - begin > 10 and count >= 3:
@@ -79,19 +86,27 b' def timer(func, title=None):'
79 if r:
86 if r:
80 sys.stderr.write("! result: %s\n" % r)
87 sys.stderr.write("! result: %s\n" % r)
81 m = min(results)
88 m = min(results)
82 sys.stderr.write("! wall %f comb %f user %f sys %f (best of %d)\n"
89 sys.stderr.write(
83 % (m[0], m[1] + m[2], m[1], m[2], count))
90 "! wall %f comb %f user %f sys %f (best of %d)\n"
91 % (m[0], m[1] + m[2], m[1], m[2], count)
92 )
93
84
94
85 orgruncommand = dispatch.runcommand
95 orgruncommand = dispatch.runcommand
86
96
97
87 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
98 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
88 ui.pushbuffer()
99 ui.pushbuffer()
89 lui.pushbuffer()
100 lui.pushbuffer()
90 timer(lambda : orgruncommand(lui, repo, cmd, fullargs, ui,
101 timer(
91 options, d, cmdpats, cmdoptions))
102 lambda: orgruncommand(
103 lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions
104 )
105 )
92 ui.popbuffer()
106 ui.popbuffer()
93 lui.popbuffer()
107 lui.popbuffer()
94
108
109
95 dispatch.runcommand = runcommand
110 dispatch.runcommand = runcommand
96
111
97 dispatch.run()
112 dispatch.run()
@@ -7,13 +7,16 b' config = "/path/to/repo/or/config"'
7
7
8 # Uncomment and adjust if Mercurial is not installed system-wide
8 # Uncomment and adjust if Mercurial is not installed system-wide
9 # (consult "installed modules" path from 'hg debuginstall'):
9 # (consult "installed modules" path from 'hg debuginstall'):
10 #import sys; sys.path.insert(0, "/path/to/python/lib")
10 # import sys; sys.path.insert(0, "/path/to/python/lib")
11
11
12 # Uncomment to send python tracebacks to the browser if an error occurs:
12 # Uncomment to send python tracebacks to the browser if an error occurs:
13 #import cgitb; cgitb.enable()
13 # import cgitb; cgitb.enable()
14
14
15 from mercurial import demandimport; demandimport.enable()
15 from mercurial import demandimport
16
17 demandimport.enable()
16 from mercurial.hgweb import hgweb
18 from mercurial.hgweb import hgweb
17 from flup.server.fcgi import WSGIServer
19 from flup.server.fcgi import WSGIServer
20
18 application = hgweb(config)
21 application = hgweb(config)
19 WSGIServer(application).run()
22 WSGIServer(application).run()
@@ -535,6 +535,8 b' def verify_modern_convention(module, roo'
535 if fullname != '__future__':
535 if fullname != '__future__':
536 if not fullname or (
536 if not fullname or (
537 fullname in stdlib_modules
537 fullname in stdlib_modules
538 # allow standard 'from typing import ...' style
539 and fullname.startswith('.')
538 and fullname not in localmods
540 and fullname not in localmods
539 and fullname + '.__init__' not in localmods
541 and fullname + '.__init__' not in localmods
540 ):
542 ):
@@ -10,7 +10,8 b' RUN apt-get update && apt-get install -y'
10 dh-python \
10 dh-python \
11 less \
11 less \
12 python \
12 python \
13 python-all-dev \
13 python3-all \
14 python-docutils \
14 python3-all-dev \
15 python3-docutils \
15 unzip \
16 unzip \
16 zip
17 zip
@@ -11,6 +11,7 b' import shutil'
11 import subprocess
11 import subprocess
12 import sys
12 import sys
13
13
14
14 def get_docker() -> str:
15 def get_docker() -> str:
15 docker = shutil.which('docker.io') or shutil.which('docker')
16 docker = shutil.which('docker.io') or shutil.which('docker')
16 if not docker:
17 if not docker:
@@ -21,15 +22,16 b' def get_docker() -> str:'
21 out = subprocess.check_output([docker, '-h'], stderr=subprocess.STDOUT)
22 out = subprocess.check_output([docker, '-h'], stderr=subprocess.STDOUT)
22
23
23 if b'Jansens' in out:
24 if b'Jansens' in out:
24 print('%s is the Docking System Tray; try installing docker.io' %
25 print(
25 docker)
26 '%s is the Docking System Tray; try installing docker.io'
27 % docker
28 )
26 sys.exit(1)
29 sys.exit(1)
27 except subprocess.CalledProcessError as e:
30 except subprocess.CalledProcessError as e:
28 print('error calling `%s -h`: %s' % (docker, e.output))
31 print('error calling `%s -h`: %s' % (docker, e.output))
29 sys.exit(1)
32 sys.exit(1)
30
33
31 out = subprocess.check_output([docker, 'version'],
34 out = subprocess.check_output([docker, 'version'], stderr=subprocess.STDOUT)
32 stderr=subprocess.STDOUT)
33
35
34 lines = out.splitlines()
36 lines = out.splitlines()
35 if not any(l.startswith((b'Client:', b'Client version:')) for l in lines):
37 if not any(l.startswith((b'Client:', b'Client version:')) for l in lines):
@@ -42,6 +44,7 b' def get_docker() -> str:'
42
44
43 return docker
45 return docker
44
46
47
45 def get_dockerfile(path: pathlib.Path, args: list) -> bytes:
48 def get_dockerfile(path: pathlib.Path, args: list) -> bytes:
46 with path.open('rb') as fh:
49 with path.open('rb') as fh:
47 df = fh.read()
50 df = fh.read()
@@ -51,6 +54,7 b' def get_dockerfile(path: pathlib.Path, a'
51
54
52 return df
55 return df
53
56
57
54 def build_docker_image(dockerfile: pathlib.Path, params: list, tag: str):
58 def build_docker_image(dockerfile: pathlib.Path, params: list, tag: str):
55 """Build a Docker image from a templatized Dockerfile."""
59 """Build a Docker image from a templatized Dockerfile."""
56 docker = get_docker()
60 docker = get_docker()
@@ -65,9 +69,12 b' def build_docker_image(dockerfile: pathl'
65 args = [
69 args = [
66 docker,
70 docker,
67 'build',
71 'build',
68 '--build-arg', 'http_proxy',
72 '--build-arg',
69 '--build-arg', 'https_proxy',
73 'http_proxy',
70 '--tag', tag,
74 '--build-arg',
75 'https_proxy',
76 '--tag',
77 tag,
71 '-',
78 '-',
72 ]
79 ]
73
80
@@ -76,8 +83,10 b' def build_docker_image(dockerfile: pathl'
76 p.communicate(input=dockerfile)
83 p.communicate(input=dockerfile)
77 if p.returncode:
84 if p.returncode:
78 raise subprocess.CalledProcessException(
85 raise subprocess.CalledProcessException(
79 p.returncode, 'failed to build docker image: %s %s'
86 p.returncode,
80 % (p.stdout, p.stderr))
87 'failed to build docker image: %s %s' % (p.stdout, p.stderr),
88 )
89
81
90
82 def command_build(args):
91 def command_build(args):
83 build_args = []
92 build_args = []
@@ -85,13 +94,13 b' def command_build(args):'
85 k, v = arg.split('=', 1)
94 k, v = arg.split('=', 1)
86 build_args.append((k.encode('utf-8'), v.encode('utf-8')))
95 build_args.append((k.encode('utf-8'), v.encode('utf-8')))
87
96
88 build_docker_image(pathlib.Path(args.dockerfile),
97 build_docker_image(pathlib.Path(args.dockerfile), build_args, args.tag)
89 build_args,
98
90 args.tag)
91
99
92 def command_docker(args):
100 def command_docker(args):
93 print(get_docker())
101 print(get_docker())
94
102
103
95 def main() -> int:
104 def main() -> int:
96 parser = argparse.ArgumentParser()
105 parser = argparse.ArgumentParser()
97
106
@@ -99,9 +108,12 b' def main() -> int:'
99
108
100 build = subparsers.add_parser('build', help='Build a Docker image')
109 build = subparsers.add_parser('build', help='Build a Docker image')
101 build.set_defaults(func=command_build)
110 build.set_defaults(func=command_build)
102 build.add_argument('--build-arg', action='append', default=[],
111 build.add_argument(
103 help='Substitution to perform in Dockerfile; '
112 '--build-arg',
104 'format: key=value')
113 action='append',
114 default=[],
115 help='Substitution to perform in Dockerfile; ' 'format: key=value',
116 )
105 build.add_argument('dockerfile', help='path to Dockerfile to use')
117 build.add_argument('dockerfile', help='path to Dockerfile to use')
106 build.add_argument('tag', help='Tag to apply to created image')
118 build.add_argument('tag', help='Tag to apply to created image')
107
119
@@ -112,5 +124,6 b' def main() -> int:'
112
124
113 return args.func(args)
125 return args.func(args)
114
126
127
115 if __name__ == '__main__':
128 if __name__ == '__main__':
116 sys.exit(main())
129 sys.exit(main())
@@ -12,9 +12,16 b' import pathlib'
12 import shutil
12 import shutil
13 import subprocess
13 import subprocess
14
14
15 from .py2exe import build_py2exe
15 import jinja2
16 from .util import find_vc_runtime_files
17
16
17 from .py2exe import (
18 build_py2exe,
19 stage_install,
20 )
21 from .util import (
22 find_vc_runtime_files,
23 read_version_py,
24 )
18
25
19 EXTRA_PACKAGES = {
26 EXTRA_PACKAGES = {
20 'dulwich',
27 'dulwich',
@@ -23,6 +30,10 b' EXTRA_PACKAGES = {'
23 'win32ctypes',
30 'win32ctypes',
24 }
31 }
25
32
33 PACKAGE_FILES_METADATA = {
34 'ReadMe.html': 'Flags: isreadme',
35 }
36
26
37
27 def build(
38 def build(
28 source_dir: pathlib.Path,
39 source_dir: pathlib.Path,
@@ -43,11 +54,17 b' def build('
43 raise Exception('%s does not exist' % iscc_exe)
54 raise Exception('%s does not exist' % iscc_exe)
44
55
45 vc_x64 = r'\x64' in os.environ.get('LIB', '')
56 vc_x64 = r'\x64' in os.environ.get('LIB', '')
57 arch = 'x64' if vc_x64 else 'x86'
58 inno_source_dir = source_dir / 'contrib' / 'packaging' / 'inno'
59 inno_build_dir = build_dir / ('inno-%s' % arch)
60 staging_dir = inno_build_dir / 'stage'
46
61
47 requirements_txt = (
62 requirements_txt = (
48 source_dir / 'contrib' / 'packaging' / 'inno' / 'requirements.txt'
63 source_dir / 'contrib' / 'packaging' / 'inno' / 'requirements.txt'
49 )
64 )
50
65
66 inno_build_dir.mkdir(parents=True, exist_ok=True)
67
51 build_py2exe(
68 build_py2exe(
52 source_dir,
69 source_dir,
53 build_dir,
70 build_dir,
@@ -57,6 +74,15 b' def build('
57 extra_packages=EXTRA_PACKAGES,
74 extra_packages=EXTRA_PACKAGES,
58 )
75 )
59
76
77 # Purge the staging directory for every build so packaging is
78 # pristine.
79 if staging_dir.exists():
80 print('purging %s' % staging_dir)
81 shutil.rmtree(staging_dir)
82
83 # Now assemble all the packaged files into the staging directory.
84 stage_install(source_dir, staging_dir)
85
60 # hg.exe depends on VC9 runtime DLLs. Copy those into place.
86 # hg.exe depends on VC9 runtime DLLs. Copy those into place.
61 for f in find_vc_runtime_files(vc_x64):
87 for f in find_vc_runtime_files(vc_x64):
62 if f.name.endswith('.manifest'):
88 if f.name.endswith('.manifest'):
@@ -64,22 +90,74 b' def build('
64 else:
90 else:
65 basename = f.name
91 basename = f.name
66
92
67 dest_path = source_dir / 'dist' / basename
93 dest_path = staging_dir / basename
68
94
69 print('copying %s to %s' % (f, dest_path))
95 print('copying %s to %s' % (f, dest_path))
70 shutil.copyfile(f, dest_path)
96 shutil.copyfile(f, dest_path)
71
97
98 # The final package layout is simply a mirror of the staging directory.
99 package_files = []
100 for root, dirs, files in os.walk(staging_dir):
101 dirs.sort()
102
103 root = pathlib.Path(root)
104
105 for f in sorted(files):
106 full = root / f
107 rel = full.relative_to(staging_dir)
108 if str(rel.parent) == '.':
109 dest_dir = '{app}'
110 else:
111 dest_dir = '{app}\\%s' % rel.parent
112
113 package_files.append(
114 {
115 'source': rel,
116 'dest_dir': dest_dir,
117 'metadata': PACKAGE_FILES_METADATA.get(str(rel), None),
118 }
119 )
120
72 print('creating installer')
121 print('creating installer')
73
122
123 # Install Inno files by rendering a template.
124 jinja_env = jinja2.Environment(
125 loader=jinja2.FileSystemLoader(str(inno_source_dir)),
126 # Need to change these to prevent conflict with Inno Setup.
127 comment_start_string='{##',
128 comment_end_string='##}',
129 )
130
131 try:
132 template = jinja_env.get_template('mercurial.iss')
133 except jinja2.TemplateSyntaxError as e:
134 raise Exception(
135 'template syntax error at %s:%d: %s'
136 % (e.name, e.lineno, e.message,)
137 )
138
139 content = template.render(package_files=package_files)
140
141 with (inno_build_dir / 'mercurial.iss').open('w', encoding='utf-8') as fh:
142 fh.write(content)
143
144 # Copy additional files used by Inno.
145 for p in ('mercurial.ico', 'postinstall.txt'):
146 shutil.copyfile(
147 source_dir / 'contrib' / 'win32' / p, inno_build_dir / p
148 )
149
74 args = [str(iscc_exe)]
150 args = [str(iscc_exe)]
75
151
76 if vc_x64:
152 if vc_x64:
77 args.append('/dARCH=x64')
153 args.append('/dARCH=x64')
78
154
79 if version:
155 if not version:
80 args.append('/dVERSION=%s' % version)
156 version = read_version_py(source_dir)
157
158 args.append('/dVERSION=%s' % version)
81
159
82 args.append('/Odist')
160 args.append('/Odist')
83 args.append('contrib/packaging/inno/mercurial.iss')
161 args.append(str(inno_build_dir / 'mercurial.iss'))
84
162
85 subprocess.run(args, cwd=str(source_dir), check=True)
163 subprocess.run(args, cwd=str(source_dir), check=True)
@@ -15,10 +15,48 b' from .downloads import download_entry'
15 from .util import (
15 from .util import (
16 extract_tar_to_directory,
16 extract_tar_to_directory,
17 extract_zip_to_directory,
17 extract_zip_to_directory,
18 process_install_rules,
18 python_exe_info,
19 python_exe_info,
19 )
20 )
20
21
21
22
23 STAGING_RULES = [
24 ('contrib/bash_completion', 'Contrib/'),
25 ('contrib/hgk', 'Contrib/hgk.tcl'),
26 ('contrib/hgweb.fcgi', 'Contrib/'),
27 ('contrib/hgweb.wsgi', 'Contrib/'),
28 ('contrib/logo-droplets.svg', 'Contrib/'),
29 ('contrib/mercurial.el', 'Contrib/'),
30 ('contrib/mq.el', 'Contrib/'),
31 ('contrib/tcsh_completion', 'Contrib/'),
32 ('contrib/tcsh_completion_build.sh', 'Contrib/'),
33 ('contrib/vim/*', 'Contrib/Vim/'),
34 ('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'),
35 ('contrib/win32/ReadMe.html', 'ReadMe.html'),
36 ('contrib/xml.rnc', 'Contrib/'),
37 ('contrib/zsh_completion', 'Contrib/'),
38 ('dist/hg.exe', './'),
39 ('dist/lib/*.dll', 'lib/'),
40 ('dist/lib/*.pyd', 'lib/'),
41 ('dist/lib/library.zip', 'lib/'),
42 ('dist/Microsoft.VC*.CRT.manifest', './'),
43 ('dist/msvc*.dll', './'),
44 ('dist/python*.dll', './'),
45 ('doc/*.html', 'doc/'),
46 ('doc/style.css', 'doc/'),
47 ('mercurial/helptext/**/*.txt', 'helptext/'),
48 ('mercurial/defaultrc/*.rc', 'hgrc.d/'),
49 ('mercurial/locale/**/*', 'locale/'),
50 ('mercurial/templates/**/*', 'Templates/'),
51 ('COPYING', 'Copying.txt'),
52 ]
53
54 # List of paths to exclude from the staging area.
55 STAGING_EXCLUDES = [
56 'doc/hg-ssh.8.html',
57 ]
58
59
22 def build_py2exe(
60 def build_py2exe(
23 source_dir: pathlib.Path,
61 source_dir: pathlib.Path,
24 build_dir: pathlib.Path,
62 build_dir: pathlib.Path,
@@ -169,3 +207,39 b' def build_py2exe('
169 env=env,
207 env=env,
170 check=True,
208 check=True,
171 )
209 )
210
211
212 def stage_install(
213 source_dir: pathlib.Path, staging_dir: pathlib.Path, lower_case=False
214 ):
215 """Copy all files to be installed to a directory.
216
217 This allows packaging to simply walk a directory tree to find source
218 files.
219 """
220 if lower_case:
221 rules = []
222 for source, dest in STAGING_RULES:
223 # Only lower directory names.
224 if '/' in dest:
225 parent, leaf = dest.rsplit('/', 1)
226 dest = '%s/%s' % (parent.lower(), leaf)
227 rules.append((source, dest))
228 else:
229 rules = STAGING_RULES
230
231 process_install_rules(rules, source_dir, staging_dir)
232
233 # Write out a default editor.rc file to configure notepad as the
234 # default editor.
235 with (staging_dir / 'hgrc.d' / 'editor.rc').open(
236 'w', encoding='utf-8'
237 ) as fh:
238 fh.write('[ui]\neditor = notepad\n')
239
240 # Purge any files we don't want to be there.
241 for f in STAGING_EXCLUDES:
242 p = staging_dir / f
243 if p.exists():
244 print('removing %s' % p)
245 p.unlink()
@@ -9,8 +9,11 b''
9
9
10 import distutils.version
10 import distutils.version
11 import getpass
11 import getpass
12 import glob
12 import os
13 import os
13 import pathlib
14 import pathlib
15 import re
16 import shutil
14 import subprocess
17 import subprocess
15 import tarfile
18 import tarfile
16 import zipfile
19 import zipfile
@@ -164,3 +167,60 b' def python_exe_info(python_exe: pathlib.'
164 'version': version,
167 'version': version,
165 'py3': version >= distutils.version.LooseVersion('3'),
168 'py3': version >= distutils.version.LooseVersion('3'),
166 }
169 }
170
171
172 def process_install_rules(
173 rules: list, source_dir: pathlib.Path, dest_dir: pathlib.Path
174 ):
175 for source, dest in rules:
176 if '*' in source:
177 if not dest.endswith('/'):
178 raise ValueError('destination must end in / when globbing')
179
180 # We strip off the source path component before the first glob
181 # character to construct the relative install path.
182 prefix_end_index = source[: source.index('*')].rindex('/')
183 relative_prefix = source_dir / source[0:prefix_end_index]
184
185 for res in glob.glob(str(source_dir / source), recursive=True):
186 source_path = pathlib.Path(res)
187
188 if source_path.is_dir():
189 continue
190
191 rel_path = source_path.relative_to(relative_prefix)
192
193 dest_path = dest_dir / dest[:-1] / rel_path
194
195 dest_path.parent.mkdir(parents=True, exist_ok=True)
196 print('copying %s to %s' % (source_path, dest_path))
197 shutil.copy(source_path, dest_path)
198
199 # Simple file case.
200 else:
201 source_path = pathlib.Path(source)
202
203 if dest.endswith('/'):
204 dest_path = pathlib.Path(dest) / source_path.name
205 else:
206 dest_path = pathlib.Path(dest)
207
208 full_source_path = source_dir / source_path
209 full_dest_path = dest_dir / dest_path
210
211 full_dest_path.parent.mkdir(parents=True, exist_ok=True)
212 shutil.copy(full_source_path, full_dest_path)
213 print('copying %s to %s' % (full_source_path, full_dest_path))
214
215
216 def read_version_py(source_dir):
217 """Read the mercurial/__version__.py file to resolve the version string."""
218 p = source_dir / 'mercurial' / '__version__.py'
219
220 with p.open('r', encoding='utf-8') as fh:
221 m = re.search('version = b"([^"]+)"', fh.read(), re.MULTILINE)
222
223 if not m:
224 raise Exception('could not parse %s' % p)
225
226 return m.group(1)
@@ -7,39 +7,60 b''
7
7
8 # no-check-code because Python 3 native.
8 # no-check-code because Python 3 native.
9
9
10 import collections
10 import os
11 import os
11 import pathlib
12 import pathlib
12 import re
13 import re
14 import shutil
13 import subprocess
15 import subprocess
14 import tempfile
15 import typing
16 import typing
17 import uuid
16 import xml.dom.minidom
18 import xml.dom.minidom
17
19
18 from .downloads import download_entry
20 from .downloads import download_entry
19 from .py2exe import build_py2exe
21 from .py2exe import (
22 build_py2exe,
23 stage_install,
24 )
20 from .util import (
25 from .util import (
21 extract_zip_to_directory,
26 extract_zip_to_directory,
27 process_install_rules,
22 sign_with_signtool,
28 sign_with_signtool,
23 )
29 )
24
30
25
31
26 SUPPORT_WXS = [
27 ('contrib.wxs', r'contrib'),
28 ('dist.wxs', r'dist'),
29 ('doc.wxs', r'doc'),
30 ('help.wxs', r'mercurial\help'),
31 ('i18n.wxs', r'i18n'),
32 ('locale.wxs', r'mercurial\locale'),
33 ('templates.wxs', r'mercurial\templates'),
34 ]
35
36
37 EXTRA_PACKAGES = {
32 EXTRA_PACKAGES = {
38 'distutils',
33 'distutils',
39 'pygments',
34 'pygments',
40 }
35 }
41
36
42
37
38 EXTRA_INSTALL_RULES = [
39 ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'),
40 ('contrib/win32/mercurial.ini', 'hgrc.d/mercurial.rc'),
41 ]
42
43 STAGING_REMOVE_FILES = [
44 # We use the RTF variant.
45 'copying.txt',
46 ]
47
48 SHORTCUTS = {
49 # hg.1.html'
50 'hg.file.5d3e441c_28d9_5542_afd0_cdd4234f12d5': {
51 'Name': 'Mercurial Command Reference',
52 },
53 # hgignore.5.html
54 'hg.file.5757d8e0_f207_5e10_a2ec_3ba0a062f431': {
55 'Name': 'Mercurial Ignore Files',
56 },
57 # hgrc.5.html
58 'hg.file.92e605fd_1d1a_5dc6_9fc0_5d2998eb8f5e': {
59 'Name': 'Mercurial Configuration Files',
60 },
61 }
62
63
43 def find_version(source_dir: pathlib.Path):
64 def find_version(source_dir: pathlib.Path):
44 version_py = source_dir / 'mercurial' / '__version__.py'
65 version_py = source_dir / 'mercurial' / '__version__.py'
45
66
@@ -148,49 +169,165 b' def make_post_build_signing_fn('
148 return post_build_sign
169 return post_build_sign
149
170
150
171
151 LIBRARIES_XML = '''
172 def make_files_xml(staging_dir: pathlib.Path, is_x64) -> str:
152 <?xml version="1.0" encoding="utf-8"?>
173 """Create XML string listing every file to be installed."""
153 <Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
154
155 <?include {wix_dir}/guids.wxi ?>
156 <?include {wix_dir}/defines.wxi ?>
157
174
158 <Fragment>
175 # We derive GUIDs from a deterministic file path identifier.
159 <DirectoryRef Id="INSTALLDIR" FileSource="$(var.SourceDir)">
176 # We shoehorn the name into something that looks like a URL because
160 <Directory Id="libdir" Name="lib" FileSource="$(var.SourceDir)/lib">
177 # the UUID namespaces are supposed to work that way (even though
161 <Component Id="libOutput" Guid="$(var.lib.guid)" Win64='$(var.IsX64)'>
178 # the input data probably is never validated).
162 </Component>
163 </Directory>
164 </DirectoryRef>
165 </Fragment>
166 </Wix>
167 '''.lstrip()
168
179
169
170 def make_libraries_xml(wix_dir: pathlib.Path, dist_dir: pathlib.Path):
171 """Make XML data for library components WXS."""
172 # We can't use ElementTree because it doesn't handle the
173 # <?include ?> directives.
174 doc = xml.dom.minidom.parseString(
180 doc = xml.dom.minidom.parseString(
175 LIBRARIES_XML.format(wix_dir=str(wix_dir))
181 '<?xml version="1.0" encoding="utf-8"?>'
182 '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">'
183 '</Wix>'
176 )
184 )
177
185
178 component = doc.getElementsByTagName('Component')[0]
186 # Assemble the install layout by directory. This makes it easier to
187 # emit XML, since each directory has separate entities.
188 manifest = collections.defaultdict(dict)
189
190 for root, dirs, files in os.walk(staging_dir):
191 dirs.sort()
192
193 root = pathlib.Path(root)
194 rel_dir = root.relative_to(staging_dir)
195
196 for i in range(len(rel_dir.parts)):
197 parent = '/'.join(rel_dir.parts[0 : i + 1])
198 manifest.setdefault(parent, {})
199
200 for f in sorted(files):
201 full = root / f
202 manifest[str(rel_dir).replace('\\', '/')][full.name] = full
203
204 component_groups = collections.defaultdict(list)
205
206 # Now emit a <Fragment> for each directory.
207 # Each directory is composed of a <DirectoryRef> pointing to its parent
208 # and defines child <Directory>'s and a <Component> with all the files.
209 for dir_name, entries in sorted(manifest.items()):
210 # The directory id is derived from the path. But the root directory
211 # is special.
212 if dir_name == '.':
213 parent_directory_id = 'INSTALLDIR'
214 else:
215 parent_directory_id = 'hg.dir.%s' % dir_name.replace('/', '.')
179
216
180 f = doc.createElement('File')
217 fragment = doc.createElement('Fragment')
181 f.setAttribute('Name', 'library.zip')
218 directory_ref = doc.createElement('DirectoryRef')
182 f.setAttribute('KeyPath', 'yes')
219 directory_ref.setAttribute('Id', parent_directory_id)
183 component.appendChild(f)
220
221 # Add <Directory> entries for immediate children directories.
222 for possible_child in sorted(manifest.keys()):
223 if (
224 dir_name == '.'
225 and '/' not in possible_child
226 and possible_child != '.'
227 ):
228 child_directory_id = 'hg.dir.%s' % possible_child
229 name = possible_child
230 else:
231 if not possible_child.startswith('%s/' % dir_name):
232 continue
233 name = possible_child[len(dir_name) + 1 :]
234 if '/' in name:
235 continue
236
237 child_directory_id = 'hg.dir.%s' % possible_child.replace(
238 '/', '.'
239 )
240
241 directory = doc.createElement('Directory')
242 directory.setAttribute('Id', child_directory_id)
243 directory.setAttribute('Name', name)
244 directory_ref.appendChild(directory)
245
246 # Add <Component>s for files in this directory.
247 for rel, source_path in sorted(entries.items()):
248 if dir_name == '.':
249 full_rel = rel
250 else:
251 full_rel = '%s/%s' % (dir_name, rel)
184
252
185 lib_dir = dist_dir / 'lib'
253 component_unique_id = (
254 'https://www.mercurial-scm.org/wix-installer/0/component/%s'
255 % full_rel
256 )
257 component_guid = uuid.uuid5(uuid.NAMESPACE_URL, component_unique_id)
258 component_id = 'hg.component.%s' % str(component_guid).replace(
259 '-', '_'
260 )
261
262 component = doc.createElement('Component')
263
264 component.setAttribute('Id', component_id)
265 component.setAttribute('Guid', str(component_guid).upper())
266 component.setAttribute('Win64', 'yes' if is_x64 else 'no')
267
268 # Assign this component to a top-level group.
269 if dir_name == '.':
270 component_groups['ROOT'].append(component_id)
271 elif '/' in dir_name:
272 component_groups[dir_name[0 : dir_name.index('/')]].append(
273 component_id
274 )
275 else:
276 component_groups[dir_name].append(component_id)
277
278 unique_id = (
279 'https://www.mercurial-scm.org/wix-installer/0/%s' % full_rel
280 )
281 file_guid = uuid.uuid5(uuid.NAMESPACE_URL, unique_id)
282
283 # IDs have length limits. So use GUID to derive them.
284 file_guid_normalized = str(file_guid).replace('-', '_')
285 file_id = 'hg.file.%s' % file_guid_normalized
186
286
187 for p in sorted(lib_dir.iterdir()):
287 file_element = doc.createElement('File')
188 if not p.name.endswith(('.dll', '.pyd')):
288 file_element.setAttribute('Id', file_id)
189 continue
289 file_element.setAttribute('Source', str(source_path))
290 file_element.setAttribute('KeyPath', 'yes')
291 file_element.setAttribute('ReadOnly', 'yes')
292
293 component.appendChild(file_element)
294 directory_ref.appendChild(component)
295
296 fragment.appendChild(directory_ref)
297 doc.documentElement.appendChild(fragment)
298
299 for group, component_ids in sorted(component_groups.items()):
300 fragment = doc.createElement('Fragment')
301 component_group = doc.createElement('ComponentGroup')
302 component_group.setAttribute('Id', 'hg.group.%s' % group)
303
304 for component_id in component_ids:
305 component_ref = doc.createElement('ComponentRef')
306 component_ref.setAttribute('Id', component_id)
307 component_group.appendChild(component_ref)
190
308
191 f = doc.createElement('File')
309 fragment.appendChild(component_group)
192 f.setAttribute('Name', p.name)
310 doc.documentElement.appendChild(fragment)
193 component.appendChild(f)
311
312 # Add <Shortcut> to files that have it defined.
313 for file_id, metadata in sorted(SHORTCUTS.items()):
314 els = doc.getElementsByTagName('File')
315 els = [el for el in els if el.getAttribute('Id') == file_id]
316
317 if not els:
318 raise Exception('could not find File[Id=%s]' % file_id)
319
320 for el in els:
321 shortcut = doc.createElement('Shortcut')
322 shortcut.setAttribute('Id', 'hg.shortcut.%s' % file_id)
323 shortcut.setAttribute('Directory', 'ProgramMenuDir')
324 shortcut.setAttribute('Icon', 'hgIcon.ico')
325 shortcut.setAttribute('IconIndex', '0')
326 shortcut.setAttribute('Advertise', 'yes')
327 for k, v in sorted(metadata.items()):
328 shortcut.setAttribute(k, v)
329
330 el.appendChild(shortcut)
194
331
195 return doc.toprettyxml()
332 return doc.toprettyxml()
196
333
@@ -249,9 +386,27 b' def build_installer('
249 post_build_fn(source_dir, hg_build_dir, dist_dir, version)
386 post_build_fn(source_dir, hg_build_dir, dist_dir, version)
250
387
251 build_dir = hg_build_dir / ('wix-%s' % arch)
388 build_dir = hg_build_dir / ('wix-%s' % arch)
389 staging_dir = build_dir / 'stage'
252
390
253 build_dir.mkdir(exist_ok=True)
391 build_dir.mkdir(exist_ok=True)
254
392
393 # Purge the staging directory for every build so packaging is pristine.
394 if staging_dir.exists():
395 print('purging %s' % staging_dir)
396 shutil.rmtree(staging_dir)
397
398 stage_install(source_dir, staging_dir, lower_case=True)
399
400 # We also install some extra files.
401 process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir)
402
403 # And remove some files we don't want.
404 for f in STAGING_REMOVE_FILES:
405 p = staging_dir / f
406 if p.exists():
407 print('removing %s' % p)
408 p.unlink()
409
255 wix_pkg, wix_entry = download_entry('wix', hg_build_dir)
410 wix_pkg, wix_entry = download_entry('wix', hg_build_dir)
256 wix_path = hg_build_dir / ('wix-%s' % wix_entry['version'])
411 wix_path = hg_build_dir / ('wix-%s' % wix_entry['version'])
257
412
@@ -264,25 +419,16 b' def build_installer('
264
419
265 defines = {'Platform': arch}
420 defines = {'Platform': arch}
266
421
267 for wxs, rel_path in SUPPORT_WXS:
422 # Derive a .wxs file with the staged files.
268 wxs = wix_dir / wxs
423 manifest_wxs = build_dir / 'stage.wxs'
269 wxs_source_dir = source_dir / rel_path
424 with manifest_wxs.open('w', encoding='utf-8') as fh:
270 run_candle(wix_path, build_dir, wxs, wxs_source_dir, defines=defines)
425 fh.write(make_files_xml(staging_dir, is_x64=arch == 'x64'))
426
427 run_candle(wix_path, build_dir, manifest_wxs, staging_dir, defines=defines)
271
428
272 for source, rel_path in sorted((extra_wxs or {}).items()):
429 for source, rel_path in sorted((extra_wxs or {}).items()):
273 run_candle(wix_path, build_dir, source, rel_path, defines=defines)
430 run_candle(wix_path, build_dir, source, rel_path, defines=defines)
274
431
275 # candle.exe doesn't like when we have an open handle on the file.
276 # So use TemporaryDirectory() instead of NamedTemporaryFile().
277 with tempfile.TemporaryDirectory() as td:
278 td = pathlib.Path(td)
279
280 tf = td / 'library.wxs'
281 with tf.open('w') as fh:
282 fh.write(make_libraries_xml(wix_dir, dist_dir))
283
284 run_candle(wix_path, build_dir, tf, dist_dir, defines=defines)
285
286 source = wix_dir / 'mercurial.wxs'
432 source = wix_dir / 'mercurial.wxs'
287 defines['Version'] = version
433 defines['Version'] = version
288 defines['Comments'] = 'Installs Mercurial version %s' % version
434 defines['Comments'] = 'Installs Mercurial version %s' % version
@@ -308,20 +454,13 b' def build_installer('
308 str(msi_path),
454 str(msi_path),
309 ]
455 ]
310
456
311 for source, rel_path in SUPPORT_WXS:
312 assert source.endswith('.wxs')
313 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
314
315 for source, rel_path in sorted((extra_wxs or {}).items()):
457 for source, rel_path in sorted((extra_wxs or {}).items()):
316 assert source.endswith('.wxs')
458 assert source.endswith('.wxs')
317 source = os.path.basename(source)
459 source = os.path.basename(source)
318 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
460 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
319
461
320 args.extend(
462 args.extend(
321 [
463 [str(build_dir / 'stage.wixobj'), str(build_dir / 'mercurial.wixobj'),]
322 str(build_dir / 'library.wixobj'),
323 str(build_dir / 'mercurial.wixobj'),
324 ]
325 )
464 )
326
465
327 subprocess.run(args, cwd=str(source_dir), check=True)
466 subprocess.run(args, cwd=str(source_dir), check=True)
@@ -1,21 +1,6 b''
1 ; Script generated by the Inno Setup Script Wizard.
1 ; Script generated by the Inno Setup Script Wizard.
2 ; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
2 ; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
3
3
4 #ifndef VERSION
5 #define FileHandle
6 #define FileLine
7 #define VERSION = "unknown"
8 #if FileHandle = FileOpen(SourcePath + "\..\..\..\mercurial\__version__.py")
9 #expr FileLine = FileRead(FileHandle)
10 #expr FileLine = FileRead(FileHandle)
11 #define VERSION = Copy(FileLine, Pos('"', FileLine)+1, Len(FileLine)-Pos('"', FileLine)-1)
12 #endif
13 #if FileHandle
14 #expr FileClose(FileHandle)
15 #endif
16 #pragma message "Detected Version: " + VERSION
17 #endif
18
19 #ifndef ARCH
4 #ifndef ARCH
20 #define ARCH = "x86"
5 #define ARCH = "x86"
21 #endif
6 #endif
@@ -33,68 +18,40 b' ArchitecturesInstallIn64BitMode=x64'
33 AppVerName=Mercurial {#VERSION}
18 AppVerName=Mercurial {#VERSION}
34 OutputBaseFilename=Mercurial-{#VERSION}
19 OutputBaseFilename=Mercurial-{#VERSION}
35 #endif
20 #endif
36 InfoAfterFile=contrib/win32/postinstall.txt
21 InfoAfterFile=../postinstall.txt
37 LicenseFile=COPYING
22 LicenseFile=Copying.txt
38 ShowLanguageDialog=yes
23 ShowLanguageDialog=yes
39 AppPublisher=Matt Mackall and others
24 AppPublisher=Matt Mackall and others
40 AppPublisherURL=https://mercurial-scm.org/
25 AppPublisherURL=https://mercurial-scm.org/
41 AppSupportURL=https://mercurial-scm.org/
26 AppSupportURL=https://mercurial-scm.org/
42 AppUpdatesURL=https://mercurial-scm.org/
27 AppUpdatesURL=https://mercurial-scm.org/
43 AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3}
28 {{ 'AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3}' }}
44 AppContact=mercurial@mercurial-scm.org
29 AppContact=mercurial@mercurial-scm.org
45 DefaultDirName={pf}\Mercurial
30 DefaultDirName={pf}\Mercurial
46 SourceDir=..\..\..
31 SourceDir=stage
47 VersionInfoDescription=Mercurial distributed SCM (version {#VERSION})
32 VersionInfoDescription=Mercurial distributed SCM (version {#VERSION})
48 VersionInfoCopyright=Copyright 2005-2019 Matt Mackall and others
33 VersionInfoCopyright=Copyright 2005-2019 Matt Mackall and others
49 VersionInfoCompany=Matt Mackall and others
34 VersionInfoCompany=Matt Mackall and others
50 InternalCompressLevel=max
35 InternalCompressLevel=max
51 SolidCompression=true
36 SolidCompression=true
52 SetupIconFile=contrib\win32\mercurial.ico
37 SetupIconFile=../mercurial.ico
53 AllowNoIcons=true
38 AllowNoIcons=true
54 DefaultGroupName=Mercurial
39 DefaultGroupName=Mercurial
55 PrivilegesRequired=none
40 PrivilegesRequired=none
56 ChangesEnvironment=true
41 ChangesEnvironment=true
57
42
58 [Files]
43 [Files]
59 Source: contrib\mercurial.el; DestDir: {app}/Contrib
44 {% for entry in package_files -%}
60 Source: contrib\vim\*.*; DestDir: {app}/Contrib/Vim
45 Source: {{ entry.source }}; DestDir: {{ entry.dest_dir }}
61 Source: contrib\zsh_completion; DestDir: {app}/Contrib
46 {%- if entry.metadata %}; {{ entry.metadata }}{% endif %}
62 Source: contrib\bash_completion; DestDir: {app}/Contrib
47 {% endfor %}
63 Source: contrib\tcsh_completion; DestDir: {app}/Contrib
64 Source: contrib\tcsh_completion_build.sh; DestDir: {app}/Contrib
65 Source: contrib\hgk; DestDir: {app}/Contrib; DestName: hgk.tcl
66 Source: contrib\xml.rnc; DestDir: {app}/Contrib
67 Source: contrib\mercurial.el; DestDir: {app}/Contrib
68 Source: contrib\mq.el; DestDir: {app}/Contrib
69 Source: contrib\hgweb.fcgi; DestDir: {app}/Contrib
70 Source: contrib\hgweb.wsgi; DestDir: {app}/Contrib
71 Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme
72 Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt
73 Source: dist\hg.exe; DestDir: {app}; AfterInstall: Touch('{app}\hg.exe.local')
74 Source: dist\lib\*.dll; Destdir: {app}\lib
75 Source: dist\lib\*.pyd; Destdir: {app}\lib
76 Source: dist\python*.dll; Destdir: {app}; Flags: skipifsourcedoesntexist
77 Source: dist\msvc*.dll; DestDir: {app}; Flags: skipifsourcedoesntexist
78 Source: dist\Microsoft.VC*.CRT.manifest; DestDir: {app}; Flags: skipifsourcedoesntexist
79 Source: dist\lib\library.zip; DestDir: {app}\lib
80 Source: doc\*.html; DestDir: {app}\Docs
81 Source: doc\style.css; DestDir: {app}\Docs
82 Source: mercurial\help\*.txt; DestDir: {app}\help
83 Source: mercurial\help\internals\*.txt; DestDir: {app}\help\internals
84 Source: mercurial\default.d\*.rc; DestDir: {app}\default.d
85 Source: mercurial\locale\*.*; DestDir: {app}\locale; Flags: recursesubdirs createallsubdirs skipifsourcedoesntexist
86 Source: mercurial\templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs
87 Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt
88 Source: COPYING; DestDir: {app}; DestName: Copying.txt
89
48
90 [INI]
49 [INI]
91 Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: https://mercurial-scm.org/
50 Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: https://mercurial-scm.org/
92 Filename: {app}\default.d\editor.rc; Section: ui; Key: editor; String: notepad
93
51
94 [UninstallDelete]
52 [UninstallDelete]
95 Type: files; Name: {app}\Mercurial.url
53 Type: files; Name: {app}\Mercurial.url
96 Type: filesandordirs; Name: {app}\default.d
54 Type: filesandordirs; Name: {app}\hgrc.d
97 Type: files; Name: "{app}\hg.exe.local"
98
55
99 [Icons]
56 [Icons]
100 Name: {group}\Uninstall Mercurial; Filename: {uninstallexe}
57 Name: {group}\Uninstall Mercurial; Filename: {uninstallexe}
@@ -121,4 +78,5 b' begin'
121 setArrayLength(Result, 1)
78 setArrayLength(Result, 1)
122 Result[0] := ExpandConstant('{app}');
79 Result[0] := ExpandConstant('{app}');
123 end;
80 end;
124 #include "modpath.iss"
81
82 {% include 'modpath.iss' %}
@@ -68,79 +68,42 b' begin'
68 for d := 0 to GetArrayLength(pathdir)-1 do begin
68 for d := 0 to GetArrayLength(pathdir)-1 do begin
69 updatepath := true;
69 updatepath := true;
70
70
71 // Modify WinNT path
71 // Get current path, split into an array
72 if UsingWinNT() = true then begin
72 RegQueryStringValue(regroot, regpath, 'Path', oldpath);
73
73 oldpath := oldpath + ';';
74 // Get current path, split into an array
74 i := 0;
75 RegQueryStringValue(regroot, regpath, 'Path', oldpath);
76 oldpath := oldpath + ';';
77 i := 0;
78
79 while (Pos(';', oldpath) > 0) do begin
80 SetArrayLength(pathArr, i+1);
81 pathArr[i] := Copy(oldpath, 0, Pos(';', oldpath)-1);
82 oldpath := Copy(oldpath, Pos(';', oldpath)+1, Length(oldpath));
83 i := i + 1;
84
75
85 // Check if current directory matches app dir
76 while (Pos(';', oldpath) > 0) do begin
86 if pathdir[d] = pathArr[i-1] then begin
77 SetArrayLength(pathArr, i+1);
87 // if uninstalling, remove dir from path
78 pathArr[i] := Copy(oldpath, 0, Pos(';', oldpath)-1);
88 if IsUninstaller() = true then begin
79 oldpath := Copy(oldpath, Pos(';', oldpath)+1, Length(oldpath));
89 continue;
80 i := i + 1;
90 // if installing, flag that dir already exists in path
91 end else begin
92 updatepath := false;
93 end;
94 end;
95
81
96 // Add current directory to new path
82 // Check if current directory matches app dir
97 if i = 1 then begin
83 if pathdir[d] = pathArr[i-1] then begin
98 newpath := pathArr[i-1];
84 // if uninstalling, remove dir from path
85 if IsUninstaller() = true then begin
86 continue;
87 // if installing, flag that dir already exists in path
99 end else begin
88 end else begin
100 newpath := newpath + ';' + pathArr[i-1];
89 updatepath := false;
101 end;
90 end;
102 end;
91 end;
103
92
104 // Append app dir to path if not already included
93 // Add current directory to new path
105 if (IsUninstaller() = false) AND (updatepath = true) then
94 if i = 1 then begin
106 newpath := newpath + ';' + pathdir[d];
95 newpath := pathArr[i-1];
107
108 // Write new path
109 RegWriteStringValue(regroot, regpath, 'Path', newpath);
110
111 // Modify Win9x path
112 end else begin
113
114 // Convert to shortened dirname
115 pathdir[d] := GetShortName(pathdir[d]);
116
117 // If autoexec.bat exists, check if app dir already exists in path
118 aExecFile := 'C:\AUTOEXEC.BAT';
119 if FileExists(aExecFile) then begin
120 LoadStringsFromFile(aExecFile, aExecArr);
121 for i := 0 to GetArrayLength(aExecArr)-1 do begin
122 if IsUninstaller() = false then begin
123 // If app dir already exists while installing, skip add
124 if (Pos(pathdir[d], aExecArr[i]) > 0) then
125 updatepath := false;
126 break;
127 end else begin
128 // If app dir exists and = what we originally set, then delete at uninstall
129 if aExecArr[i] = 'SET PATH=%PATH%;' + pathdir[d] then
130 aExecArr[i] := '';
131 end;
132 end;
133 end;
134
135 // If app dir not found, or autoexec.bat didn't exist, then (create and) append to current path
136 if (IsUninstaller() = false) AND (updatepath = true) then begin
137 SaveStringToFile(aExecFile, #13#10 + 'SET PATH=%PATH%;' + pathdir[d], True);
138
139 // If uninstalling, write the full autoexec out
140 end else begin
96 end else begin
141 SaveStringsToFile(aExecFile, aExecArr, False);
97 newpath := newpath + ';' + pathArr[i-1];
142 end;
98 end;
143 end;
99 end;
100
101 // Append app dir to path if not already included
102 if (IsUninstaller() = false) AND (updatepath = true) then
103 newpath := newpath + ';' + pathdir[d];
104
105 // Write new path
106 RegWriteStringValue(regroot, regpath, 'Path', newpath);
144 end;
107 end;
145 end;
108 end;
146
109
@@ -207,13 +170,6 b' begin'
207 end;
170 end;
208
171
209 function NeedRestart(): Boolean;
172 function NeedRestart(): Boolean;
210 var
211 taskname: String;
212 begin
173 begin
213 taskname := ModPathName;
174 Result := False;
214 if IsTaskSelected(taskname) and not UsingWinNT() then begin
215 Result := True;
216 end else begin
217 Result := False;
218 end;
219 end;
175 end;
@@ -11,12 +11,12 b' The following system dependencies must b'
11 * Inno Setup (http://jrsoftware.org/isdl.php) version 5.4 or newer.
11 * Inno Setup (http://jrsoftware.org/isdl.php) version 5.4 or newer.
12 Be sure to install the optional Inno Setup Preprocessor feature,
12 Be sure to install the optional Inno Setup Preprocessor feature,
13 which is required.
13 which is required.
14 * Python 3.5+ (to run the ``build.py`` script)
14 * Python 3.5+ (to run the ``packaging.py`` script)
15
15
16 Building
16 Building
17 ========
17 ========
18
18
19 The ``build.py`` script automates the process of producing an
19 The ``packaging.py`` script automates the process of producing an
20 Inno installer. It manages fetching and configuring the
20 Inno installer. It manages fetching and configuring the
21 non-system dependencies (such as py2exe, gettext, and various
21 non-system dependencies (such as py2exe, gettext, and various
22 Python packages).
22 Python packages).
@@ -31,11 +31,11 b' either ``Visual C++ 2008 32-bit Command '
31 From the prompt, change to the Mercurial source directory. e.g.
31 From the prompt, change to the Mercurial source directory. e.g.
32 ``cd c:\src\hg``.
32 ``cd c:\src\hg``.
33
33
34 Next, invoke ``build.py`` to produce an Inno installer. You will
34 Next, invoke ``packaging.py`` to produce an Inno installer. You will
35 need to supply the path to the Python interpreter to use.::
35 need to supply the path to the Python interpreter to use.::
36
36
37 $ python3.exe contrib\packaging\inno\build.py \
37 $ python3.exe contrib\packaging\packaging.py \
38 --python c:\python27\python.exe
38 inno --python c:\python27\python.exe
39
39
40 .. note::
40 .. note::
41
41
@@ -49,13 +49,13 b' configured into the ``build`` sub-direct'
49 and an installer placed in the ``dist`` sub-directory. The final
49 and an installer placed in the ``dist`` sub-directory. The final
50 line of output should print the name of the generated installer.
50 line of output should print the name of the generated installer.
51
51
52 Additional options may be configured. Run ``build.py --help`` to
52 Additional options may be configured. Run
53 see a list of program flags.
53 ``packaging.py inno --help`` to see a list of program flags.
54
54
55 MinGW
55 MinGW
56 =====
56 =====
57
57
58 It is theoretically possible to generate an installer that uses
58 It is theoretically possible to generate an installer that uses
59 MinGW. This isn't well tested and ``build.py`` and may properly
59 MinGW. This isn't well tested and ``packaging.py`` and may properly
60 support it. See old versions of this file in version control for
60 support it. See old versions of this file in version control for
61 potentially useful hints as to how to achieve this.
61 potentially useful hints as to how to achieve this.
@@ -8,65 +8,6 b' certifi==2019.9.11 \\'
8 --hash=sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50 \
8 --hash=sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50 \
9 --hash=sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef \
9 --hash=sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef \
10 # via dulwich
10 # via dulwich
11 cffi==1.13.1 \
12 --hash=sha256:00d890313797d9fe4420506613384b43099ad7d2b905c0752dbcc3a6f14d80fa \
13 --hash=sha256:0cf9e550ac6c5e57b713437e2f4ac2d7fd0cd10336525a27224f5fc1ec2ee59a \
14 --hash=sha256:0ea23c9c0cdd6778146a50d867d6405693ac3b80a68829966c98dd5e1bbae400 \
15 --hash=sha256:193697c2918ecdb3865acf6557cddf5076bb39f1f654975e087b67efdff83365 \
16 --hash=sha256:1ae14b542bf3b35e5229439c35653d2ef7d8316c1fffb980f9b7647e544baa98 \
17 --hash=sha256:1e389e069450609c6ffa37f21f40cce36f9be7643bbe5051ab1de99d5a779526 \
18 --hash=sha256:263242b6ace7f9cd4ea401428d2d45066b49a700852334fd55311bde36dcda14 \
19 --hash=sha256:33142ae9807665fa6511cfa9857132b2c3ee6ddffb012b3f0933fc11e1e830d5 \
20 --hash=sha256:364f8404034ae1b232335d8c7f7b57deac566f148f7222cef78cf8ae28ef764e \
21 --hash=sha256:47368f69fe6529f8f49a5d146ddee713fc9057e31d61e8b6dc86a6a5e38cecc1 \
22 --hash=sha256:4895640844f17bec32943995dc8c96989226974dfeb9dd121cc45d36e0d0c434 \
23 --hash=sha256:558b3afef987cf4b17abd849e7bedf64ee12b28175d564d05b628a0f9355599b \
24 --hash=sha256:5ba86e1d80d458b338bda676fd9f9d68cb4e7a03819632969cf6d46b01a26730 \
25 --hash=sha256:63424daa6955e6b4c70dc2755897f5be1d719eabe71b2625948b222775ed5c43 \
26 --hash=sha256:6381a7d8b1ebd0bc27c3bc85bc1bfadbb6e6f756b4d4db0aa1425c3719ba26b4 \
27 --hash=sha256:6381ab708158c4e1639da1f2a7679a9bbe3e5a776fc6d1fd808076f0e3145331 \
28 --hash=sha256:6fd58366747debfa5e6163ada468a90788411f10c92597d3b0a912d07e580c36 \
29 --hash=sha256:728ec653964655d65408949b07f9b2219df78badd601d6c49e28d604efe40599 \
30 --hash=sha256:7cfcfda59ef1f95b9f729c56fe8a4041899f96b72685d36ef16a3440a0f85da8 \
31 --hash=sha256:819f8d5197c2684524637f940445c06e003c4a541f9983fd30d6deaa2a5487d8 \
32 --hash=sha256:825ecffd9574557590e3225560a8a9d751f6ffe4a49e3c40918c9969b93395fa \
33 --hash=sha256:9009e917d8f5ef780c2626e29b6bc126f4cb2a4d43ca67aa2b40f2a5d6385e78 \
34 --hash=sha256:9c77564a51d4d914ed5af096cd9843d90c45b784b511723bd46a8a9d09cf16fc \
35 --hash=sha256:a19089fa74ed19c4fe96502a291cfdb89223a9705b1d73b3005df4256976142e \
36 --hash=sha256:a40ed527bffa2b7ebe07acc5a3f782da072e262ca994b4f2085100b5a444bbb2 \
37 --hash=sha256:bb75ba21d5716abc41af16eac1145ab2e471deedde1f22c6f99bd9f995504df0 \
38 --hash=sha256:e22a00c0c81ffcecaf07c2bfb3672fa372c50e2bd1024ffee0da191c1b27fc71 \
39 --hash=sha256:e55b5a746fb77f10c83e8af081979351722f6ea48facea79d470b3731c7b2891 \
40 --hash=sha256:ec2fa3ee81707a5232bf2dfbd6623fdb278e070d596effc7e2d788f2ada71a05 \
41 --hash=sha256:fd82eb4694be712fcae03c717ca2e0fc720657ac226b80bbb597e971fc6928c2 \
42 # via cryptography
43 configparser==4.0.2 \
44 --hash=sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c \
45 --hash=sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df \
46 # via entrypoints
47 cryptography==2.8 \
48 --hash=sha256:02079a6addc7b5140ba0825f542c0869ff4df9a69c360e339ecead5baefa843c \
49 --hash=sha256:1df22371fbf2004c6f64e927668734070a8953362cd8370ddd336774d6743595 \
50 --hash=sha256:369d2346db5934345787451504853ad9d342d7f721ae82d098083e1f49a582ad \
51 --hash=sha256:3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651 \
52 --hash=sha256:44ff04138935882fef7c686878e1c8fd80a723161ad6a98da31e14b7553170c2 \
53 --hash=sha256:4b1030728872c59687badcca1e225a9103440e467c17d6d1730ab3d2d64bfeff \
54 --hash=sha256:58363dbd966afb4f89b3b11dfb8ff200058fbc3b947507675c19ceb46104b48d \
55 --hash=sha256:6ec280fb24d27e3d97aa731e16207d58bd8ae94ef6eab97249a2afe4ba643d42 \
56 --hash=sha256:7270a6c29199adc1297776937a05b59720e8a782531f1f122f2eb8467f9aab4d \
57 --hash=sha256:73fd30c57fa2d0a1d7a49c561c40c2f79c7d6c374cc7750e9ac7c99176f6428e \
58 --hash=sha256:7f09806ed4fbea8f51585231ba742b58cbcfbfe823ea197d8c89a5e433c7e912 \
59 --hash=sha256:90df0cc93e1f8d2fba8365fb59a858f51a11a394d64dbf3ef844f783844cc793 \
60 --hash=sha256:971221ed40f058f5662a604bd1ae6e4521d84e6cad0b7b170564cc34169c8f13 \
61 --hash=sha256:a518c153a2b5ed6b8cc03f7ae79d5ffad7315ad4569b2d5333a13c38d64bd8d7 \
62 --hash=sha256:b0de590a8b0979649ebeef8bb9f54394d3a41f66c5584fff4220901739b6b2f0 \
63 --hash=sha256:b43f53f29816ba1db8525f006fa6f49292e9b029554b3eb56a189a70f2a40879 \
64 --hash=sha256:d31402aad60ed889c7e57934a03477b572a03af7794fa8fb1780f21ea8f6551f \
65 --hash=sha256:de96157ec73458a7f14e3d26f17f8128c959084931e8997b9e655a39c8fde9f9 \
66 --hash=sha256:df6b4dca2e11865e6cfbfb708e800efb18370f5a46fd601d3755bc7f85b3a8a2 \
67 --hash=sha256:ecadccc7ba52193963c0475ac9f6fa28ac01e01349a2ca48509667ef41ffd2cf \
68 --hash=sha256:fb81c17e0ebe3358486cd8cc3ad78adbae58af12fc2bf2bc0bb84e8090fa5ce8 \
69 # via secretstorage
70 docutils==0.15.2 \
11 docutils==0.15.2 \
71 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
12 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
72 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
13 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
@@ -85,35 +26,16 b' entrypoints==0.3 \\'
85 --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \
26 --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \
86 --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \
27 --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \
87 # via keyring
28 # via keyring
88 enum34==1.1.6 \
89 --hash=sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850 \
90 --hash=sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a \
91 --hash=sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79 \
92 --hash=sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1 \
93 # via cryptography
94 ipaddress==1.0.23 \
95 --hash=sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc \
96 --hash=sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2 \
97 # via cryptography
98 keyring==18.0.1 \
29 keyring==18.0.1 \
99 --hash=sha256:67d6cc0132bd77922725fae9f18366bb314fd8f95ff4d323a4df41890a96a838 \
30 --hash=sha256:67d6cc0132bd77922725fae9f18366bb314fd8f95ff4d323a4df41890a96a838 \
100 --hash=sha256:7b29ebfcf8678c4da531b2478a912eea01e80007e5ddca9ee0c7038cb3489ec6
31 --hash=sha256:7b29ebfcf8678c4da531b2478a912eea01e80007e5ddca9ee0c7038cb3489ec6
101 pycparser==2.19 \
102 --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \
103 # via cffi
104 pygments==2.4.2 \
32 pygments==2.4.2 \
105 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
33 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
106 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297
34 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297
107 pywin32-ctypes==0.2.0 \
35 pywin32-ctypes==0.2.0 \
108 --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
36 --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
109 --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98
37 --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 \
110 secretstorage==2.3.1 \
111 --hash=sha256:3af65c87765323e6f64c83575b05393f9e003431959c9395d1791d51497f29b6 \
112 # via keyring
38 # via keyring
113 six==1.12.0 \
114 --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
115 --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
116 # via cryptography
117 urllib3==1.25.6 \
39 urllib3==1.25.6 \
118 --hash=sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398 \
40 --hash=sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398 \
119 --hash=sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86 \
41 --hash=sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86 \
@@ -4,49 +4,9 b''
4 and replace 'Mercurial' in this notice with the name of
4 and replace 'Mercurial' in this notice with the name of
5 your project. Component GUIDs have global namespace! -->
5 your project. Component GUIDs have global namespace! -->
6
6
7 <!-- contrib.wxs -->
8 <?define contrib.guid = {4E11FFC2-E2F7-482A-8460-9394B5489F02} ?>
9 <?define contrib.vim.guid = {BB04903A-652D-4C4F-9590-2BD07A2304F2} ?>
10
11 <!-- dist.wxs -->
12 <?define dist.guid = {CE405FE6-CD1E-4873-9C9A-7683AE5A3D90} ?>
13 <?define lib.guid = {877633b5-0b7e-4b46-8f1c-224a61733297} ?>
14
15 <!-- doc.wxs -->
16 <?define doc.hg.1.html.guid = {AAAA3FDA-EDC5-4220-B59D-D342722358A2} ?>
17 <?define doc.hgignore.5.html.guid = {AA9118C4-F3A0-4429-A5F4-5A1906B2D67F} ?>
18 <?define doc.hgrc.5.html = {E0CEA1EB-FA01-408c-844B-EE5965165BAE} ?>
19 <?define doc.style.css = {172F8262-98E0-4711-BD39-4DAE0D77EF05} ?>
20
21 <!-- help.wxs -->
22 <?define help.root.guid = {9FA957DB-6DFE-44f2-AD03-293B2791CF17} ?>
23 <?define help.internals.guid = {2DD7669D-0DB8-4C39-9806-78E6475E7ACC} ?>
24
25 <!-- i18n.wxs -->
26 <?define i18nFolder.guid = {1BF8026D-CF7C-4174-AEE6-D6B7BF119248} ?>
27
28 <!-- templates.wxs -->
29 <?define templates.root.guid = {437FD55C-7756-4EA0-87E5-FDBE75DC8595} ?>
30 <?define templates.atom.guid = {D30E14A5-8AF0-4268-8B00-00BEE9E09E39} ?>
31 <?define templates.coal.guid = {B63CCAAB-4EAF-43b4-901E-4BD13F5B78FC} ?>
32 <?define templates.gitweb.guid = {827334AF-1EFD-421B-962C-5660A068F612} ?>
33 <?define templates.json.guid = {F535BE7A-EC34-46E0-B9BE-013F3DBAFB19} ?>
34 <?define templates.monoblue.guid = {8060A1E4-BD4C-453E-92CB-9536DC44A9E3} ?>
35 <?define templates.paper.guid = {61AB1DE9-645F-46ED-8AF8-0CF02267FFBB} ?>
36 <?define templates.raw.guid = {834DF8D7-9784-43A6-851D-A96CE1B3575B} ?>
37 <?define templates.rss.guid = {9338FA09-E128-4B1C-B723-1142DBD09E14} ?>
38 <?define templates.spartan.guid = {80222625-FA8F-44b1-86CE-1781EF375D09} ?>
39 <?define templates.static.guid = {6B3D7C24-98DA-4B67-9F18-35F77357B0B4} ?>
40
41 <!-- mercurial.wxs -->
7 <!-- mercurial.wxs -->
42 <?define ProductUpgradeCode = {A1CC6134-E945-4399-BE36-EB0017FDF7CF} ?>
8 <?define ProductUpgradeCode = {A1CC6134-E945-4399-BE36-EB0017FDF7CF} ?>
43
44 <?define ComponentMainExecutableGUID = {D102B8FA-059B-4ACC-9FA3-8C78C3B58EEF} ?>
9 <?define ComponentMainExecutableGUID = {D102B8FA-059B-4ACC-9FA3-8C78C3B58EEF} ?>
45
46 <?define ReadMe.guid = {56A8E372-991D-4DCA-B91D-93D775974CF5} ?>
47 <?define COPYING.guid = {B7801DBA-1C49-4BF4-91AD-33C65F5C7895} ?>
48 <?define mercurial.rc.guid = {1D5FAEEE-7E6E-43B1-9F7F-802714316B15} ?>
49 <?define mergetools.rc.guid = {E8A1DC29-FF40-4B5F-BD12-80B9F7BF0CCD} ?>
50 <?define ProgramMenuDir.guid = {D5A63320-1238-489B-B68B-CF053E9577CA} ?>
10 <?define ProgramMenuDir.guid = {D5A63320-1238-489B-B68B-CF053E9577CA} ?>
51
11
52 </Include>
12 </Include>
@@ -60,30 +60,10 b''
60 <Directory Id='$(var.PFolder)' Name='PFiles'>
60 <Directory Id='$(var.PFolder)' Name='PFiles'>
61 <Directory Id='INSTALLDIR' Name='Mercurial'>
61 <Directory Id='INSTALLDIR' Name='Mercurial'>
62 <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'>
62 <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'>
63 <File Id='hgEXE' Name='hg.exe' Source='dist\hg.exe' KeyPath='yes' />
63 <CreateFolder />
64 <Environment Id="Environment" Name="PATH" Part="last" System="yes"
64 <Environment Id="Environment" Name="PATH" Part="last" System="yes"
65 Permanent="no" Value="[INSTALLDIR]" Action="set" />
65 Permanent="no" Value="[INSTALLDIR]" Action="set" />
66 </Component>
66 </Component>
67 <Component Id='ReadMe' Guid='$(var.ReadMe.guid)' Win64='$(var.IsX64)'>
68 <File Id='ReadMe' Name='ReadMe.html' Source='contrib\win32\ReadMe.html'
69 KeyPath='yes'/>
70 </Component>
71 <Component Id='COPYING' Guid='$(var.COPYING.guid)' Win64='$(var.IsX64)'>
72 <File Id='COPYING' Name='COPYING.rtf' Source='contrib\packaging\wix\COPYING.rtf'
73 KeyPath='yes'/>
74 </Component>
75
76 <Directory Id='HGRCD' Name='hgrc.d'>
77 <Component Id='mercurial.rc' Guid='$(var.mercurial.rc.guid)' Win64='$(var.IsX64)'>
78 <File Id='mercurial.rc' Name='Mercurial.rc' Source='contrib\win32\mercurial.ini'
79 ReadOnly='yes' KeyPath='yes'/>
80 </Component>
81 <Component Id='mergetools.rc' Guid='$(var.mergetools.rc.guid)' Win64='$(var.IsX64)'>
82 <File Id='mergetools.rc' Name='MergeTools.rc' Source='mercurial\default.d\mergetools.rc'
83 ReadOnly='yes' KeyPath='yes'/>
84 </Component>
85 </Directory>
86
87 </Directory>
67 </Directory>
88 </Directory>
68 </Directory>
89
69
@@ -117,15 +97,12 b''
117 <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app'
97 <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app'
118 Level='1' Absent='disallow' >
98 Level='1' Absent='disallow' >
119 <ComponentRef Id='MainExecutable' />
99 <ComponentRef Id='MainExecutable' />
120 <ComponentRef Id='distOutput' />
121 <ComponentRef Id='libOutput' />
122 <ComponentRef Id='ProgramMenuDir' />
100 <ComponentRef Id='ProgramMenuDir' />
123 <ComponentRef Id='ReadMe' />
101 <ComponentGroupRef Id="hg.group.ROOT" />
124 <ComponentRef Id='COPYING' />
102 <ComponentGroupRef Id="hg.group.hgrc.d" />
125 <ComponentRef Id='mercurial.rc' />
103 <ComponentGroupRef Id="hg.group.helptext" />
126 <ComponentRef Id='mergetools.rc' />
104 <ComponentGroupRef Id="hg.group.lib" />
127 <ComponentGroupRef Id='helpFolder' />
105 <ComponentGroupRef Id="hg.group.templates" />
128 <ComponentGroupRef Id='templatesFolder' />
129 <MergeRef Id='VCRuntime' />
106 <MergeRef Id='VCRuntime' />
130 <MergeRef Id='VCRuntimePolicy' />
107 <MergeRef Id='VCRuntimePolicy' />
131 </Feature>
108 </Feature>
@@ -135,14 +112,13 b''
135 <?endforeach?>
112 <?endforeach?>
136 <?endif?>
113 <?endif?>
137 <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'>
114 <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'>
138 <ComponentGroupRef Id='localeFolder' />
115 <ComponentGroupRef Id="hg.group.locale" />
139 <ComponentRef Id='i18nFolder' />
140 </Feature>
116 </Feature>
141 <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'>
117 <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'>
142 <ComponentGroupRef Id='docFolder' />
118 <ComponentGroupRef Id="hg.group.doc" />
143 </Feature>
119 </Feature>
144 <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'>
120 <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'>
145 <ComponentGroupRef Id='contribFolder' />
121 <ComponentGroupRef Id="hg.group.contrib" />
146 </Feature>
122 </Feature>
147 </Feature>
123 </Feature>
148
124
@@ -18,12 +18,12 b' dependencies must be installed:'
18 * Python 2.7 (download from https://www.python.org/downloads/)
18 * Python 2.7 (download from https://www.python.org/downloads/)
19 * Microsoft Visual C++ Compiler for Python 2.7
19 * Microsoft Visual C++ Compiler for Python 2.7
20 (https://www.microsoft.com/en-us/download/details.aspx?id=44266)
20 (https://www.microsoft.com/en-us/download/details.aspx?id=44266)
21 * Python 3.5+ (to run the ``build.py`` script)
21 * Python 3.5+ (to run the ``packaging.py`` script)
22
22
23 Building
23 Building
24 ========
24 ========
25
25
26 The ``build.py`` script automates the process of producing an MSI
26 The ``packaging.py`` script automates the process of producing an MSI
27 installer. It manages fetching and configuring non-system dependencies
27 installer. It manages fetching and configuring non-system dependencies
28 (such as py2exe, gettext, and various Python packages).
28 (such as py2exe, gettext, and various Python packages).
29
29
@@ -37,11 +37,11 b' launch either ``Visual C++ 2008 32-bit C'
37 From the prompt, change to the Mercurial source directory. e.g.
37 From the prompt, change to the Mercurial source directory. e.g.
38 ``cd c:\src\hg``.
38 ``cd c:\src\hg``.
39
39
40 Next, invoke ``build.py`` to produce an MSI installer. You will need
40 Next, invoke ``packaging.py`` to produce an MSI installer. You will need
41 to supply the path to the Python interpreter to use.::
41 to supply the path to the Python interpreter to use.::
42
42
43 $ python3 contrib\packaging\wix\build.py \
43 $ python3 contrib\packaging\packaging.py \
44 --python c:\python27\python.exe
44 wix --python c:\python27\python.exe
45
45
46 .. note::
46 .. note::
47
47
@@ -54,8 +54,8 b' configured into the ``build`` sub-direct'
54 and an installer placed in the ``dist`` sub-directory. The final line
54 and an installer placed in the ``dist`` sub-directory. The final line
55 of output should print the name of the generated installer.
55 of output should print the name of the generated installer.
56
56
57 Additional options may be configured. Run ``build.py --help`` to see
57 Additional options may be configured. Run ``packaging.py wix --help`` to
58 a list of program flags.
58 see a list of program flags.
59
59
60 Relationship to TortoiseHG
60 Relationship to TortoiseHG
61 ==========================
61 ==========================
@@ -1,13 +1,13 b''
1 #
1 #
2 # This file is autogenerated by pip-compile
2 # This file is autogenerated by pip-compile
3 # To update, run:
3 # To update, run:
4 #
4 #
5 # pip-compile --generate-hashes --output-file=contrib/packaging/wix/requirements.txt contrib/packaging/wix/requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/packaging/wix/requirements.txt contrib/packaging/wix/requirements.txt.in
6 #
6 #
7 docutils==0.15.2 \
7 docutils==0.15.2 \
8 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
8 --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
9 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
9 --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
10 --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99
10 --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99
11 pygments==2.4.2 \
11 pygments==2.4.2 \
12 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
12 --hash=sha256:71e430bc85c88a430f000ac1d9b331d2407f681d6f6aec95e8bcfbc3df5b0127 \
13 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297
13 --hash=sha256:881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297
@@ -1,2 +1,2 b''
1 docutils
1 docutils
2 pygments
2 pygments
@@ -726,8 +726,8 b' def clearfilecache(obj, attrname):'
726
726
727 def clearchangelog(repo):
727 def clearchangelog(repo):
728 if repo is not repo.unfiltered():
728 if repo is not repo.unfiltered():
729 object.__setattr__(repo, r'_clcachekey', None)
729 object.__setattr__(repo, '_clcachekey', None)
730 object.__setattr__(repo, r'_clcache', None)
730 object.__setattr__(repo, '_clcache', None)
731 clearfilecache(repo.unfiltered(), 'changelog')
731 clearfilecache(repo.unfiltered(), 'changelog')
732
732
733
733
@@ -760,7 +760,10 b' def perfannotate(ui, repo, f, **opts):'
760
760
761 @command(
761 @command(
762 b'perfstatus',
762 b'perfstatus',
763 [(b'u', b'unknown', False, b'ask status to look for unknown files')]
763 [
764 (b'u', b'unknown', False, b'ask status to look for unknown files'),
765 (b'', b'dirstate', False, b'benchmark the internal dirstate call'),
766 ]
764 + formatteropts,
767 + formatteropts,
765 )
768 )
766 def perfstatus(ui, repo, **opts):
769 def perfstatus(ui, repo, **opts):
@@ -776,7 +779,20 b' def perfstatus(ui, repo, **opts):'
776 # timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
779 # timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
777 # False))))
780 # False))))
778 timer, fm = gettimer(ui, opts)
781 timer, fm = gettimer(ui, opts)
779 timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown']))))
782 if opts[b'dirstate']:
783 dirstate = repo.dirstate
784 m = scmutil.matchall(repo)
785 unknown = opts[b'unknown']
786
787 def status_dirstate():
788 s = dirstate.status(
789 m, subrepos=[], ignored=False, clean=False, unknown=unknown
790 )
791 sum(map(bool, s))
792
793 timer(status_dirstate)
794 else:
795 timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown']))))
780 fm.end()
796 fm.end()
781
797
782
798
@@ -804,6 +820,7 b' def clearcaches(cl):'
804 if util.safehasattr(cl, b'clearcaches'):
820 if util.safehasattr(cl, b'clearcaches'):
805 cl.clearcaches()
821 cl.clearcaches()
806 elif util.safehasattr(cl, b'_nodecache'):
822 elif util.safehasattr(cl, b'_nodecache'):
823 # <= hg-5.2
807 from mercurial.node import nullid, nullrev
824 from mercurial.node import nullid, nullrev
808
825
809 cl._nodecache = {nullid: nullrev}
826 cl._nodecache = {nullid: nullrev}
@@ -1404,13 +1421,15 b' def perfphasesremote(ui, repo, dest=None'
1404 else:
1421 else:
1405 ui.statusnoi18n(b'publishing: no\n')
1422 ui.statusnoi18n(b'publishing: no\n')
1406
1423
1407 nodemap = repo.changelog.nodemap
1424 has_node = getattr(repo.changelog.index, 'has_node', None)
1425 if has_node is None:
1426 has_node = repo.changelog.nodemap.__contains__
1408 nonpublishroots = 0
1427 nonpublishroots = 0
1409 for nhex, phase in remotephases.iteritems():
1428 for nhex, phase in remotephases.iteritems():
1410 if nhex == b'publishing': # ignore data related to publish option
1429 if nhex == b'publishing': # ignore data related to publish option
1411 continue
1430 continue
1412 node = bin(nhex)
1431 node = bin(nhex)
1413 if node in nodemap and int(phase):
1432 if has_node(node) and int(phase):
1414 nonpublishroots += 1
1433 nonpublishroots += 1
1415 ui.statusnoi18n(b'number of roots: %d\n' % len(remotephases))
1434 ui.statusnoi18n(b'number of roots: %d\n' % len(remotephases))
1416 ui.statusnoi18n(b'number of known non public roots: %d\n' % nonpublishroots)
1435 ui.statusnoi18n(b'number of known non public roots: %d\n' % nonpublishroots)
@@ -1610,7 +1629,11 b' def perfnodemap(ui, repo, **opts):'
1610 def setnodeget():
1629 def setnodeget():
1611 # probably not necessary, but for good measure
1630 # probably not necessary, but for good measure
1612 clearchangelog(unfi)
1631 clearchangelog(unfi)
1613 nodeget[0] = makecl(unfi).nodemap.get
1632 cl = makecl(unfi)
1633 if util.safehasattr(cl.index, 'get_rev'):
1634 nodeget[0] = cl.index.get_rev
1635 else:
1636 nodeget[0] = cl.nodemap.get
1614
1637
1615 def d():
1638 def d():
1616 get = nodeget[0]
1639 get = nodeget[0]
@@ -1636,13 +1659,13 b' def perfstartup(ui, repo, **opts):'
1636 timer, fm = gettimer(ui, opts)
1659 timer, fm = gettimer(ui, opts)
1637
1660
1638 def d():
1661 def d():
1639 if os.name != r'nt':
1662 if os.name != 'nt':
1640 os.system(
1663 os.system(
1641 b"HGRCPATH= %s version -q > /dev/null" % fsencode(sys.argv[0])
1664 b"HGRCPATH= %s version -q > /dev/null" % fsencode(sys.argv[0])
1642 )
1665 )
1643 else:
1666 else:
1644 os.environ[r'HGRCPATH'] = r' '
1667 os.environ['HGRCPATH'] = r' '
1645 os.system(r"%s version -q > NUL" % sys.argv[0])
1668 os.system("%s version -q > NUL" % sys.argv[0])
1646
1669
1647 timer(d)
1670 timer(d)
1648 fm.end()
1671 fm.end()
@@ -1828,7 +1851,7 b' def perftemplating(ui, repo, testedtempl'
1828 opts = _byteskwargs(opts)
1851 opts = _byteskwargs(opts)
1829
1852
1830 nullui = ui.copy()
1853 nullui = ui.copy()
1831 nullui.fout = open(os.devnull, r'wb')
1854 nullui.fout = open(os.devnull, 'wb')
1832 nullui.disablepager()
1855 nullui.disablepager()
1833 revs = opts.get(b'rev')
1856 revs = opts.get(b'rev')
1834 if not revs:
1857 if not revs:
@@ -1855,7 +1878,6 b' def perftemplating(ui, repo, testedtempl'
1855
1878
1856
1879
1857 def _displaystats(ui, opts, entries, data):
1880 def _displaystats(ui, opts, entries, data):
1858 pass
1859 # use a second formatter because the data are quite different, not sure
1881 # use a second formatter because the data are quite different, not sure
1860 # how it flies with the templater.
1882 # how it flies with the templater.
1861 fm = ui.formatter(b'perf-stats', opts)
1883 fm = ui.formatter(b'perf-stats', opts)
@@ -2025,8 +2047,8 b' def perfhelpermergecopies(ui, repo, revs'
2025 data['p1.time'] = end - begin
2047 data['p1.time'] = end - begin
2026 begin = util.timer()
2048 begin = util.timer()
2027 p2renames = copies.pathcopies(b, p2)
2049 p2renames = copies.pathcopies(b, p2)
2050 end = util.timer()
2028 data['p2.time'] = end - begin
2051 data['p2.time'] = end - begin
2029 end = util.timer()
2030 data['p1.renamedfiles'] = len(p1renames)
2052 data['p1.renamedfiles'] = len(p1renames)
2031 data['p2.renamedfiles'] = len(p2renames)
2053 data['p2.renamedfiles'] = len(p2renames)
2032
2054
@@ -2198,9 +2220,6 b' def perfhelperpathcopies(ui, repo, revs='
2198
2220
2199 fm.end()
2221 fm.end()
2200 if dostats:
2222 if dostats:
2201 # use a second formatter because the data are quite different, not sure
2202 # how it flies with the templater.
2203 fm = ui.formatter(b'perf', opts)
2204 entries = [
2223 entries = [
2205 ('nbrevs', 'number of revision covered'),
2224 ('nbrevs', 'number of revision covered'),
2206 ('nbmissingfiles', 'number of missing files at head'),
2225 ('nbmissingfiles', 'number of missing files at head'),
@@ -2576,25 +2595,38 b' def perfrevlogindex(ui, repo, file_=None'
2576 index[rev]
2595 index[rev]
2577
2596
2578 def resolvenode(node):
2597 def resolvenode(node):
2579 nodemap = revlogio.parseindex(data, inline)[1]
2598 index = revlogio.parseindex(data, inline)[0]
2580 # This only works for the C code.
2599 rev = getattr(index, 'rev', None)
2581 if nodemap is None:
2600 if rev is None:
2582 return
2601 nodemap = getattr(
2602 revlogio.parseindex(data, inline)[0], 'nodemap', None
2603 )
2604 # This only works for the C code.
2605 if nodemap is None:
2606 return
2607 rev = nodemap.__getitem__
2583
2608
2584 try:
2609 try:
2585 nodemap[node]
2610 rev(node)
2586 except error.RevlogError:
2611 except error.RevlogError:
2587 pass
2612 pass
2588
2613
2589 def resolvenodes(nodes, count=1):
2614 def resolvenodes(nodes, count=1):
2590 nodemap = revlogio.parseindex(data, inline)[1]
2615 index = revlogio.parseindex(data, inline)[0]
2591 if nodemap is None:
2616 rev = getattr(index, 'rev', None)
2592 return
2617 if rev is None:
2618 nodemap = getattr(
2619 revlogio.parseindex(data, inline)[0], 'nodemap', None
2620 )
2621 # This only works for the C code.
2622 if nodemap is None:
2623 return
2624 rev = nodemap.__getitem__
2593
2625
2594 for i in range(count):
2626 for i in range(count):
2595 for node in nodes:
2627 for node in nodes:
2596 try:
2628 try:
2597 nodemap[node]
2629 rev(node)
2598 except error.RevlogError:
2630 except error.RevlogError:
2599 pass
2631 pass
2600
2632
@@ -43,13 +43,18 b' Actions Blocking Release'
43 * Support modifying compression parameters mid operation when supported by
43 * Support modifying compression parameters mid operation when supported by
44 zstd API.
44 zstd API.
45 * Expose ``ZSTD_CLEVEL_DEFAULT`` constant.
45 * Expose ``ZSTD_CLEVEL_DEFAULT`` constant.
46 * Expose ``ZSTD_SRCSIZEHINT_{MIN,MAX}`` constants.
46 * Support ``ZSTD_p_forceAttachDict`` compression parameter.
47 * Support ``ZSTD_p_forceAttachDict`` compression parameter.
47 * Support ``ZSTD_c_literalCompressionMode `` compression parameter.
48 * Support ``ZSTD_dictForceLoad`` dictionary compression parameter.
49 * Support ``ZSTD_c_targetCBlockSize`` compression parameter.
50 * Support ``ZSTD_c_literalCompressionMode`` compression parameter.
51 * Support ``ZSTD_c_srcSizeHint`` compression parameter.
48 * Use ``ZSTD_CCtx_getParameter()``/``ZSTD_CCtxParam_getParameter()`` for retrieving
52 * Use ``ZSTD_CCtx_getParameter()``/``ZSTD_CCtxParam_getParameter()`` for retrieving
49 compression parameters.
53 compression parameters.
50 * Consider exposing ``ZSTDMT_toFlushNow()``.
54 * Consider exposing ``ZSTDMT_toFlushNow()``.
51 * Expose ``ZDICT_trainFromBuffer_fastCover()``,
55 * Expose ``ZDICT_trainFromBuffer_fastCover()``,
52 ``ZDICT_optimizeTrainFromBuffer_fastCover``.
56 ``ZDICT_optimizeTrainFromBuffer_fastCover``.
57 * Expose ``ZSTD_Sequence`` struct and related ``ZSTD_getSequences()`` API.
53 * Expose and enforce ``ZSTD_minCLevel()`` for minimum compression level.
58 * Expose and enforce ``ZSTD_minCLevel()`` for minimum compression level.
54 * Consider a ``chunker()`` API for decompression.
59 * Consider a ``chunker()`` API for decompression.
55 * Consider stats for ``chunker()`` API, including finding the last consumed
60 * Consider stats for ``chunker()`` API, including finding the last consumed
@@ -67,6 +72,20 b' Other Actions Not Blocking Release'
67 * API for ensuring max memory ceiling isn't exceeded.
72 * API for ensuring max memory ceiling isn't exceeded.
68 * Move off nose for testing.
73 * Move off nose for testing.
69
74
75 0.13.0 (released 2019-12-28)
76 ============================
77
78 Changes
79 -------
80
81 * ``pytest-xdist`` ``pytest`` extension is now installed so tests can be
82 run in parallel.
83 * CI now builds ``manylinux2010`` and ``manylinux2014`` binary wheels
84 instead of a mix of ``manylinux2010`` and ``manylinux1``.
85 * Official support for Python 3.8 has been added.
86 * Bundled zstandard library upgraded from 1.4.3 to 1.4.4.
87 * Python code has been reformatted with black.
88
70 0.12.0 (released 2019-09-15)
89 0.12.0 (released 2019-09-15)
71 ============================
90 ============================
72
91
@@ -20,7 +20,7 b' https://github.com/indygreg/python-zstan'
20 Requirements
20 Requirements
21 ============
21 ============
22
22
23 This extension is designed to run with Python 2.7, 3.4, 3.5, 3.6, and 3.7
23 This extension is designed to run with Python 2.7, 3.5, 3.6, 3.7, and 3.8
24 on common platforms (Linux, Windows, and OS X). On PyPy (both PyPy2 and PyPy3) we support version 6.0.0 and above.
24 on common platforms (Linux, Windows, and OS X). On PyPy (both PyPy2 and PyPy3) we support version 6.0.0 and above.
25 x86 and x86_64 are well-tested on Windows. Only x86_64 is well-tested on Linux and macOS.
25 x86 and x86_64 are well-tested on Windows. Only x86_64 is well-tested on Linux and macOS.
26
26
@@ -16,7 +16,7 b''
16 #include <zdict.h>
16 #include <zdict.h>
17
17
18 /* Remember to change the string in zstandard/__init__ as well */
18 /* Remember to change the string in zstandard/__init__ as well */
19 #define PYTHON_ZSTANDARD_VERSION "0.12.0"
19 #define PYTHON_ZSTANDARD_VERSION "0.13.0"
20
20
21 typedef enum {
21 typedef enum {
22 compressorobj_flush_finish,
22 compressorobj_flush_finish,
@@ -16,80 +16,82 b' import tempfile'
16
16
17 HERE = os.path.abspath(os.path.dirname(__file__))
17 HERE = os.path.abspath(os.path.dirname(__file__))
18
18
19 SOURCES = ['zstd/%s' % p for p in (
19 SOURCES = [
20 'common/debug.c',
20 "zstd/%s" % p
21 'common/entropy_common.c',
21 for p in (
22 'common/error_private.c',
22 "common/debug.c",
23 'common/fse_decompress.c',
23 "common/entropy_common.c",
24 'common/pool.c',
24 "common/error_private.c",
25 'common/threading.c',
25 "common/fse_decompress.c",
26 'common/xxhash.c',
26 "common/pool.c",
27 'common/zstd_common.c',
27 "common/threading.c",
28 'compress/fse_compress.c',
28 "common/xxhash.c",
29 'compress/hist.c',
29 "common/zstd_common.c",
30 'compress/huf_compress.c',
30 "compress/fse_compress.c",
31 'compress/zstd_compress.c',
31 "compress/hist.c",
32 'compress/zstd_compress_literals.c',
32 "compress/huf_compress.c",
33 'compress/zstd_compress_sequences.c',
33 "compress/zstd_compress.c",
34 'compress/zstd_double_fast.c',
34 "compress/zstd_compress_literals.c",
35 'compress/zstd_fast.c',
35 "compress/zstd_compress_sequences.c",
36 'compress/zstd_lazy.c',
36 "compress/zstd_double_fast.c",
37 'compress/zstd_ldm.c',
37 "compress/zstd_fast.c",
38 'compress/zstd_opt.c',
38 "compress/zstd_lazy.c",
39 'compress/zstdmt_compress.c',
39 "compress/zstd_ldm.c",
40 'decompress/huf_decompress.c',
40 "compress/zstd_opt.c",
41 'decompress/zstd_ddict.c',
41 "compress/zstdmt_compress.c",
42 'decompress/zstd_decompress.c',
42 "decompress/huf_decompress.c",
43 'decompress/zstd_decompress_block.c',
43 "decompress/zstd_ddict.c",
44 'dictBuilder/cover.c',
44 "decompress/zstd_decompress.c",
45 'dictBuilder/fastcover.c',
45 "decompress/zstd_decompress_block.c",
46 'dictBuilder/divsufsort.c',
46 "dictBuilder/cover.c",
47 'dictBuilder/zdict.c',
47 "dictBuilder/fastcover.c",
48 )]
48 "dictBuilder/divsufsort.c",
49 "dictBuilder/zdict.c",
50 )
51 ]
49
52
50 # Headers whose preprocessed output will be fed into cdef().
53 # Headers whose preprocessed output will be fed into cdef().
51 HEADERS = [os.path.join(HERE, 'zstd', *p) for p in (
54 HEADERS = [
52 ('zstd.h',),
55 os.path.join(HERE, "zstd", *p) for p in (("zstd.h",), ("dictBuilder", "zdict.h"),)
53 ('dictBuilder', 'zdict.h'),
56 ]
54 )]
55
57
56 INCLUDE_DIRS = [os.path.join(HERE, d) for d in (
58 INCLUDE_DIRS = [
57 'zstd',
59 os.path.join(HERE, d)
58 'zstd/common',
60 for d in (
59 'zstd/compress',
61 "zstd",
60 'zstd/decompress',
62 "zstd/common",
61 'zstd/dictBuilder',
63 "zstd/compress",
62 )]
64 "zstd/decompress",
65 "zstd/dictBuilder",
66 )
67 ]
63
68
64 # cffi can't parse some of the primitives in zstd.h. So we invoke the
69 # cffi can't parse some of the primitives in zstd.h. So we invoke the
65 # preprocessor and feed its output into cffi.
70 # preprocessor and feed its output into cffi.
66 compiler = distutils.ccompiler.new_compiler()
71 compiler = distutils.ccompiler.new_compiler()
67
72
68 # Needed for MSVC.
73 # Needed for MSVC.
69 if hasattr(compiler, 'initialize'):
74 if hasattr(compiler, "initialize"):
70 compiler.initialize()
75 compiler.initialize()
71
76
72 # Distutils doesn't set compiler.preprocessor, so invoke the preprocessor
77 # Distutils doesn't set compiler.preprocessor, so invoke the preprocessor
73 # manually.
78 # manually.
74 if compiler.compiler_type == 'unix':
79 if compiler.compiler_type == "unix":
75 args = list(compiler.executables['compiler'])
80 args = list(compiler.executables["compiler"])
76 args.extend([
81 args.extend(
77 '-E',
82 ["-E", "-DZSTD_STATIC_LINKING_ONLY", "-DZDICT_STATIC_LINKING_ONLY",]
78 '-DZSTD_STATIC_LINKING_ONLY',
83 )
79 '-DZDICT_STATIC_LINKING_ONLY',
84 elif compiler.compiler_type == "msvc":
80 ])
81 elif compiler.compiler_type == 'msvc':
82 args = [compiler.cc]
85 args = [compiler.cc]
83 args.extend([
86 args.extend(
84 '/EP',
87 ["/EP", "/DZSTD_STATIC_LINKING_ONLY", "/DZDICT_STATIC_LINKING_ONLY",]
85 '/DZSTD_STATIC_LINKING_ONLY',
88 )
86 '/DZDICT_STATIC_LINKING_ONLY',
87 ])
88 else:
89 else:
89 raise Exception('unsupported compiler type: %s' % compiler.compiler_type)
90 raise Exception("unsupported compiler type: %s" % compiler.compiler_type)
91
90
92
91 def preprocess(path):
93 def preprocess(path):
92 with open(path, 'rb') as fh:
94 with open(path, "rb") as fh:
93 lines = []
95 lines = []
94 it = iter(fh)
96 it = iter(fh)
95
97
@@ -104,32 +106,44 b' def preprocess(path):'
104 # We define ZSTD_STATIC_LINKING_ONLY, which is redundant with the inline
106 # We define ZSTD_STATIC_LINKING_ONLY, which is redundant with the inline
105 # #define in zstdmt_compress.h and results in a compiler warning. So drop
107 # #define in zstdmt_compress.h and results in a compiler warning. So drop
106 # the inline #define.
108 # the inline #define.
107 if l.startswith((b'#include <stddef.h>',
109 if l.startswith(
108 b'#include "zstd.h"',
110 (
109 b'#define ZSTD_STATIC_LINKING_ONLY')):
111 b"#include <stddef.h>",
112 b'#include "zstd.h"',
113 b"#define ZSTD_STATIC_LINKING_ONLY",
114 )
115 ):
110 continue
116 continue
111
117
118 # The preprocessor environment on Windows doesn't define include
119 # paths, so the #include of limits.h fails. We work around this
120 # by removing that import and defining INT_MAX ourselves. This is
121 # a bit hacky. But it gets the job done.
122 # TODO make limits.h work on Windows so we ensure INT_MAX is
123 # correct.
124 if l.startswith(b"#include <limits.h>"):
125 l = b"#define INT_MAX 2147483647\n"
126
112 # ZSTDLIB_API may not be defined if we dropped zstd.h. It isn't
127 # ZSTDLIB_API may not be defined if we dropped zstd.h. It isn't
113 # important so just filter it out.
128 # important so just filter it out.
114 if l.startswith(b'ZSTDLIB_API'):
129 if l.startswith(b"ZSTDLIB_API"):
115 l = l[len(b'ZSTDLIB_API '):]
130 l = l[len(b"ZSTDLIB_API ") :]
116
131
117 lines.append(l)
132 lines.append(l)
118
133
119 fd, input_file = tempfile.mkstemp(suffix='.h')
134 fd, input_file = tempfile.mkstemp(suffix=".h")
120 os.write(fd, b''.join(lines))
135 os.write(fd, b"".join(lines))
121 os.close(fd)
136 os.close(fd)
122
137
123 try:
138 try:
124 env = dict(os.environ)
139 env = dict(os.environ)
125 if getattr(compiler, '_paths', None):
140 if getattr(compiler, "_paths", None):
126 env['PATH'] = compiler._paths
141 env["PATH"] = compiler._paths
127 process = subprocess.Popen(args + [input_file], stdout=subprocess.PIPE,
142 process = subprocess.Popen(args + [input_file], stdout=subprocess.PIPE, env=env)
128 env=env)
129 output = process.communicate()[0]
143 output = process.communicate()[0]
130 ret = process.poll()
144 ret = process.poll()
131 if ret:
145 if ret:
132 raise Exception('preprocessor exited with error')
146 raise Exception("preprocessor exited with error")
133
147
134 return output
148 return output
135 finally:
149 finally:
@@ -141,16 +155,16 b' def normalize_output(output):'
141 for line in output.splitlines():
155 for line in output.splitlines():
142 # CFFI's parser doesn't like __attribute__ on UNIX compilers.
156 # CFFI's parser doesn't like __attribute__ on UNIX compilers.
143 if line.startswith(b'__attribute__ ((visibility ("default"))) '):
157 if line.startswith(b'__attribute__ ((visibility ("default"))) '):
144 line = line[len(b'__attribute__ ((visibility ("default"))) '):]
158 line = line[len(b'__attribute__ ((visibility ("default"))) ') :]
145
159
146 if line.startswith(b'__attribute__((deprecated('):
160 if line.startswith(b"__attribute__((deprecated("):
147 continue
161 continue
148 elif b'__declspec(deprecated(' in line:
162 elif b"__declspec(deprecated(" in line:
149 continue
163 continue
150
164
151 lines.append(line)
165 lines.append(line)
152
166
153 return b'\n'.join(lines)
167 return b"\n".join(lines)
154
168
155
169
156 ffi = cffi.FFI()
170 ffi = cffi.FFI()
@@ -159,18 +173,22 b' ffi = cffi.FFI()'
159 # *_DISABLE_DEPRECATE_WARNINGS prevents the compiler from emitting a warning
173 # *_DISABLE_DEPRECATE_WARNINGS prevents the compiler from emitting a warning
160 # when cffi uses the function. Since we statically link against zstd, even
174 # when cffi uses the function. Since we statically link against zstd, even
161 # if we use the deprecated functions it shouldn't be a huge problem.
175 # if we use the deprecated functions it shouldn't be a huge problem.
162 ffi.set_source('_zstd_cffi', '''
176 ffi.set_source(
177 "_zstd_cffi",
178 """
163 #define MIN(a,b) ((a)<(b) ? (a) : (b))
179 #define MIN(a,b) ((a)<(b) ? (a) : (b))
164 #define ZSTD_STATIC_LINKING_ONLY
180 #define ZSTD_STATIC_LINKING_ONLY
165 #include <zstd.h>
181 #include <zstd.h>
166 #define ZDICT_STATIC_LINKING_ONLY
182 #define ZDICT_STATIC_LINKING_ONLY
167 #define ZDICT_DISABLE_DEPRECATE_WARNINGS
183 #define ZDICT_DISABLE_DEPRECATE_WARNINGS
168 #include <zdict.h>
184 #include <zdict.h>
169 ''', sources=SOURCES,
185 """,
170 include_dirs=INCLUDE_DIRS,
186 sources=SOURCES,
171 extra_compile_args=['-DZSTD_MULTITHREAD'])
187 include_dirs=INCLUDE_DIRS,
188 extra_compile_args=["-DZSTD_MULTITHREAD"],
189 )
172
190
173 DEFINE = re.compile(b'^\\#define ([a-zA-Z0-9_]+) ')
191 DEFINE = re.compile(b"^\\#define ([a-zA-Z0-9_]+) ")
174
192
175 sources = []
193 sources = []
176
194
@@ -181,27 +199,27 b' for header in HEADERS:'
181
199
182 # #define's are effectively erased as part of going through preprocessor.
200 # #define's are effectively erased as part of going through preprocessor.
183 # So perform a manual pass to re-add those to the cdef source.
201 # So perform a manual pass to re-add those to the cdef source.
184 with open(header, 'rb') as fh:
202 with open(header, "rb") as fh:
185 for line in fh:
203 for line in fh:
186 line = line.strip()
204 line = line.strip()
187 m = DEFINE.match(line)
205 m = DEFINE.match(line)
188 if not m:
206 if not m:
189 continue
207 continue
190
208
191 if m.group(1) == b'ZSTD_STATIC_LINKING_ONLY':
209 if m.group(1) == b"ZSTD_STATIC_LINKING_ONLY":
192 continue
210 continue
193
211
194 # The parser doesn't like some constants with complex values.
212 # The parser doesn't like some constants with complex values.
195 if m.group(1) in (b'ZSTD_LIB_VERSION', b'ZSTD_VERSION_STRING'):
213 if m.group(1) in (b"ZSTD_LIB_VERSION", b"ZSTD_VERSION_STRING"):
196 continue
214 continue
197
215
198 # The ... is magic syntax by the cdef parser to resolve the
216 # The ... is magic syntax by the cdef parser to resolve the
199 # value at compile time.
217 # value at compile time.
200 sources.append(m.group(0) + b' ...')
218 sources.append(m.group(0) + b" ...")
201
219
202 cdeflines = b'\n'.join(sources).splitlines()
220 cdeflines = b"\n".join(sources).splitlines()
203 cdeflines = [l for l in cdeflines if l.strip()]
221 cdeflines = [l for l in cdeflines if l.strip()]
204 ffi.cdef(b'\n'.join(cdeflines).decode('latin1'))
222 ffi.cdef(b"\n".join(cdeflines).decode("latin1"))
205
223
206 if __name__ == '__main__':
224 if __name__ == "__main__":
207 ffi.compile()
225 ffi.compile()
@@ -16,7 +16,7 b' from setuptools import setup'
16 # (like memoryview).
16 # (like memoryview).
17 # Need feature in 1.11 for ffi.gc() to declare size of objects so we avoid
17 # Need feature in 1.11 for ffi.gc() to declare size of objects so we avoid
18 # garbage collection pitfalls.
18 # garbage collection pitfalls.
19 MINIMUM_CFFI_VERSION = '1.11'
19 MINIMUM_CFFI_VERSION = "1.11"
20
20
21 try:
21 try:
22 import cffi
22 import cffi
@@ -26,9 +26,11 b' try:'
26 # out the CFFI version here and reject CFFI if it is too old.
26 # out the CFFI version here and reject CFFI if it is too old.
27 cffi_version = LooseVersion(cffi.__version__)
27 cffi_version = LooseVersion(cffi.__version__)
28 if cffi_version < LooseVersion(MINIMUM_CFFI_VERSION):
28 if cffi_version < LooseVersion(MINIMUM_CFFI_VERSION):
29 print('CFFI 1.11 or newer required (%s found); '
29 print(
30 'not building CFFI backend' % cffi_version,
30 "CFFI 1.11 or newer required (%s found); "
31 file=sys.stderr)
31 "not building CFFI backend" % cffi_version,
32 file=sys.stderr,
33 )
32 cffi = None
34 cffi = None
33
35
34 except ImportError:
36 except ImportError:
@@ -40,73 +42,77 b' SUPPORT_LEGACY = False'
40 SYSTEM_ZSTD = False
42 SYSTEM_ZSTD = False
41 WARNINGS_AS_ERRORS = False
43 WARNINGS_AS_ERRORS = False
42
44
43 if os.environ.get('ZSTD_WARNINGS_AS_ERRORS', ''):
45 if os.environ.get("ZSTD_WARNINGS_AS_ERRORS", ""):
44 WARNINGS_AS_ERRORS = True
46 WARNINGS_AS_ERRORS = True
45
47
46 if '--legacy' in sys.argv:
48 if "--legacy" in sys.argv:
47 SUPPORT_LEGACY = True
49 SUPPORT_LEGACY = True
48 sys.argv.remove('--legacy')
50 sys.argv.remove("--legacy")
49
51
50 if '--system-zstd' in sys.argv:
52 if "--system-zstd" in sys.argv:
51 SYSTEM_ZSTD = True
53 SYSTEM_ZSTD = True
52 sys.argv.remove('--system-zstd')
54 sys.argv.remove("--system-zstd")
53
55
54 if '--warnings-as-errors' in sys.argv:
56 if "--warnings-as-errors" in sys.argv:
55 WARNINGS_AS_ERRORS = True
57 WARNINGS_AS_ERRORS = True
56 sys.argv.remove('--warning-as-errors')
58 sys.argv.remove("--warning-as-errors")
57
59
58 # Code for obtaining the Extension instance is in its own module to
60 # Code for obtaining the Extension instance is in its own module to
59 # facilitate reuse in other projects.
61 # facilitate reuse in other projects.
60 extensions = [
62 extensions = [
61 setup_zstd.get_c_extension(name='zstd',
63 setup_zstd.get_c_extension(
62 support_legacy=SUPPORT_LEGACY,
64 name="zstd",
63 system_zstd=SYSTEM_ZSTD,
65 support_legacy=SUPPORT_LEGACY,
64 warnings_as_errors=WARNINGS_AS_ERRORS),
66 system_zstd=SYSTEM_ZSTD,
67 warnings_as_errors=WARNINGS_AS_ERRORS,
68 ),
65 ]
69 ]
66
70
67 install_requires = []
71 install_requires = []
68
72
69 if cffi:
73 if cffi:
70 import make_cffi
74 import make_cffi
75
71 extensions.append(make_cffi.ffi.distutils_extension())
76 extensions.append(make_cffi.ffi.distutils_extension())
72 install_requires.append('cffi>=%s' % MINIMUM_CFFI_VERSION)
77 install_requires.append("cffi>=%s" % MINIMUM_CFFI_VERSION)
73
78
74 version = None
79 version = None
75
80
76 with open('c-ext/python-zstandard.h', 'r') as fh:
81 with open("c-ext/python-zstandard.h", "r") as fh:
77 for line in fh:
82 for line in fh:
78 if not line.startswith('#define PYTHON_ZSTANDARD_VERSION'):
83 if not line.startswith("#define PYTHON_ZSTANDARD_VERSION"):
79 continue
84 continue
80
85
81 version = line.split()[2][1:-1]
86 version = line.split()[2][1:-1]
82 break
87 break
83
88
84 if not version:
89 if not version:
85 raise Exception('could not resolve package version; '
90 raise Exception("could not resolve package version; " "this should never happen")
86 'this should never happen')
87
91
88 setup(
92 setup(
89 name='zstandard',
93 name="zstandard",
90 version=version,
94 version=version,
91 description='Zstandard bindings for Python',
95 description="Zstandard bindings for Python",
92 long_description=open('README.rst', 'r').read(),
96 long_description=open("README.rst", "r").read(),
93 url='https://github.com/indygreg/python-zstandard',
97 url="https://github.com/indygreg/python-zstandard",
94 author='Gregory Szorc',
98 author="Gregory Szorc",
95 author_email='gregory.szorc@gmail.com',
99 author_email="gregory.szorc@gmail.com",
96 license='BSD',
100 license="BSD",
97 classifiers=[
101 classifiers=[
98 'Development Status :: 4 - Beta',
102 "Development Status :: 4 - Beta",
99 'Intended Audience :: Developers',
103 "Intended Audience :: Developers",
100 'License :: OSI Approved :: BSD License',
104 "License :: OSI Approved :: BSD License",
101 'Programming Language :: C',
105 "Programming Language :: C",
102 'Programming Language :: Python :: 2.7',
106 "Programming Language :: Python :: 2.7",
103 'Programming Language :: Python :: 3.5',
107 "Programming Language :: Python :: 3.5",
104 'Programming Language :: Python :: 3.6',
108 "Programming Language :: Python :: 3.6",
105 'Programming Language :: Python :: 3.7',
109 "Programming Language :: Python :: 3.7",
110 "Programming Language :: Python :: 3.8",
106 ],
111 ],
107 keywords='zstandard zstd compression',
112 keywords="zstandard zstd compression",
108 packages=['zstandard'],
113 packages=["zstandard"],
109 ext_modules=extensions,
114 ext_modules=extensions,
110 test_suite='tests',
115 test_suite="tests",
111 install_requires=install_requires,
116 install_requires=install_requires,
117 tests_require=["hypothesis"],
112 )
118 )
@@ -10,97 +10,110 b' import os'
10 from distutils.extension import Extension
10 from distutils.extension import Extension
11
11
12
12
13 zstd_sources = ['zstd/%s' % p for p in (
13 zstd_sources = [
14 'common/debug.c',
14 "zstd/%s" % p
15 'common/entropy_common.c',
15 for p in (
16 'common/error_private.c',
16 "common/debug.c",
17 'common/fse_decompress.c',
17 "common/entropy_common.c",
18 'common/pool.c',
18 "common/error_private.c",
19 'common/threading.c',
19 "common/fse_decompress.c",
20 'common/xxhash.c',
20 "common/pool.c",
21 'common/zstd_common.c',
21 "common/threading.c",
22 'compress/fse_compress.c',
22 "common/xxhash.c",
23 'compress/hist.c',
23 "common/zstd_common.c",
24 'compress/huf_compress.c',
24 "compress/fse_compress.c",
25 'compress/zstd_compress_literals.c',
25 "compress/hist.c",
26 'compress/zstd_compress_sequences.c',
26 "compress/huf_compress.c",
27 'compress/zstd_compress.c',
27 "compress/zstd_compress_literals.c",
28 'compress/zstd_double_fast.c',
28 "compress/zstd_compress_sequences.c",
29 'compress/zstd_fast.c',
29 "compress/zstd_compress.c",
30 'compress/zstd_lazy.c',
30 "compress/zstd_double_fast.c",
31 'compress/zstd_ldm.c',
31 "compress/zstd_fast.c",
32 'compress/zstd_opt.c',
32 "compress/zstd_lazy.c",
33 'compress/zstdmt_compress.c',
33 "compress/zstd_ldm.c",
34 'decompress/huf_decompress.c',
34 "compress/zstd_opt.c",
35 'decompress/zstd_ddict.c',
35 "compress/zstdmt_compress.c",
36 'decompress/zstd_decompress.c',
36 "decompress/huf_decompress.c",
37 'decompress/zstd_decompress_block.c',
37 "decompress/zstd_ddict.c",
38 'dictBuilder/cover.c',
38 "decompress/zstd_decompress.c",
39 'dictBuilder/divsufsort.c',
39 "decompress/zstd_decompress_block.c",
40 'dictBuilder/fastcover.c',
40 "dictBuilder/cover.c",
41 'dictBuilder/zdict.c',
41 "dictBuilder/divsufsort.c",
42 )]
42 "dictBuilder/fastcover.c",
43 "dictBuilder/zdict.c",
44 )
45 ]
43
46
44 zstd_sources_legacy = ['zstd/%s' % p for p in (
47 zstd_sources_legacy = [
45 'deprecated/zbuff_common.c',
48 "zstd/%s" % p
46 'deprecated/zbuff_compress.c',
49 for p in (
47 'deprecated/zbuff_decompress.c',
50 "deprecated/zbuff_common.c",
48 'legacy/zstd_v01.c',
51 "deprecated/zbuff_compress.c",
49 'legacy/zstd_v02.c',
52 "deprecated/zbuff_decompress.c",
50 'legacy/zstd_v03.c',
53 "legacy/zstd_v01.c",
51 'legacy/zstd_v04.c',
54 "legacy/zstd_v02.c",
52 'legacy/zstd_v05.c',
55 "legacy/zstd_v03.c",
53 'legacy/zstd_v06.c',
56 "legacy/zstd_v04.c",
54 'legacy/zstd_v07.c'
57 "legacy/zstd_v05.c",
55 )]
58 "legacy/zstd_v06.c",
59 "legacy/zstd_v07.c",
60 )
61 ]
56
62
57 zstd_includes = [
63 zstd_includes = [
58 'zstd',
64 "zstd",
59 'zstd/common',
65 "zstd/common",
60 'zstd/compress',
66 "zstd/compress",
61 'zstd/decompress',
67 "zstd/decompress",
62 'zstd/dictBuilder',
68 "zstd/dictBuilder",
63 ]
69 ]
64
70
65 zstd_includes_legacy = [
71 zstd_includes_legacy = [
66 'zstd/deprecated',
72 "zstd/deprecated",
67 'zstd/legacy',
73 "zstd/legacy",
68 ]
74 ]
69
75
70 ext_includes = [
76 ext_includes = [
71 'c-ext',
77 "c-ext",
72 'zstd/common',
78 "zstd/common",
73 ]
79 ]
74
80
75 ext_sources = [
81 ext_sources = [
76 'zstd/common/pool.c',
82 "zstd/common/error_private.c",
77 'zstd/common/threading.c',
83 "zstd/common/pool.c",
78 'zstd.c',
84 "zstd/common/threading.c",
79 'c-ext/bufferutil.c',
85 "zstd/common/zstd_common.c",
80 'c-ext/compressiondict.c',
86 "zstd.c",
81 'c-ext/compressobj.c',
87 "c-ext/bufferutil.c",
82 'c-ext/compressor.c',
88 "c-ext/compressiondict.c",
83 'c-ext/compressoriterator.c',
89 "c-ext/compressobj.c",
84 'c-ext/compressionchunker.c',
90 "c-ext/compressor.c",
85 'c-ext/compressionparams.c',
91 "c-ext/compressoriterator.c",
86 'c-ext/compressionreader.c',
92 "c-ext/compressionchunker.c",
87 'c-ext/compressionwriter.c',
93 "c-ext/compressionparams.c",
88 'c-ext/constants.c',
94 "c-ext/compressionreader.c",
89 'c-ext/decompressobj.c',
95 "c-ext/compressionwriter.c",
90 'c-ext/decompressor.c',
96 "c-ext/constants.c",
91 'c-ext/decompressoriterator.c',
97 "c-ext/decompressobj.c",
92 'c-ext/decompressionreader.c',
98 "c-ext/decompressor.c",
93 'c-ext/decompressionwriter.c',
99 "c-ext/decompressoriterator.c",
94 'c-ext/frameparams.c',
100 "c-ext/decompressionreader.c",
101 "c-ext/decompressionwriter.c",
102 "c-ext/frameparams.c",
95 ]
103 ]
96
104
97 zstd_depends = [
105 zstd_depends = [
98 'c-ext/python-zstandard.h',
106 "c-ext/python-zstandard.h",
99 ]
107 ]
100
108
101
109
102 def get_c_extension(support_legacy=False, system_zstd=False, name='zstd',
110 def get_c_extension(
103 warnings_as_errors=False, root=None):
111 support_legacy=False,
112 system_zstd=False,
113 name="zstd",
114 warnings_as_errors=False,
115 root=None,
116 ):
104 """Obtain a distutils.extension.Extension for the C extension.
117 """Obtain a distutils.extension.Extension for the C extension.
105
118
106 ``support_legacy`` controls whether to compile in legacy zstd format support.
119 ``support_legacy`` controls whether to compile in legacy zstd format support.
@@ -125,17 +138,16 b' def get_c_extension(support_legacy=False'
125 if not system_zstd:
138 if not system_zstd:
126 sources.update([os.path.join(actual_root, p) for p in zstd_sources])
139 sources.update([os.path.join(actual_root, p) for p in zstd_sources])
127 if support_legacy:
140 if support_legacy:
128 sources.update([os.path.join(actual_root, p)
141 sources.update([os.path.join(actual_root, p) for p in zstd_sources_legacy])
129 for p in zstd_sources_legacy])
130 sources = list(sources)
142 sources = list(sources)
131
143
132 include_dirs = set([os.path.join(actual_root, d) for d in ext_includes])
144 include_dirs = set([os.path.join(actual_root, d) for d in ext_includes])
133 if not system_zstd:
145 if not system_zstd:
134 include_dirs.update([os.path.join(actual_root, d)
146 include_dirs.update([os.path.join(actual_root, d) for d in zstd_includes])
135 for d in zstd_includes])
136 if support_legacy:
147 if support_legacy:
137 include_dirs.update([os.path.join(actual_root, d)
148 include_dirs.update(
138 for d in zstd_includes_legacy])
149 [os.path.join(actual_root, d) for d in zstd_includes_legacy]
150 )
139 include_dirs = list(include_dirs)
151 include_dirs = list(include_dirs)
140
152
141 depends = [os.path.join(actual_root, p) for p in zstd_depends]
153 depends = [os.path.join(actual_root, p) for p in zstd_depends]
@@ -143,41 +155,40 b' def get_c_extension(support_legacy=False'
143 compiler = distutils.ccompiler.new_compiler()
155 compiler = distutils.ccompiler.new_compiler()
144
156
145 # Needed for MSVC.
157 # Needed for MSVC.
146 if hasattr(compiler, 'initialize'):
158 if hasattr(compiler, "initialize"):
147 compiler.initialize()
159 compiler.initialize()
148
160
149 if compiler.compiler_type == 'unix':
161 if compiler.compiler_type == "unix":
150 compiler_type = 'unix'
162 compiler_type = "unix"
151 elif compiler.compiler_type == 'msvc':
163 elif compiler.compiler_type == "msvc":
152 compiler_type = 'msvc'
164 compiler_type = "msvc"
153 elif compiler.compiler_type == 'mingw32':
165 elif compiler.compiler_type == "mingw32":
154 compiler_type = 'mingw32'
166 compiler_type = "mingw32"
155 else:
167 else:
156 raise Exception('unhandled compiler type: %s' %
168 raise Exception("unhandled compiler type: %s" % compiler.compiler_type)
157 compiler.compiler_type)
158
169
159 extra_args = ['-DZSTD_MULTITHREAD']
170 extra_args = ["-DZSTD_MULTITHREAD"]
160
171
161 if not system_zstd:
172 if not system_zstd:
162 extra_args.append('-DZSTDLIB_VISIBILITY=')
173 extra_args.append("-DZSTDLIB_VISIBILITY=")
163 extra_args.append('-DZDICTLIB_VISIBILITY=')
174 extra_args.append("-DZDICTLIB_VISIBILITY=")
164 extra_args.append('-DZSTDERRORLIB_VISIBILITY=')
175 extra_args.append("-DZSTDERRORLIB_VISIBILITY=")
165
176
166 if compiler_type == 'unix':
177 if compiler_type == "unix":
167 extra_args.append('-fvisibility=hidden')
178 extra_args.append("-fvisibility=hidden")
168
179
169 if not system_zstd and support_legacy:
180 if not system_zstd and support_legacy:
170 extra_args.append('-DZSTD_LEGACY_SUPPORT=1')
181 extra_args.append("-DZSTD_LEGACY_SUPPORT=1")
171
182
172 if warnings_as_errors:
183 if warnings_as_errors:
173 if compiler_type in ('unix', 'mingw32'):
184 if compiler_type in ("unix", "mingw32"):
174 extra_args.append('-Werror')
185 extra_args.append("-Werror")
175 elif compiler_type == 'msvc':
186 elif compiler_type == "msvc":
176 extra_args.append('/WX')
187 extra_args.append("/WX")
177 else:
188 else:
178 assert False
189 assert False
179
190
180 libraries = ['zstd'] if system_zstd else []
191 libraries = ["zstd"] if system_zstd else []
181
192
182 # Python 3.7 doesn't like absolute paths. So normalize to relative.
193 # Python 3.7 doesn't like absolute paths. So normalize to relative.
183 sources = [os.path.relpath(p, root) for p in sources]
194 sources = [os.path.relpath(p, root) for p in sources]
@@ -185,8 +196,11 b' def get_c_extension(support_legacy=False'
185 depends = [os.path.relpath(p, root) for p in depends]
196 depends = [os.path.relpath(p, root) for p in depends]
186
197
187 # TODO compile with optimizations.
198 # TODO compile with optimizations.
188 return Extension(name, sources,
199 return Extension(
189 include_dirs=include_dirs,
200 name,
190 depends=depends,
201 sources,
191 extra_compile_args=extra_args,
202 include_dirs=include_dirs,
192 libraries=libraries)
203 depends=depends,
204 extra_compile_args=extra_args,
205 libraries=libraries,
206 )
@@ -3,6 +3,7 b' import inspect'
3 import io
3 import io
4 import os
4 import os
5 import types
5 import types
6 import unittest
6
7
7 try:
8 try:
8 import hypothesis
9 import hypothesis
@@ -10,39 +11,46 b' except ImportError:'
10 hypothesis = None
11 hypothesis = None
11
12
12
13
14 class TestCase(unittest.TestCase):
15 if not getattr(unittest.TestCase, "assertRaisesRegex", False):
16 assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
17
18
13 def make_cffi(cls):
19 def make_cffi(cls):
14 """Decorator to add CFFI versions of each test method."""
20 """Decorator to add CFFI versions of each test method."""
15
21
16 # The module containing this class definition should
22 # The module containing this class definition should
17 # `import zstandard as zstd`. Otherwise things may blow up.
23 # `import zstandard as zstd`. Otherwise things may blow up.
18 mod = inspect.getmodule(cls)
24 mod = inspect.getmodule(cls)
19 if not hasattr(mod, 'zstd'):
25 if not hasattr(mod, "zstd"):
20 raise Exception('test module does not contain "zstd" symbol')
26 raise Exception('test module does not contain "zstd" symbol')
21
27
22 if not hasattr(mod.zstd, 'backend'):
28 if not hasattr(mod.zstd, "backend"):
23 raise Exception('zstd symbol does not have "backend" attribute; did '
29 raise Exception(
24 'you `import zstandard as zstd`?')
30 'zstd symbol does not have "backend" attribute; did '
31 "you `import zstandard as zstd`?"
32 )
25
33
26 # If `import zstandard` already chose the cffi backend, there is nothing
34 # If `import zstandard` already chose the cffi backend, there is nothing
27 # for us to do: we only add the cffi variation if the default backend
35 # for us to do: we only add the cffi variation if the default backend
28 # is the C extension.
36 # is the C extension.
29 if mod.zstd.backend == 'cffi':
37 if mod.zstd.backend == "cffi":
30 return cls
38 return cls
31
39
32 old_env = dict(os.environ)
40 old_env = dict(os.environ)
33 os.environ['PYTHON_ZSTANDARD_IMPORT_POLICY'] = 'cffi'
41 os.environ["PYTHON_ZSTANDARD_IMPORT_POLICY"] = "cffi"
34 try:
42 try:
35 try:
43 try:
36 mod_info = imp.find_module('zstandard')
44 mod_info = imp.find_module("zstandard")
37 mod = imp.load_module('zstandard_cffi', *mod_info)
45 mod = imp.load_module("zstandard_cffi", *mod_info)
38 except ImportError:
46 except ImportError:
39 return cls
47 return cls
40 finally:
48 finally:
41 os.environ.clear()
49 os.environ.clear()
42 os.environ.update(old_env)
50 os.environ.update(old_env)
43
51
44 if mod.backend != 'cffi':
52 if mod.backend != "cffi":
45 raise Exception('got the zstandard %s backend instead of cffi' % mod.backend)
53 raise Exception("got the zstandard %s backend instead of cffi" % mod.backend)
46
54
47 # If CFFI version is available, dynamically construct test methods
55 # If CFFI version is available, dynamically construct test methods
48 # that use it.
56 # that use it.
@@ -52,27 +60,31 b' def make_cffi(cls):'
52 if not inspect.ismethod(fn) and not inspect.isfunction(fn):
60 if not inspect.ismethod(fn) and not inspect.isfunction(fn):
53 continue
61 continue
54
62
55 if not fn.__name__.startswith('test_'):
63 if not fn.__name__.startswith("test_"):
56 continue
64 continue
57
65
58 name = '%s_cffi' % fn.__name__
66 name = "%s_cffi" % fn.__name__
59
67
60 # Replace the "zstd" symbol with the CFFI module instance. Then copy
68 # Replace the "zstd" symbol with the CFFI module instance. Then copy
61 # the function object and install it in a new attribute.
69 # the function object and install it in a new attribute.
62 if isinstance(fn, types.FunctionType):
70 if isinstance(fn, types.FunctionType):
63 globs = dict(fn.__globals__)
71 globs = dict(fn.__globals__)
64 globs['zstd'] = mod
72 globs["zstd"] = mod
65 new_fn = types.FunctionType(fn.__code__, globs, name,
73 new_fn = types.FunctionType(
66 fn.__defaults__, fn.__closure__)
74 fn.__code__, globs, name, fn.__defaults__, fn.__closure__
75 )
67 new_method = new_fn
76 new_method = new_fn
68 else:
77 else:
69 globs = dict(fn.__func__.func_globals)
78 globs = dict(fn.__func__.func_globals)
70 globs['zstd'] = mod
79 globs["zstd"] = mod
71 new_fn = types.FunctionType(fn.__func__.func_code, globs, name,
80 new_fn = types.FunctionType(
72 fn.__func__.func_defaults,
81 fn.__func__.func_code,
73 fn.__func__.func_closure)
82 globs,
74 new_method = types.UnboundMethodType(new_fn, fn.im_self,
83 name,
75 fn.im_class)
84 fn.__func__.func_defaults,
85 fn.__func__.func_closure,
86 )
87 new_method = types.UnboundMethodType(new_fn, fn.im_self, fn.im_class)
76
88
77 setattr(cls, name, new_method)
89 setattr(cls, name, new_method)
78
90
@@ -84,6 +96,7 b' class NonClosingBytesIO(io.BytesIO):'
84
96
85 This allows us to access written data after close().
97 This allows us to access written data after close().
86 """
98 """
99
87 def __init__(self, *args, **kwargs):
100 def __init__(self, *args, **kwargs):
88 super(NonClosingBytesIO, self).__init__(*args, **kwargs)
101 super(NonClosingBytesIO, self).__init__(*args, **kwargs)
89 self._saved_buffer = None
102 self._saved_buffer = None
@@ -135,7 +148,7 b' def random_input_data():'
135 dirs[:] = list(sorted(dirs))
148 dirs[:] = list(sorted(dirs))
136 for f in sorted(files):
149 for f in sorted(files):
137 try:
150 try:
138 with open(os.path.join(root, f), 'rb') as fh:
151 with open(os.path.join(root, f), "rb") as fh:
139 data = fh.read()
152 data = fh.read()
140 if data:
153 if data:
141 _source_files.append(data)
154 _source_files.append(data)
@@ -154,11 +167,11 b' def random_input_data():'
154
167
155 def generate_samples():
168 def generate_samples():
156 inputs = [
169 inputs = [
157 b'foo',
170 b"foo",
158 b'bar',
171 b"bar",
159 b'abcdef',
172 b"abcdef",
160 b'sometext',
173 b"sometext",
161 b'baz',
174 b"baz",
162 ]
175 ]
163
176
164 samples = []
177 samples = []
@@ -173,13 +186,12 b' def generate_samples():'
173
186
174 if hypothesis:
187 if hypothesis:
175 default_settings = hypothesis.settings(deadline=10000)
188 default_settings = hypothesis.settings(deadline=10000)
176 hypothesis.settings.register_profile('default', default_settings)
189 hypothesis.settings.register_profile("default", default_settings)
177
190
178 ci_settings = hypothesis.settings(deadline=20000, max_examples=1000)
191 ci_settings = hypothesis.settings(deadline=20000, max_examples=1000)
179 hypothesis.settings.register_profile('ci', ci_settings)
192 hypothesis.settings.register_profile("ci", ci_settings)
180
193
181 expensive_settings = hypothesis.settings(deadline=None, max_examples=10000)
194 expensive_settings = hypothesis.settings(deadline=None, max_examples=10000)
182 hypothesis.settings.register_profile('expensive', expensive_settings)
195 hypothesis.settings.register_profile("expensive", expensive_settings)
183
196
184 hypothesis.settings.load_profile(
197 hypothesis.settings.load_profile(os.environ.get("HYPOTHESIS_PROFILE", "default"))
185 os.environ.get('HYPOTHESIS_PROFILE', 'default'))
@@ -3,104 +3,114 b' import unittest'
3
3
4 import zstandard as zstd
4 import zstandard as zstd
5
5
6 ss = struct.Struct('=QQ')
6 from .common import TestCase
7
8 ss = struct.Struct("=QQ")
7
9
8
10
9 class TestBufferWithSegments(unittest.TestCase):
11 class TestBufferWithSegments(TestCase):
10 def test_arguments(self):
12 def test_arguments(self):
11 if not hasattr(zstd, 'BufferWithSegments'):
13 if not hasattr(zstd, "BufferWithSegments"):
12 self.skipTest('BufferWithSegments not available')
14 self.skipTest("BufferWithSegments not available")
13
15
14 with self.assertRaises(TypeError):
16 with self.assertRaises(TypeError):
15 zstd.BufferWithSegments()
17 zstd.BufferWithSegments()
16
18
17 with self.assertRaises(TypeError):
19 with self.assertRaises(TypeError):
18 zstd.BufferWithSegments(b'foo')
20 zstd.BufferWithSegments(b"foo")
19
21
20 # Segments data should be a multiple of 16.
22 # Segments data should be a multiple of 16.
21 with self.assertRaisesRegexp(ValueError, 'segments array size is not a multiple of 16'):
23 with self.assertRaisesRegex(
22 zstd.BufferWithSegments(b'foo', b'\x00\x00')
24 ValueError, "segments array size is not a multiple of 16"
25 ):
26 zstd.BufferWithSegments(b"foo", b"\x00\x00")
23
27
24 def test_invalid_offset(self):
28 def test_invalid_offset(self):
25 if not hasattr(zstd, 'BufferWithSegments'):
29 if not hasattr(zstd, "BufferWithSegments"):
26 self.skipTest('BufferWithSegments not available')
30 self.skipTest("BufferWithSegments not available")
27
31
28 with self.assertRaisesRegexp(ValueError, 'offset within segments array references memory'):
32 with self.assertRaisesRegex(
29 zstd.BufferWithSegments(b'foo', ss.pack(0, 4))
33 ValueError, "offset within segments array references memory"
34 ):
35 zstd.BufferWithSegments(b"foo", ss.pack(0, 4))
30
36
31 def test_invalid_getitem(self):
37 def test_invalid_getitem(self):
32 if not hasattr(zstd, 'BufferWithSegments'):
38 if not hasattr(zstd, "BufferWithSegments"):
33 self.skipTest('BufferWithSegments not available')
39 self.skipTest("BufferWithSegments not available")
34
40
35 b = zstd.BufferWithSegments(b'foo', ss.pack(0, 3))
41 b = zstd.BufferWithSegments(b"foo", ss.pack(0, 3))
36
42
37 with self.assertRaisesRegexp(IndexError, 'offset must be non-negative'):
43 with self.assertRaisesRegex(IndexError, "offset must be non-negative"):
38 test = b[-10]
44 test = b[-10]
39
45
40 with self.assertRaisesRegexp(IndexError, 'offset must be less than 1'):
46 with self.assertRaisesRegex(IndexError, "offset must be less than 1"):
41 test = b[1]
47 test = b[1]
42
48
43 with self.assertRaisesRegexp(IndexError, 'offset must be less than 1'):
49 with self.assertRaisesRegex(IndexError, "offset must be less than 1"):
44 test = b[2]
50 test = b[2]
45
51
46 def test_single(self):
52 def test_single(self):
47 if not hasattr(zstd, 'BufferWithSegments'):
53 if not hasattr(zstd, "BufferWithSegments"):
48 self.skipTest('BufferWithSegments not available')
54 self.skipTest("BufferWithSegments not available")
49
55
50 b = zstd.BufferWithSegments(b'foo', ss.pack(0, 3))
56 b = zstd.BufferWithSegments(b"foo", ss.pack(0, 3))
51 self.assertEqual(len(b), 1)
57 self.assertEqual(len(b), 1)
52 self.assertEqual(b.size, 3)
58 self.assertEqual(b.size, 3)
53 self.assertEqual(b.tobytes(), b'foo')
59 self.assertEqual(b.tobytes(), b"foo")
54
60
55 self.assertEqual(len(b[0]), 3)
61 self.assertEqual(len(b[0]), 3)
56 self.assertEqual(b[0].offset, 0)
62 self.assertEqual(b[0].offset, 0)
57 self.assertEqual(b[0].tobytes(), b'foo')
63 self.assertEqual(b[0].tobytes(), b"foo")
58
64
59 def test_multiple(self):
65 def test_multiple(self):
60 if not hasattr(zstd, 'BufferWithSegments'):
66 if not hasattr(zstd, "BufferWithSegments"):
61 self.skipTest('BufferWithSegments not available')
67 self.skipTest("BufferWithSegments not available")
62
68
63 b = zstd.BufferWithSegments(b'foofooxfooxy', b''.join([ss.pack(0, 3),
69 b = zstd.BufferWithSegments(
64 ss.pack(3, 4),
70 b"foofooxfooxy", b"".join([ss.pack(0, 3), ss.pack(3, 4), ss.pack(7, 5)])
65 ss.pack(7, 5)]))
71 )
66 self.assertEqual(len(b), 3)
72 self.assertEqual(len(b), 3)
67 self.assertEqual(b.size, 12)
73 self.assertEqual(b.size, 12)
68 self.assertEqual(b.tobytes(), b'foofooxfooxy')
74 self.assertEqual(b.tobytes(), b"foofooxfooxy")
69
75
70 self.assertEqual(b[0].tobytes(), b'foo')
76 self.assertEqual(b[0].tobytes(), b"foo")
71 self.assertEqual(b[1].tobytes(), b'foox')
77 self.assertEqual(b[1].tobytes(), b"foox")
72 self.assertEqual(b[2].tobytes(), b'fooxy')
78 self.assertEqual(b[2].tobytes(), b"fooxy")
73
79
74
80
75 class TestBufferWithSegmentsCollection(unittest.TestCase):
81 class TestBufferWithSegmentsCollection(TestCase):
76 def test_empty_constructor(self):
82 def test_empty_constructor(self):
77 if not hasattr(zstd, 'BufferWithSegmentsCollection'):
83 if not hasattr(zstd, "BufferWithSegmentsCollection"):
78 self.skipTest('BufferWithSegmentsCollection not available')
84 self.skipTest("BufferWithSegmentsCollection not available")
79
85
80 with self.assertRaisesRegexp(ValueError, 'must pass at least 1 argument'):
86 with self.assertRaisesRegex(ValueError, "must pass at least 1 argument"):
81 zstd.BufferWithSegmentsCollection()
87 zstd.BufferWithSegmentsCollection()
82
88
83 def test_argument_validation(self):
89 def test_argument_validation(self):
84 if not hasattr(zstd, 'BufferWithSegmentsCollection'):
90 if not hasattr(zstd, "BufferWithSegmentsCollection"):
85 self.skipTest('BufferWithSegmentsCollection not available')
91 self.skipTest("BufferWithSegmentsCollection not available")
86
92
87 with self.assertRaisesRegexp(TypeError, 'arguments must be BufferWithSegments'):
93 with self.assertRaisesRegex(TypeError, "arguments must be BufferWithSegments"):
88 zstd.BufferWithSegmentsCollection(None)
94 zstd.BufferWithSegmentsCollection(None)
89
95
90 with self.assertRaisesRegexp(TypeError, 'arguments must be BufferWithSegments'):
96 with self.assertRaisesRegex(TypeError, "arguments must be BufferWithSegments"):
91 zstd.BufferWithSegmentsCollection(zstd.BufferWithSegments(b'foo', ss.pack(0, 3)),
97 zstd.BufferWithSegmentsCollection(
92 None)
98 zstd.BufferWithSegments(b"foo", ss.pack(0, 3)), None
99 )
93
100
94 with self.assertRaisesRegexp(ValueError, 'ZstdBufferWithSegments cannot be empty'):
101 with self.assertRaisesRegex(
95 zstd.BufferWithSegmentsCollection(zstd.BufferWithSegments(b'', b''))
102 ValueError, "ZstdBufferWithSegments cannot be empty"
103 ):
104 zstd.BufferWithSegmentsCollection(zstd.BufferWithSegments(b"", b""))
96
105
97 def test_length(self):
106 def test_length(self):
98 if not hasattr(zstd, 'BufferWithSegmentsCollection'):
107 if not hasattr(zstd, "BufferWithSegmentsCollection"):
99 self.skipTest('BufferWithSegmentsCollection not available')
108 self.skipTest("BufferWithSegmentsCollection not available")
100
109
101 b1 = zstd.BufferWithSegments(b'foo', ss.pack(0, 3))
110 b1 = zstd.BufferWithSegments(b"foo", ss.pack(0, 3))
102 b2 = zstd.BufferWithSegments(b'barbaz', b''.join([ss.pack(0, 3),
111 b2 = zstd.BufferWithSegments(
103 ss.pack(3, 3)]))
112 b"barbaz", b"".join([ss.pack(0, 3), ss.pack(3, 3)])
113 )
104
114
105 c = zstd.BufferWithSegmentsCollection(b1)
115 c = zstd.BufferWithSegmentsCollection(b1)
106 self.assertEqual(len(c), 1)
116 self.assertEqual(len(c), 1)
@@ -115,21 +125,22 b' class TestBufferWithSegmentsCollection(u'
115 self.assertEqual(c.size(), 9)
125 self.assertEqual(c.size(), 9)
116
126
117 def test_getitem(self):
127 def test_getitem(self):
118 if not hasattr(zstd, 'BufferWithSegmentsCollection'):
128 if not hasattr(zstd, "BufferWithSegmentsCollection"):
119 self.skipTest('BufferWithSegmentsCollection not available')
129 self.skipTest("BufferWithSegmentsCollection not available")
120
130
121 b1 = zstd.BufferWithSegments(b'foo', ss.pack(0, 3))
131 b1 = zstd.BufferWithSegments(b"foo", ss.pack(0, 3))
122 b2 = zstd.BufferWithSegments(b'barbaz', b''.join([ss.pack(0, 3),
132 b2 = zstd.BufferWithSegments(
123 ss.pack(3, 3)]))
133 b"barbaz", b"".join([ss.pack(0, 3), ss.pack(3, 3)])
134 )
124
135
125 c = zstd.BufferWithSegmentsCollection(b1, b2)
136 c = zstd.BufferWithSegmentsCollection(b1, b2)
126
137
127 with self.assertRaisesRegexp(IndexError, 'offset must be less than 3'):
138 with self.assertRaisesRegex(IndexError, "offset must be less than 3"):
128 c[3]
139 c[3]
129
140
130 with self.assertRaisesRegexp(IndexError, 'offset must be less than 3'):
141 with self.assertRaisesRegex(IndexError, "offset must be less than 3"):
131 c[4]
142 c[4]
132
143
133 self.assertEqual(c[0].tobytes(), b'foo')
144 self.assertEqual(c[0].tobytes(), b"foo")
134 self.assertEqual(c[1].tobytes(), b'bar')
145 self.assertEqual(c[1].tobytes(), b"bar")
135 self.assertEqual(c[2].tobytes(), b'baz')
146 self.assertEqual(c[2].tobytes(), b"baz")
This diff has been collapsed as it changes many lines, (811 lines changed) Show them Hide them
@@ -13,6 +13,7 b' from .common import ('
13 make_cffi,
13 make_cffi,
14 NonClosingBytesIO,
14 NonClosingBytesIO,
15 OpCountingBytesIO,
15 OpCountingBytesIO,
16 TestCase,
16 )
17 )
17
18
18
19
@@ -23,14 +24,13 b' else:'
23
24
24
25
25 def multithreaded_chunk_size(level, source_size=0):
26 def multithreaded_chunk_size(level, source_size=0):
26 params = zstd.ZstdCompressionParameters.from_level(level,
27 params = zstd.ZstdCompressionParameters.from_level(level, source_size=source_size)
27 source_size=source_size)
28
28
29 return 1 << (params.window_log + 2)
29 return 1 << (params.window_log + 2)
30
30
31
31
32 @make_cffi
32 @make_cffi
33 class TestCompressor(unittest.TestCase):
33 class TestCompressor(TestCase):
34 def test_level_bounds(self):
34 def test_level_bounds(self):
35 with self.assertRaises(ValueError):
35 with self.assertRaises(ValueError):
36 zstd.ZstdCompressor(level=23)
36 zstd.ZstdCompressor(level=23)
@@ -41,11 +41,11 b' class TestCompressor(unittest.TestCase):'
41
41
42
42
43 @make_cffi
43 @make_cffi
44 class TestCompressor_compress(unittest.TestCase):
44 class TestCompressor_compress(TestCase):
45 def test_compress_empty(self):
45 def test_compress_empty(self):
46 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
46 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
47 result = cctx.compress(b'')
47 result = cctx.compress(b"")
48 self.assertEqual(result, b'\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00')
48 self.assertEqual(result, b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00")
49 params = zstd.get_frame_parameters(result)
49 params = zstd.get_frame_parameters(result)
50 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
50 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
51 self.assertEqual(params.window_size, 524288)
51 self.assertEqual(params.window_size, 524288)
@@ -53,21 +53,21 b' class TestCompressor_compress(unittest.T'
53 self.assertFalse(params.has_checksum, 0)
53 self.assertFalse(params.has_checksum, 0)
54
54
55 cctx = zstd.ZstdCompressor()
55 cctx = zstd.ZstdCompressor()
56 result = cctx.compress(b'')
56 result = cctx.compress(b"")
57 self.assertEqual(result, b'\x28\xb5\x2f\xfd\x20\x00\x01\x00\x00')
57 self.assertEqual(result, b"\x28\xb5\x2f\xfd\x20\x00\x01\x00\x00")
58 params = zstd.get_frame_parameters(result)
58 params = zstd.get_frame_parameters(result)
59 self.assertEqual(params.content_size, 0)
59 self.assertEqual(params.content_size, 0)
60
60
61 def test_input_types(self):
61 def test_input_types(self):
62 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
62 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
63 expected = b'\x28\xb5\x2f\xfd\x00\x00\x19\x00\x00\x66\x6f\x6f'
63 expected = b"\x28\xb5\x2f\xfd\x00\x00\x19\x00\x00\x66\x6f\x6f"
64
64
65 mutable_array = bytearray(3)
65 mutable_array = bytearray(3)
66 mutable_array[:] = b'foo'
66 mutable_array[:] = b"foo"
67
67
68 sources = [
68 sources = [
69 memoryview(b'foo'),
69 memoryview(b"foo"),
70 bytearray(b'foo'),
70 bytearray(b"foo"),
71 mutable_array,
71 mutable_array,
72 ]
72 ]
73
73
@@ -77,43 +77,46 b' class TestCompressor_compress(unittest.T'
77 def test_compress_large(self):
77 def test_compress_large(self):
78 chunks = []
78 chunks = []
79 for i in range(255):
79 for i in range(255):
80 chunks.append(struct.Struct('>B').pack(i) * 16384)
80 chunks.append(struct.Struct(">B").pack(i) * 16384)
81
81
82 cctx = zstd.ZstdCompressor(level=3, write_content_size=False)
82 cctx = zstd.ZstdCompressor(level=3, write_content_size=False)
83 result = cctx.compress(b''.join(chunks))
83 result = cctx.compress(b"".join(chunks))
84 self.assertEqual(len(result), 999)
84 self.assertEqual(len(result), 999)
85 self.assertEqual(result[0:4], b'\x28\xb5\x2f\xfd')
85 self.assertEqual(result[0:4], b"\x28\xb5\x2f\xfd")
86
86
87 # This matches the test for read_to_iter() below.
87 # This matches the test for read_to_iter() below.
88 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
88 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
89 result = cctx.compress(b'f' * zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE + b'o')
89 result = cctx.compress(b"f" * zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE + b"o")
90 self.assertEqual(result, b'\x28\xb5\x2f\xfd\x00\x40\x54\x00\x00'
90 self.assertEqual(
91 b'\x10\x66\x66\x01\x00\xfb\xff\x39\xc0'
91 result,
92 b'\x02\x09\x00\x00\x6f')
92 b"\x28\xb5\x2f\xfd\x00\x40\x54\x00\x00"
93 b"\x10\x66\x66\x01\x00\xfb\xff\x39\xc0"
94 b"\x02\x09\x00\x00\x6f",
95 )
93
96
94 def test_negative_level(self):
97 def test_negative_level(self):
95 cctx = zstd.ZstdCompressor(level=-4)
98 cctx = zstd.ZstdCompressor(level=-4)
96 result = cctx.compress(b'foo' * 256)
99 result = cctx.compress(b"foo" * 256)
97
100
98 def test_no_magic(self):
101 def test_no_magic(self):
99 params = zstd.ZstdCompressionParameters.from_level(
102 params = zstd.ZstdCompressionParameters.from_level(1, format=zstd.FORMAT_ZSTD1)
100 1, format=zstd.FORMAT_ZSTD1)
101 cctx = zstd.ZstdCompressor(compression_params=params)
103 cctx = zstd.ZstdCompressor(compression_params=params)
102 magic = cctx.compress(b'foobar')
104 magic = cctx.compress(b"foobar")
103
105
104 params = zstd.ZstdCompressionParameters.from_level(
106 params = zstd.ZstdCompressionParameters.from_level(
105 1, format=zstd.FORMAT_ZSTD1_MAGICLESS)
107 1, format=zstd.FORMAT_ZSTD1_MAGICLESS
108 )
106 cctx = zstd.ZstdCompressor(compression_params=params)
109 cctx = zstd.ZstdCompressor(compression_params=params)
107 no_magic = cctx.compress(b'foobar')
110 no_magic = cctx.compress(b"foobar")
108
111
109 self.assertEqual(magic[0:4], b'\x28\xb5\x2f\xfd')
112 self.assertEqual(magic[0:4], b"\x28\xb5\x2f\xfd")
110 self.assertEqual(magic[4:], no_magic)
113 self.assertEqual(magic[4:], no_magic)
111
114
112 def test_write_checksum(self):
115 def test_write_checksum(self):
113 cctx = zstd.ZstdCompressor(level=1)
116 cctx = zstd.ZstdCompressor(level=1)
114 no_checksum = cctx.compress(b'foobar')
117 no_checksum = cctx.compress(b"foobar")
115 cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
118 cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
116 with_checksum = cctx.compress(b'foobar')
119 with_checksum = cctx.compress(b"foobar")
117
120
118 self.assertEqual(len(with_checksum), len(no_checksum) + 4)
121 self.assertEqual(len(with_checksum), len(no_checksum) + 4)
119
122
@@ -125,9 +128,9 b' class TestCompressor_compress(unittest.T'
125
128
126 def test_write_content_size(self):
129 def test_write_content_size(self):
127 cctx = zstd.ZstdCompressor(level=1)
130 cctx = zstd.ZstdCompressor(level=1)
128 with_size = cctx.compress(b'foobar' * 256)
131 with_size = cctx.compress(b"foobar" * 256)
129 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
132 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
130 no_size = cctx.compress(b'foobar' * 256)
133 no_size = cctx.compress(b"foobar" * 256)
131
134
132 self.assertEqual(len(with_size), len(no_size) + 1)
135 self.assertEqual(len(with_size), len(no_size) + 1)
133
136
@@ -139,17 +142,17 b' class TestCompressor_compress(unittest.T'
139 def test_no_dict_id(self):
142 def test_no_dict_id(self):
140 samples = []
143 samples = []
141 for i in range(128):
144 for i in range(128):
142 samples.append(b'foo' * 64)
145 samples.append(b"foo" * 64)
143 samples.append(b'bar' * 64)
146 samples.append(b"bar" * 64)
144 samples.append(b'foobar' * 64)
147 samples.append(b"foobar" * 64)
145
148
146 d = zstd.train_dictionary(1024, samples)
149 d = zstd.train_dictionary(1024, samples)
147
150
148 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
151 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
149 with_dict_id = cctx.compress(b'foobarfoobar')
152 with_dict_id = cctx.compress(b"foobarfoobar")
150
153
151 cctx = zstd.ZstdCompressor(level=1, dict_data=d, write_dict_id=False)
154 cctx = zstd.ZstdCompressor(level=1, dict_data=d, write_dict_id=False)
152 no_dict_id = cctx.compress(b'foobarfoobar')
155 no_dict_id = cctx.compress(b"foobarfoobar")
153
156
154 self.assertEqual(len(with_dict_id), len(no_dict_id) + 4)
157 self.assertEqual(len(with_dict_id), len(no_dict_id) + 4)
155
158
@@ -161,23 +164,23 b' class TestCompressor_compress(unittest.T'
161 def test_compress_dict_multiple(self):
164 def test_compress_dict_multiple(self):
162 samples = []
165 samples = []
163 for i in range(128):
166 for i in range(128):
164 samples.append(b'foo' * 64)
167 samples.append(b"foo" * 64)
165 samples.append(b'bar' * 64)
168 samples.append(b"bar" * 64)
166 samples.append(b'foobar' * 64)
169 samples.append(b"foobar" * 64)
167
170
168 d = zstd.train_dictionary(8192, samples)
171 d = zstd.train_dictionary(8192, samples)
169
172
170 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
173 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
171
174
172 for i in range(32):
175 for i in range(32):
173 cctx.compress(b'foo bar foobar foo bar foobar')
176 cctx.compress(b"foo bar foobar foo bar foobar")
174
177
175 def test_dict_precompute(self):
178 def test_dict_precompute(self):
176 samples = []
179 samples = []
177 for i in range(128):
180 for i in range(128):
178 samples.append(b'foo' * 64)
181 samples.append(b"foo" * 64)
179 samples.append(b'bar' * 64)
182 samples.append(b"bar" * 64)
180 samples.append(b'foobar' * 64)
183 samples.append(b"foobar" * 64)
181
184
182 d = zstd.train_dictionary(8192, samples)
185 d = zstd.train_dictionary(8192, samples)
183 d.precompute_compress(level=1)
186 d.precompute_compress(level=1)
@@ -185,11 +188,11 b' class TestCompressor_compress(unittest.T'
185 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
188 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
186
189
187 for i in range(32):
190 for i in range(32):
188 cctx.compress(b'foo bar foobar foo bar foobar')
191 cctx.compress(b"foo bar foobar foo bar foobar")
189
192
190 def test_multithreaded(self):
193 def test_multithreaded(self):
191 chunk_size = multithreaded_chunk_size(1)
194 chunk_size = multithreaded_chunk_size(1)
192 source = b''.join([b'x' * chunk_size, b'y' * chunk_size])
195 source = b"".join([b"x" * chunk_size, b"y" * chunk_size])
193
196
194 cctx = zstd.ZstdCompressor(level=1, threads=2)
197 cctx = zstd.ZstdCompressor(level=1, threads=2)
195 compressed = cctx.compress(source)
198 compressed = cctx.compress(source)
@@ -205,73 +208,72 b' class TestCompressor_compress(unittest.T'
205 def test_multithreaded_dict(self):
208 def test_multithreaded_dict(self):
206 samples = []
209 samples = []
207 for i in range(128):
210 for i in range(128):
208 samples.append(b'foo' * 64)
211 samples.append(b"foo" * 64)
209 samples.append(b'bar' * 64)
212 samples.append(b"bar" * 64)
210 samples.append(b'foobar' * 64)
213 samples.append(b"foobar" * 64)
211
214
212 d = zstd.train_dictionary(1024, samples)
215 d = zstd.train_dictionary(1024, samples)
213
216
214 cctx = zstd.ZstdCompressor(dict_data=d, threads=2)
217 cctx = zstd.ZstdCompressor(dict_data=d, threads=2)
215
218
216 result = cctx.compress(b'foo')
219 result = cctx.compress(b"foo")
217 params = zstd.get_frame_parameters(result);
220 params = zstd.get_frame_parameters(result)
218 self.assertEqual(params.content_size, 3);
221 self.assertEqual(params.content_size, 3)
219 self.assertEqual(params.dict_id, d.dict_id())
222 self.assertEqual(params.dict_id, d.dict_id())
220
223
221 self.assertEqual(result,
224 self.assertEqual(
222 b'\x28\xb5\x2f\xfd\x23\x8f\x55\x0f\x70\x03\x19\x00\x00'
225 result,
223 b'\x66\x6f\x6f')
226 b"\x28\xb5\x2f\xfd\x23\x8f\x55\x0f\x70\x03\x19\x00\x00" b"\x66\x6f\x6f",
227 )
224
228
225 def test_multithreaded_compression_params(self):
229 def test_multithreaded_compression_params(self):
226 params = zstd.ZstdCompressionParameters.from_level(0, threads=2)
230 params = zstd.ZstdCompressionParameters.from_level(0, threads=2)
227 cctx = zstd.ZstdCompressor(compression_params=params)
231 cctx = zstd.ZstdCompressor(compression_params=params)
228
232
229 result = cctx.compress(b'foo')
233 result = cctx.compress(b"foo")
230 params = zstd.get_frame_parameters(result);
234 params = zstd.get_frame_parameters(result)
231 self.assertEqual(params.content_size, 3);
235 self.assertEqual(params.content_size, 3)
232
236
233 self.assertEqual(result,
237 self.assertEqual(result, b"\x28\xb5\x2f\xfd\x20\x03\x19\x00\x00\x66\x6f\x6f")
234 b'\x28\xb5\x2f\xfd\x20\x03\x19\x00\x00\x66\x6f\x6f')
235
238
236
239
237 @make_cffi
240 @make_cffi
238 class TestCompressor_compressobj(unittest.TestCase):
241 class TestCompressor_compressobj(TestCase):
239 def test_compressobj_empty(self):
242 def test_compressobj_empty(self):
240 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
243 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
241 cobj = cctx.compressobj()
244 cobj = cctx.compressobj()
242 self.assertEqual(cobj.compress(b''), b'')
245 self.assertEqual(cobj.compress(b""), b"")
243 self.assertEqual(cobj.flush(),
246 self.assertEqual(cobj.flush(), b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00")
244 b'\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00')
245
247
246 def test_input_types(self):
248 def test_input_types(self):
247 expected = b'\x28\xb5\x2f\xfd\x00\x48\x19\x00\x00\x66\x6f\x6f'
249 expected = b"\x28\xb5\x2f\xfd\x00\x48\x19\x00\x00\x66\x6f\x6f"
248 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
250 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
249
251
250 mutable_array = bytearray(3)
252 mutable_array = bytearray(3)
251 mutable_array[:] = b'foo'
253 mutable_array[:] = b"foo"
252
254
253 sources = [
255 sources = [
254 memoryview(b'foo'),
256 memoryview(b"foo"),
255 bytearray(b'foo'),
257 bytearray(b"foo"),
256 mutable_array,
258 mutable_array,
257 ]
259 ]
258
260
259 for source in sources:
261 for source in sources:
260 cobj = cctx.compressobj()
262 cobj = cctx.compressobj()
261 self.assertEqual(cobj.compress(source), b'')
263 self.assertEqual(cobj.compress(source), b"")
262 self.assertEqual(cobj.flush(), expected)
264 self.assertEqual(cobj.flush(), expected)
263
265
264 def test_compressobj_large(self):
266 def test_compressobj_large(self):
265 chunks = []
267 chunks = []
266 for i in range(255):
268 for i in range(255):
267 chunks.append(struct.Struct('>B').pack(i) * 16384)
269 chunks.append(struct.Struct(">B").pack(i) * 16384)
268
270
269 cctx = zstd.ZstdCompressor(level=3)
271 cctx = zstd.ZstdCompressor(level=3)
270 cobj = cctx.compressobj()
272 cobj = cctx.compressobj()
271
273
272 result = cobj.compress(b''.join(chunks)) + cobj.flush()
274 result = cobj.compress(b"".join(chunks)) + cobj.flush()
273 self.assertEqual(len(result), 999)
275 self.assertEqual(len(result), 999)
274 self.assertEqual(result[0:4], b'\x28\xb5\x2f\xfd')
276 self.assertEqual(result[0:4], b"\x28\xb5\x2f\xfd")
275
277
276 params = zstd.get_frame_parameters(result)
278 params = zstd.get_frame_parameters(result)
277 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
279 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
@@ -282,10 +284,10 b' class TestCompressor_compressobj(unittes'
282 def test_write_checksum(self):
284 def test_write_checksum(self):
283 cctx = zstd.ZstdCompressor(level=1)
285 cctx = zstd.ZstdCompressor(level=1)
284 cobj = cctx.compressobj()
286 cobj = cctx.compressobj()
285 no_checksum = cobj.compress(b'foobar') + cobj.flush()
287 no_checksum = cobj.compress(b"foobar") + cobj.flush()
286 cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
288 cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
287 cobj = cctx.compressobj()
289 cobj = cctx.compressobj()
288 with_checksum = cobj.compress(b'foobar') + cobj.flush()
290 with_checksum = cobj.compress(b"foobar") + cobj.flush()
289
291
290 no_params = zstd.get_frame_parameters(no_checksum)
292 no_params = zstd.get_frame_parameters(no_checksum)
291 with_params = zstd.get_frame_parameters(with_checksum)
293 with_params = zstd.get_frame_parameters(with_checksum)
@@ -300,11 +302,11 b' class TestCompressor_compressobj(unittes'
300
302
301 def test_write_content_size(self):
303 def test_write_content_size(self):
302 cctx = zstd.ZstdCompressor(level=1)
304 cctx = zstd.ZstdCompressor(level=1)
303 cobj = cctx.compressobj(size=len(b'foobar' * 256))
305 cobj = cctx.compressobj(size=len(b"foobar" * 256))
304 with_size = cobj.compress(b'foobar' * 256) + cobj.flush()
306 with_size = cobj.compress(b"foobar" * 256) + cobj.flush()
305 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
307 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
306 cobj = cctx.compressobj(size=len(b'foobar' * 256))
308 cobj = cctx.compressobj(size=len(b"foobar" * 256))
307 no_size = cobj.compress(b'foobar' * 256) + cobj.flush()
309 no_size = cobj.compress(b"foobar" * 256) + cobj.flush()
308
310
309 no_params = zstd.get_frame_parameters(no_size)
311 no_params = zstd.get_frame_parameters(no_size)
310 with_params = zstd.get_frame_parameters(with_size)
312 with_params = zstd.get_frame_parameters(with_size)
@@ -321,48 +323,53 b' class TestCompressor_compressobj(unittes'
321 cctx = zstd.ZstdCompressor()
323 cctx = zstd.ZstdCompressor()
322 cobj = cctx.compressobj()
324 cobj = cctx.compressobj()
323
325
324 cobj.compress(b'foo')
326 cobj.compress(b"foo")
325 cobj.flush()
327 cobj.flush()
326
328
327 with self.assertRaisesRegexp(zstd.ZstdError, r'cannot call compress\(\) after compressor'):
329 with self.assertRaisesRegex(
328 cobj.compress(b'foo')
330 zstd.ZstdError, r"cannot call compress\(\) after compressor"
331 ):
332 cobj.compress(b"foo")
329
333
330 with self.assertRaisesRegexp(zstd.ZstdError, 'compressor object already finished'):
334 with self.assertRaisesRegex(
335 zstd.ZstdError, "compressor object already finished"
336 ):
331 cobj.flush()
337 cobj.flush()
332
338
333 def test_flush_block_repeated(self):
339 def test_flush_block_repeated(self):
334 cctx = zstd.ZstdCompressor(level=1)
340 cctx = zstd.ZstdCompressor(level=1)
335 cobj = cctx.compressobj()
341 cobj = cctx.compressobj()
336
342
337 self.assertEqual(cobj.compress(b'foo'), b'')
343 self.assertEqual(cobj.compress(b"foo"), b"")
338 self.assertEqual(cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK),
344 self.assertEqual(
339 b'\x28\xb5\x2f\xfd\x00\x48\x18\x00\x00foo')
345 cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK),
340 self.assertEqual(cobj.compress(b'bar'), b'')
346 b"\x28\xb5\x2f\xfd\x00\x48\x18\x00\x00foo",
347 )
348 self.assertEqual(cobj.compress(b"bar"), b"")
341 # 3 byte header plus content.
349 # 3 byte header plus content.
342 self.assertEqual(cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK),
350 self.assertEqual(cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK), b"\x18\x00\x00bar")
343 b'\x18\x00\x00bar')
351 self.assertEqual(cobj.flush(), b"\x01\x00\x00")
344 self.assertEqual(cobj.flush(), b'\x01\x00\x00')
345
352
346 def test_flush_empty_block(self):
353 def test_flush_empty_block(self):
347 cctx = zstd.ZstdCompressor(write_checksum=True)
354 cctx = zstd.ZstdCompressor(write_checksum=True)
348 cobj = cctx.compressobj()
355 cobj = cctx.compressobj()
349
356
350 cobj.compress(b'foobar')
357 cobj.compress(b"foobar")
351 cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK)
358 cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK)
352 # No-op if no block is active (this is internal to zstd).
359 # No-op if no block is active (this is internal to zstd).
353 self.assertEqual(cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK), b'')
360 self.assertEqual(cobj.flush(zstd.COMPRESSOBJ_FLUSH_BLOCK), b"")
354
361
355 trailing = cobj.flush()
362 trailing = cobj.flush()
356 # 3 bytes block header + 4 bytes frame checksum
363 # 3 bytes block header + 4 bytes frame checksum
357 self.assertEqual(len(trailing), 7)
364 self.assertEqual(len(trailing), 7)
358 header = trailing[0:3]
365 header = trailing[0:3]
359 self.assertEqual(header, b'\x01\x00\x00')
366 self.assertEqual(header, b"\x01\x00\x00")
360
367
361 def test_multithreaded(self):
368 def test_multithreaded(self):
362 source = io.BytesIO()
369 source = io.BytesIO()
363 source.write(b'a' * 1048576)
370 source.write(b"a" * 1048576)
364 source.write(b'b' * 1048576)
371 source.write(b"b" * 1048576)
365 source.write(b'c' * 1048576)
372 source.write(b"c" * 1048576)
366 source.seek(0)
373 source.seek(0)
367
374
368 cctx = zstd.ZstdCompressor(level=1, threads=2)
375 cctx = zstd.ZstdCompressor(level=1, threads=2)
@@ -378,9 +385,9 b' class TestCompressor_compressobj(unittes'
378
385
379 chunks.append(cobj.flush())
386 chunks.append(cobj.flush())
380
387
381 compressed = b''.join(chunks)
388 compressed = b"".join(chunks)
382
389
383 self.assertEqual(len(compressed), 295)
390 self.assertEqual(len(compressed), 119)
384
391
385 def test_frame_progression(self):
392 def test_frame_progression(self):
386 cctx = zstd.ZstdCompressor()
393 cctx = zstd.ZstdCompressor()
@@ -389,7 +396,7 b' class TestCompressor_compressobj(unittes'
389
396
390 cobj = cctx.compressobj()
397 cobj = cctx.compressobj()
391
398
392 cobj.compress(b'foobar')
399 cobj.compress(b"foobar")
393 self.assertEqual(cctx.frame_progression(), (6, 0, 0))
400 self.assertEqual(cctx.frame_progression(), (6, 0, 0))
394
401
395 cobj.flush()
402 cobj.flush()
@@ -399,20 +406,20 b' class TestCompressor_compressobj(unittes'
399 cctx = zstd.ZstdCompressor()
406 cctx = zstd.ZstdCompressor()
400
407
401 cobj = cctx.compressobj(size=2)
408 cobj = cctx.compressobj(size=2)
402 with self.assertRaisesRegexp(zstd.ZstdError, 'Src size is incorrect'):
409 with self.assertRaisesRegex(zstd.ZstdError, "Src size is incorrect"):
403 cobj.compress(b'foo')
410 cobj.compress(b"foo")
404
411
405 # Try another operation on this instance.
412 # Try another operation on this instance.
406 with self.assertRaisesRegexp(zstd.ZstdError, 'Src size is incorrect'):
413 with self.assertRaisesRegex(zstd.ZstdError, "Src size is incorrect"):
407 cobj.compress(b'aa')
414 cobj.compress(b"aa")
408
415
409 # Try another operation on the compressor.
416 # Try another operation on the compressor.
410 cctx.compressobj(size=4)
417 cctx.compressobj(size=4)
411 cctx.compress(b'foobar')
418 cctx.compress(b"foobar")
412
419
413
420
414 @make_cffi
421 @make_cffi
415 class TestCompressor_copy_stream(unittest.TestCase):
422 class TestCompressor_copy_stream(TestCase):
416 def test_no_read(self):
423 def test_no_read(self):
417 source = object()
424 source = object()
418 dest = io.BytesIO()
425 dest = io.BytesIO()
@@ -438,13 +445,12 b' class TestCompressor_copy_stream(unittes'
438 self.assertEqual(int(r), 0)
445 self.assertEqual(int(r), 0)
439 self.assertEqual(w, 9)
446 self.assertEqual(w, 9)
440
447
441 self.assertEqual(dest.getvalue(),
448 self.assertEqual(dest.getvalue(), b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00")
442 b'\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00')
443
449
444 def test_large_data(self):
450 def test_large_data(self):
445 source = io.BytesIO()
451 source = io.BytesIO()
446 for i in range(255):
452 for i in range(255):
447 source.write(struct.Struct('>B').pack(i) * 16384)
453 source.write(struct.Struct(">B").pack(i) * 16384)
448 source.seek(0)
454 source.seek(0)
449
455
450 dest = io.BytesIO()
456 dest = io.BytesIO()
@@ -461,7 +467,7 b' class TestCompressor_copy_stream(unittes'
461 self.assertFalse(params.has_checksum)
467 self.assertFalse(params.has_checksum)
462
468
463 def test_write_checksum(self):
469 def test_write_checksum(self):
464 source = io.BytesIO(b'foobar')
470 source = io.BytesIO(b"foobar")
465 no_checksum = io.BytesIO()
471 no_checksum = io.BytesIO()
466
472
467 cctx = zstd.ZstdCompressor(level=1)
473 cctx = zstd.ZstdCompressor(level=1)
@@ -472,8 +478,7 b' class TestCompressor_copy_stream(unittes'
472 cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
478 cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
473 cctx.copy_stream(source, with_checksum)
479 cctx.copy_stream(source, with_checksum)
474
480
475 self.assertEqual(len(with_checksum.getvalue()),
481 self.assertEqual(len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4)
476 len(no_checksum.getvalue()) + 4)
477
482
478 no_params = zstd.get_frame_parameters(no_checksum.getvalue())
483 no_params = zstd.get_frame_parameters(no_checksum.getvalue())
479 with_params = zstd.get_frame_parameters(with_checksum.getvalue())
484 with_params = zstd.get_frame_parameters(with_checksum.getvalue())
@@ -485,7 +490,7 b' class TestCompressor_copy_stream(unittes'
485 self.assertTrue(with_params.has_checksum)
490 self.assertTrue(with_params.has_checksum)
486
491
487 def test_write_content_size(self):
492 def test_write_content_size(self):
488 source = io.BytesIO(b'foobar' * 256)
493 source = io.BytesIO(b"foobar" * 256)
489 no_size = io.BytesIO()
494 no_size = io.BytesIO()
490
495
491 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
496 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
@@ -497,16 +502,14 b' class TestCompressor_copy_stream(unittes'
497 cctx.copy_stream(source, with_size)
502 cctx.copy_stream(source, with_size)
498
503
499 # Source content size is unknown, so no content size written.
504 # Source content size is unknown, so no content size written.
500 self.assertEqual(len(with_size.getvalue()),
505 self.assertEqual(len(with_size.getvalue()), len(no_size.getvalue()))
501 len(no_size.getvalue()))
502
506
503 source.seek(0)
507 source.seek(0)
504 with_size = io.BytesIO()
508 with_size = io.BytesIO()
505 cctx.copy_stream(source, with_size, size=len(source.getvalue()))
509 cctx.copy_stream(source, with_size, size=len(source.getvalue()))
506
510
507 # We specified source size, so content size header is present.
511 # We specified source size, so content size header is present.
508 self.assertEqual(len(with_size.getvalue()),
512 self.assertEqual(len(with_size.getvalue()), len(no_size.getvalue()) + 1)
509 len(no_size.getvalue()) + 1)
510
513
511 no_params = zstd.get_frame_parameters(no_size.getvalue())
514 no_params = zstd.get_frame_parameters(no_size.getvalue())
512 with_params = zstd.get_frame_parameters(with_size.getvalue())
515 with_params = zstd.get_frame_parameters(with_size.getvalue())
@@ -518,7 +521,7 b' class TestCompressor_copy_stream(unittes'
518 self.assertFalse(with_params.has_checksum)
521 self.assertFalse(with_params.has_checksum)
519
522
520 def test_read_write_size(self):
523 def test_read_write_size(self):
521 source = OpCountingBytesIO(b'foobarfoobar')
524 source = OpCountingBytesIO(b"foobarfoobar")
522 dest = OpCountingBytesIO()
525 dest = OpCountingBytesIO()
523 cctx = zstd.ZstdCompressor()
526 cctx = zstd.ZstdCompressor()
524 r, w = cctx.copy_stream(source, dest, read_size=1, write_size=1)
527 r, w = cctx.copy_stream(source, dest, read_size=1, write_size=1)
@@ -530,16 +533,16 b' class TestCompressor_copy_stream(unittes'
530
533
531 def test_multithreaded(self):
534 def test_multithreaded(self):
532 source = io.BytesIO()
535 source = io.BytesIO()
533 source.write(b'a' * 1048576)
536 source.write(b"a" * 1048576)
534 source.write(b'b' * 1048576)
537 source.write(b"b" * 1048576)
535 source.write(b'c' * 1048576)
538 source.write(b"c" * 1048576)
536 source.seek(0)
539 source.seek(0)
537
540
538 dest = io.BytesIO()
541 dest = io.BytesIO()
539 cctx = zstd.ZstdCompressor(threads=2, write_content_size=False)
542 cctx = zstd.ZstdCompressor(threads=2, write_content_size=False)
540 r, w = cctx.copy_stream(source, dest)
543 r, w = cctx.copy_stream(source, dest)
541 self.assertEqual(r, 3145728)
544 self.assertEqual(r, 3145728)
542 self.assertEqual(w, 295)
545 self.assertEqual(w, 111)
543
546
544 params = zstd.get_frame_parameters(dest.getvalue())
547 params = zstd.get_frame_parameters(dest.getvalue())
545 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
548 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
@@ -559,15 +562,15 b' class TestCompressor_copy_stream(unittes'
559
562
560 def test_bad_size(self):
563 def test_bad_size(self):
561 source = io.BytesIO()
564 source = io.BytesIO()
562 source.write(b'a' * 32768)
565 source.write(b"a" * 32768)
563 source.write(b'b' * 32768)
566 source.write(b"b" * 32768)
564 source.seek(0)
567 source.seek(0)
565
568
566 dest = io.BytesIO()
569 dest = io.BytesIO()
567
570
568 cctx = zstd.ZstdCompressor()
571 cctx = zstd.ZstdCompressor()
569
572
570 with self.assertRaisesRegexp(zstd.ZstdError, 'Src size is incorrect'):
573 with self.assertRaisesRegex(zstd.ZstdError, "Src size is incorrect"):
571 cctx.copy_stream(source, dest, size=42)
574 cctx.copy_stream(source, dest, size=42)
572
575
573 # Try another operation on this compressor.
576 # Try another operation on this compressor.
@@ -577,31 +580,31 b' class TestCompressor_copy_stream(unittes'
577
580
578
581
579 @make_cffi
582 @make_cffi
580 class TestCompressor_stream_reader(unittest.TestCase):
583 class TestCompressor_stream_reader(TestCase):
581 def test_context_manager(self):
584 def test_context_manager(self):
582 cctx = zstd.ZstdCompressor()
585 cctx = zstd.ZstdCompressor()
583
586
584 with cctx.stream_reader(b'foo') as reader:
587 with cctx.stream_reader(b"foo") as reader:
585 with self.assertRaisesRegexp(ValueError, 'cannot __enter__ multiple times'):
588 with self.assertRaisesRegex(ValueError, "cannot __enter__ multiple times"):
586 with reader as reader2:
589 with reader as reader2:
587 pass
590 pass
588
591
589 def test_no_context_manager(self):
592 def test_no_context_manager(self):
590 cctx = zstd.ZstdCompressor()
593 cctx = zstd.ZstdCompressor()
591
594
592 reader = cctx.stream_reader(b'foo')
595 reader = cctx.stream_reader(b"foo")
593 reader.read(4)
596 reader.read(4)
594 self.assertFalse(reader.closed)
597 self.assertFalse(reader.closed)
595
598
596 reader.close()
599 reader.close()
597 self.assertTrue(reader.closed)
600 self.assertTrue(reader.closed)
598 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
601 with self.assertRaisesRegex(ValueError, "stream is closed"):
599 reader.read(1)
602 reader.read(1)
600
603
601 def test_not_implemented(self):
604 def test_not_implemented(self):
602 cctx = zstd.ZstdCompressor()
605 cctx = zstd.ZstdCompressor()
603
606
604 with cctx.stream_reader(b'foo' * 60) as reader:
607 with cctx.stream_reader(b"foo" * 60) as reader:
605 with self.assertRaises(io.UnsupportedOperation):
608 with self.assertRaises(io.UnsupportedOperation):
606 reader.readline()
609 reader.readline()
607
610
@@ -618,12 +621,12 b' class TestCompressor_stream_reader(unitt'
618 reader.writelines([])
621 reader.writelines([])
619
622
620 with self.assertRaises(OSError):
623 with self.assertRaises(OSError):
621 reader.write(b'foo')
624 reader.write(b"foo")
622
625
623 def test_constant_methods(self):
626 def test_constant_methods(self):
624 cctx = zstd.ZstdCompressor()
627 cctx = zstd.ZstdCompressor()
625
628
626 with cctx.stream_reader(b'boo') as reader:
629 with cctx.stream_reader(b"boo") as reader:
627 self.assertTrue(reader.readable())
630 self.assertTrue(reader.readable())
628 self.assertFalse(reader.writable())
631 self.assertFalse(reader.writable())
629 self.assertFalse(reader.seekable())
632 self.assertFalse(reader.seekable())
@@ -637,27 +640,29 b' class TestCompressor_stream_reader(unitt'
637 def test_read_closed(self):
640 def test_read_closed(self):
638 cctx = zstd.ZstdCompressor()
641 cctx = zstd.ZstdCompressor()
639
642
640 with cctx.stream_reader(b'foo' * 60) as reader:
643 with cctx.stream_reader(b"foo" * 60) as reader:
641 reader.close()
644 reader.close()
642 self.assertTrue(reader.closed)
645 self.assertTrue(reader.closed)
643 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
646 with self.assertRaisesRegex(ValueError, "stream is closed"):
644 reader.read(10)
647 reader.read(10)
645
648
646 def test_read_sizes(self):
649 def test_read_sizes(self):
647 cctx = zstd.ZstdCompressor()
650 cctx = zstd.ZstdCompressor()
648 foo = cctx.compress(b'foo')
651 foo = cctx.compress(b"foo")
649
652
650 with cctx.stream_reader(b'foo') as reader:
653 with cctx.stream_reader(b"foo") as reader:
651 with self.assertRaisesRegexp(ValueError, 'cannot read negative amounts less than -1'):
654 with self.assertRaisesRegex(
655 ValueError, "cannot read negative amounts less than -1"
656 ):
652 reader.read(-2)
657 reader.read(-2)
653
658
654 self.assertEqual(reader.read(0), b'')
659 self.assertEqual(reader.read(0), b"")
655 self.assertEqual(reader.read(), foo)
660 self.assertEqual(reader.read(), foo)
656
661
657 def test_read_buffer(self):
662 def test_read_buffer(self):
658 cctx = zstd.ZstdCompressor()
663 cctx = zstd.ZstdCompressor()
659
664
660 source = b''.join([b'foo' * 60, b'bar' * 60, b'baz' * 60])
665 source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
661 frame = cctx.compress(source)
666 frame = cctx.compress(source)
662
667
663 with cctx.stream_reader(source) as reader:
668 with cctx.stream_reader(source) as reader:
@@ -667,13 +672,13 b' class TestCompressor_stream_reader(unitt'
667 result = reader.read(8192)
672 result = reader.read(8192)
668 self.assertEqual(result, frame)
673 self.assertEqual(result, frame)
669 self.assertEqual(reader.tell(), len(result))
674 self.assertEqual(reader.tell(), len(result))
670 self.assertEqual(reader.read(), b'')
675 self.assertEqual(reader.read(), b"")
671 self.assertEqual(reader.tell(), len(result))
676 self.assertEqual(reader.tell(), len(result))
672
677
673 def test_read_buffer_small_chunks(self):
678 def test_read_buffer_small_chunks(self):
674 cctx = zstd.ZstdCompressor()
679 cctx = zstd.ZstdCompressor()
675
680
676 source = b'foo' * 60
681 source = b"foo" * 60
677 chunks = []
682 chunks = []
678
683
679 with cctx.stream_reader(source) as reader:
684 with cctx.stream_reader(source) as reader:
@@ -687,12 +692,12 b' class TestCompressor_stream_reader(unitt'
687 chunks.append(chunk)
692 chunks.append(chunk)
688 self.assertEqual(reader.tell(), sum(map(len, chunks)))
693 self.assertEqual(reader.tell(), sum(map(len, chunks)))
689
694
690 self.assertEqual(b''.join(chunks), cctx.compress(source))
695 self.assertEqual(b"".join(chunks), cctx.compress(source))
691
696
692 def test_read_stream(self):
697 def test_read_stream(self):
693 cctx = zstd.ZstdCompressor()
698 cctx = zstd.ZstdCompressor()
694
699
695 source = b''.join([b'foo' * 60, b'bar' * 60, b'baz' * 60])
700 source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
696 frame = cctx.compress(source)
701 frame = cctx.compress(source)
697
702
698 with cctx.stream_reader(io.BytesIO(source), size=len(source)) as reader:
703 with cctx.stream_reader(io.BytesIO(source), size=len(source)) as reader:
@@ -701,13 +706,13 b' class TestCompressor_stream_reader(unitt'
701 chunk = reader.read(8192)
706 chunk = reader.read(8192)
702 self.assertEqual(chunk, frame)
707 self.assertEqual(chunk, frame)
703 self.assertEqual(reader.tell(), len(chunk))
708 self.assertEqual(reader.tell(), len(chunk))
704 self.assertEqual(reader.read(), b'')
709 self.assertEqual(reader.read(), b"")
705 self.assertEqual(reader.tell(), len(chunk))
710 self.assertEqual(reader.tell(), len(chunk))
706
711
707 def test_read_stream_small_chunks(self):
712 def test_read_stream_small_chunks(self):
708 cctx = zstd.ZstdCompressor()
713 cctx = zstd.ZstdCompressor()
709
714
710 source = b'foo' * 60
715 source = b"foo" * 60
711 chunks = []
716 chunks = []
712
717
713 with cctx.stream_reader(io.BytesIO(source), size=len(source)) as reader:
718 with cctx.stream_reader(io.BytesIO(source), size=len(source)) as reader:
@@ -721,25 +726,25 b' class TestCompressor_stream_reader(unitt'
721 chunks.append(chunk)
726 chunks.append(chunk)
722 self.assertEqual(reader.tell(), sum(map(len, chunks)))
727 self.assertEqual(reader.tell(), sum(map(len, chunks)))
723
728
724 self.assertEqual(b''.join(chunks), cctx.compress(source))
729 self.assertEqual(b"".join(chunks), cctx.compress(source))
725
730
726 def test_read_after_exit(self):
731 def test_read_after_exit(self):
727 cctx = zstd.ZstdCompressor()
732 cctx = zstd.ZstdCompressor()
728
733
729 with cctx.stream_reader(b'foo' * 60) as reader:
734 with cctx.stream_reader(b"foo" * 60) as reader:
730 while reader.read(8192):
735 while reader.read(8192):
731 pass
736 pass
732
737
733 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
738 with self.assertRaisesRegex(ValueError, "stream is closed"):
734 reader.read(10)
739 reader.read(10)
735
740
736 def test_bad_size(self):
741 def test_bad_size(self):
737 cctx = zstd.ZstdCompressor()
742 cctx = zstd.ZstdCompressor()
738
743
739 source = io.BytesIO(b'foobar')
744 source = io.BytesIO(b"foobar")
740
745
741 with cctx.stream_reader(source, size=2) as reader:
746 with cctx.stream_reader(source, size=2) as reader:
742 with self.assertRaisesRegexp(zstd.ZstdError, 'Src size is incorrect'):
747 with self.assertRaisesRegex(zstd.ZstdError, "Src size is incorrect"):
743 reader.read(10)
748 reader.read(10)
744
749
745 # Try another compression operation.
750 # Try another compression operation.
@@ -748,36 +753,36 b' class TestCompressor_stream_reader(unitt'
748
753
749 def test_readall(self):
754 def test_readall(self):
750 cctx = zstd.ZstdCompressor()
755 cctx = zstd.ZstdCompressor()
751 frame = cctx.compress(b'foo' * 1024)
756 frame = cctx.compress(b"foo" * 1024)
752
757
753 reader = cctx.stream_reader(b'foo' * 1024)
758 reader = cctx.stream_reader(b"foo" * 1024)
754 self.assertEqual(reader.readall(), frame)
759 self.assertEqual(reader.readall(), frame)
755
760
756 def test_readinto(self):
761 def test_readinto(self):
757 cctx = zstd.ZstdCompressor()
762 cctx = zstd.ZstdCompressor()
758 foo = cctx.compress(b'foo')
763 foo = cctx.compress(b"foo")
759
764
760 reader = cctx.stream_reader(b'foo')
765 reader = cctx.stream_reader(b"foo")
761 with self.assertRaises(Exception):
766 with self.assertRaises(Exception):
762 reader.readinto(b'foobar')
767 reader.readinto(b"foobar")
763
768
764 # readinto() with sufficiently large destination.
769 # readinto() with sufficiently large destination.
765 b = bytearray(1024)
770 b = bytearray(1024)
766 reader = cctx.stream_reader(b'foo')
771 reader = cctx.stream_reader(b"foo")
767 self.assertEqual(reader.readinto(b), len(foo))
772 self.assertEqual(reader.readinto(b), len(foo))
768 self.assertEqual(b[0:len(foo)], foo)
773 self.assertEqual(b[0 : len(foo)], foo)
769 self.assertEqual(reader.readinto(b), 0)
774 self.assertEqual(reader.readinto(b), 0)
770 self.assertEqual(b[0:len(foo)], foo)
775 self.assertEqual(b[0 : len(foo)], foo)
771
776
772 # readinto() with small reads.
777 # readinto() with small reads.
773 b = bytearray(1024)
778 b = bytearray(1024)
774 reader = cctx.stream_reader(b'foo', read_size=1)
779 reader = cctx.stream_reader(b"foo", read_size=1)
775 self.assertEqual(reader.readinto(b), len(foo))
780 self.assertEqual(reader.readinto(b), len(foo))
776 self.assertEqual(b[0:len(foo)], foo)
781 self.assertEqual(b[0 : len(foo)], foo)
777
782
778 # Too small destination buffer.
783 # Too small destination buffer.
779 b = bytearray(2)
784 b = bytearray(2)
780 reader = cctx.stream_reader(b'foo')
785 reader = cctx.stream_reader(b"foo")
781 self.assertEqual(reader.readinto(b), 2)
786 self.assertEqual(reader.readinto(b), 2)
782 self.assertEqual(b[:], foo[0:2])
787 self.assertEqual(b[:], foo[0:2])
783 self.assertEqual(reader.readinto(b), 2)
788 self.assertEqual(reader.readinto(b), 2)
@@ -787,41 +792,41 b' class TestCompressor_stream_reader(unitt'
787
792
788 def test_readinto1(self):
793 def test_readinto1(self):
789 cctx = zstd.ZstdCompressor()
794 cctx = zstd.ZstdCompressor()
790 foo = b''.join(cctx.read_to_iter(io.BytesIO(b'foo')))
795 foo = b"".join(cctx.read_to_iter(io.BytesIO(b"foo")))
791
796
792 reader = cctx.stream_reader(b'foo')
797 reader = cctx.stream_reader(b"foo")
793 with self.assertRaises(Exception):
798 with self.assertRaises(Exception):
794 reader.readinto1(b'foobar')
799 reader.readinto1(b"foobar")
795
800
796 b = bytearray(1024)
801 b = bytearray(1024)
797 source = OpCountingBytesIO(b'foo')
802 source = OpCountingBytesIO(b"foo")
798 reader = cctx.stream_reader(source)
803 reader = cctx.stream_reader(source)
799 self.assertEqual(reader.readinto1(b), len(foo))
804 self.assertEqual(reader.readinto1(b), len(foo))
800 self.assertEqual(b[0:len(foo)], foo)
805 self.assertEqual(b[0 : len(foo)], foo)
801 self.assertEqual(source._read_count, 2)
806 self.assertEqual(source._read_count, 2)
802
807
803 # readinto1() with small reads.
808 # readinto1() with small reads.
804 b = bytearray(1024)
809 b = bytearray(1024)
805 source = OpCountingBytesIO(b'foo')
810 source = OpCountingBytesIO(b"foo")
806 reader = cctx.stream_reader(source, read_size=1)
811 reader = cctx.stream_reader(source, read_size=1)
807 self.assertEqual(reader.readinto1(b), len(foo))
812 self.assertEqual(reader.readinto1(b), len(foo))
808 self.assertEqual(b[0:len(foo)], foo)
813 self.assertEqual(b[0 : len(foo)], foo)
809 self.assertEqual(source._read_count, 4)
814 self.assertEqual(source._read_count, 4)
810
815
811 def test_read1(self):
816 def test_read1(self):
812 cctx = zstd.ZstdCompressor()
817 cctx = zstd.ZstdCompressor()
813 foo = b''.join(cctx.read_to_iter(io.BytesIO(b'foo')))
818 foo = b"".join(cctx.read_to_iter(io.BytesIO(b"foo")))
814
819
815 b = OpCountingBytesIO(b'foo')
820 b = OpCountingBytesIO(b"foo")
816 reader = cctx.stream_reader(b)
821 reader = cctx.stream_reader(b)
817
822
818 self.assertEqual(reader.read1(), foo)
823 self.assertEqual(reader.read1(), foo)
819 self.assertEqual(b._read_count, 2)
824 self.assertEqual(b._read_count, 2)
820
825
821 b = OpCountingBytesIO(b'foo')
826 b = OpCountingBytesIO(b"foo")
822 reader = cctx.stream_reader(b)
827 reader = cctx.stream_reader(b)
823
828
824 self.assertEqual(reader.read1(0), b'')
829 self.assertEqual(reader.read1(0), b"")
825 self.assertEqual(reader.read1(2), foo[0:2])
830 self.assertEqual(reader.read1(2), foo[0:2])
826 self.assertEqual(b._read_count, 2)
831 self.assertEqual(b._read_count, 2)
827 self.assertEqual(reader.read1(2), foo[2:4])
832 self.assertEqual(reader.read1(2), foo[2:4])
@@ -829,7 +834,7 b' class TestCompressor_stream_reader(unitt'
829
834
830
835
831 @make_cffi
836 @make_cffi
832 class TestCompressor_stream_writer(unittest.TestCase):
837 class TestCompressor_stream_writer(TestCase):
833 def test_io_api(self):
838 def test_io_api(self):
834 buffer = io.BytesIO()
839 buffer = io.BytesIO()
835 cctx = zstd.ZstdCompressor()
840 cctx = zstd.ZstdCompressor()
@@ -899,7 +904,7 b' class TestCompressor_stream_writer(unitt'
899 self.assertFalse(writer.closed)
904 self.assertFalse(writer.closed)
900
905
901 def test_fileno_file(self):
906 def test_fileno_file(self):
902 with tempfile.TemporaryFile('wb') as tf:
907 with tempfile.TemporaryFile("wb") as tf:
903 cctx = zstd.ZstdCompressor()
908 cctx = zstd.ZstdCompressor()
904 writer = cctx.stream_writer(tf)
909 writer = cctx.stream_writer(tf)
905
910
@@ -910,33 +915,35 b' class TestCompressor_stream_writer(unitt'
910 cctx = zstd.ZstdCompressor(level=1)
915 cctx = zstd.ZstdCompressor(level=1)
911 writer = cctx.stream_writer(buffer)
916 writer = cctx.stream_writer(buffer)
912
917
913 writer.write(b'foo' * 1024)
918 writer.write(b"foo" * 1024)
914 self.assertFalse(writer.closed)
919 self.assertFalse(writer.closed)
915 self.assertFalse(buffer.closed)
920 self.assertFalse(buffer.closed)
916 writer.close()
921 writer.close()
917 self.assertTrue(writer.closed)
922 self.assertTrue(writer.closed)
918 self.assertTrue(buffer.closed)
923 self.assertTrue(buffer.closed)
919
924
920 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
925 with self.assertRaisesRegex(ValueError, "stream is closed"):
921 writer.write(b'foo')
926 writer.write(b"foo")
922
927
923 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
928 with self.assertRaisesRegex(ValueError, "stream is closed"):
924 writer.flush()
929 writer.flush()
925
930
926 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
931 with self.assertRaisesRegex(ValueError, "stream is closed"):
927 with writer:
932 with writer:
928 pass
933 pass
929
934
930 self.assertEqual(buffer.getvalue(),
935 self.assertEqual(
931 b'\x28\xb5\x2f\xfd\x00\x48\x55\x00\x00\x18\x66\x6f'
936 buffer.getvalue(),
932 b'\x6f\x01\x00\xfa\xd3\x77\x43')
937 b"\x28\xb5\x2f\xfd\x00\x48\x55\x00\x00\x18\x66\x6f"
938 b"\x6f\x01\x00\xfa\xd3\x77\x43",
939 )
933
940
934 # Context manager exit should close stream.
941 # Context manager exit should close stream.
935 buffer = io.BytesIO()
942 buffer = io.BytesIO()
936 writer = cctx.stream_writer(buffer)
943 writer = cctx.stream_writer(buffer)
937
944
938 with writer:
945 with writer:
939 writer.write(b'foo')
946 writer.write(b"foo")
940
947
941 self.assertTrue(writer.closed)
948 self.assertTrue(writer.closed)
942
949
@@ -944,10 +951,10 b' class TestCompressor_stream_writer(unitt'
944 buffer = NonClosingBytesIO()
951 buffer = NonClosingBytesIO()
945 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
952 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
946 with cctx.stream_writer(buffer) as compressor:
953 with cctx.stream_writer(buffer) as compressor:
947 compressor.write(b'')
954 compressor.write(b"")
948
955
949 result = buffer.getvalue()
956 result = buffer.getvalue()
950 self.assertEqual(result, b'\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00')
957 self.assertEqual(result, b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00")
951
958
952 params = zstd.get_frame_parameters(result)
959 params = zstd.get_frame_parameters(result)
953 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
960 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
@@ -958,11 +965,11 b' class TestCompressor_stream_writer(unitt'
958 # Test without context manager.
965 # Test without context manager.
959 buffer = io.BytesIO()
966 buffer = io.BytesIO()
960 compressor = cctx.stream_writer(buffer)
967 compressor = cctx.stream_writer(buffer)
961 self.assertEqual(compressor.write(b''), 0)
968 self.assertEqual(compressor.write(b""), 0)
962 self.assertEqual(buffer.getvalue(), b'')
969 self.assertEqual(buffer.getvalue(), b"")
963 self.assertEqual(compressor.flush(zstd.FLUSH_FRAME), 9)
970 self.assertEqual(compressor.flush(zstd.FLUSH_FRAME), 9)
964 result = buffer.getvalue()
971 result = buffer.getvalue()
965 self.assertEqual(result, b'\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00')
972 self.assertEqual(result, b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00")
966
973
967 params = zstd.get_frame_parameters(result)
974 params = zstd.get_frame_parameters(result)
968 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
975 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
@@ -972,18 +979,18 b' class TestCompressor_stream_writer(unitt'
972
979
973 # Test write_return_read=True
980 # Test write_return_read=True
974 compressor = cctx.stream_writer(buffer, write_return_read=True)
981 compressor = cctx.stream_writer(buffer, write_return_read=True)
975 self.assertEqual(compressor.write(b''), 0)
982 self.assertEqual(compressor.write(b""), 0)
976
983
977 def test_input_types(self):
984 def test_input_types(self):
978 expected = b'\x28\xb5\x2f\xfd\x00\x48\x19\x00\x00\x66\x6f\x6f'
985 expected = b"\x28\xb5\x2f\xfd\x00\x48\x19\x00\x00\x66\x6f\x6f"
979 cctx = zstd.ZstdCompressor(level=1)
986 cctx = zstd.ZstdCompressor(level=1)
980
987
981 mutable_array = bytearray(3)
988 mutable_array = bytearray(3)
982 mutable_array[:] = b'foo'
989 mutable_array[:] = b"foo"
983
990
984 sources = [
991 sources = [
985 memoryview(b'foo'),
992 memoryview(b"foo"),
986 bytearray(b'foo'),
993 bytearray(b"foo"),
987 mutable_array,
994 mutable_array,
988 ]
995 ]
989
996
@@ -1001,51 +1008,55 b' class TestCompressor_stream_writer(unitt'
1001 buffer = NonClosingBytesIO()
1008 buffer = NonClosingBytesIO()
1002 cctx = zstd.ZstdCompressor(level=5)
1009 cctx = zstd.ZstdCompressor(level=5)
1003 with cctx.stream_writer(buffer) as compressor:
1010 with cctx.stream_writer(buffer) as compressor:
1004 self.assertEqual(compressor.write(b'foo'), 0)
1011 self.assertEqual(compressor.write(b"foo"), 0)
1005 self.assertEqual(compressor.write(b'bar'), 0)
1012 self.assertEqual(compressor.write(b"bar"), 0)
1006 self.assertEqual(compressor.write(b'x' * 8192), 0)
1013 self.assertEqual(compressor.write(b"x" * 8192), 0)
1007
1014
1008 result = buffer.getvalue()
1015 result = buffer.getvalue()
1009 self.assertEqual(result,
1016 self.assertEqual(
1010 b'\x28\xb5\x2f\xfd\x00\x58\x75\x00\x00\x38\x66\x6f'
1017 result,
1011 b'\x6f\x62\x61\x72\x78\x01\x00\xfc\xdf\x03\x23')
1018 b"\x28\xb5\x2f\xfd\x00\x58\x75\x00\x00\x38\x66\x6f"
1019 b"\x6f\x62\x61\x72\x78\x01\x00\xfc\xdf\x03\x23",
1020 )
1012
1021
1013 # Test without context manager.
1022 # Test without context manager.
1014 buffer = io.BytesIO()
1023 buffer = io.BytesIO()
1015 compressor = cctx.stream_writer(buffer)
1024 compressor = cctx.stream_writer(buffer)
1016 self.assertEqual(compressor.write(b'foo'), 0)
1025 self.assertEqual(compressor.write(b"foo"), 0)
1017 self.assertEqual(compressor.write(b'bar'), 0)
1026 self.assertEqual(compressor.write(b"bar"), 0)
1018 self.assertEqual(compressor.write(b'x' * 8192), 0)
1027 self.assertEqual(compressor.write(b"x" * 8192), 0)
1019 self.assertEqual(compressor.flush(zstd.FLUSH_FRAME), 23)
1028 self.assertEqual(compressor.flush(zstd.FLUSH_FRAME), 23)
1020 result = buffer.getvalue()
1029 result = buffer.getvalue()
1021 self.assertEqual(result,
1030 self.assertEqual(
1022 b'\x28\xb5\x2f\xfd\x00\x58\x75\x00\x00\x38\x66\x6f'
1031 result,
1023 b'\x6f\x62\x61\x72\x78\x01\x00\xfc\xdf\x03\x23')
1032 b"\x28\xb5\x2f\xfd\x00\x58\x75\x00\x00\x38\x66\x6f"
1033 b"\x6f\x62\x61\x72\x78\x01\x00\xfc\xdf\x03\x23",
1034 )
1024
1035
1025 # Test with write_return_read=True.
1036 # Test with write_return_read=True.
1026 compressor = cctx.stream_writer(buffer, write_return_read=True)
1037 compressor = cctx.stream_writer(buffer, write_return_read=True)
1027 self.assertEqual(compressor.write(b'foo'), 3)
1038 self.assertEqual(compressor.write(b"foo"), 3)
1028 self.assertEqual(compressor.write(b'barbiz'), 6)
1039 self.assertEqual(compressor.write(b"barbiz"), 6)
1029 self.assertEqual(compressor.write(b'x' * 8192), 8192)
1040 self.assertEqual(compressor.write(b"x" * 8192), 8192)
1030
1041
1031 def test_dictionary(self):
1042 def test_dictionary(self):
1032 samples = []
1043 samples = []
1033 for i in range(128):
1044 for i in range(128):
1034 samples.append(b'foo' * 64)
1045 samples.append(b"foo" * 64)
1035 samples.append(b'bar' * 64)
1046 samples.append(b"bar" * 64)
1036 samples.append(b'foobar' * 64)
1047 samples.append(b"foobar" * 64)
1037
1048
1038 d = zstd.train_dictionary(8192, samples)
1049 d = zstd.train_dictionary(8192, samples)
1039
1050
1040 h = hashlib.sha1(d.as_bytes()).hexdigest()
1051 h = hashlib.sha1(d.as_bytes()).hexdigest()
1041 self.assertEqual(h, '7a2e59a876db958f74257141045af8f912e00d4e')
1052 self.assertEqual(h, "7a2e59a876db958f74257141045af8f912e00d4e")
1042
1053
1043 buffer = NonClosingBytesIO()
1054 buffer = NonClosingBytesIO()
1044 cctx = zstd.ZstdCompressor(level=9, dict_data=d)
1055 cctx = zstd.ZstdCompressor(level=9, dict_data=d)
1045 with cctx.stream_writer(buffer) as compressor:
1056 with cctx.stream_writer(buffer) as compressor:
1046 self.assertEqual(compressor.write(b'foo'), 0)
1057 self.assertEqual(compressor.write(b"foo"), 0)
1047 self.assertEqual(compressor.write(b'bar'), 0)
1058 self.assertEqual(compressor.write(b"bar"), 0)
1048 self.assertEqual(compressor.write(b'foo' * 16384), 0)
1059 self.assertEqual(compressor.write(b"foo" * 16384), 0)
1049
1060
1050 compressed = buffer.getvalue()
1061 compressed = buffer.getvalue()
1051
1062
@@ -1056,14 +1067,15 b' class TestCompressor_stream_writer(unitt'
1056 self.assertFalse(params.has_checksum)
1067 self.assertFalse(params.has_checksum)
1057
1068
1058 h = hashlib.sha1(compressed).hexdigest()
1069 h = hashlib.sha1(compressed).hexdigest()
1059 self.assertEqual(h, '0a7c05635061f58039727cdbe76388c6f4cfef06')
1070 self.assertEqual(h, "0a7c05635061f58039727cdbe76388c6f4cfef06")
1060
1071
1061 source = b'foo' + b'bar' + (b'foo' * 16384)
1072 source = b"foo" + b"bar" + (b"foo" * 16384)
1062
1073
1063 dctx = zstd.ZstdDecompressor(dict_data=d)
1074 dctx = zstd.ZstdDecompressor(dict_data=d)
1064
1075
1065 self.assertEqual(dctx.decompress(compressed, max_output_size=len(source)),
1076 self.assertEqual(
1066 source)
1077 dctx.decompress(compressed, max_output_size=len(source)), source
1078 )
1067
1079
1068 def test_compression_params(self):
1080 def test_compression_params(self):
1069 params = zstd.ZstdCompressionParameters(
1081 params = zstd.ZstdCompressionParameters(
@@ -1073,14 +1085,15 b' class TestCompressor_stream_writer(unitt'
1073 min_match=5,
1085 min_match=5,
1074 search_log=4,
1086 search_log=4,
1075 target_length=10,
1087 target_length=10,
1076 strategy=zstd.STRATEGY_FAST)
1088 strategy=zstd.STRATEGY_FAST,
1089 )
1077
1090
1078 buffer = NonClosingBytesIO()
1091 buffer = NonClosingBytesIO()
1079 cctx = zstd.ZstdCompressor(compression_params=params)
1092 cctx = zstd.ZstdCompressor(compression_params=params)
1080 with cctx.stream_writer(buffer) as compressor:
1093 with cctx.stream_writer(buffer) as compressor:
1081 self.assertEqual(compressor.write(b'foo'), 0)
1094 self.assertEqual(compressor.write(b"foo"), 0)
1082 self.assertEqual(compressor.write(b'bar'), 0)
1095 self.assertEqual(compressor.write(b"bar"), 0)
1083 self.assertEqual(compressor.write(b'foobar' * 16384), 0)
1096 self.assertEqual(compressor.write(b"foobar" * 16384), 0)
1084
1097
1085 compressed = buffer.getvalue()
1098 compressed = buffer.getvalue()
1086
1099
@@ -1091,18 +1104,18 b' class TestCompressor_stream_writer(unitt'
1091 self.assertFalse(params.has_checksum)
1104 self.assertFalse(params.has_checksum)
1092
1105
1093 h = hashlib.sha1(compressed).hexdigest()
1106 h = hashlib.sha1(compressed).hexdigest()
1094 self.assertEqual(h, 'dd4bb7d37c1a0235b38a2f6b462814376843ef0b')
1107 self.assertEqual(h, "dd4bb7d37c1a0235b38a2f6b462814376843ef0b")
1095
1108
1096 def test_write_checksum(self):
1109 def test_write_checksum(self):
1097 no_checksum = NonClosingBytesIO()
1110 no_checksum = NonClosingBytesIO()
1098 cctx = zstd.ZstdCompressor(level=1)
1111 cctx = zstd.ZstdCompressor(level=1)
1099 with cctx.stream_writer(no_checksum) as compressor:
1112 with cctx.stream_writer(no_checksum) as compressor:
1100 self.assertEqual(compressor.write(b'foobar'), 0)
1113 self.assertEqual(compressor.write(b"foobar"), 0)
1101
1114
1102 with_checksum = NonClosingBytesIO()
1115 with_checksum = NonClosingBytesIO()
1103 cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
1116 cctx = zstd.ZstdCompressor(level=1, write_checksum=True)
1104 with cctx.stream_writer(with_checksum) as compressor:
1117 with cctx.stream_writer(with_checksum) as compressor:
1105 self.assertEqual(compressor.write(b'foobar'), 0)
1118 self.assertEqual(compressor.write(b"foobar"), 0)
1106
1119
1107 no_params = zstd.get_frame_parameters(no_checksum.getvalue())
1120 no_params = zstd.get_frame_parameters(no_checksum.getvalue())
1108 with_params = zstd.get_frame_parameters(with_checksum.getvalue())
1121 with_params = zstd.get_frame_parameters(with_checksum.getvalue())
@@ -1113,29 +1126,27 b' class TestCompressor_stream_writer(unitt'
1113 self.assertFalse(no_params.has_checksum)
1126 self.assertFalse(no_params.has_checksum)
1114 self.assertTrue(with_params.has_checksum)
1127 self.assertTrue(with_params.has_checksum)
1115
1128
1116 self.assertEqual(len(with_checksum.getvalue()),
1129 self.assertEqual(len(with_checksum.getvalue()), len(no_checksum.getvalue()) + 4)
1117 len(no_checksum.getvalue()) + 4)
1118
1130
1119 def test_write_content_size(self):
1131 def test_write_content_size(self):
1120 no_size = NonClosingBytesIO()
1132 no_size = NonClosingBytesIO()
1121 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
1133 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
1122 with cctx.stream_writer(no_size) as compressor:
1134 with cctx.stream_writer(no_size) as compressor:
1123 self.assertEqual(compressor.write(b'foobar' * 256), 0)
1135 self.assertEqual(compressor.write(b"foobar" * 256), 0)
1124
1136
1125 with_size = NonClosingBytesIO()
1137 with_size = NonClosingBytesIO()
1126 cctx = zstd.ZstdCompressor(level=1)
1138 cctx = zstd.ZstdCompressor(level=1)
1127 with cctx.stream_writer(with_size) as compressor:
1139 with cctx.stream_writer(with_size) as compressor:
1128 self.assertEqual(compressor.write(b'foobar' * 256), 0)
1140 self.assertEqual(compressor.write(b"foobar" * 256), 0)
1129
1141
1130 # Source size is not known in streaming mode, so header not
1142 # Source size is not known in streaming mode, so header not
1131 # written.
1143 # written.
1132 self.assertEqual(len(with_size.getvalue()),
1144 self.assertEqual(len(with_size.getvalue()), len(no_size.getvalue()))
1133 len(no_size.getvalue()))
1134
1145
1135 # Declaring size will write the header.
1146 # Declaring size will write the header.
1136 with_size = NonClosingBytesIO()
1147 with_size = NonClosingBytesIO()
1137 with cctx.stream_writer(with_size, size=len(b'foobar' * 256)) as compressor:
1148 with cctx.stream_writer(with_size, size=len(b"foobar" * 256)) as compressor:
1138 self.assertEqual(compressor.write(b'foobar' * 256), 0)
1149 self.assertEqual(compressor.write(b"foobar" * 256), 0)
1139
1150
1140 no_params = zstd.get_frame_parameters(no_size.getvalue())
1151 no_params = zstd.get_frame_parameters(no_size.getvalue())
1141 with_params = zstd.get_frame_parameters(with_size.getvalue())
1152 with_params = zstd.get_frame_parameters(with_size.getvalue())
@@ -1146,31 +1157,30 b' class TestCompressor_stream_writer(unitt'
1146 self.assertFalse(no_params.has_checksum)
1157 self.assertFalse(no_params.has_checksum)
1147 self.assertFalse(with_params.has_checksum)
1158 self.assertFalse(with_params.has_checksum)
1148
1159
1149 self.assertEqual(len(with_size.getvalue()),
1160 self.assertEqual(len(with_size.getvalue()), len(no_size.getvalue()) + 1)
1150 len(no_size.getvalue()) + 1)
1151
1161
1152 def test_no_dict_id(self):
1162 def test_no_dict_id(self):
1153 samples = []
1163 samples = []
1154 for i in range(128):
1164 for i in range(128):
1155 samples.append(b'foo' * 64)
1165 samples.append(b"foo" * 64)
1156 samples.append(b'bar' * 64)
1166 samples.append(b"bar" * 64)
1157 samples.append(b'foobar' * 64)
1167 samples.append(b"foobar" * 64)
1158
1168
1159 d = zstd.train_dictionary(1024, samples)
1169 d = zstd.train_dictionary(1024, samples)
1160
1170
1161 with_dict_id = NonClosingBytesIO()
1171 with_dict_id = NonClosingBytesIO()
1162 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
1172 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
1163 with cctx.stream_writer(with_dict_id) as compressor:
1173 with cctx.stream_writer(with_dict_id) as compressor:
1164 self.assertEqual(compressor.write(b'foobarfoobar'), 0)
1174 self.assertEqual(compressor.write(b"foobarfoobar"), 0)
1165
1175
1166 self.assertEqual(with_dict_id.getvalue()[4:5], b'\x03')
1176 self.assertEqual(with_dict_id.getvalue()[4:5], b"\x03")
1167
1177
1168 cctx = zstd.ZstdCompressor(level=1, dict_data=d, write_dict_id=False)
1178 cctx = zstd.ZstdCompressor(level=1, dict_data=d, write_dict_id=False)
1169 no_dict_id = NonClosingBytesIO()
1179 no_dict_id = NonClosingBytesIO()
1170 with cctx.stream_writer(no_dict_id) as compressor:
1180 with cctx.stream_writer(no_dict_id) as compressor:
1171 self.assertEqual(compressor.write(b'foobarfoobar'), 0)
1181 self.assertEqual(compressor.write(b"foobarfoobar"), 0)
1172
1182
1173 self.assertEqual(no_dict_id.getvalue()[4:5], b'\x00')
1183 self.assertEqual(no_dict_id.getvalue()[4:5], b"\x00")
1174
1184
1175 no_params = zstd.get_frame_parameters(no_dict_id.getvalue())
1185 no_params = zstd.get_frame_parameters(no_dict_id.getvalue())
1176 with_params = zstd.get_frame_parameters(with_dict_id.getvalue())
1186 with_params = zstd.get_frame_parameters(with_dict_id.getvalue())
@@ -1181,14 +1191,13 b' class TestCompressor_stream_writer(unitt'
1181 self.assertFalse(no_params.has_checksum)
1191 self.assertFalse(no_params.has_checksum)
1182 self.assertFalse(with_params.has_checksum)
1192 self.assertFalse(with_params.has_checksum)
1183
1193
1184 self.assertEqual(len(with_dict_id.getvalue()),
1194 self.assertEqual(len(with_dict_id.getvalue()), len(no_dict_id.getvalue()) + 4)
1185 len(no_dict_id.getvalue()) + 4)
1186
1195
1187 def test_memory_size(self):
1196 def test_memory_size(self):
1188 cctx = zstd.ZstdCompressor(level=3)
1197 cctx = zstd.ZstdCompressor(level=3)
1189 buffer = io.BytesIO()
1198 buffer = io.BytesIO()
1190 with cctx.stream_writer(buffer) as compressor:
1199 with cctx.stream_writer(buffer) as compressor:
1191 compressor.write(b'foo')
1200 compressor.write(b"foo")
1192 size = compressor.memory_size()
1201 size = compressor.memory_size()
1193
1202
1194 self.assertGreater(size, 100000)
1203 self.assertGreater(size, 100000)
@@ -1197,9 +1206,9 b' class TestCompressor_stream_writer(unitt'
1197 cctx = zstd.ZstdCompressor(level=3)
1206 cctx = zstd.ZstdCompressor(level=3)
1198 dest = OpCountingBytesIO()
1207 dest = OpCountingBytesIO()
1199 with cctx.stream_writer(dest, write_size=1) as compressor:
1208 with cctx.stream_writer(dest, write_size=1) as compressor:
1200 self.assertEqual(compressor.write(b'foo'), 0)
1209 self.assertEqual(compressor.write(b"foo"), 0)
1201 self.assertEqual(compressor.write(b'bar'), 0)
1210 self.assertEqual(compressor.write(b"bar"), 0)
1202 self.assertEqual(compressor.write(b'foobar'), 0)
1211 self.assertEqual(compressor.write(b"foobar"), 0)
1203
1212
1204 self.assertEqual(len(dest.getvalue()), dest._write_count)
1213 self.assertEqual(len(dest.getvalue()), dest._write_count)
1205
1214
@@ -1207,15 +1216,15 b' class TestCompressor_stream_writer(unitt'
1207 cctx = zstd.ZstdCompressor(level=3)
1216 cctx = zstd.ZstdCompressor(level=3)
1208 dest = OpCountingBytesIO()
1217 dest = OpCountingBytesIO()
1209 with cctx.stream_writer(dest) as compressor:
1218 with cctx.stream_writer(dest) as compressor:
1210 self.assertEqual(compressor.write(b'foo'), 0)
1219 self.assertEqual(compressor.write(b"foo"), 0)
1211 self.assertEqual(dest._write_count, 0)
1220 self.assertEqual(dest._write_count, 0)
1212 self.assertEqual(compressor.flush(), 12)
1221 self.assertEqual(compressor.flush(), 12)
1213 self.assertEqual(dest._write_count, 1)
1222 self.assertEqual(dest._write_count, 1)
1214 self.assertEqual(compressor.write(b'bar'), 0)
1223 self.assertEqual(compressor.write(b"bar"), 0)
1215 self.assertEqual(dest._write_count, 1)
1224 self.assertEqual(dest._write_count, 1)
1216 self.assertEqual(compressor.flush(), 6)
1225 self.assertEqual(compressor.flush(), 6)
1217 self.assertEqual(dest._write_count, 2)
1226 self.assertEqual(dest._write_count, 2)
1218 self.assertEqual(compressor.write(b'baz'), 0)
1227 self.assertEqual(compressor.write(b"baz"), 0)
1219
1228
1220 self.assertEqual(dest._write_count, 3)
1229 self.assertEqual(dest._write_count, 3)
1221
1230
@@ -1223,7 +1232,7 b' class TestCompressor_stream_writer(unitt'
1223 cctx = zstd.ZstdCompressor(level=3, write_checksum=True)
1232 cctx = zstd.ZstdCompressor(level=3, write_checksum=True)
1224 dest = OpCountingBytesIO()
1233 dest = OpCountingBytesIO()
1225 with cctx.stream_writer(dest) as compressor:
1234 with cctx.stream_writer(dest) as compressor:
1226 self.assertEqual(compressor.write(b'foobar' * 8192), 0)
1235 self.assertEqual(compressor.write(b"foobar" * 8192), 0)
1227 count = dest._write_count
1236 count = dest._write_count
1228 offset = dest.tell()
1237 offset = dest.tell()
1229 self.assertEqual(compressor.flush(), 23)
1238 self.assertEqual(compressor.flush(), 23)
@@ -1238,41 +1247,43 b' class TestCompressor_stream_writer(unitt'
1238 self.assertEqual(len(trailing), 7)
1247 self.assertEqual(len(trailing), 7)
1239
1248
1240 header = trailing[0:3]
1249 header = trailing[0:3]
1241 self.assertEqual(header, b'\x01\x00\x00')
1250 self.assertEqual(header, b"\x01\x00\x00")
1242
1251
1243 def test_flush_frame(self):
1252 def test_flush_frame(self):
1244 cctx = zstd.ZstdCompressor(level=3)
1253 cctx = zstd.ZstdCompressor(level=3)
1245 dest = OpCountingBytesIO()
1254 dest = OpCountingBytesIO()
1246
1255
1247 with cctx.stream_writer(dest) as compressor:
1256 with cctx.stream_writer(dest) as compressor:
1248 self.assertEqual(compressor.write(b'foobar' * 8192), 0)
1257 self.assertEqual(compressor.write(b"foobar" * 8192), 0)
1249 self.assertEqual(compressor.flush(zstd.FLUSH_FRAME), 23)
1258 self.assertEqual(compressor.flush(zstd.FLUSH_FRAME), 23)
1250 compressor.write(b'biz' * 16384)
1259 compressor.write(b"biz" * 16384)
1251
1260
1252 self.assertEqual(dest.getvalue(),
1261 self.assertEqual(
1253 # Frame 1.
1262 dest.getvalue(),
1254 b'\x28\xb5\x2f\xfd\x00\x58\x75\x00\x00\x30\x66\x6f\x6f'
1263 # Frame 1.
1255 b'\x62\x61\x72\x01\x00\xf7\xbf\xe8\xa5\x08'
1264 b"\x28\xb5\x2f\xfd\x00\x58\x75\x00\x00\x30\x66\x6f\x6f"
1256 # Frame 2.
1265 b"\x62\x61\x72\x01\x00\xf7\xbf\xe8\xa5\x08"
1257 b'\x28\xb5\x2f\xfd\x00\x58\x5d\x00\x00\x18\x62\x69\x7a'
1266 # Frame 2.
1258 b'\x01\x00\xfa\x3f\x75\x37\x04')
1267 b"\x28\xb5\x2f\xfd\x00\x58\x5d\x00\x00\x18\x62\x69\x7a"
1268 b"\x01\x00\xfa\x3f\x75\x37\x04",
1269 )
1259
1270
1260 def test_bad_flush_mode(self):
1271 def test_bad_flush_mode(self):
1261 cctx = zstd.ZstdCompressor()
1272 cctx = zstd.ZstdCompressor()
1262 dest = io.BytesIO()
1273 dest = io.BytesIO()
1263 with cctx.stream_writer(dest) as compressor:
1274 with cctx.stream_writer(dest) as compressor:
1264 with self.assertRaisesRegexp(ValueError, 'unknown flush_mode: 42'):
1275 with self.assertRaisesRegex(ValueError, "unknown flush_mode: 42"):
1265 compressor.flush(flush_mode=42)
1276 compressor.flush(flush_mode=42)
1266
1277
1267 def test_multithreaded(self):
1278 def test_multithreaded(self):
1268 dest = NonClosingBytesIO()
1279 dest = NonClosingBytesIO()
1269 cctx = zstd.ZstdCompressor(threads=2)
1280 cctx = zstd.ZstdCompressor(threads=2)
1270 with cctx.stream_writer(dest) as compressor:
1281 with cctx.stream_writer(dest) as compressor:
1271 compressor.write(b'a' * 1048576)
1282 compressor.write(b"a" * 1048576)
1272 compressor.write(b'b' * 1048576)
1283 compressor.write(b"b" * 1048576)
1273 compressor.write(b'c' * 1048576)
1284 compressor.write(b"c" * 1048576)
1274
1285
1275 self.assertEqual(len(dest.getvalue()), 295)
1286 self.assertEqual(len(dest.getvalue()), 111)
1276
1287
1277 def test_tell(self):
1288 def test_tell(self):
1278 dest = io.BytesIO()
1289 dest = io.BytesIO()
@@ -1281,7 +1292,7 b' class TestCompressor_stream_writer(unitt'
1281 self.assertEqual(compressor.tell(), 0)
1292 self.assertEqual(compressor.tell(), 0)
1282
1293
1283 for i in range(256):
1294 for i in range(256):
1284 compressor.write(b'foo' * (i + 1))
1295 compressor.write(b"foo" * (i + 1))
1285 self.assertEqual(compressor.tell(), dest.tell())
1296 self.assertEqual(compressor.tell(), dest.tell())
1286
1297
1287 def test_bad_size(self):
1298 def test_bad_size(self):
@@ -1289,9 +1300,9 b' class TestCompressor_stream_writer(unitt'
1289
1300
1290 dest = io.BytesIO()
1301 dest = io.BytesIO()
1291
1302
1292 with self.assertRaisesRegexp(zstd.ZstdError, 'Src size is incorrect'):
1303 with self.assertRaisesRegex(zstd.ZstdError, "Src size is incorrect"):
1293 with cctx.stream_writer(dest, size=2) as compressor:
1304 with cctx.stream_writer(dest, size=2) as compressor:
1294 compressor.write(b'foo')
1305 compressor.write(b"foo")
1295
1306
1296 # Test another operation.
1307 # Test another operation.
1297 with cctx.stream_writer(dest, size=42):
1308 with cctx.stream_writer(dest, size=42):
@@ -1301,20 +1312,20 b' class TestCompressor_stream_writer(unitt'
1301 dest = NonClosingBytesIO()
1312 dest = NonClosingBytesIO()
1302 cctx = zstd.ZstdCompressor()
1313 cctx = zstd.ZstdCompressor()
1303 with cctx.stream_writer(dest) as compressor:
1314 with cctx.stream_writer(dest) as compressor:
1304 with tarfile.open('tf', mode='w|', fileobj=compressor) as tf:
1315 with tarfile.open("tf", mode="w|", fileobj=compressor) as tf:
1305 tf.add(__file__, 'test_compressor.py')
1316 tf.add(__file__, "test_compressor.py")
1306
1317
1307 dest = io.BytesIO(dest.getvalue())
1318 dest = io.BytesIO(dest.getvalue())
1308
1319
1309 dctx = zstd.ZstdDecompressor()
1320 dctx = zstd.ZstdDecompressor()
1310 with dctx.stream_reader(dest) as reader:
1321 with dctx.stream_reader(dest) as reader:
1311 with tarfile.open(mode='r|', fileobj=reader) as tf:
1322 with tarfile.open(mode="r|", fileobj=reader) as tf:
1312 for member in tf:
1323 for member in tf:
1313 self.assertEqual(member.name, 'test_compressor.py')
1324 self.assertEqual(member.name, "test_compressor.py")
1314
1325
1315
1326
1316 @make_cffi
1327 @make_cffi
1317 class TestCompressor_read_to_iter(unittest.TestCase):
1328 class TestCompressor_read_to_iter(TestCase):
1318 def test_type_validation(self):
1329 def test_type_validation(self):
1319 cctx = zstd.ZstdCompressor()
1330 cctx = zstd.ZstdCompressor()
1320
1331
@@ -1323,10 +1334,10 b' class TestCompressor_read_to_iter(unitte'
1323 pass
1334 pass
1324
1335
1325 # Buffer protocol works.
1336 # Buffer protocol works.
1326 for chunk in cctx.read_to_iter(b'foobar'):
1337 for chunk in cctx.read_to_iter(b"foobar"):
1327 pass
1338 pass
1328
1339
1329 with self.assertRaisesRegexp(ValueError, 'must pass an object with a read'):
1340 with self.assertRaisesRegex(ValueError, "must pass an object with a read"):
1330 for chunk in cctx.read_to_iter(True):
1341 for chunk in cctx.read_to_iter(True):
1331 pass
1342 pass
1332
1343
@@ -1337,22 +1348,22 b' class TestCompressor_read_to_iter(unitte'
1337 it = cctx.read_to_iter(source)
1348 it = cctx.read_to_iter(source)
1338 chunks = list(it)
1349 chunks = list(it)
1339 self.assertEqual(len(chunks), 1)
1350 self.assertEqual(len(chunks), 1)
1340 compressed = b''.join(chunks)
1351 compressed = b"".join(chunks)
1341 self.assertEqual(compressed, b'\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00')
1352 self.assertEqual(compressed, b"\x28\xb5\x2f\xfd\x00\x48\x01\x00\x00")
1342
1353
1343 # And again with the buffer protocol.
1354 # And again with the buffer protocol.
1344 it = cctx.read_to_iter(b'')
1355 it = cctx.read_to_iter(b"")
1345 chunks = list(it)
1356 chunks = list(it)
1346 self.assertEqual(len(chunks), 1)
1357 self.assertEqual(len(chunks), 1)
1347 compressed2 = b''.join(chunks)
1358 compressed2 = b"".join(chunks)
1348 self.assertEqual(compressed2, compressed)
1359 self.assertEqual(compressed2, compressed)
1349
1360
1350 def test_read_large(self):
1361 def test_read_large(self):
1351 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
1362 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
1352
1363
1353 source = io.BytesIO()
1364 source = io.BytesIO()
1354 source.write(b'f' * zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE)
1365 source.write(b"f" * zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE)
1355 source.write(b'o')
1366 source.write(b"o")
1356 source.seek(0)
1367 source.seek(0)
1357
1368
1358 # Creating an iterator should not perform any compression until
1369 # Creating an iterator should not perform any compression until
@@ -1380,9 +1391,9 b' class TestCompressor_read_to_iter(unitte'
1380 next(it)
1391 next(it)
1381
1392
1382 # We should get the same output as the one-shot compression mechanism.
1393 # We should get the same output as the one-shot compression mechanism.
1383 self.assertEqual(b''.join(chunks), cctx.compress(source.getvalue()))
1394 self.assertEqual(b"".join(chunks), cctx.compress(source.getvalue()))
1384
1395
1385 params = zstd.get_frame_parameters(b''.join(chunks))
1396 params = zstd.get_frame_parameters(b"".join(chunks))
1386 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
1397 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
1387 self.assertEqual(params.window_size, 262144)
1398 self.assertEqual(params.window_size, 262144)
1388 self.assertEqual(params.dict_id, 0)
1399 self.assertEqual(params.dict_id, 0)
@@ -1393,16 +1404,16 b' class TestCompressor_read_to_iter(unitte'
1393 chunks = list(it)
1404 chunks = list(it)
1394 self.assertEqual(len(chunks), 2)
1405 self.assertEqual(len(chunks), 2)
1395
1406
1396 params = zstd.get_frame_parameters(b''.join(chunks))
1407 params = zstd.get_frame_parameters(b"".join(chunks))
1397 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
1408 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
1398 #self.assertEqual(params.window_size, 262144)
1409 # self.assertEqual(params.window_size, 262144)
1399 self.assertEqual(params.dict_id, 0)
1410 self.assertEqual(params.dict_id, 0)
1400 self.assertFalse(params.has_checksum)
1411 self.assertFalse(params.has_checksum)
1401
1412
1402 self.assertEqual(b''.join(chunks), cctx.compress(source.getvalue()))
1413 self.assertEqual(b"".join(chunks), cctx.compress(source.getvalue()))
1403
1414
1404 def test_read_write_size(self):
1415 def test_read_write_size(self):
1405 source = OpCountingBytesIO(b'foobarfoobar')
1416 source = OpCountingBytesIO(b"foobarfoobar")
1406 cctx = zstd.ZstdCompressor(level=3)
1417 cctx = zstd.ZstdCompressor(level=3)
1407 for chunk in cctx.read_to_iter(source, read_size=1, write_size=1):
1418 for chunk in cctx.read_to_iter(source, read_size=1, write_size=1):
1408 self.assertEqual(len(chunk), 1)
1419 self.assertEqual(len(chunk), 1)
@@ -1411,42 +1422,42 b' class TestCompressor_read_to_iter(unitte'
1411
1422
1412 def test_multithreaded(self):
1423 def test_multithreaded(self):
1413 source = io.BytesIO()
1424 source = io.BytesIO()
1414 source.write(b'a' * 1048576)
1425 source.write(b"a" * 1048576)
1415 source.write(b'b' * 1048576)
1426 source.write(b"b" * 1048576)
1416 source.write(b'c' * 1048576)
1427 source.write(b"c" * 1048576)
1417 source.seek(0)
1428 source.seek(0)
1418
1429
1419 cctx = zstd.ZstdCompressor(threads=2)
1430 cctx = zstd.ZstdCompressor(threads=2)
1420
1431
1421 compressed = b''.join(cctx.read_to_iter(source))
1432 compressed = b"".join(cctx.read_to_iter(source))
1422 self.assertEqual(len(compressed), 295)
1433 self.assertEqual(len(compressed), 111)
1423
1434
1424 def test_bad_size(self):
1435 def test_bad_size(self):
1425 cctx = zstd.ZstdCompressor()
1436 cctx = zstd.ZstdCompressor()
1426
1437
1427 source = io.BytesIO(b'a' * 42)
1438 source = io.BytesIO(b"a" * 42)
1428
1439
1429 with self.assertRaisesRegexp(zstd.ZstdError, 'Src size is incorrect'):
1440 with self.assertRaisesRegex(zstd.ZstdError, "Src size is incorrect"):
1430 b''.join(cctx.read_to_iter(source, size=2))
1441 b"".join(cctx.read_to_iter(source, size=2))
1431
1442
1432 # Test another operation on errored compressor.
1443 # Test another operation on errored compressor.
1433 b''.join(cctx.read_to_iter(source))
1444 b"".join(cctx.read_to_iter(source))
1434
1445
1435
1446
1436 @make_cffi
1447 @make_cffi
1437 class TestCompressor_chunker(unittest.TestCase):
1448 class TestCompressor_chunker(TestCase):
1438 def test_empty(self):
1449 def test_empty(self):
1439 cctx = zstd.ZstdCompressor(write_content_size=False)
1450 cctx = zstd.ZstdCompressor(write_content_size=False)
1440 chunker = cctx.chunker()
1451 chunker = cctx.chunker()
1441
1452
1442 it = chunker.compress(b'')
1453 it = chunker.compress(b"")
1443
1454
1444 with self.assertRaises(StopIteration):
1455 with self.assertRaises(StopIteration):
1445 next(it)
1456 next(it)
1446
1457
1447 it = chunker.finish()
1458 it = chunker.finish()
1448
1459
1449 self.assertEqual(next(it), b'\x28\xb5\x2f\xfd\x00\x58\x01\x00\x00')
1460 self.assertEqual(next(it), b"\x28\xb5\x2f\xfd\x00\x58\x01\x00\x00")
1450
1461
1451 with self.assertRaises(StopIteration):
1462 with self.assertRaises(StopIteration):
1452 next(it)
1463 next(it)
@@ -1455,21 +1466,23 b' class TestCompressor_chunker(unittest.Te'
1455 cctx = zstd.ZstdCompressor()
1466 cctx = zstd.ZstdCompressor()
1456 chunker = cctx.chunker()
1467 chunker = cctx.chunker()
1457
1468
1458 it = chunker.compress(b'foobar')
1469 it = chunker.compress(b"foobar")
1459
1470
1460 with self.assertRaises(StopIteration):
1471 with self.assertRaises(StopIteration):
1461 next(it)
1472 next(it)
1462
1473
1463 it = chunker.compress(b'baz' * 30)
1474 it = chunker.compress(b"baz" * 30)
1464
1475
1465 with self.assertRaises(StopIteration):
1476 with self.assertRaises(StopIteration):
1466 next(it)
1477 next(it)
1467
1478
1468 it = chunker.finish()
1479 it = chunker.finish()
1469
1480
1470 self.assertEqual(next(it),
1481 self.assertEqual(
1471 b'\x28\xb5\x2f\xfd\x00\x58\x7d\x00\x00\x48\x66\x6f'
1482 next(it),
1472 b'\x6f\x62\x61\x72\x62\x61\x7a\x01\x00\xe4\xe4\x8e')
1483 b"\x28\xb5\x2f\xfd\x00\x58\x7d\x00\x00\x48\x66\x6f"
1484 b"\x6f\x62\x61\x72\x62\x61\x7a\x01\x00\xe4\xe4\x8e",
1485 )
1473
1486
1474 with self.assertRaises(StopIteration):
1487 with self.assertRaises(StopIteration):
1475 next(it)
1488 next(it)
@@ -1478,57 +1491,60 b' class TestCompressor_chunker(unittest.Te'
1478 cctx = zstd.ZstdCompressor()
1491 cctx = zstd.ZstdCompressor()
1479 chunker = cctx.chunker(size=1024)
1492 chunker = cctx.chunker(size=1024)
1480
1493
1481 it = chunker.compress(b'x' * 1000)
1494 it = chunker.compress(b"x" * 1000)
1482
1495
1483 with self.assertRaises(StopIteration):
1496 with self.assertRaises(StopIteration):
1484 next(it)
1497 next(it)
1485
1498
1486 it = chunker.compress(b'y' * 24)
1499 it = chunker.compress(b"y" * 24)
1487
1500
1488 with self.assertRaises(StopIteration):
1501 with self.assertRaises(StopIteration):
1489 next(it)
1502 next(it)
1490
1503
1491 chunks = list(chunker.finish())
1504 chunks = list(chunker.finish())
1492
1505
1493 self.assertEqual(chunks, [
1506 self.assertEqual(
1494 b'\x28\xb5\x2f\xfd\x60\x00\x03\x65\x00\x00\x18\x78\x78\x79\x02\x00'
1507 chunks,
1495 b'\xa0\x16\xe3\x2b\x80\x05'
1508 [
1496 ])
1509 b"\x28\xb5\x2f\xfd\x60\x00\x03\x65\x00\x00\x18\x78\x78\x79\x02\x00"
1510 b"\xa0\x16\xe3\x2b\x80\x05"
1511 ],
1512 )
1497
1513
1498 dctx = zstd.ZstdDecompressor()
1514 dctx = zstd.ZstdDecompressor()
1499
1515
1500 self.assertEqual(dctx.decompress(b''.join(chunks)),
1516 self.assertEqual(dctx.decompress(b"".join(chunks)), (b"x" * 1000) + (b"y" * 24))
1501 (b'x' * 1000) + (b'y' * 24))
1502
1517
1503 def test_small_chunk_size(self):
1518 def test_small_chunk_size(self):
1504 cctx = zstd.ZstdCompressor()
1519 cctx = zstd.ZstdCompressor()
1505 chunker = cctx.chunker(chunk_size=1)
1520 chunker = cctx.chunker(chunk_size=1)
1506
1521
1507 chunks = list(chunker.compress(b'foo' * 1024))
1522 chunks = list(chunker.compress(b"foo" * 1024))
1508 self.assertEqual(chunks, [])
1523 self.assertEqual(chunks, [])
1509
1524
1510 chunks = list(chunker.finish())
1525 chunks = list(chunker.finish())
1511 self.assertTrue(all(len(chunk) == 1 for chunk in chunks))
1526 self.assertTrue(all(len(chunk) == 1 for chunk in chunks))
1512
1527
1513 self.assertEqual(
1528 self.assertEqual(
1514 b''.join(chunks),
1529 b"".join(chunks),
1515 b'\x28\xb5\x2f\xfd\x00\x58\x55\x00\x00\x18\x66\x6f\x6f\x01\x00'
1530 b"\x28\xb5\x2f\xfd\x00\x58\x55\x00\x00\x18\x66\x6f\x6f\x01\x00"
1516 b'\xfa\xd3\x77\x43')
1531 b"\xfa\xd3\x77\x43",
1532 )
1517
1533
1518 dctx = zstd.ZstdDecompressor()
1534 dctx = zstd.ZstdDecompressor()
1519 self.assertEqual(dctx.decompress(b''.join(chunks),
1535 self.assertEqual(
1520 max_output_size=10000),
1536 dctx.decompress(b"".join(chunks), max_output_size=10000), b"foo" * 1024
1521 b'foo' * 1024)
1537 )
1522
1538
1523 def test_input_types(self):
1539 def test_input_types(self):
1524 cctx = zstd.ZstdCompressor()
1540 cctx = zstd.ZstdCompressor()
1525
1541
1526 mutable_array = bytearray(3)
1542 mutable_array = bytearray(3)
1527 mutable_array[:] = b'foo'
1543 mutable_array[:] = b"foo"
1528
1544
1529 sources = [
1545 sources = [
1530 memoryview(b'foo'),
1546 memoryview(b"foo"),
1531 bytearray(b'foo'),
1547 bytearray(b"foo"),
1532 mutable_array,
1548 mutable_array,
1533 ]
1549 ]
1534
1550
@@ -1536,28 +1552,32 b' class TestCompressor_chunker(unittest.Te'
1536 chunker = cctx.chunker()
1552 chunker = cctx.chunker()
1537
1553
1538 self.assertEqual(list(chunker.compress(source)), [])
1554 self.assertEqual(list(chunker.compress(source)), [])
1539 self.assertEqual(list(chunker.finish()), [
1555 self.assertEqual(
1540 b'\x28\xb5\x2f\xfd\x00\x58\x19\x00\x00\x66\x6f\x6f'
1556 list(chunker.finish()),
1541 ])
1557 [b"\x28\xb5\x2f\xfd\x00\x58\x19\x00\x00\x66\x6f\x6f"],
1558 )
1542
1559
1543 def test_flush(self):
1560 def test_flush(self):
1544 cctx = zstd.ZstdCompressor()
1561 cctx = zstd.ZstdCompressor()
1545 chunker = cctx.chunker()
1562 chunker = cctx.chunker()
1546
1563
1547 self.assertEqual(list(chunker.compress(b'foo' * 1024)), [])
1564 self.assertEqual(list(chunker.compress(b"foo" * 1024)), [])
1548 self.assertEqual(list(chunker.compress(b'bar' * 1024)), [])
1565 self.assertEqual(list(chunker.compress(b"bar" * 1024)), [])
1549
1566
1550 chunks1 = list(chunker.flush())
1567 chunks1 = list(chunker.flush())
1551
1568
1552 self.assertEqual(chunks1, [
1569 self.assertEqual(
1553 b'\x28\xb5\x2f\xfd\x00\x58\x8c\x00\x00\x30\x66\x6f\x6f\x62\x61\x72'
1570 chunks1,
1554 b'\x02\x00\xfa\x03\xfe\xd0\x9f\xbe\x1b\x02'
1571 [
1555 ])
1572 b"\x28\xb5\x2f\xfd\x00\x58\x8c\x00\x00\x30\x66\x6f\x6f\x62\x61\x72"
1573 b"\x02\x00\xfa\x03\xfe\xd0\x9f\xbe\x1b\x02"
1574 ],
1575 )
1556
1576
1557 self.assertEqual(list(chunker.flush()), [])
1577 self.assertEqual(list(chunker.flush()), [])
1558 self.assertEqual(list(chunker.flush()), [])
1578 self.assertEqual(list(chunker.flush()), [])
1559
1579
1560 self.assertEqual(list(chunker.compress(b'baz' * 1024)), [])
1580 self.assertEqual(list(chunker.compress(b"baz" * 1024)), [])
1561
1581
1562 chunks2 = list(chunker.flush())
1582 chunks2 = list(chunker.flush())
1563 self.assertEqual(len(chunks2), 1)
1583 self.assertEqual(len(chunks2), 1)
@@ -1567,53 +1587,56 b' class TestCompressor_chunker(unittest.Te'
1567
1587
1568 dctx = zstd.ZstdDecompressor()
1588 dctx = zstd.ZstdDecompressor()
1569
1589
1570 self.assertEqual(dctx.decompress(b''.join(chunks1 + chunks2 + chunks3),
1590 self.assertEqual(
1571 max_output_size=10000),
1591 dctx.decompress(
1572 (b'foo' * 1024) + (b'bar' * 1024) + (b'baz' * 1024))
1592 b"".join(chunks1 + chunks2 + chunks3), max_output_size=10000
1593 ),
1594 (b"foo" * 1024) + (b"bar" * 1024) + (b"baz" * 1024),
1595 )
1573
1596
1574 def test_compress_after_finish(self):
1597 def test_compress_after_finish(self):
1575 cctx = zstd.ZstdCompressor()
1598 cctx = zstd.ZstdCompressor()
1576 chunker = cctx.chunker()
1599 chunker = cctx.chunker()
1577
1600
1578 list(chunker.compress(b'foo'))
1601 list(chunker.compress(b"foo"))
1579 list(chunker.finish())
1602 list(chunker.finish())
1580
1603
1581 with self.assertRaisesRegexp(
1604 with self.assertRaisesRegex(
1582 zstd.ZstdError,
1605 zstd.ZstdError, r"cannot call compress\(\) after compression finished"
1583 r'cannot call compress\(\) after compression finished'):
1606 ):
1584 list(chunker.compress(b'foo'))
1607 list(chunker.compress(b"foo"))
1585
1608
1586 def test_flush_after_finish(self):
1609 def test_flush_after_finish(self):
1587 cctx = zstd.ZstdCompressor()
1610 cctx = zstd.ZstdCompressor()
1588 chunker = cctx.chunker()
1611 chunker = cctx.chunker()
1589
1612
1590 list(chunker.compress(b'foo'))
1613 list(chunker.compress(b"foo"))
1591 list(chunker.finish())
1614 list(chunker.finish())
1592
1615
1593 with self.assertRaisesRegexp(
1616 with self.assertRaisesRegex(
1594 zstd.ZstdError,
1617 zstd.ZstdError, r"cannot call flush\(\) after compression finished"
1595 r'cannot call flush\(\) after compression finished'):
1618 ):
1596 list(chunker.flush())
1619 list(chunker.flush())
1597
1620
1598 def test_finish_after_finish(self):
1621 def test_finish_after_finish(self):
1599 cctx = zstd.ZstdCompressor()
1622 cctx = zstd.ZstdCompressor()
1600 chunker = cctx.chunker()
1623 chunker = cctx.chunker()
1601
1624
1602 list(chunker.compress(b'foo'))
1625 list(chunker.compress(b"foo"))
1603 list(chunker.finish())
1626 list(chunker.finish())
1604
1627
1605 with self.assertRaisesRegexp(
1628 with self.assertRaisesRegex(
1606 zstd.ZstdError,
1629 zstd.ZstdError, r"cannot call finish\(\) after compression finished"
1607 r'cannot call finish\(\) after compression finished'):
1630 ):
1608 list(chunker.finish())
1631 list(chunker.finish())
1609
1632
1610
1633
1611 class TestCompressor_multi_compress_to_buffer(unittest.TestCase):
1634 class TestCompressor_multi_compress_to_buffer(TestCase):
1612 def test_invalid_inputs(self):
1635 def test_invalid_inputs(self):
1613 cctx = zstd.ZstdCompressor()
1636 cctx = zstd.ZstdCompressor()
1614
1637
1615 if not hasattr(cctx, 'multi_compress_to_buffer'):
1638 if not hasattr(cctx, "multi_compress_to_buffer"):
1616 self.skipTest('multi_compress_to_buffer not available')
1639 self.skipTest("multi_compress_to_buffer not available")
1617
1640
1618 with self.assertRaises(TypeError):
1641 with self.assertRaises(TypeError):
1619 cctx.multi_compress_to_buffer(True)
1642 cctx.multi_compress_to_buffer(True)
@@ -1621,28 +1644,28 b' class TestCompressor_multi_compress_to_b'
1621 with self.assertRaises(TypeError):
1644 with self.assertRaises(TypeError):
1622 cctx.multi_compress_to_buffer((1, 2))
1645 cctx.multi_compress_to_buffer((1, 2))
1623
1646
1624 with self.assertRaisesRegexp(TypeError, 'item 0 not a bytes like object'):
1647 with self.assertRaisesRegex(TypeError, "item 0 not a bytes like object"):
1625 cctx.multi_compress_to_buffer([u'foo'])
1648 cctx.multi_compress_to_buffer([u"foo"])
1626
1649
1627 def test_empty_input(self):
1650 def test_empty_input(self):
1628 cctx = zstd.ZstdCompressor()
1651 cctx = zstd.ZstdCompressor()
1629
1652
1630 if not hasattr(cctx, 'multi_compress_to_buffer'):
1653 if not hasattr(cctx, "multi_compress_to_buffer"):
1631 self.skipTest('multi_compress_to_buffer not available')
1654 self.skipTest("multi_compress_to_buffer not available")
1632
1655
1633 with self.assertRaisesRegexp(ValueError, 'no source elements found'):
1656 with self.assertRaisesRegex(ValueError, "no source elements found"):
1634 cctx.multi_compress_to_buffer([])
1657 cctx.multi_compress_to_buffer([])
1635
1658
1636 with self.assertRaisesRegexp(ValueError, 'source elements are empty'):
1659 with self.assertRaisesRegex(ValueError, "source elements are empty"):
1637 cctx.multi_compress_to_buffer([b'', b'', b''])
1660 cctx.multi_compress_to_buffer([b"", b"", b""])
1638
1661
1639 def test_list_input(self):
1662 def test_list_input(self):
1640 cctx = zstd.ZstdCompressor(write_checksum=True)
1663 cctx = zstd.ZstdCompressor(write_checksum=True)
1641
1664
1642 if not hasattr(cctx, 'multi_compress_to_buffer'):
1665 if not hasattr(cctx, "multi_compress_to_buffer"):
1643 self.skipTest('multi_compress_to_buffer not available')
1666 self.skipTest("multi_compress_to_buffer not available")
1644
1667
1645 original = [b'foo' * 12, b'bar' * 6]
1668 original = [b"foo" * 12, b"bar" * 6]
1646 frames = [cctx.compress(c) for c in original]
1669 frames = [cctx.compress(c) for c in original]
1647 b = cctx.multi_compress_to_buffer(original)
1670 b = cctx.multi_compress_to_buffer(original)
1648
1671
@@ -1657,15 +1680,16 b' class TestCompressor_multi_compress_to_b'
1657 def test_buffer_with_segments_input(self):
1680 def test_buffer_with_segments_input(self):
1658 cctx = zstd.ZstdCompressor(write_checksum=True)
1681 cctx = zstd.ZstdCompressor(write_checksum=True)
1659
1682
1660 if not hasattr(cctx, 'multi_compress_to_buffer'):
1683 if not hasattr(cctx, "multi_compress_to_buffer"):
1661 self.skipTest('multi_compress_to_buffer not available')
1684 self.skipTest("multi_compress_to_buffer not available")
1662
1685
1663 original = [b'foo' * 4, b'bar' * 6]
1686 original = [b"foo" * 4, b"bar" * 6]
1664 frames = [cctx.compress(c) for c in original]
1687 frames = [cctx.compress(c) for c in original]
1665
1688
1666 offsets = struct.pack('=QQQQ', 0, len(original[0]),
1689 offsets = struct.pack(
1667 len(original[0]), len(original[1]))
1690 "=QQQQ", 0, len(original[0]), len(original[0]), len(original[1])
1668 segments = zstd.BufferWithSegments(b''.join(original), offsets)
1691 )
1692 segments = zstd.BufferWithSegments(b"".join(original), offsets)
1669
1693
1670 result = cctx.multi_compress_to_buffer(segments)
1694 result = cctx.multi_compress_to_buffer(segments)
1671
1695
@@ -1678,28 +1702,39 b' class TestCompressor_multi_compress_to_b'
1678 def test_buffer_with_segments_collection_input(self):
1702 def test_buffer_with_segments_collection_input(self):
1679 cctx = zstd.ZstdCompressor(write_checksum=True)
1703 cctx = zstd.ZstdCompressor(write_checksum=True)
1680
1704
1681 if not hasattr(cctx, 'multi_compress_to_buffer'):
1705 if not hasattr(cctx, "multi_compress_to_buffer"):
1682 self.skipTest('multi_compress_to_buffer not available')
1706 self.skipTest("multi_compress_to_buffer not available")
1683
1707
1684 original = [
1708 original = [
1685 b'foo1',
1709 b"foo1",
1686 b'foo2' * 2,
1710 b"foo2" * 2,
1687 b'foo3' * 3,
1711 b"foo3" * 3,
1688 b'foo4' * 4,
1712 b"foo4" * 4,
1689 b'foo5' * 5,
1713 b"foo5" * 5,
1690 ]
1714 ]
1691
1715
1692 frames = [cctx.compress(c) for c in original]
1716 frames = [cctx.compress(c) for c in original]
1693
1717
1694 b = b''.join([original[0], original[1]])
1718 b = b"".join([original[0], original[1]])
1695 b1 = zstd.BufferWithSegments(b, struct.pack('=QQQQ',
1719 b1 = zstd.BufferWithSegments(
1696 0, len(original[0]),
1720 b,
1697 len(original[0]), len(original[1])))
1721 struct.pack(
1698 b = b''.join([original[2], original[3], original[4]])
1722 "=QQQQ", 0, len(original[0]), len(original[0]), len(original[1])
1699 b2 = zstd.BufferWithSegments(b, struct.pack('=QQQQQQ',
1723 ),
1700 0, len(original[2]),
1724 )
1701 len(original[2]), len(original[3]),
1725 b = b"".join([original[2], original[3], original[4]])
1702 len(original[2]) + len(original[3]), len(original[4])))
1726 b2 = zstd.BufferWithSegments(
1727 b,
1728 struct.pack(
1729 "=QQQQQQ",
1730 0,
1731 len(original[2]),
1732 len(original[2]),
1733 len(original[3]),
1734 len(original[2]) + len(original[3]),
1735 len(original[4]),
1736 ),
1737 )
1703
1738
1704 c = zstd.BufferWithSegmentsCollection(b1, b2)
1739 c = zstd.BufferWithSegmentsCollection(b1, b2)
1705
1740
@@ -1714,16 +1749,16 b' class TestCompressor_multi_compress_to_b'
1714 # threads argument will cause multi-threaded ZSTD APIs to be used, which will
1749 # threads argument will cause multi-threaded ZSTD APIs to be used, which will
1715 # make output different.
1750 # make output different.
1716 refcctx = zstd.ZstdCompressor(write_checksum=True)
1751 refcctx = zstd.ZstdCompressor(write_checksum=True)
1717 reference = [refcctx.compress(b'x' * 64), refcctx.compress(b'y' * 64)]
1752 reference = [refcctx.compress(b"x" * 64), refcctx.compress(b"y" * 64)]
1718
1753
1719 cctx = zstd.ZstdCompressor(write_checksum=True)
1754 cctx = zstd.ZstdCompressor(write_checksum=True)
1720
1755
1721 if not hasattr(cctx, 'multi_compress_to_buffer'):
1756 if not hasattr(cctx, "multi_compress_to_buffer"):
1722 self.skipTest('multi_compress_to_buffer not available')
1757 self.skipTest("multi_compress_to_buffer not available")
1723
1758
1724 frames = []
1759 frames = []
1725 frames.extend(b'x' * 64 for i in range(256))
1760 frames.extend(b"x" * 64 for i in range(256))
1726 frames.extend(b'y' * 64 for i in range(256))
1761 frames.extend(b"y" * 64 for i in range(256))
1727
1762
1728 result = cctx.multi_compress_to_buffer(frames, threads=-1)
1763 result = cctx.multi_compress_to_buffer(frames, threads=-1)
1729
1764
This diff has been collapsed as it changes many lines, (631 lines changed) Show them Hide them
@@ -6,28 +6,31 b' try:'
6 import hypothesis
6 import hypothesis
7 import hypothesis.strategies as strategies
7 import hypothesis.strategies as strategies
8 except ImportError:
8 except ImportError:
9 raise unittest.SkipTest('hypothesis not available')
9 raise unittest.SkipTest("hypothesis not available")
10
10
11 import zstandard as zstd
11 import zstandard as zstd
12
12
13 from . common import (
13 from .common import (
14 make_cffi,
14 make_cffi,
15 NonClosingBytesIO,
15 NonClosingBytesIO,
16 random_input_data,
16 random_input_data,
17 TestCase,
17 )
18 )
18
19
19
20
20 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
21 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
21 @make_cffi
22 @make_cffi
22 class TestCompressor_stream_reader_fuzzing(unittest.TestCase):
23 class TestCompressor_stream_reader_fuzzing(TestCase):
23 @hypothesis.settings(
24 @hypothesis.settings(
24 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
25 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
25 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
26 )
26 level=strategies.integers(min_value=1, max_value=5),
27 @hypothesis.given(
27 source_read_size=strategies.integers(1, 16384),
28 original=strategies.sampled_from(random_input_data()),
28 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE))
29 level=strategies.integers(min_value=1, max_value=5),
29 def test_stream_source_read(self, original, level, source_read_size,
30 source_read_size=strategies.integers(1, 16384),
30 read_size):
31 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
32 )
33 def test_stream_source_read(self, original, level, source_read_size, read_size):
31 if read_size == 0:
34 if read_size == 0:
32 read_size = -1
35 read_size = -1
33
36
@@ -35,8 +38,9 b' class TestCompressor_stream_reader_fuzzi'
35 ref_frame = refctx.compress(original)
38 ref_frame = refctx.compress(original)
36
39
37 cctx = zstd.ZstdCompressor(level=level)
40 cctx = zstd.ZstdCompressor(level=level)
38 with cctx.stream_reader(io.BytesIO(original), size=len(original),
41 with cctx.stream_reader(
39 read_size=source_read_size) as reader:
42 io.BytesIO(original), size=len(original), read_size=source_read_size
43 ) as reader:
40 chunks = []
44 chunks = []
41 while True:
45 while True:
42 chunk = reader.read(read_size)
46 chunk = reader.read(read_size)
@@ -45,16 +49,18 b' class TestCompressor_stream_reader_fuzzi'
45
49
46 chunks.append(chunk)
50 chunks.append(chunk)
47
51
48 self.assertEqual(b''.join(chunks), ref_frame)
52 self.assertEqual(b"".join(chunks), ref_frame)
49
53
50 @hypothesis.settings(
54 @hypothesis.settings(
51 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
55 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
52 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
56 )
53 level=strategies.integers(min_value=1, max_value=5),
57 @hypothesis.given(
54 source_read_size=strategies.integers(1, 16384),
58 original=strategies.sampled_from(random_input_data()),
55 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE))
59 level=strategies.integers(min_value=1, max_value=5),
56 def test_buffer_source_read(self, original, level, source_read_size,
60 source_read_size=strategies.integers(1, 16384),
57 read_size):
61 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
62 )
63 def test_buffer_source_read(self, original, level, source_read_size, read_size):
58 if read_size == 0:
64 if read_size == 0:
59 read_size = -1
65 read_size = -1
60
66
@@ -62,8 +68,9 b' class TestCompressor_stream_reader_fuzzi'
62 ref_frame = refctx.compress(original)
68 ref_frame = refctx.compress(original)
63
69
64 cctx = zstd.ZstdCompressor(level=level)
70 cctx = zstd.ZstdCompressor(level=level)
65 with cctx.stream_reader(original, size=len(original),
71 with cctx.stream_reader(
66 read_size=source_read_size) as reader:
72 original, size=len(original), read_size=source_read_size
73 ) as reader:
67 chunks = []
74 chunks = []
68 while True:
75 while True:
69 chunk = reader.read(read_size)
76 chunk = reader.read(read_size)
@@ -72,22 +79,30 b' class TestCompressor_stream_reader_fuzzi'
72
79
73 chunks.append(chunk)
80 chunks.append(chunk)
74
81
75 self.assertEqual(b''.join(chunks), ref_frame)
82 self.assertEqual(b"".join(chunks), ref_frame)
76
83
77 @hypothesis.settings(
84 @hypothesis.settings(
78 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
85 suppress_health_check=[
79 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
86 hypothesis.HealthCheck.large_base_example,
80 level=strategies.integers(min_value=1, max_value=5),
87 hypothesis.HealthCheck.too_slow,
81 source_read_size=strategies.integers(1, 16384),
88 ]
82 read_sizes=strategies.data())
89 )
83 def test_stream_source_read_variance(self, original, level, source_read_size,
90 @hypothesis.given(
84 read_sizes):
91 original=strategies.sampled_from(random_input_data()),
92 level=strategies.integers(min_value=1, max_value=5),
93 source_read_size=strategies.integers(1, 16384),
94 read_sizes=strategies.data(),
95 )
96 def test_stream_source_read_variance(
97 self, original, level, source_read_size, read_sizes
98 ):
85 refctx = zstd.ZstdCompressor(level=level)
99 refctx = zstd.ZstdCompressor(level=level)
86 ref_frame = refctx.compress(original)
100 ref_frame = refctx.compress(original)
87
101
88 cctx = zstd.ZstdCompressor(level=level)
102 cctx = zstd.ZstdCompressor(level=level)
89 with cctx.stream_reader(io.BytesIO(original), size=len(original),
103 with cctx.stream_reader(
90 read_size=source_read_size) as reader:
104 io.BytesIO(original), size=len(original), read_size=source_read_size
105 ) as reader:
91 chunks = []
106 chunks = []
92 while True:
107 while True:
93 read_size = read_sizes.draw(strategies.integers(-1, 16384))
108 read_size = read_sizes.draw(strategies.integers(-1, 16384))
@@ -97,23 +112,31 b' class TestCompressor_stream_reader_fuzzi'
97
112
98 chunks.append(chunk)
113 chunks.append(chunk)
99
114
100 self.assertEqual(b''.join(chunks), ref_frame)
115 self.assertEqual(b"".join(chunks), ref_frame)
101
116
102 @hypothesis.settings(
117 @hypothesis.settings(
103 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
118 suppress_health_check=[
104 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
119 hypothesis.HealthCheck.large_base_example,
105 level=strategies.integers(min_value=1, max_value=5),
120 hypothesis.HealthCheck.too_slow,
106 source_read_size=strategies.integers(1, 16384),
121 ]
107 read_sizes=strategies.data())
122 )
108 def test_buffer_source_read_variance(self, original, level, source_read_size,
123 @hypothesis.given(
109 read_sizes):
124 original=strategies.sampled_from(random_input_data()),
125 level=strategies.integers(min_value=1, max_value=5),
126 source_read_size=strategies.integers(1, 16384),
127 read_sizes=strategies.data(),
128 )
129 def test_buffer_source_read_variance(
130 self, original, level, source_read_size, read_sizes
131 ):
110
132
111 refctx = zstd.ZstdCompressor(level=level)
133 refctx = zstd.ZstdCompressor(level=level)
112 ref_frame = refctx.compress(original)
134 ref_frame = refctx.compress(original)
113
135
114 cctx = zstd.ZstdCompressor(level=level)
136 cctx = zstd.ZstdCompressor(level=level)
115 with cctx.stream_reader(original, size=len(original),
137 with cctx.stream_reader(
116 read_size=source_read_size) as reader:
138 original, size=len(original), read_size=source_read_size
139 ) as reader:
117 chunks = []
140 chunks = []
118 while True:
141 while True:
119 read_size = read_sizes.draw(strategies.integers(-1, 16384))
142 read_size = read_sizes.draw(strategies.integers(-1, 16384))
@@ -123,22 +146,25 b' class TestCompressor_stream_reader_fuzzi'
123
146
124 chunks.append(chunk)
147 chunks.append(chunk)
125
148
126 self.assertEqual(b''.join(chunks), ref_frame)
149 self.assertEqual(b"".join(chunks), ref_frame)
127
150
128 @hypothesis.settings(
151 @hypothesis.settings(
129 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
152 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
130 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
153 )
131 level=strategies.integers(min_value=1, max_value=5),
154 @hypothesis.given(
132 source_read_size=strategies.integers(1, 16384),
155 original=strategies.sampled_from(random_input_data()),
133 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE))
156 level=strategies.integers(min_value=1, max_value=5),
134 def test_stream_source_readinto(self, original, level,
157 source_read_size=strategies.integers(1, 16384),
135 source_read_size, read_size):
158 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
159 )
160 def test_stream_source_readinto(self, original, level, source_read_size, read_size):
136 refctx = zstd.ZstdCompressor(level=level)
161 refctx = zstd.ZstdCompressor(level=level)
137 ref_frame = refctx.compress(original)
162 ref_frame = refctx.compress(original)
138
163
139 cctx = zstd.ZstdCompressor(level=level)
164 cctx = zstd.ZstdCompressor(level=level)
140 with cctx.stream_reader(io.BytesIO(original), size=len(original),
165 with cctx.stream_reader(
141 read_size=source_read_size) as reader:
166 io.BytesIO(original), size=len(original), read_size=source_read_size
167 ) as reader:
142 chunks = []
168 chunks = []
143 while True:
169 while True:
144 b = bytearray(read_size)
170 b = bytearray(read_size)
@@ -149,23 +175,26 b' class TestCompressor_stream_reader_fuzzi'
149
175
150 chunks.append(bytes(b[0:count]))
176 chunks.append(bytes(b[0:count]))
151
177
152 self.assertEqual(b''.join(chunks), ref_frame)
178 self.assertEqual(b"".join(chunks), ref_frame)
153
179
154 @hypothesis.settings(
180 @hypothesis.settings(
155 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
181 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
156 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
182 )
157 level=strategies.integers(min_value=1, max_value=5),
183 @hypothesis.given(
158 source_read_size=strategies.integers(1, 16384),
184 original=strategies.sampled_from(random_input_data()),
159 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE))
185 level=strategies.integers(min_value=1, max_value=5),
160 def test_buffer_source_readinto(self, original, level,
186 source_read_size=strategies.integers(1, 16384),
161 source_read_size, read_size):
187 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
188 )
189 def test_buffer_source_readinto(self, original, level, source_read_size, read_size):
162
190
163 refctx = zstd.ZstdCompressor(level=level)
191 refctx = zstd.ZstdCompressor(level=level)
164 ref_frame = refctx.compress(original)
192 ref_frame = refctx.compress(original)
165
193
166 cctx = zstd.ZstdCompressor(level=level)
194 cctx = zstd.ZstdCompressor(level=level)
167 with cctx.stream_reader(original, size=len(original),
195 with cctx.stream_reader(
168 read_size=source_read_size) as reader:
196 original, size=len(original), read_size=source_read_size
197 ) as reader:
169 chunks = []
198 chunks = []
170 while True:
199 while True:
171 b = bytearray(read_size)
200 b = bytearray(read_size)
@@ -176,22 +205,30 b' class TestCompressor_stream_reader_fuzzi'
176
205
177 chunks.append(bytes(b[0:count]))
206 chunks.append(bytes(b[0:count]))
178
207
179 self.assertEqual(b''.join(chunks), ref_frame)
208 self.assertEqual(b"".join(chunks), ref_frame)
180
209
181 @hypothesis.settings(
210 @hypothesis.settings(
182 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
211 suppress_health_check=[
183 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
212 hypothesis.HealthCheck.large_base_example,
184 level=strategies.integers(min_value=1, max_value=5),
213 hypothesis.HealthCheck.too_slow,
185 source_read_size=strategies.integers(1, 16384),
214 ]
186 read_sizes=strategies.data())
215 )
187 def test_stream_source_readinto_variance(self, original, level,
216 @hypothesis.given(
188 source_read_size, read_sizes):
217 original=strategies.sampled_from(random_input_data()),
218 level=strategies.integers(min_value=1, max_value=5),
219 source_read_size=strategies.integers(1, 16384),
220 read_sizes=strategies.data(),
221 )
222 def test_stream_source_readinto_variance(
223 self, original, level, source_read_size, read_sizes
224 ):
189 refctx = zstd.ZstdCompressor(level=level)
225 refctx = zstd.ZstdCompressor(level=level)
190 ref_frame = refctx.compress(original)
226 ref_frame = refctx.compress(original)
191
227
192 cctx = zstd.ZstdCompressor(level=level)
228 cctx = zstd.ZstdCompressor(level=level)
193 with cctx.stream_reader(io.BytesIO(original), size=len(original),
229 with cctx.stream_reader(
194 read_size=source_read_size) as reader:
230 io.BytesIO(original), size=len(original), read_size=source_read_size
231 ) as reader:
195 chunks = []
232 chunks = []
196 while True:
233 while True:
197 read_size = read_sizes.draw(strategies.integers(1, 16384))
234 read_size = read_sizes.draw(strategies.integers(1, 16384))
@@ -203,23 +240,31 b' class TestCompressor_stream_reader_fuzzi'
203
240
204 chunks.append(bytes(b[0:count]))
241 chunks.append(bytes(b[0:count]))
205
242
206 self.assertEqual(b''.join(chunks), ref_frame)
243 self.assertEqual(b"".join(chunks), ref_frame)
207
244
208 @hypothesis.settings(
245 @hypothesis.settings(
209 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
246 suppress_health_check=[
210 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
247 hypothesis.HealthCheck.large_base_example,
211 level=strategies.integers(min_value=1, max_value=5),
248 hypothesis.HealthCheck.too_slow,
212 source_read_size=strategies.integers(1, 16384),
249 ]
213 read_sizes=strategies.data())
250 )
214 def test_buffer_source_readinto_variance(self, original, level,
251 @hypothesis.given(
215 source_read_size, read_sizes):
252 original=strategies.sampled_from(random_input_data()),
253 level=strategies.integers(min_value=1, max_value=5),
254 source_read_size=strategies.integers(1, 16384),
255 read_sizes=strategies.data(),
256 )
257 def test_buffer_source_readinto_variance(
258 self, original, level, source_read_size, read_sizes
259 ):
216
260
217 refctx = zstd.ZstdCompressor(level=level)
261 refctx = zstd.ZstdCompressor(level=level)
218 ref_frame = refctx.compress(original)
262 ref_frame = refctx.compress(original)
219
263
220 cctx = zstd.ZstdCompressor(level=level)
264 cctx = zstd.ZstdCompressor(level=level)
221 with cctx.stream_reader(original, size=len(original),
265 with cctx.stream_reader(
222 read_size=source_read_size) as reader:
266 original, size=len(original), read_size=source_read_size
267 ) as reader:
223 chunks = []
268 chunks = []
224 while True:
269 while True:
225 read_size = read_sizes.draw(strategies.integers(1, 16384))
270 read_size = read_sizes.draw(strategies.integers(1, 16384))
@@ -231,16 +276,18 b' class TestCompressor_stream_reader_fuzzi'
231
276
232 chunks.append(bytes(b[0:count]))
277 chunks.append(bytes(b[0:count]))
233
278
234 self.assertEqual(b''.join(chunks), ref_frame)
279 self.assertEqual(b"".join(chunks), ref_frame)
235
280
236 @hypothesis.settings(
281 @hypothesis.settings(
237 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
282 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
238 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
283 )
239 level=strategies.integers(min_value=1, max_value=5),
284 @hypothesis.given(
240 source_read_size=strategies.integers(1, 16384),
285 original=strategies.sampled_from(random_input_data()),
241 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE))
286 level=strategies.integers(min_value=1, max_value=5),
242 def test_stream_source_read1(self, original, level, source_read_size,
287 source_read_size=strategies.integers(1, 16384),
243 read_size):
288 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
289 )
290 def test_stream_source_read1(self, original, level, source_read_size, read_size):
244 if read_size == 0:
291 if read_size == 0:
245 read_size = -1
292 read_size = -1
246
293
@@ -248,8 +295,9 b' class TestCompressor_stream_reader_fuzzi'
248 ref_frame = refctx.compress(original)
295 ref_frame = refctx.compress(original)
249
296
250 cctx = zstd.ZstdCompressor(level=level)
297 cctx = zstd.ZstdCompressor(level=level)
251 with cctx.stream_reader(io.BytesIO(original), size=len(original),
298 with cctx.stream_reader(
252 read_size=source_read_size) as reader:
299 io.BytesIO(original), size=len(original), read_size=source_read_size
300 ) as reader:
253 chunks = []
301 chunks = []
254 while True:
302 while True:
255 chunk = reader.read1(read_size)
303 chunk = reader.read1(read_size)
@@ -258,16 +306,18 b' class TestCompressor_stream_reader_fuzzi'
258
306
259 chunks.append(chunk)
307 chunks.append(chunk)
260
308
261 self.assertEqual(b''.join(chunks), ref_frame)
309 self.assertEqual(b"".join(chunks), ref_frame)
262
310
263 @hypothesis.settings(
311 @hypothesis.settings(
264 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
312 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
265 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
313 )
266 level=strategies.integers(min_value=1, max_value=5),
314 @hypothesis.given(
267 source_read_size=strategies.integers(1, 16384),
315 original=strategies.sampled_from(random_input_data()),
268 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE))
316 level=strategies.integers(min_value=1, max_value=5),
269 def test_buffer_source_read1(self, original, level, source_read_size,
317 source_read_size=strategies.integers(1, 16384),
270 read_size):
318 read_size=strategies.integers(-1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
319 )
320 def test_buffer_source_read1(self, original, level, source_read_size, read_size):
271 if read_size == 0:
321 if read_size == 0:
272 read_size = -1
322 read_size = -1
273
323
@@ -275,8 +325,9 b' class TestCompressor_stream_reader_fuzzi'
275 ref_frame = refctx.compress(original)
325 ref_frame = refctx.compress(original)
276
326
277 cctx = zstd.ZstdCompressor(level=level)
327 cctx = zstd.ZstdCompressor(level=level)
278 with cctx.stream_reader(original, size=len(original),
328 with cctx.stream_reader(
279 read_size=source_read_size) as reader:
329 original, size=len(original), read_size=source_read_size
330 ) as reader:
280 chunks = []
331 chunks = []
281 while True:
332 while True:
282 chunk = reader.read1(read_size)
333 chunk = reader.read1(read_size)
@@ -285,22 +336,30 b' class TestCompressor_stream_reader_fuzzi'
285
336
286 chunks.append(chunk)
337 chunks.append(chunk)
287
338
288 self.assertEqual(b''.join(chunks), ref_frame)
339 self.assertEqual(b"".join(chunks), ref_frame)
289
340
290 @hypothesis.settings(
341 @hypothesis.settings(
291 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
342 suppress_health_check=[
292 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
343 hypothesis.HealthCheck.large_base_example,
293 level=strategies.integers(min_value=1, max_value=5),
344 hypothesis.HealthCheck.too_slow,
294 source_read_size=strategies.integers(1, 16384),
345 ]
295 read_sizes=strategies.data())
346 )
296 def test_stream_source_read1_variance(self, original, level, source_read_size,
347 @hypothesis.given(
297 read_sizes):
348 original=strategies.sampled_from(random_input_data()),
349 level=strategies.integers(min_value=1, max_value=5),
350 source_read_size=strategies.integers(1, 16384),
351 read_sizes=strategies.data(),
352 )
353 def test_stream_source_read1_variance(
354 self, original, level, source_read_size, read_sizes
355 ):
298 refctx = zstd.ZstdCompressor(level=level)
356 refctx = zstd.ZstdCompressor(level=level)
299 ref_frame = refctx.compress(original)
357 ref_frame = refctx.compress(original)
300
358
301 cctx = zstd.ZstdCompressor(level=level)
359 cctx = zstd.ZstdCompressor(level=level)
302 with cctx.stream_reader(io.BytesIO(original), size=len(original),
360 with cctx.stream_reader(
303 read_size=source_read_size) as reader:
361 io.BytesIO(original), size=len(original), read_size=source_read_size
362 ) as reader:
304 chunks = []
363 chunks = []
305 while True:
364 while True:
306 read_size = read_sizes.draw(strategies.integers(-1, 16384))
365 read_size = read_sizes.draw(strategies.integers(-1, 16384))
@@ -310,23 +369,31 b' class TestCompressor_stream_reader_fuzzi'
310
369
311 chunks.append(chunk)
370 chunks.append(chunk)
312
371
313 self.assertEqual(b''.join(chunks), ref_frame)
372 self.assertEqual(b"".join(chunks), ref_frame)
314
373
315 @hypothesis.settings(
374 @hypothesis.settings(
316 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
375 suppress_health_check=[
317 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
376 hypothesis.HealthCheck.large_base_example,
318 level=strategies.integers(min_value=1, max_value=5),
377 hypothesis.HealthCheck.too_slow,
319 source_read_size=strategies.integers(1, 16384),
378 ]
320 read_sizes=strategies.data())
379 )
321 def test_buffer_source_read1_variance(self, original, level, source_read_size,
380 @hypothesis.given(
322 read_sizes):
381 original=strategies.sampled_from(random_input_data()),
382 level=strategies.integers(min_value=1, max_value=5),
383 source_read_size=strategies.integers(1, 16384),
384 read_sizes=strategies.data(),
385 )
386 def test_buffer_source_read1_variance(
387 self, original, level, source_read_size, read_sizes
388 ):
323
389
324 refctx = zstd.ZstdCompressor(level=level)
390 refctx = zstd.ZstdCompressor(level=level)
325 ref_frame = refctx.compress(original)
391 ref_frame = refctx.compress(original)
326
392
327 cctx = zstd.ZstdCompressor(level=level)
393 cctx = zstd.ZstdCompressor(level=level)
328 with cctx.stream_reader(original, size=len(original),
394 with cctx.stream_reader(
329 read_size=source_read_size) as reader:
395 original, size=len(original), read_size=source_read_size
396 ) as reader:
330 chunks = []
397 chunks = []
331 while True:
398 while True:
332 read_size = read_sizes.draw(strategies.integers(-1, 16384))
399 read_size = read_sizes.draw(strategies.integers(-1, 16384))
@@ -336,17 +403,20 b' class TestCompressor_stream_reader_fuzzi'
336
403
337 chunks.append(chunk)
404 chunks.append(chunk)
338
405
339 self.assertEqual(b''.join(chunks), ref_frame)
406 self.assertEqual(b"".join(chunks), ref_frame)
340
341
407
342 @hypothesis.settings(
408 @hypothesis.settings(
343 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
409 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
344 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
410 )
345 level=strategies.integers(min_value=1, max_value=5),
411 @hypothesis.given(
346 source_read_size=strategies.integers(1, 16384),
412 original=strategies.sampled_from(random_input_data()),
347 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE))
413 level=strategies.integers(min_value=1, max_value=5),
348 def test_stream_source_readinto1(self, original, level, source_read_size,
414 source_read_size=strategies.integers(1, 16384),
349 read_size):
415 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
416 )
417 def test_stream_source_readinto1(
418 self, original, level, source_read_size, read_size
419 ):
350 if read_size == 0:
420 if read_size == 0:
351 read_size = -1
421 read_size = -1
352
422
@@ -354,8 +424,9 b' class TestCompressor_stream_reader_fuzzi'
354 ref_frame = refctx.compress(original)
424 ref_frame = refctx.compress(original)
355
425
356 cctx = zstd.ZstdCompressor(level=level)
426 cctx = zstd.ZstdCompressor(level=level)
357 with cctx.stream_reader(io.BytesIO(original), size=len(original),
427 with cctx.stream_reader(
358 read_size=source_read_size) as reader:
428 io.BytesIO(original), size=len(original), read_size=source_read_size
429 ) as reader:
359 chunks = []
430 chunks = []
360 while True:
431 while True:
361 b = bytearray(read_size)
432 b = bytearray(read_size)
@@ -366,16 +437,20 b' class TestCompressor_stream_reader_fuzzi'
366
437
367 chunks.append(bytes(b[0:count]))
438 chunks.append(bytes(b[0:count]))
368
439
369 self.assertEqual(b''.join(chunks), ref_frame)
440 self.assertEqual(b"".join(chunks), ref_frame)
370
441
371 @hypothesis.settings(
442 @hypothesis.settings(
372 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
443 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
373 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
444 )
374 level=strategies.integers(min_value=1, max_value=5),
445 @hypothesis.given(
375 source_read_size=strategies.integers(1, 16384),
446 original=strategies.sampled_from(random_input_data()),
376 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE))
447 level=strategies.integers(min_value=1, max_value=5),
377 def test_buffer_source_readinto1(self, original, level, source_read_size,
448 source_read_size=strategies.integers(1, 16384),
378 read_size):
449 read_size=strategies.integers(1, zstd.COMPRESSION_RECOMMENDED_OUTPUT_SIZE),
450 )
451 def test_buffer_source_readinto1(
452 self, original, level, source_read_size, read_size
453 ):
379 if read_size == 0:
454 if read_size == 0:
380 read_size = -1
455 read_size = -1
381
456
@@ -383,8 +458,9 b' class TestCompressor_stream_reader_fuzzi'
383 ref_frame = refctx.compress(original)
458 ref_frame = refctx.compress(original)
384
459
385 cctx = zstd.ZstdCompressor(level=level)
460 cctx = zstd.ZstdCompressor(level=level)
386 with cctx.stream_reader(original, size=len(original),
461 with cctx.stream_reader(
387 read_size=source_read_size) as reader:
462 original, size=len(original), read_size=source_read_size
463 ) as reader:
388 chunks = []
464 chunks = []
389 while True:
465 while True:
390 b = bytearray(read_size)
466 b = bytearray(read_size)
@@ -395,22 +471,30 b' class TestCompressor_stream_reader_fuzzi'
395
471
396 chunks.append(bytes(b[0:count]))
472 chunks.append(bytes(b[0:count]))
397
473
398 self.assertEqual(b''.join(chunks), ref_frame)
474 self.assertEqual(b"".join(chunks), ref_frame)
399
475
400 @hypothesis.settings(
476 @hypothesis.settings(
401 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
477 suppress_health_check=[
402 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
478 hypothesis.HealthCheck.large_base_example,
403 level=strategies.integers(min_value=1, max_value=5),
479 hypothesis.HealthCheck.too_slow,
404 source_read_size=strategies.integers(1, 16384),
480 ]
405 read_sizes=strategies.data())
481 )
406 def test_stream_source_readinto1_variance(self, original, level, source_read_size,
482 @hypothesis.given(
407 read_sizes):
483 original=strategies.sampled_from(random_input_data()),
484 level=strategies.integers(min_value=1, max_value=5),
485 source_read_size=strategies.integers(1, 16384),
486 read_sizes=strategies.data(),
487 )
488 def test_stream_source_readinto1_variance(
489 self, original, level, source_read_size, read_sizes
490 ):
408 refctx = zstd.ZstdCompressor(level=level)
491 refctx = zstd.ZstdCompressor(level=level)
409 ref_frame = refctx.compress(original)
492 ref_frame = refctx.compress(original)
410
493
411 cctx = zstd.ZstdCompressor(level=level)
494 cctx = zstd.ZstdCompressor(level=level)
412 with cctx.stream_reader(io.BytesIO(original), size=len(original),
495 with cctx.stream_reader(
413 read_size=source_read_size) as reader:
496 io.BytesIO(original), size=len(original), read_size=source_read_size
497 ) as reader:
414 chunks = []
498 chunks = []
415 while True:
499 while True:
416 read_size = read_sizes.draw(strategies.integers(1, 16384))
500 read_size = read_sizes.draw(strategies.integers(1, 16384))
@@ -422,23 +506,31 b' class TestCompressor_stream_reader_fuzzi'
422
506
423 chunks.append(bytes(b[0:count]))
507 chunks.append(bytes(b[0:count]))
424
508
425 self.assertEqual(b''.join(chunks), ref_frame)
509 self.assertEqual(b"".join(chunks), ref_frame)
426
510
427 @hypothesis.settings(
511 @hypothesis.settings(
428 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
512 suppress_health_check=[
429 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
513 hypothesis.HealthCheck.large_base_example,
430 level=strategies.integers(min_value=1, max_value=5),
514 hypothesis.HealthCheck.too_slow,
431 source_read_size=strategies.integers(1, 16384),
515 ]
432 read_sizes=strategies.data())
516 )
433 def test_buffer_source_readinto1_variance(self, original, level, source_read_size,
517 @hypothesis.given(
434 read_sizes):
518 original=strategies.sampled_from(random_input_data()),
519 level=strategies.integers(min_value=1, max_value=5),
520 source_read_size=strategies.integers(1, 16384),
521 read_sizes=strategies.data(),
522 )
523 def test_buffer_source_readinto1_variance(
524 self, original, level, source_read_size, read_sizes
525 ):
435
526
436 refctx = zstd.ZstdCompressor(level=level)
527 refctx = zstd.ZstdCompressor(level=level)
437 ref_frame = refctx.compress(original)
528 ref_frame = refctx.compress(original)
438
529
439 cctx = zstd.ZstdCompressor(level=level)
530 cctx = zstd.ZstdCompressor(level=level)
440 with cctx.stream_reader(original, size=len(original),
531 with cctx.stream_reader(
441 read_size=source_read_size) as reader:
532 original, size=len(original), read_size=source_read_size
533 ) as reader:
442 chunks = []
534 chunks = []
443 while True:
535 while True:
444 read_size = read_sizes.draw(strategies.integers(1, 16384))
536 read_size = read_sizes.draw(strategies.integers(1, 16384))
@@ -450,35 +542,40 b' class TestCompressor_stream_reader_fuzzi'
450
542
451 chunks.append(bytes(b[0:count]))
543 chunks.append(bytes(b[0:count]))
452
544
453 self.assertEqual(b''.join(chunks), ref_frame)
545 self.assertEqual(b"".join(chunks), ref_frame)
454
455
546
456
547
457 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
548 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
458 @make_cffi
549 @make_cffi
459 class TestCompressor_stream_writer_fuzzing(unittest.TestCase):
550 class TestCompressor_stream_writer_fuzzing(TestCase):
460 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
551 @hypothesis.given(
461 level=strategies.integers(min_value=1, max_value=5),
552 original=strategies.sampled_from(random_input_data()),
462 write_size=strategies.integers(min_value=1, max_value=1048576))
553 level=strategies.integers(min_value=1, max_value=5),
554 write_size=strategies.integers(min_value=1, max_value=1048576),
555 )
463 def test_write_size_variance(self, original, level, write_size):
556 def test_write_size_variance(self, original, level, write_size):
464 refctx = zstd.ZstdCompressor(level=level)
557 refctx = zstd.ZstdCompressor(level=level)
465 ref_frame = refctx.compress(original)
558 ref_frame = refctx.compress(original)
466
559
467 cctx = zstd.ZstdCompressor(level=level)
560 cctx = zstd.ZstdCompressor(level=level)
468 b = NonClosingBytesIO()
561 b = NonClosingBytesIO()
469 with cctx.stream_writer(b, size=len(original), write_size=write_size) as compressor:
562 with cctx.stream_writer(
563 b, size=len(original), write_size=write_size
564 ) as compressor:
470 compressor.write(original)
565 compressor.write(original)
471
566
472 self.assertEqual(b.getvalue(), ref_frame)
567 self.assertEqual(b.getvalue(), ref_frame)
473
568
474
569
475 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
570 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
476 @make_cffi
571 @make_cffi
477 class TestCompressor_copy_stream_fuzzing(unittest.TestCase):
572 class TestCompressor_copy_stream_fuzzing(TestCase):
478 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
573 @hypothesis.given(
479 level=strategies.integers(min_value=1, max_value=5),
574 original=strategies.sampled_from(random_input_data()),
480 read_size=strategies.integers(min_value=1, max_value=1048576),
575 level=strategies.integers(min_value=1, max_value=5),
481 write_size=strategies.integers(min_value=1, max_value=1048576))
576 read_size=strategies.integers(min_value=1, max_value=1048576),
577 write_size=strategies.integers(min_value=1, max_value=1048576),
578 )
482 def test_read_write_size_variance(self, original, level, read_size, write_size):
579 def test_read_write_size_variance(self, original, level, read_size, write_size):
483 refctx = zstd.ZstdCompressor(level=level)
580 refctx = zstd.ZstdCompressor(level=level)
484 ref_frame = refctx.compress(original)
581 ref_frame = refctx.compress(original)
@@ -487,20 +584,27 b' class TestCompressor_copy_stream_fuzzing'
487 source = io.BytesIO(original)
584 source = io.BytesIO(original)
488 dest = io.BytesIO()
585 dest = io.BytesIO()
489
586
490 cctx.copy_stream(source, dest, size=len(original), read_size=read_size,
587 cctx.copy_stream(
491 write_size=write_size)
588 source, dest, size=len(original), read_size=read_size, write_size=write_size
589 )
492
590
493 self.assertEqual(dest.getvalue(), ref_frame)
591 self.assertEqual(dest.getvalue(), ref_frame)
494
592
495
593
496 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
594 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
497 @make_cffi
595 @make_cffi
498 class TestCompressor_compressobj_fuzzing(unittest.TestCase):
596 class TestCompressor_compressobj_fuzzing(TestCase):
499 @hypothesis.settings(
597 @hypothesis.settings(
500 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
598 suppress_health_check=[
501 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
599 hypothesis.HealthCheck.large_base_example,
502 level=strategies.integers(min_value=1, max_value=5),
600 hypothesis.HealthCheck.too_slow,
503 chunk_sizes=strategies.data())
601 ]
602 )
603 @hypothesis.given(
604 original=strategies.sampled_from(random_input_data()),
605 level=strategies.integers(min_value=1, max_value=5),
606 chunk_sizes=strategies.data(),
607 )
504 def test_random_input_sizes(self, original, level, chunk_sizes):
608 def test_random_input_sizes(self, original, level, chunk_sizes):
505 refctx = zstd.ZstdCompressor(level=level)
609 refctx = zstd.ZstdCompressor(level=level)
506 ref_frame = refctx.compress(original)
610 ref_frame = refctx.compress(original)
@@ -512,7 +616,7 b' class TestCompressor_compressobj_fuzzing'
512 i = 0
616 i = 0
513 while True:
617 while True:
514 chunk_size = chunk_sizes.draw(strategies.integers(1, 4096))
618 chunk_size = chunk_sizes.draw(strategies.integers(1, 4096))
515 source = original[i:i + chunk_size]
619 source = original[i : i + chunk_size]
516 if not source:
620 if not source:
517 break
621 break
518
622
@@ -521,14 +625,20 b' class TestCompressor_compressobj_fuzzing'
521
625
522 chunks.append(cobj.flush())
626 chunks.append(cobj.flush())
523
627
524 self.assertEqual(b''.join(chunks), ref_frame)
628 self.assertEqual(b"".join(chunks), ref_frame)
525
629
526 @hypothesis.settings(
630 @hypothesis.settings(
527 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
631 suppress_health_check=[
528 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
632 hypothesis.HealthCheck.large_base_example,
529 level=strategies.integers(min_value=1, max_value=5),
633 hypothesis.HealthCheck.too_slow,
530 chunk_sizes=strategies.data(),
634 ]
531 flushes=strategies.data())
635 )
636 @hypothesis.given(
637 original=strategies.sampled_from(random_input_data()),
638 level=strategies.integers(min_value=1, max_value=5),
639 chunk_sizes=strategies.data(),
640 flushes=strategies.data(),
641 )
532 def test_flush_block(self, original, level, chunk_sizes, flushes):
642 def test_flush_block(self, original, level, chunk_sizes, flushes):
533 cctx = zstd.ZstdCompressor(level=level)
643 cctx = zstd.ZstdCompressor(level=level)
534 cobj = cctx.compressobj()
644 cobj = cctx.compressobj()
@@ -541,7 +651,7 b' class TestCompressor_compressobj_fuzzing'
541 i = 0
651 i = 0
542 while True:
652 while True:
543 input_size = chunk_sizes.draw(strategies.integers(1, 4096))
653 input_size = chunk_sizes.draw(strategies.integers(1, 4096))
544 source = original[i:i + input_size]
654 source = original[i : i + input_size]
545 if not source:
655 if not source:
546 break
656 break
547
657
@@ -558,24 +668,28 b' class TestCompressor_compressobj_fuzzing'
558 compressed_chunks.append(chunk)
668 compressed_chunks.append(chunk)
559 decompressed_chunks.append(dobj.decompress(chunk))
669 decompressed_chunks.append(dobj.decompress(chunk))
560
670
561 self.assertEqual(b''.join(decompressed_chunks), original[0:i])
671 self.assertEqual(b"".join(decompressed_chunks), original[0:i])
562
672
563 chunk = cobj.flush(zstd.COMPRESSOBJ_FLUSH_FINISH)
673 chunk = cobj.flush(zstd.COMPRESSOBJ_FLUSH_FINISH)
564 compressed_chunks.append(chunk)
674 compressed_chunks.append(chunk)
565 decompressed_chunks.append(dobj.decompress(chunk))
675 decompressed_chunks.append(dobj.decompress(chunk))
566
676
567 self.assertEqual(dctx.decompress(b''.join(compressed_chunks),
677 self.assertEqual(
568 max_output_size=len(original)),
678 dctx.decompress(b"".join(compressed_chunks), max_output_size=len(original)),
569 original)
679 original,
570 self.assertEqual(b''.join(decompressed_chunks), original)
680 )
681 self.assertEqual(b"".join(decompressed_chunks), original)
682
571
683
572 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
684 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
573 @make_cffi
685 @make_cffi
574 class TestCompressor_read_to_iter_fuzzing(unittest.TestCase):
686 class TestCompressor_read_to_iter_fuzzing(TestCase):
575 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
687 @hypothesis.given(
576 level=strategies.integers(min_value=1, max_value=5),
688 original=strategies.sampled_from(random_input_data()),
577 read_size=strategies.integers(min_value=1, max_value=4096),
689 level=strategies.integers(min_value=1, max_value=5),
578 write_size=strategies.integers(min_value=1, max_value=4096))
690 read_size=strategies.integers(min_value=1, max_value=4096),
691 write_size=strategies.integers(min_value=1, max_value=4096),
692 )
579 def test_read_write_size_variance(self, original, level, read_size, write_size):
693 def test_read_write_size_variance(self, original, level, read_size, write_size):
580 refcctx = zstd.ZstdCompressor(level=level)
694 refcctx = zstd.ZstdCompressor(level=level)
581 ref_frame = refcctx.compress(original)
695 ref_frame = refcctx.compress(original)
@@ -583,32 +697,35 b' class TestCompressor_read_to_iter_fuzzin'
583 source = io.BytesIO(original)
697 source = io.BytesIO(original)
584
698
585 cctx = zstd.ZstdCompressor(level=level)
699 cctx = zstd.ZstdCompressor(level=level)
586 chunks = list(cctx.read_to_iter(source, size=len(original),
700 chunks = list(
587 read_size=read_size,
701 cctx.read_to_iter(
588 write_size=write_size))
702 source, size=len(original), read_size=read_size, write_size=write_size
703 )
704 )
589
705
590 self.assertEqual(b''.join(chunks), ref_frame)
706 self.assertEqual(b"".join(chunks), ref_frame)
591
707
592
708
593 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
709 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
594 class TestCompressor_multi_compress_to_buffer_fuzzing(unittest.TestCase):
710 class TestCompressor_multi_compress_to_buffer_fuzzing(TestCase):
595 @hypothesis.given(original=strategies.lists(strategies.sampled_from(random_input_data()),
711 @hypothesis.given(
596 min_size=1, max_size=1024),
712 original=strategies.lists(
597 threads=strategies.integers(min_value=1, max_value=8),
713 strategies.sampled_from(random_input_data()), min_size=1, max_size=1024
598 use_dict=strategies.booleans())
714 ),
715 threads=strategies.integers(min_value=1, max_value=8),
716 use_dict=strategies.booleans(),
717 )
599 def test_data_equivalence(self, original, threads, use_dict):
718 def test_data_equivalence(self, original, threads, use_dict):
600 kwargs = {}
719 kwargs = {}
601
720
602 # Use a content dictionary because it is cheap to create.
721 # Use a content dictionary because it is cheap to create.
603 if use_dict:
722 if use_dict:
604 kwargs['dict_data'] = zstd.ZstdCompressionDict(original[0])
723 kwargs["dict_data"] = zstd.ZstdCompressionDict(original[0])
605
724
606 cctx = zstd.ZstdCompressor(level=1,
725 cctx = zstd.ZstdCompressor(level=1, write_checksum=True, **kwargs)
607 write_checksum=True,
608 **kwargs)
609
726
610 if not hasattr(cctx, 'multi_compress_to_buffer'):
727 if not hasattr(cctx, "multi_compress_to_buffer"):
611 self.skipTest('multi_compress_to_buffer not available')
728 self.skipTest("multi_compress_to_buffer not available")
612
729
613 result = cctx.multi_compress_to_buffer(original, threads=-1)
730 result = cctx.multi_compress_to_buffer(original, threads=-1)
614
731
@@ -624,17 +741,21 b' class TestCompressor_multi_compress_to_b'
624 self.assertEqual(dctx.decompress(frame), original[i])
741 self.assertEqual(dctx.decompress(frame), original[i])
625
742
626
743
627 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
744 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
628 @make_cffi
745 @make_cffi
629 class TestCompressor_chunker_fuzzing(unittest.TestCase):
746 class TestCompressor_chunker_fuzzing(TestCase):
630 @hypothesis.settings(
747 @hypothesis.settings(
631 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
748 suppress_health_check=[
632 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
749 hypothesis.HealthCheck.large_base_example,
633 level=strategies.integers(min_value=1, max_value=5),
750 hypothesis.HealthCheck.too_slow,
634 chunk_size=strategies.integers(
751 ]
635 min_value=1,
752 )
636 max_value=32 * 1048576),
753 @hypothesis.given(
637 input_sizes=strategies.data())
754 original=strategies.sampled_from(random_input_data()),
755 level=strategies.integers(min_value=1, max_value=5),
756 chunk_size=strategies.integers(min_value=1, max_value=32 * 1048576),
757 input_sizes=strategies.data(),
758 )
638 def test_random_input_sizes(self, original, level, chunk_size, input_sizes):
759 def test_random_input_sizes(self, original, level, chunk_size, input_sizes):
639 cctx = zstd.ZstdCompressor(level=level)
760 cctx = zstd.ZstdCompressor(level=level)
640 chunker = cctx.chunker(chunk_size=chunk_size)
761 chunker = cctx.chunker(chunk_size=chunk_size)
@@ -643,7 +764,7 b' class TestCompressor_chunker_fuzzing(uni'
643 i = 0
764 i = 0
644 while True:
765 while True:
645 input_size = input_sizes.draw(strategies.integers(1, 4096))
766 input_size = input_sizes.draw(strategies.integers(1, 4096))
646 source = original[i:i + input_size]
767 source = original[i : i + input_size]
647 if not source:
768 if not source:
648 break
769 break
649
770
@@ -654,23 +775,26 b' class TestCompressor_chunker_fuzzing(uni'
654
775
655 dctx = zstd.ZstdDecompressor()
776 dctx = zstd.ZstdDecompressor()
656
777
657 self.assertEqual(dctx.decompress(b''.join(chunks),
778 self.assertEqual(
658 max_output_size=len(original)),
779 dctx.decompress(b"".join(chunks), max_output_size=len(original)), original
659 original)
780 )
660
781
661 self.assertTrue(all(len(chunk) == chunk_size for chunk in chunks[:-1]))
782 self.assertTrue(all(len(chunk) == chunk_size for chunk in chunks[:-1]))
662
783
663 @hypothesis.settings(
784 @hypothesis.settings(
664 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
785 suppress_health_check=[
665 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
786 hypothesis.HealthCheck.large_base_example,
666 level=strategies.integers(min_value=1, max_value=5),
787 hypothesis.HealthCheck.too_slow,
667 chunk_size=strategies.integers(
788 ]
668 min_value=1,
789 )
669 max_value=32 * 1048576),
790 @hypothesis.given(
670 input_sizes=strategies.data(),
791 original=strategies.sampled_from(random_input_data()),
671 flushes=strategies.data())
792 level=strategies.integers(min_value=1, max_value=5),
672 def test_flush_block(self, original, level, chunk_size, input_sizes,
793 chunk_size=strategies.integers(min_value=1, max_value=32 * 1048576),
673 flushes):
794 input_sizes=strategies.data(),
795 flushes=strategies.data(),
796 )
797 def test_flush_block(self, original, level, chunk_size, input_sizes, flushes):
674 cctx = zstd.ZstdCompressor(level=level)
798 cctx = zstd.ZstdCompressor(level=level)
675 chunker = cctx.chunker(chunk_size=chunk_size)
799 chunker = cctx.chunker(chunk_size=chunk_size)
676
800
@@ -682,7 +806,7 b' class TestCompressor_chunker_fuzzing(uni'
682 i = 0
806 i = 0
683 while True:
807 while True:
684 input_size = input_sizes.draw(strategies.integers(1, 4096))
808 input_size = input_sizes.draw(strategies.integers(1, 4096))
685 source = original[i:i + input_size]
809 source = original[i : i + input_size]
686 if not source:
810 if not source:
687 break
811 break
688
812
@@ -690,22 +814,23 b' class TestCompressor_chunker_fuzzing(uni'
690
814
691 chunks = list(chunker.compress(source))
815 chunks = list(chunker.compress(source))
692 compressed_chunks.extend(chunks)
816 compressed_chunks.extend(chunks)
693 decompressed_chunks.append(dobj.decompress(b''.join(chunks)))
817 decompressed_chunks.append(dobj.decompress(b"".join(chunks)))
694
818
695 if not flushes.draw(strategies.booleans()):
819 if not flushes.draw(strategies.booleans()):
696 continue
820 continue
697
821
698 chunks = list(chunker.flush())
822 chunks = list(chunker.flush())
699 compressed_chunks.extend(chunks)
823 compressed_chunks.extend(chunks)
700 decompressed_chunks.append(dobj.decompress(b''.join(chunks)))
824 decompressed_chunks.append(dobj.decompress(b"".join(chunks)))
701
825
702 self.assertEqual(b''.join(decompressed_chunks), original[0:i])
826 self.assertEqual(b"".join(decompressed_chunks), original[0:i])
703
827
704 chunks = list(chunker.finish())
828 chunks = list(chunker.finish())
705 compressed_chunks.extend(chunks)
829 compressed_chunks.extend(chunks)
706 decompressed_chunks.append(dobj.decompress(b''.join(chunks)))
830 decompressed_chunks.append(dobj.decompress(b"".join(chunks)))
707
831
708 self.assertEqual(dctx.decompress(b''.join(compressed_chunks),
832 self.assertEqual(
709 max_output_size=len(original)),
833 dctx.decompress(b"".join(compressed_chunks), max_output_size=len(original)),
710 original)
834 original,
711 self.assertEqual(b''.join(decompressed_chunks), original) No newline at end of file
835 )
836 self.assertEqual(b"".join(decompressed_chunks), original)
@@ -3,29 +3,34 b' import unittest'
3
3
4 import zstandard as zstd
4 import zstandard as zstd
5
5
6 from . common import (
6 from .common import (
7 make_cffi,
7 make_cffi,
8 TestCase,
8 )
9 )
9
10
10
11
11 @make_cffi
12 @make_cffi
12 class TestCompressionParameters(unittest.TestCase):
13 class TestCompressionParameters(TestCase):
13 def test_bounds(self):
14 def test_bounds(self):
14 zstd.ZstdCompressionParameters(window_log=zstd.WINDOWLOG_MIN,
15 zstd.ZstdCompressionParameters(
15 chain_log=zstd.CHAINLOG_MIN,
16 window_log=zstd.WINDOWLOG_MIN,
16 hash_log=zstd.HASHLOG_MIN,
17 chain_log=zstd.CHAINLOG_MIN,
17 search_log=zstd.SEARCHLOG_MIN,
18 hash_log=zstd.HASHLOG_MIN,
18 min_match=zstd.MINMATCH_MIN + 1,
19 search_log=zstd.SEARCHLOG_MIN,
19 target_length=zstd.TARGETLENGTH_MIN,
20 min_match=zstd.MINMATCH_MIN + 1,
20 strategy=zstd.STRATEGY_FAST)
21 target_length=zstd.TARGETLENGTH_MIN,
22 strategy=zstd.STRATEGY_FAST,
23 )
21
24
22 zstd.ZstdCompressionParameters(window_log=zstd.WINDOWLOG_MAX,
25 zstd.ZstdCompressionParameters(
23 chain_log=zstd.CHAINLOG_MAX,
26 window_log=zstd.WINDOWLOG_MAX,
24 hash_log=zstd.HASHLOG_MAX,
27 chain_log=zstd.CHAINLOG_MAX,
25 search_log=zstd.SEARCHLOG_MAX,
28 hash_log=zstd.HASHLOG_MAX,
26 min_match=zstd.MINMATCH_MAX - 1,
29 search_log=zstd.SEARCHLOG_MAX,
27 target_length=zstd.TARGETLENGTH_MAX,
30 min_match=zstd.MINMATCH_MAX - 1,
28 strategy=zstd.STRATEGY_BTULTRA2)
31 target_length=zstd.TARGETLENGTH_MAX,
32 strategy=zstd.STRATEGY_BTULTRA2,
33 )
29
34
30 def test_from_level(self):
35 def test_from_level(self):
31 p = zstd.ZstdCompressionParameters.from_level(1)
36 p = zstd.ZstdCompressionParameters.from_level(1)
@@ -37,13 +42,15 b' class TestCompressionParameters(unittest'
37 self.assertEqual(p.window_log, 19)
42 self.assertEqual(p.window_log, 19)
38
43
39 def test_members(self):
44 def test_members(self):
40 p = zstd.ZstdCompressionParameters(window_log=10,
45 p = zstd.ZstdCompressionParameters(
41 chain_log=6,
46 window_log=10,
42 hash_log=7,
47 chain_log=6,
43 search_log=4,
48 hash_log=7,
44 min_match=5,
49 search_log=4,
45 target_length=8,
50 min_match=5,
46 strategy=1)
51 target_length=8,
52 strategy=1,
53 )
47 self.assertEqual(p.window_log, 10)
54 self.assertEqual(p.window_log, 10)
48 self.assertEqual(p.chain_log, 6)
55 self.assertEqual(p.chain_log, 6)
49 self.assertEqual(p.hash_log, 7)
56 self.assertEqual(p.hash_log, 7)
@@ -58,8 +65,7 b' class TestCompressionParameters(unittest'
58 p = zstd.ZstdCompressionParameters(threads=4)
65 p = zstd.ZstdCompressionParameters(threads=4)
59 self.assertEqual(p.threads, 4)
66 self.assertEqual(p.threads, 4)
60
67
61 p = zstd.ZstdCompressionParameters(threads=2, job_size=1048576,
68 p = zstd.ZstdCompressionParameters(threads=2, job_size=1048576, overlap_log=6)
62 overlap_log=6)
63 self.assertEqual(p.threads, 2)
69 self.assertEqual(p.threads, 2)
64 self.assertEqual(p.job_size, 1048576)
70 self.assertEqual(p.job_size, 1048576)
65 self.assertEqual(p.overlap_log, 6)
71 self.assertEqual(p.overlap_log, 6)
@@ -91,20 +97,25 b' class TestCompressionParameters(unittest'
91 self.assertEqual(p.ldm_hash_rate_log, 8)
97 self.assertEqual(p.ldm_hash_rate_log, 8)
92
98
93 def test_estimated_compression_context_size(self):
99 def test_estimated_compression_context_size(self):
94 p = zstd.ZstdCompressionParameters(window_log=20,
100 p = zstd.ZstdCompressionParameters(
95 chain_log=16,
101 window_log=20,
96 hash_log=17,
102 chain_log=16,
97 search_log=1,
103 hash_log=17,
98 min_match=5,
104 search_log=1,
99 target_length=16,
105 min_match=5,
100 strategy=zstd.STRATEGY_DFAST)
106 target_length=16,
107 strategy=zstd.STRATEGY_DFAST,
108 )
101
109
102 # 32-bit has slightly different values from 64-bit.
110 # 32-bit has slightly different values from 64-bit.
103 self.assertAlmostEqual(p.estimated_compression_context_size(), 1294144,
111 self.assertAlmostEqual(
104 delta=250)
112 p.estimated_compression_context_size(), 1294464, delta=400
113 )
105
114
106 def test_strategy(self):
115 def test_strategy(self):
107 with self.assertRaisesRegexp(ValueError, 'cannot specify both compression_strategy'):
116 with self.assertRaisesRegex(
117 ValueError, "cannot specify both compression_strategy"
118 ):
108 zstd.ZstdCompressionParameters(strategy=0, compression_strategy=0)
119 zstd.ZstdCompressionParameters(strategy=0, compression_strategy=0)
109
120
110 p = zstd.ZstdCompressionParameters(strategy=2)
121 p = zstd.ZstdCompressionParameters(strategy=2)
@@ -114,7 +125,9 b' class TestCompressionParameters(unittest'
114 self.assertEqual(p.compression_strategy, 3)
125 self.assertEqual(p.compression_strategy, 3)
115
126
116 def test_ldm_hash_rate_log(self):
127 def test_ldm_hash_rate_log(self):
117 with self.assertRaisesRegexp(ValueError, 'cannot specify both ldm_hash_rate_log'):
128 with self.assertRaisesRegex(
129 ValueError, "cannot specify both ldm_hash_rate_log"
130 ):
118 zstd.ZstdCompressionParameters(ldm_hash_rate_log=8, ldm_hash_every_log=4)
131 zstd.ZstdCompressionParameters(ldm_hash_rate_log=8, ldm_hash_every_log=4)
119
132
120 p = zstd.ZstdCompressionParameters(ldm_hash_rate_log=8)
133 p = zstd.ZstdCompressionParameters(ldm_hash_rate_log=8)
@@ -124,7 +137,7 b' class TestCompressionParameters(unittest'
124 self.assertEqual(p.ldm_hash_every_log, 16)
137 self.assertEqual(p.ldm_hash_every_log, 16)
125
138
126 def test_overlap_log(self):
139 def test_overlap_log(self):
127 with self.assertRaisesRegexp(ValueError, 'cannot specify both overlap_log'):
140 with self.assertRaisesRegex(ValueError, "cannot specify both overlap_log"):
128 zstd.ZstdCompressionParameters(overlap_log=1, overlap_size_log=9)
141 zstd.ZstdCompressionParameters(overlap_log=1, overlap_size_log=9)
129
142
130 p = zstd.ZstdCompressionParameters(overlap_log=2)
143 p = zstd.ZstdCompressionParameters(overlap_log=2)
@@ -137,7 +150,7 b' class TestCompressionParameters(unittest'
137
150
138
151
139 @make_cffi
152 @make_cffi
140 class TestFrameParameters(unittest.TestCase):
153 class TestFrameParameters(TestCase):
141 def test_invalid_type(self):
154 def test_invalid_type(self):
142 with self.assertRaises(TypeError):
155 with self.assertRaises(TypeError):
143 zstd.get_frame_parameters(None)
156 zstd.get_frame_parameters(None)
@@ -145,71 +158,71 b' class TestFrameParameters(unittest.TestC'
145 # Python 3 doesn't appear to convert unicode to Py_buffer.
158 # Python 3 doesn't appear to convert unicode to Py_buffer.
146 if sys.version_info[0] >= 3:
159 if sys.version_info[0] >= 3:
147 with self.assertRaises(TypeError):
160 with self.assertRaises(TypeError):
148 zstd.get_frame_parameters(u'foobarbaz')
161 zstd.get_frame_parameters(u"foobarbaz")
149 else:
162 else:
150 # CPython will convert unicode to Py_buffer. But CFFI won't.
163 # CPython will convert unicode to Py_buffer. But CFFI won't.
151 if zstd.backend == 'cffi':
164 if zstd.backend == "cffi":
152 with self.assertRaises(TypeError):
165 with self.assertRaises(TypeError):
153 zstd.get_frame_parameters(u'foobarbaz')
166 zstd.get_frame_parameters(u"foobarbaz")
154 else:
167 else:
155 with self.assertRaises(zstd.ZstdError):
168 with self.assertRaises(zstd.ZstdError):
156 zstd.get_frame_parameters(u'foobarbaz')
169 zstd.get_frame_parameters(u"foobarbaz")
157
170
158 def test_invalid_input_sizes(self):
171 def test_invalid_input_sizes(self):
159 with self.assertRaisesRegexp(zstd.ZstdError, 'not enough data for frame'):
172 with self.assertRaisesRegex(zstd.ZstdError, "not enough data for frame"):
160 zstd.get_frame_parameters(b'')
173 zstd.get_frame_parameters(b"")
161
174
162 with self.assertRaisesRegexp(zstd.ZstdError, 'not enough data for frame'):
175 with self.assertRaisesRegex(zstd.ZstdError, "not enough data for frame"):
163 zstd.get_frame_parameters(zstd.FRAME_HEADER)
176 zstd.get_frame_parameters(zstd.FRAME_HEADER)
164
177
165 def test_invalid_frame(self):
178 def test_invalid_frame(self):
166 with self.assertRaisesRegexp(zstd.ZstdError, 'Unknown frame descriptor'):
179 with self.assertRaisesRegex(zstd.ZstdError, "Unknown frame descriptor"):
167 zstd.get_frame_parameters(b'foobarbaz')
180 zstd.get_frame_parameters(b"foobarbaz")
168
181
169 def test_attributes(self):
182 def test_attributes(self):
170 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b'\x00\x00')
183 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b"\x00\x00")
171 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
184 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
172 self.assertEqual(params.window_size, 1024)
185 self.assertEqual(params.window_size, 1024)
173 self.assertEqual(params.dict_id, 0)
186 self.assertEqual(params.dict_id, 0)
174 self.assertFalse(params.has_checksum)
187 self.assertFalse(params.has_checksum)
175
188
176 # Lowest 2 bits indicate a dictionary and length. Here, the dict id is 1 byte.
189 # Lowest 2 bits indicate a dictionary and length. Here, the dict id is 1 byte.
177 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b'\x01\x00\xff')
190 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b"\x01\x00\xff")
178 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
191 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
179 self.assertEqual(params.window_size, 1024)
192 self.assertEqual(params.window_size, 1024)
180 self.assertEqual(params.dict_id, 255)
193 self.assertEqual(params.dict_id, 255)
181 self.assertFalse(params.has_checksum)
194 self.assertFalse(params.has_checksum)
182
195
183 # Lowest 3rd bit indicates if checksum is present.
196 # Lowest 3rd bit indicates if checksum is present.
184 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b'\x04\x00')
197 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b"\x04\x00")
185 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
198 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
186 self.assertEqual(params.window_size, 1024)
199 self.assertEqual(params.window_size, 1024)
187 self.assertEqual(params.dict_id, 0)
200 self.assertEqual(params.dict_id, 0)
188 self.assertTrue(params.has_checksum)
201 self.assertTrue(params.has_checksum)
189
202
190 # Upper 2 bits indicate content size.
203 # Upper 2 bits indicate content size.
191 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b'\x40\x00\xff\x00')
204 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b"\x40\x00\xff\x00")
192 self.assertEqual(params.content_size, 511)
205 self.assertEqual(params.content_size, 511)
193 self.assertEqual(params.window_size, 1024)
206 self.assertEqual(params.window_size, 1024)
194 self.assertEqual(params.dict_id, 0)
207 self.assertEqual(params.dict_id, 0)
195 self.assertFalse(params.has_checksum)
208 self.assertFalse(params.has_checksum)
196
209
197 # Window descriptor is 2nd byte after frame header.
210 # Window descriptor is 2nd byte after frame header.
198 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b'\x00\x40')
211 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b"\x00\x40")
199 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
212 self.assertEqual(params.content_size, zstd.CONTENTSIZE_UNKNOWN)
200 self.assertEqual(params.window_size, 262144)
213 self.assertEqual(params.window_size, 262144)
201 self.assertEqual(params.dict_id, 0)
214 self.assertEqual(params.dict_id, 0)
202 self.assertFalse(params.has_checksum)
215 self.assertFalse(params.has_checksum)
203
216
204 # Set multiple things.
217 # Set multiple things.
205 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b'\x45\x40\x0f\x10\x00')
218 params = zstd.get_frame_parameters(zstd.FRAME_HEADER + b"\x45\x40\x0f\x10\x00")
206 self.assertEqual(params.content_size, 272)
219 self.assertEqual(params.content_size, 272)
207 self.assertEqual(params.window_size, 262144)
220 self.assertEqual(params.window_size, 262144)
208 self.assertEqual(params.dict_id, 15)
221 self.assertEqual(params.dict_id, 15)
209 self.assertTrue(params.has_checksum)
222 self.assertTrue(params.has_checksum)
210
223
211 def test_input_types(self):
224 def test_input_types(self):
212 v = zstd.FRAME_HEADER + b'\x00\x00'
225 v = zstd.FRAME_HEADER + b"\x00\x00"
213
226
214 mutable_array = bytearray(len(v))
227 mutable_array = bytearray(len(v))
215 mutable_array[:] = v
228 mutable_array[:] = v
@@ -7,70 +7,99 b' try:'
7 import hypothesis
7 import hypothesis
8 import hypothesis.strategies as strategies
8 import hypothesis.strategies as strategies
9 except ImportError:
9 except ImportError:
10 raise unittest.SkipTest('hypothesis not available')
10 raise unittest.SkipTest("hypothesis not available")
11
11
12 import zstandard as zstd
12 import zstandard as zstd
13
13
14 from .common import (
14 from .common import (
15 make_cffi,
15 make_cffi,
16 TestCase,
17 )
18
19
20 s_windowlog = strategies.integers(
21 min_value=zstd.WINDOWLOG_MIN, max_value=zstd.WINDOWLOG_MAX
22 )
23 s_chainlog = strategies.integers(
24 min_value=zstd.CHAINLOG_MIN, max_value=zstd.CHAINLOG_MAX
25 )
26 s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN, max_value=zstd.HASHLOG_MAX)
27 s_searchlog = strategies.integers(
28 min_value=zstd.SEARCHLOG_MIN, max_value=zstd.SEARCHLOG_MAX
29 )
30 s_minmatch = strategies.integers(
31 min_value=zstd.MINMATCH_MIN, max_value=zstd.MINMATCH_MAX
32 )
33 s_targetlength = strategies.integers(
34 min_value=zstd.TARGETLENGTH_MIN, max_value=zstd.TARGETLENGTH_MAX
35 )
36 s_strategy = strategies.sampled_from(
37 (
38 zstd.STRATEGY_FAST,
39 zstd.STRATEGY_DFAST,
40 zstd.STRATEGY_GREEDY,
41 zstd.STRATEGY_LAZY,
42 zstd.STRATEGY_LAZY2,
43 zstd.STRATEGY_BTLAZY2,
44 zstd.STRATEGY_BTOPT,
45 zstd.STRATEGY_BTULTRA,
46 zstd.STRATEGY_BTULTRA2,
47 )
16 )
48 )
17
49
18
50
19 s_windowlog = strategies.integers(min_value=zstd.WINDOWLOG_MIN,
51 @make_cffi
20 max_value=zstd.WINDOWLOG_MAX)
52 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
21 s_chainlog = strategies.integers(min_value=zstd.CHAINLOG_MIN,
53 class TestCompressionParametersHypothesis(TestCase):
22 max_value=zstd.CHAINLOG_MAX)
54 @hypothesis.given(
23 s_hashlog = strategies.integers(min_value=zstd.HASHLOG_MIN,
55 s_windowlog,
24 max_value=zstd.HASHLOG_MAX)
56 s_chainlog,
25 s_searchlog = strategies.integers(min_value=zstd.SEARCHLOG_MIN,
57 s_hashlog,
26 max_value=zstd.SEARCHLOG_MAX)
58 s_searchlog,
27 s_minmatch = strategies.integers(min_value=zstd.MINMATCH_MIN,
59 s_minmatch,
28 max_value=zstd.MINMATCH_MAX)
60 s_targetlength,
29 s_targetlength = strategies.integers(min_value=zstd.TARGETLENGTH_MIN,
61 s_strategy,
30 max_value=zstd.TARGETLENGTH_MAX)
62 )
31 s_strategy = strategies.sampled_from((zstd.STRATEGY_FAST,
63 def test_valid_init(
32 zstd.STRATEGY_DFAST,
64 self, windowlog, chainlog, hashlog, searchlog, minmatch, targetlength, strategy
33 zstd.STRATEGY_GREEDY,
65 ):
34 zstd.STRATEGY_LAZY,
66 zstd.ZstdCompressionParameters(
35 zstd.STRATEGY_LAZY2,
67 window_log=windowlog,
36 zstd.STRATEGY_BTLAZY2,
68 chain_log=chainlog,
37 zstd.STRATEGY_BTOPT,
69 hash_log=hashlog,
38 zstd.STRATEGY_BTULTRA,
70 search_log=searchlog,
39 zstd.STRATEGY_BTULTRA2))
71 min_match=minmatch,
40
72 target_length=targetlength,
73 strategy=strategy,
74 )
41
75
42 @make_cffi
76 @hypothesis.given(
43 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
77 s_windowlog,
44 class TestCompressionParametersHypothesis(unittest.TestCase):
78 s_chainlog,
45 @hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
79 s_hashlog,
46 s_minmatch, s_targetlength, s_strategy)
80 s_searchlog,
47 def test_valid_init(self, windowlog, chainlog, hashlog, searchlog,
81 s_minmatch,
48 minmatch, targetlength, strategy):
82 s_targetlength,
49 zstd.ZstdCompressionParameters(window_log=windowlog,
83 s_strategy,
50 chain_log=chainlog,
84 )
51 hash_log=hashlog,
85 def test_estimated_compression_context_size(
52 search_log=searchlog,
86 self, windowlog, chainlog, hashlog, searchlog, minmatch, targetlength, strategy
53 min_match=minmatch,
87 ):
54 target_length=targetlength,
88 if minmatch == zstd.MINMATCH_MIN and strategy in (
55 strategy=strategy)
89 zstd.STRATEGY_FAST,
56
90 zstd.STRATEGY_GREEDY,
57 @hypothesis.given(s_windowlog, s_chainlog, s_hashlog, s_searchlog,
91 ):
58 s_minmatch, s_targetlength, s_strategy)
59 def test_estimated_compression_context_size(self, windowlog, chainlog,
60 hashlog, searchlog,
61 minmatch, targetlength,
62 strategy):
63 if minmatch == zstd.MINMATCH_MIN and strategy in (zstd.STRATEGY_FAST, zstd.STRATEGY_GREEDY):
64 minmatch += 1
92 minmatch += 1
65 elif minmatch == zstd.MINMATCH_MAX and strategy != zstd.STRATEGY_FAST:
93 elif minmatch == zstd.MINMATCH_MAX and strategy != zstd.STRATEGY_FAST:
66 minmatch -= 1
94 minmatch -= 1
67
95
68 p = zstd.ZstdCompressionParameters(window_log=windowlog,
96 p = zstd.ZstdCompressionParameters(
69 chain_log=chainlog,
97 window_log=windowlog,
70 hash_log=hashlog,
98 chain_log=chainlog,
71 search_log=searchlog,
99 hash_log=hashlog,
72 min_match=minmatch,
100 search_log=searchlog,
73 target_length=targetlength,
101 min_match=minmatch,
74 strategy=strategy)
102 target_length=targetlength,
103 strategy=strategy,
104 )
75 size = p.estimated_compression_context_size()
105 size = p.estimated_compression_context_size()
76
This diff has been collapsed as it changes many lines, (729 lines changed) Show them Hide them
@@ -13,6 +13,7 b' from .common import ('
13 make_cffi,
13 make_cffi,
14 NonClosingBytesIO,
14 NonClosingBytesIO,
15 OpCountingBytesIO,
15 OpCountingBytesIO,
16 TestCase,
16 )
17 )
17
18
18
19
@@ -23,62 +24,67 b' else:'
23
24
24
25
25 @make_cffi
26 @make_cffi
26 class TestFrameHeaderSize(unittest.TestCase):
27 class TestFrameHeaderSize(TestCase):
27 def test_empty(self):
28 def test_empty(self):
28 with self.assertRaisesRegexp(
29 with self.assertRaisesRegex(
29 zstd.ZstdError, 'could not determine frame header size: Src size '
30 zstd.ZstdError,
30 'is incorrect'):
31 "could not determine frame header size: Src size " "is incorrect",
31 zstd.frame_header_size(b'')
32 ):
33 zstd.frame_header_size(b"")
32
34
33 def test_too_small(self):
35 def test_too_small(self):
34 with self.assertRaisesRegexp(
36 with self.assertRaisesRegex(
35 zstd.ZstdError, 'could not determine frame header size: Src size '
37 zstd.ZstdError,
36 'is incorrect'):
38 "could not determine frame header size: Src size " "is incorrect",
37 zstd.frame_header_size(b'foob')
39 ):
40 zstd.frame_header_size(b"foob")
38
41
39 def test_basic(self):
42 def test_basic(self):
40 # It doesn't matter that it isn't a valid frame.
43 # It doesn't matter that it isn't a valid frame.
41 self.assertEqual(zstd.frame_header_size(b'long enough but no magic'), 6)
44 self.assertEqual(zstd.frame_header_size(b"long enough but no magic"), 6)
42
45
43
46
44 @make_cffi
47 @make_cffi
45 class TestFrameContentSize(unittest.TestCase):
48 class TestFrameContentSize(TestCase):
46 def test_empty(self):
49 def test_empty(self):
47 with self.assertRaisesRegexp(zstd.ZstdError,
50 with self.assertRaisesRegex(
48 'error when determining content size'):
51 zstd.ZstdError, "error when determining content size"
49 zstd.frame_content_size(b'')
52 ):
53 zstd.frame_content_size(b"")
50
54
51 def test_too_small(self):
55 def test_too_small(self):
52 with self.assertRaisesRegexp(zstd.ZstdError,
56 with self.assertRaisesRegex(
53 'error when determining content size'):
57 zstd.ZstdError, "error when determining content size"
54 zstd.frame_content_size(b'foob')
58 ):
59 zstd.frame_content_size(b"foob")
55
60
56 def test_bad_frame(self):
61 def test_bad_frame(self):
57 with self.assertRaisesRegexp(zstd.ZstdError,
62 with self.assertRaisesRegex(
58 'error when determining content size'):
63 zstd.ZstdError, "error when determining content size"
59 zstd.frame_content_size(b'invalid frame header')
64 ):
65 zstd.frame_content_size(b"invalid frame header")
60
66
61 def test_unknown(self):
67 def test_unknown(self):
62 cctx = zstd.ZstdCompressor(write_content_size=False)
68 cctx = zstd.ZstdCompressor(write_content_size=False)
63 frame = cctx.compress(b'foobar')
69 frame = cctx.compress(b"foobar")
64
70
65 self.assertEqual(zstd.frame_content_size(frame), -1)
71 self.assertEqual(zstd.frame_content_size(frame), -1)
66
72
67 def test_empty(self):
73 def test_empty(self):
68 cctx = zstd.ZstdCompressor()
74 cctx = zstd.ZstdCompressor()
69 frame = cctx.compress(b'')
75 frame = cctx.compress(b"")
70
76
71 self.assertEqual(zstd.frame_content_size(frame), 0)
77 self.assertEqual(zstd.frame_content_size(frame), 0)
72
78
73 def test_basic(self):
79 def test_basic(self):
74 cctx = zstd.ZstdCompressor()
80 cctx = zstd.ZstdCompressor()
75 frame = cctx.compress(b'foobar')
81 frame = cctx.compress(b"foobar")
76
82
77 self.assertEqual(zstd.frame_content_size(frame), 6)
83 self.assertEqual(zstd.frame_content_size(frame), 6)
78
84
79
85
80 @make_cffi
86 @make_cffi
81 class TestDecompressor(unittest.TestCase):
87 class TestDecompressor(TestCase):
82 def test_memory_size(self):
88 def test_memory_size(self):
83 dctx = zstd.ZstdDecompressor()
89 dctx = zstd.ZstdDecompressor()
84
90
@@ -86,22 +92,26 b' class TestDecompressor(unittest.TestCase'
86
92
87
93
88 @make_cffi
94 @make_cffi
89 class TestDecompressor_decompress(unittest.TestCase):
95 class TestDecompressor_decompress(TestCase):
90 def test_empty_input(self):
96 def test_empty_input(self):
91 dctx = zstd.ZstdDecompressor()
97 dctx = zstd.ZstdDecompressor()
92
98
93 with self.assertRaisesRegexp(zstd.ZstdError, 'error determining content size from frame header'):
99 with self.assertRaisesRegex(
94 dctx.decompress(b'')
100 zstd.ZstdError, "error determining content size from frame header"
101 ):
102 dctx.decompress(b"")
95
103
96 def test_invalid_input(self):
104 def test_invalid_input(self):
97 dctx = zstd.ZstdDecompressor()
105 dctx = zstd.ZstdDecompressor()
98
106
99 with self.assertRaisesRegexp(zstd.ZstdError, 'error determining content size from frame header'):
107 with self.assertRaisesRegex(
100 dctx.decompress(b'foobar')
108 zstd.ZstdError, "error determining content size from frame header"
109 ):
110 dctx.decompress(b"foobar")
101
111
102 def test_input_types(self):
112 def test_input_types(self):
103 cctx = zstd.ZstdCompressor(level=1)
113 cctx = zstd.ZstdCompressor(level=1)
104 compressed = cctx.compress(b'foo')
114 compressed = cctx.compress(b"foo")
105
115
106 mutable_array = bytearray(len(compressed))
116 mutable_array = bytearray(len(compressed))
107 mutable_array[:] = compressed
117 mutable_array[:] = compressed
@@ -114,36 +124,38 b' class TestDecompressor_decompress(unitte'
114
124
115 dctx = zstd.ZstdDecompressor()
125 dctx = zstd.ZstdDecompressor()
116 for source in sources:
126 for source in sources:
117 self.assertEqual(dctx.decompress(source), b'foo')
127 self.assertEqual(dctx.decompress(source), b"foo")
118
128
119 def test_no_content_size_in_frame(self):
129 def test_no_content_size_in_frame(self):
120 cctx = zstd.ZstdCompressor(write_content_size=False)
130 cctx = zstd.ZstdCompressor(write_content_size=False)
121 compressed = cctx.compress(b'foobar')
131 compressed = cctx.compress(b"foobar")
122
132
123 dctx = zstd.ZstdDecompressor()
133 dctx = zstd.ZstdDecompressor()
124 with self.assertRaisesRegexp(zstd.ZstdError, 'could not determine content size in frame header'):
134 with self.assertRaisesRegex(
135 zstd.ZstdError, "could not determine content size in frame header"
136 ):
125 dctx.decompress(compressed)
137 dctx.decompress(compressed)
126
138
127 def test_content_size_present(self):
139 def test_content_size_present(self):
128 cctx = zstd.ZstdCompressor()
140 cctx = zstd.ZstdCompressor()
129 compressed = cctx.compress(b'foobar')
141 compressed = cctx.compress(b"foobar")
130
142
131 dctx = zstd.ZstdDecompressor()
143 dctx = zstd.ZstdDecompressor()
132 decompressed = dctx.decompress(compressed)
144 decompressed = dctx.decompress(compressed)
133 self.assertEqual(decompressed, b'foobar')
145 self.assertEqual(decompressed, b"foobar")
134
146
135 def test_empty_roundtrip(self):
147 def test_empty_roundtrip(self):
136 cctx = zstd.ZstdCompressor()
148 cctx = zstd.ZstdCompressor()
137 compressed = cctx.compress(b'')
149 compressed = cctx.compress(b"")
138
150
139 dctx = zstd.ZstdDecompressor()
151 dctx = zstd.ZstdDecompressor()
140 decompressed = dctx.decompress(compressed)
152 decompressed = dctx.decompress(compressed)
141
153
142 self.assertEqual(decompressed, b'')
154 self.assertEqual(decompressed, b"")
143
155
144 def test_max_output_size(self):
156 def test_max_output_size(self):
145 cctx = zstd.ZstdCompressor(write_content_size=False)
157 cctx = zstd.ZstdCompressor(write_content_size=False)
146 source = b'foobar' * 256
158 source = b"foobar" * 256
147 compressed = cctx.compress(source)
159 compressed = cctx.compress(source)
148
160
149 dctx = zstd.ZstdDecompressor()
161 dctx = zstd.ZstdDecompressor()
@@ -152,8 +164,9 b' class TestDecompressor_decompress(unitte'
152 self.assertEqual(decompressed, source)
164 self.assertEqual(decompressed, source)
153
165
154 # Input size - 1 fails
166 # Input size - 1 fails
155 with self.assertRaisesRegexp(zstd.ZstdError,
167 with self.assertRaisesRegex(
156 'decompression error: did not decompress full frame'):
168 zstd.ZstdError, "decompression error: did not decompress full frame"
169 ):
157 dctx.decompress(compressed, max_output_size=len(source) - 1)
170 dctx.decompress(compressed, max_output_size=len(source) - 1)
158
171
159 # Input size + 1 works
172 # Input size + 1 works
@@ -166,24 +179,24 b' class TestDecompressor_decompress(unitte'
166
179
167 def test_stupidly_large_output_buffer(self):
180 def test_stupidly_large_output_buffer(self):
168 cctx = zstd.ZstdCompressor(write_content_size=False)
181 cctx = zstd.ZstdCompressor(write_content_size=False)
169 compressed = cctx.compress(b'foobar' * 256)
182 compressed = cctx.compress(b"foobar" * 256)
170 dctx = zstd.ZstdDecompressor()
183 dctx = zstd.ZstdDecompressor()
171
184
172 # Will get OverflowError on some Python distributions that can't
185 # Will get OverflowError on some Python distributions that can't
173 # handle really large integers.
186 # handle really large integers.
174 with self.assertRaises((MemoryError, OverflowError)):
187 with self.assertRaises((MemoryError, OverflowError)):
175 dctx.decompress(compressed, max_output_size=2**62)
188 dctx.decompress(compressed, max_output_size=2 ** 62)
176
189
177 def test_dictionary(self):
190 def test_dictionary(self):
178 samples = []
191 samples = []
179 for i in range(128):
192 for i in range(128):
180 samples.append(b'foo' * 64)
193 samples.append(b"foo" * 64)
181 samples.append(b'bar' * 64)
194 samples.append(b"bar" * 64)
182 samples.append(b'foobar' * 64)
195 samples.append(b"foobar" * 64)
183
196
184 d = zstd.train_dictionary(8192, samples)
197 d = zstd.train_dictionary(8192, samples)
185
198
186 orig = b'foobar' * 16384
199 orig = b"foobar" * 16384
187 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
200 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
188 compressed = cctx.compress(orig)
201 compressed = cctx.compress(orig)
189
202
@@ -195,13 +208,13 b' class TestDecompressor_decompress(unitte'
195 def test_dictionary_multiple(self):
208 def test_dictionary_multiple(self):
196 samples = []
209 samples = []
197 for i in range(128):
210 for i in range(128):
198 samples.append(b'foo' * 64)
211 samples.append(b"foo" * 64)
199 samples.append(b'bar' * 64)
212 samples.append(b"bar" * 64)
200 samples.append(b'foobar' * 64)
213 samples.append(b"foobar" * 64)
201
214
202 d = zstd.train_dictionary(8192, samples)
215 d = zstd.train_dictionary(8192, samples)
203
216
204 sources = (b'foobar' * 8192, b'foo' * 8192, b'bar' * 8192)
217 sources = (b"foobar" * 8192, b"foo" * 8192, b"bar" * 8192)
205 compressed = []
218 compressed = []
206 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
219 cctx = zstd.ZstdCompressor(level=1, dict_data=d)
207 for source in sources:
220 for source in sources:
@@ -213,7 +226,7 b' class TestDecompressor_decompress(unitte'
213 self.assertEqual(decompressed, sources[i])
226 self.assertEqual(decompressed, sources[i])
214
227
215 def test_max_window_size(self):
228 def test_max_window_size(self):
216 with open(__file__, 'rb') as fh:
229 with open(__file__, "rb") as fh:
217 source = fh.read()
230 source = fh.read()
218
231
219 # If we write a content size, the decompressor engages single pass
232 # If we write a content size, the decompressor engages single pass
@@ -221,15 +234,16 b' class TestDecompressor_decompress(unitte'
221 cctx = zstd.ZstdCompressor(write_content_size=False)
234 cctx = zstd.ZstdCompressor(write_content_size=False)
222 frame = cctx.compress(source)
235 frame = cctx.compress(source)
223
236
224 dctx = zstd.ZstdDecompressor(max_window_size=2**zstd.WINDOWLOG_MIN)
237 dctx = zstd.ZstdDecompressor(max_window_size=2 ** zstd.WINDOWLOG_MIN)
225
238
226 with self.assertRaisesRegexp(
239 with self.assertRaisesRegex(
227 zstd.ZstdError, 'decompression error: Frame requires too much memory'):
240 zstd.ZstdError, "decompression error: Frame requires too much memory"
241 ):
228 dctx.decompress(frame, max_output_size=len(source))
242 dctx.decompress(frame, max_output_size=len(source))
229
243
230
244
231 @make_cffi
245 @make_cffi
232 class TestDecompressor_copy_stream(unittest.TestCase):
246 class TestDecompressor_copy_stream(TestCase):
233 def test_no_read(self):
247 def test_no_read(self):
234 source = object()
248 source = object()
235 dest = io.BytesIO()
249 dest = io.BytesIO()
@@ -256,12 +270,12 b' class TestDecompressor_copy_stream(unitt'
256
270
257 self.assertEqual(r, 0)
271 self.assertEqual(r, 0)
258 self.assertEqual(w, 0)
272 self.assertEqual(w, 0)
259 self.assertEqual(dest.getvalue(), b'')
273 self.assertEqual(dest.getvalue(), b"")
260
274
261 def test_large_data(self):
275 def test_large_data(self):
262 source = io.BytesIO()
276 source = io.BytesIO()
263 for i in range(255):
277 for i in range(255):
264 source.write(struct.Struct('>B').pack(i) * 16384)
278 source.write(struct.Struct(">B").pack(i) * 16384)
265 source.seek(0)
279 source.seek(0)
266
280
267 compressed = io.BytesIO()
281 compressed = io.BytesIO()
@@ -277,33 +291,32 b' class TestDecompressor_copy_stream(unitt'
277 self.assertEqual(w, len(source.getvalue()))
291 self.assertEqual(w, len(source.getvalue()))
278
292
279 def test_read_write_size(self):
293 def test_read_write_size(self):
280 source = OpCountingBytesIO(zstd.ZstdCompressor().compress(
294 source = OpCountingBytesIO(zstd.ZstdCompressor().compress(b"foobarfoobar"))
281 b'foobarfoobar'))
282
295
283 dest = OpCountingBytesIO()
296 dest = OpCountingBytesIO()
284 dctx = zstd.ZstdDecompressor()
297 dctx = zstd.ZstdDecompressor()
285 r, w = dctx.copy_stream(source, dest, read_size=1, write_size=1)
298 r, w = dctx.copy_stream(source, dest, read_size=1, write_size=1)
286
299
287 self.assertEqual(r, len(source.getvalue()))
300 self.assertEqual(r, len(source.getvalue()))
288 self.assertEqual(w, len(b'foobarfoobar'))
301 self.assertEqual(w, len(b"foobarfoobar"))
289 self.assertEqual(source._read_count, len(source.getvalue()) + 1)
302 self.assertEqual(source._read_count, len(source.getvalue()) + 1)
290 self.assertEqual(dest._write_count, len(dest.getvalue()))
303 self.assertEqual(dest._write_count, len(dest.getvalue()))
291
304
292
305
293 @make_cffi
306 @make_cffi
294 class TestDecompressor_stream_reader(unittest.TestCase):
307 class TestDecompressor_stream_reader(TestCase):
295 def test_context_manager(self):
308 def test_context_manager(self):
296 dctx = zstd.ZstdDecompressor()
309 dctx = zstd.ZstdDecompressor()
297
310
298 with dctx.stream_reader(b'foo') as reader:
311 with dctx.stream_reader(b"foo") as reader:
299 with self.assertRaisesRegexp(ValueError, 'cannot __enter__ multiple times'):
312 with self.assertRaisesRegex(ValueError, "cannot __enter__ multiple times"):
300 with reader as reader2:
313 with reader as reader2:
301 pass
314 pass
302
315
303 def test_not_implemented(self):
316 def test_not_implemented(self):
304 dctx = zstd.ZstdDecompressor()
317 dctx = zstd.ZstdDecompressor()
305
318
306 with dctx.stream_reader(b'foo') as reader:
319 with dctx.stream_reader(b"foo") as reader:
307 with self.assertRaises(io.UnsupportedOperation):
320 with self.assertRaises(io.UnsupportedOperation):
308 reader.readline()
321 reader.readline()
309
322
@@ -317,7 +330,7 b' class TestDecompressor_stream_reader(uni'
317 next(reader)
330 next(reader)
318
331
319 with self.assertRaises(io.UnsupportedOperation):
332 with self.assertRaises(io.UnsupportedOperation):
320 reader.write(b'foo')
333 reader.write(b"foo")
321
334
322 with self.assertRaises(io.UnsupportedOperation):
335 with self.assertRaises(io.UnsupportedOperation):
323 reader.writelines([])
336 reader.writelines([])
@@ -325,7 +338,7 b' class TestDecompressor_stream_reader(uni'
325 def test_constant_methods(self):
338 def test_constant_methods(self):
326 dctx = zstd.ZstdDecompressor()
339 dctx = zstd.ZstdDecompressor()
327
340
328 with dctx.stream_reader(b'foo') as reader:
341 with dctx.stream_reader(b"foo") as reader:
329 self.assertFalse(reader.closed)
342 self.assertFalse(reader.closed)
330 self.assertTrue(reader.readable())
343 self.assertTrue(reader.readable())
331 self.assertFalse(reader.writable())
344 self.assertFalse(reader.writable())
@@ -340,29 +353,31 b' class TestDecompressor_stream_reader(uni'
340 def test_read_closed(self):
353 def test_read_closed(self):
341 dctx = zstd.ZstdDecompressor()
354 dctx = zstd.ZstdDecompressor()
342
355
343 with dctx.stream_reader(b'foo') as reader:
356 with dctx.stream_reader(b"foo") as reader:
344 reader.close()
357 reader.close()
345 self.assertTrue(reader.closed)
358 self.assertTrue(reader.closed)
346 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
359 with self.assertRaisesRegex(ValueError, "stream is closed"):
347 reader.read(1)
360 reader.read(1)
348
361
349 def test_read_sizes(self):
362 def test_read_sizes(self):
350 cctx = zstd.ZstdCompressor()
363 cctx = zstd.ZstdCompressor()
351 foo = cctx.compress(b'foo')
364 foo = cctx.compress(b"foo")
352
365
353 dctx = zstd.ZstdDecompressor()
366 dctx = zstd.ZstdDecompressor()
354
367
355 with dctx.stream_reader(foo) as reader:
368 with dctx.stream_reader(foo) as reader:
356 with self.assertRaisesRegexp(ValueError, 'cannot read negative amounts less than -1'):
369 with self.assertRaisesRegex(
370 ValueError, "cannot read negative amounts less than -1"
371 ):
357 reader.read(-2)
372 reader.read(-2)
358
373
359 self.assertEqual(reader.read(0), b'')
374 self.assertEqual(reader.read(0), b"")
360 self.assertEqual(reader.read(), b'foo')
375 self.assertEqual(reader.read(), b"foo")
361
376
362 def test_read_buffer(self):
377 def test_read_buffer(self):
363 cctx = zstd.ZstdCompressor()
378 cctx = zstd.ZstdCompressor()
364
379
365 source = b''.join([b'foo' * 60, b'bar' * 60, b'baz' * 60])
380 source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
366 frame = cctx.compress(source)
381 frame = cctx.compress(source)
367
382
368 dctx = zstd.ZstdDecompressor()
383 dctx = zstd.ZstdDecompressor()
@@ -376,14 +391,14 b' class TestDecompressor_stream_reader(uni'
376 self.assertEqual(reader.tell(), len(source))
391 self.assertEqual(reader.tell(), len(source))
377
392
378 # Read after EOF should return empty bytes.
393 # Read after EOF should return empty bytes.
379 self.assertEqual(reader.read(1), b'')
394 self.assertEqual(reader.read(1), b"")
380 self.assertEqual(reader.tell(), len(result))
395 self.assertEqual(reader.tell(), len(result))
381
396
382 self.assertTrue(reader.closed)
397 self.assertTrue(reader.closed)
383
398
384 def test_read_buffer_small_chunks(self):
399 def test_read_buffer_small_chunks(self):
385 cctx = zstd.ZstdCompressor()
400 cctx = zstd.ZstdCompressor()
386 source = b''.join([b'foo' * 60, b'bar' * 60, b'baz' * 60])
401 source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
387 frame = cctx.compress(source)
402 frame = cctx.compress(source)
388
403
389 dctx = zstd.ZstdDecompressor()
404 dctx = zstd.ZstdDecompressor()
@@ -398,11 +413,11 b' class TestDecompressor_stream_reader(uni'
398 chunks.append(chunk)
413 chunks.append(chunk)
399 self.assertEqual(reader.tell(), sum(map(len, chunks)))
414 self.assertEqual(reader.tell(), sum(map(len, chunks)))
400
415
401 self.assertEqual(b''.join(chunks), source)
416 self.assertEqual(b"".join(chunks), source)
402
417
403 def test_read_stream(self):
418 def test_read_stream(self):
404 cctx = zstd.ZstdCompressor()
419 cctx = zstd.ZstdCompressor()
405 source = b''.join([b'foo' * 60, b'bar' * 60, b'baz' * 60])
420 source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
406 frame = cctx.compress(source)
421 frame = cctx.compress(source)
407
422
408 dctx = zstd.ZstdDecompressor()
423 dctx = zstd.ZstdDecompressor()
@@ -412,7 +427,7 b' class TestDecompressor_stream_reader(uni'
412 chunk = reader.read(8192)
427 chunk = reader.read(8192)
413 self.assertEqual(chunk, source)
428 self.assertEqual(chunk, source)
414 self.assertEqual(reader.tell(), len(source))
429 self.assertEqual(reader.tell(), len(source))
415 self.assertEqual(reader.read(1), b'')
430 self.assertEqual(reader.read(1), b"")
416 self.assertEqual(reader.tell(), len(source))
431 self.assertEqual(reader.tell(), len(source))
417 self.assertFalse(reader.closed)
432 self.assertFalse(reader.closed)
418
433
@@ -420,7 +435,7 b' class TestDecompressor_stream_reader(uni'
420
435
421 def test_read_stream_small_chunks(self):
436 def test_read_stream_small_chunks(self):
422 cctx = zstd.ZstdCompressor()
437 cctx = zstd.ZstdCompressor()
423 source = b''.join([b'foo' * 60, b'bar' * 60, b'baz' * 60])
438 source = b"".join([b"foo" * 60, b"bar" * 60, b"baz" * 60])
424 frame = cctx.compress(source)
439 frame = cctx.compress(source)
425
440
426 dctx = zstd.ZstdDecompressor()
441 dctx = zstd.ZstdDecompressor()
@@ -435,11 +450,11 b' class TestDecompressor_stream_reader(uni'
435 chunks.append(chunk)
450 chunks.append(chunk)
436 self.assertEqual(reader.tell(), sum(map(len, chunks)))
451 self.assertEqual(reader.tell(), sum(map(len, chunks)))
437
452
438 self.assertEqual(b''.join(chunks), source)
453 self.assertEqual(b"".join(chunks), source)
439
454
440 def test_read_after_exit(self):
455 def test_read_after_exit(self):
441 cctx = zstd.ZstdCompressor()
456 cctx = zstd.ZstdCompressor()
442 frame = cctx.compress(b'foo' * 60)
457 frame = cctx.compress(b"foo" * 60)
443
458
444 dctx = zstd.ZstdDecompressor()
459 dctx = zstd.ZstdDecompressor()
445
460
@@ -449,45 +464,46 b' class TestDecompressor_stream_reader(uni'
449
464
450 self.assertTrue(reader.closed)
465 self.assertTrue(reader.closed)
451
466
452 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
467 with self.assertRaisesRegex(ValueError, "stream is closed"):
453 reader.read(10)
468 reader.read(10)
454
469
455 def test_illegal_seeks(self):
470 def test_illegal_seeks(self):
456 cctx = zstd.ZstdCompressor()
471 cctx = zstd.ZstdCompressor()
457 frame = cctx.compress(b'foo' * 60)
472 frame = cctx.compress(b"foo" * 60)
458
473
459 dctx = zstd.ZstdDecompressor()
474 dctx = zstd.ZstdDecompressor()
460
475
461 with dctx.stream_reader(frame) as reader:
476 with dctx.stream_reader(frame) as reader:
462 with self.assertRaisesRegexp(ValueError,
477 with self.assertRaisesRegex(ValueError, "cannot seek to negative position"):
463 'cannot seek to negative position'):
464 reader.seek(-1, os.SEEK_SET)
478 reader.seek(-1, os.SEEK_SET)
465
479
466 reader.read(1)
480 reader.read(1)
467
481
468 with self.assertRaisesRegexp(
482 with self.assertRaisesRegex(
469 ValueError, 'cannot seek zstd decompression stream backwards'):
483 ValueError, "cannot seek zstd decompression stream backwards"
484 ):
470 reader.seek(0, os.SEEK_SET)
485 reader.seek(0, os.SEEK_SET)
471
486
472 with self.assertRaisesRegexp(
487 with self.assertRaisesRegex(
473 ValueError, 'cannot seek zstd decompression stream backwards'):
488 ValueError, "cannot seek zstd decompression stream backwards"
489 ):
474 reader.seek(-1, os.SEEK_CUR)
490 reader.seek(-1, os.SEEK_CUR)
475
491
476 with self.assertRaisesRegexp(
492 with self.assertRaisesRegex(
477 ValueError,
493 ValueError, "zstd decompression streams cannot be seeked with SEEK_END"
478 'zstd decompression streams cannot be seeked with SEEK_END'):
494 ):
479 reader.seek(0, os.SEEK_END)
495 reader.seek(0, os.SEEK_END)
480
496
481 reader.close()
497 reader.close()
482
498
483 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
499 with self.assertRaisesRegex(ValueError, "stream is closed"):
484 reader.seek(4, os.SEEK_SET)
500 reader.seek(4, os.SEEK_SET)
485
501
486 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
502 with self.assertRaisesRegex(ValueError, "stream is closed"):
487 reader.seek(0)
503 reader.seek(0)
488
504
489 def test_seek(self):
505 def test_seek(self):
490 source = b'foobar' * 60
506 source = b"foobar" * 60
491 cctx = zstd.ZstdCompressor()
507 cctx = zstd.ZstdCompressor()
492 frame = cctx.compress(source)
508 frame = cctx.compress(source)
493
509
@@ -495,32 +511,32 b' class TestDecompressor_stream_reader(uni'
495
511
496 with dctx.stream_reader(frame) as reader:
512 with dctx.stream_reader(frame) as reader:
497 reader.seek(3)
513 reader.seek(3)
498 self.assertEqual(reader.read(3), b'bar')
514 self.assertEqual(reader.read(3), b"bar")
499
515
500 reader.seek(4, os.SEEK_CUR)
516 reader.seek(4, os.SEEK_CUR)
501 self.assertEqual(reader.read(2), b'ar')
517 self.assertEqual(reader.read(2), b"ar")
502
518
503 def test_no_context_manager(self):
519 def test_no_context_manager(self):
504 source = b'foobar' * 60
520 source = b"foobar" * 60
505 cctx = zstd.ZstdCompressor()
521 cctx = zstd.ZstdCompressor()
506 frame = cctx.compress(source)
522 frame = cctx.compress(source)
507
523
508 dctx = zstd.ZstdDecompressor()
524 dctx = zstd.ZstdDecompressor()
509 reader = dctx.stream_reader(frame)
525 reader = dctx.stream_reader(frame)
510
526
511 self.assertEqual(reader.read(6), b'foobar')
527 self.assertEqual(reader.read(6), b"foobar")
512 self.assertEqual(reader.read(18), b'foobar' * 3)
528 self.assertEqual(reader.read(18), b"foobar" * 3)
513 self.assertFalse(reader.closed)
529 self.assertFalse(reader.closed)
514
530
515 # Calling close prevents subsequent use.
531 # Calling close prevents subsequent use.
516 reader.close()
532 reader.close()
517 self.assertTrue(reader.closed)
533 self.assertTrue(reader.closed)
518
534
519 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
535 with self.assertRaisesRegex(ValueError, "stream is closed"):
520 reader.read(6)
536 reader.read(6)
521
537
522 def test_read_after_error(self):
538 def test_read_after_error(self):
523 source = io.BytesIO(b'')
539 source = io.BytesIO(b"")
524 dctx = zstd.ZstdDecompressor()
540 dctx = zstd.ZstdDecompressor()
525
541
526 reader = dctx.stream_reader(source)
542 reader = dctx.stream_reader(source)
@@ -529,7 +545,7 b' class TestDecompressor_stream_reader(uni'
529 reader.read(0)
545 reader.read(0)
530
546
531 with reader:
547 with reader:
532 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
548 with self.assertRaisesRegex(ValueError, "stream is closed"):
533 reader.read(100)
549 reader.read(100)
534
550
535 def test_partial_read(self):
551 def test_partial_read(self):
@@ -553,87 +569,87 b' class TestDecompressor_stream_reader(uni'
553 cctx = zstd.ZstdCompressor()
569 cctx = zstd.ZstdCompressor()
554 source = io.BytesIO()
570 source = io.BytesIO()
555 writer = cctx.stream_writer(source)
571 writer = cctx.stream_writer(source)
556 writer.write(b'foo')
572 writer.write(b"foo")
557 writer.flush(zstd.FLUSH_FRAME)
573 writer.flush(zstd.FLUSH_FRAME)
558 writer.write(b'bar')
574 writer.write(b"bar")
559 writer.flush(zstd.FLUSH_FRAME)
575 writer.flush(zstd.FLUSH_FRAME)
560
576
561 dctx = zstd.ZstdDecompressor()
577 dctx = zstd.ZstdDecompressor()
562
578
563 reader = dctx.stream_reader(source.getvalue())
579 reader = dctx.stream_reader(source.getvalue())
564 self.assertEqual(reader.read(2), b'fo')
580 self.assertEqual(reader.read(2), b"fo")
565 self.assertEqual(reader.read(2), b'o')
581 self.assertEqual(reader.read(2), b"o")
566 self.assertEqual(reader.read(2), b'ba')
582 self.assertEqual(reader.read(2), b"ba")
567 self.assertEqual(reader.read(2), b'r')
583 self.assertEqual(reader.read(2), b"r")
568
584
569 source.seek(0)
585 source.seek(0)
570 reader = dctx.stream_reader(source)
586 reader = dctx.stream_reader(source)
571 self.assertEqual(reader.read(2), b'fo')
587 self.assertEqual(reader.read(2), b"fo")
572 self.assertEqual(reader.read(2), b'o')
588 self.assertEqual(reader.read(2), b"o")
573 self.assertEqual(reader.read(2), b'ba')
589 self.assertEqual(reader.read(2), b"ba")
574 self.assertEqual(reader.read(2), b'r')
590 self.assertEqual(reader.read(2), b"r")
575
591
576 reader = dctx.stream_reader(source.getvalue())
592 reader = dctx.stream_reader(source.getvalue())
577 self.assertEqual(reader.read(3), b'foo')
593 self.assertEqual(reader.read(3), b"foo")
578 self.assertEqual(reader.read(3), b'bar')
594 self.assertEqual(reader.read(3), b"bar")
579
595
580 source.seek(0)
596 source.seek(0)
581 reader = dctx.stream_reader(source)
597 reader = dctx.stream_reader(source)
582 self.assertEqual(reader.read(3), b'foo')
598 self.assertEqual(reader.read(3), b"foo")
583 self.assertEqual(reader.read(3), b'bar')
599 self.assertEqual(reader.read(3), b"bar")
584
600
585 reader = dctx.stream_reader(source.getvalue())
601 reader = dctx.stream_reader(source.getvalue())
586 self.assertEqual(reader.read(4), b'foo')
602 self.assertEqual(reader.read(4), b"foo")
587 self.assertEqual(reader.read(4), b'bar')
603 self.assertEqual(reader.read(4), b"bar")
588
604
589 source.seek(0)
605 source.seek(0)
590 reader = dctx.stream_reader(source)
606 reader = dctx.stream_reader(source)
591 self.assertEqual(reader.read(4), b'foo')
607 self.assertEqual(reader.read(4), b"foo")
592 self.assertEqual(reader.read(4), b'bar')
608 self.assertEqual(reader.read(4), b"bar")
593
609
594 reader = dctx.stream_reader(source.getvalue())
610 reader = dctx.stream_reader(source.getvalue())
595 self.assertEqual(reader.read(128), b'foo')
611 self.assertEqual(reader.read(128), b"foo")
596 self.assertEqual(reader.read(128), b'bar')
612 self.assertEqual(reader.read(128), b"bar")
597
613
598 source.seek(0)
614 source.seek(0)
599 reader = dctx.stream_reader(source)
615 reader = dctx.stream_reader(source)
600 self.assertEqual(reader.read(128), b'foo')
616 self.assertEqual(reader.read(128), b"foo")
601 self.assertEqual(reader.read(128), b'bar')
617 self.assertEqual(reader.read(128), b"bar")
602
618
603 # Now tests for reads spanning frames.
619 # Now tests for reads spanning frames.
604 reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
620 reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
605 self.assertEqual(reader.read(3), b'foo')
621 self.assertEqual(reader.read(3), b"foo")
606 self.assertEqual(reader.read(3), b'bar')
622 self.assertEqual(reader.read(3), b"bar")
607
623
608 source.seek(0)
624 source.seek(0)
609 reader = dctx.stream_reader(source, read_across_frames=True)
625 reader = dctx.stream_reader(source, read_across_frames=True)
610 self.assertEqual(reader.read(3), b'foo')
626 self.assertEqual(reader.read(3), b"foo")
611 self.assertEqual(reader.read(3), b'bar')
627 self.assertEqual(reader.read(3), b"bar")
612
628
613 reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
629 reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
614 self.assertEqual(reader.read(6), b'foobar')
630 self.assertEqual(reader.read(6), b"foobar")
615
631
616 source.seek(0)
632 source.seek(0)
617 reader = dctx.stream_reader(source, read_across_frames=True)
633 reader = dctx.stream_reader(source, read_across_frames=True)
618 self.assertEqual(reader.read(6), b'foobar')
634 self.assertEqual(reader.read(6), b"foobar")
619
635
620 reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
636 reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
621 self.assertEqual(reader.read(7), b'foobar')
637 self.assertEqual(reader.read(7), b"foobar")
622
638
623 source.seek(0)
639 source.seek(0)
624 reader = dctx.stream_reader(source, read_across_frames=True)
640 reader = dctx.stream_reader(source, read_across_frames=True)
625 self.assertEqual(reader.read(7), b'foobar')
641 self.assertEqual(reader.read(7), b"foobar")
626
642
627 reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
643 reader = dctx.stream_reader(source.getvalue(), read_across_frames=True)
628 self.assertEqual(reader.read(128), b'foobar')
644 self.assertEqual(reader.read(128), b"foobar")
629
645
630 source.seek(0)
646 source.seek(0)
631 reader = dctx.stream_reader(source, read_across_frames=True)
647 reader = dctx.stream_reader(source, read_across_frames=True)
632 self.assertEqual(reader.read(128), b'foobar')
648 self.assertEqual(reader.read(128), b"foobar")
633
649
634 def test_readinto(self):
650 def test_readinto(self):
635 cctx = zstd.ZstdCompressor()
651 cctx = zstd.ZstdCompressor()
636 foo = cctx.compress(b'foo')
652 foo = cctx.compress(b"foo")
637
653
638 dctx = zstd.ZstdDecompressor()
654 dctx = zstd.ZstdDecompressor()
639
655
@@ -641,116 +657,116 b' class TestDecompressor_stream_reader(uni'
641 # The exact exception varies based on the backend.
657 # The exact exception varies based on the backend.
642 reader = dctx.stream_reader(foo)
658 reader = dctx.stream_reader(foo)
643 with self.assertRaises(Exception):
659 with self.assertRaises(Exception):
644 reader.readinto(b'foobar')
660 reader.readinto(b"foobar")
645
661
646 # readinto() with sufficiently large destination.
662 # readinto() with sufficiently large destination.
647 b = bytearray(1024)
663 b = bytearray(1024)
648 reader = dctx.stream_reader(foo)
664 reader = dctx.stream_reader(foo)
649 self.assertEqual(reader.readinto(b), 3)
665 self.assertEqual(reader.readinto(b), 3)
650 self.assertEqual(b[0:3], b'foo')
666 self.assertEqual(b[0:3], b"foo")
651 self.assertEqual(reader.readinto(b), 0)
667 self.assertEqual(reader.readinto(b), 0)
652 self.assertEqual(b[0:3], b'foo')
668 self.assertEqual(b[0:3], b"foo")
653
669
654 # readinto() with small reads.
670 # readinto() with small reads.
655 b = bytearray(1024)
671 b = bytearray(1024)
656 reader = dctx.stream_reader(foo, read_size=1)
672 reader = dctx.stream_reader(foo, read_size=1)
657 self.assertEqual(reader.readinto(b), 3)
673 self.assertEqual(reader.readinto(b), 3)
658 self.assertEqual(b[0:3], b'foo')
674 self.assertEqual(b[0:3], b"foo")
659
675
660 # Too small destination buffer.
676 # Too small destination buffer.
661 b = bytearray(2)
677 b = bytearray(2)
662 reader = dctx.stream_reader(foo)
678 reader = dctx.stream_reader(foo)
663 self.assertEqual(reader.readinto(b), 2)
679 self.assertEqual(reader.readinto(b), 2)
664 self.assertEqual(b[:], b'fo')
680 self.assertEqual(b[:], b"fo")
665
681
666 def test_readinto1(self):
682 def test_readinto1(self):
667 cctx = zstd.ZstdCompressor()
683 cctx = zstd.ZstdCompressor()
668 foo = cctx.compress(b'foo')
684 foo = cctx.compress(b"foo")
669
685
670 dctx = zstd.ZstdDecompressor()
686 dctx = zstd.ZstdDecompressor()
671
687
672 reader = dctx.stream_reader(foo)
688 reader = dctx.stream_reader(foo)
673 with self.assertRaises(Exception):
689 with self.assertRaises(Exception):
674 reader.readinto1(b'foobar')
690 reader.readinto1(b"foobar")
675
691
676 # Sufficiently large destination.
692 # Sufficiently large destination.
677 b = bytearray(1024)
693 b = bytearray(1024)
678 reader = dctx.stream_reader(foo)
694 reader = dctx.stream_reader(foo)
679 self.assertEqual(reader.readinto1(b), 3)
695 self.assertEqual(reader.readinto1(b), 3)
680 self.assertEqual(b[0:3], b'foo')
696 self.assertEqual(b[0:3], b"foo")
681 self.assertEqual(reader.readinto1(b), 0)
697 self.assertEqual(reader.readinto1(b), 0)
682 self.assertEqual(b[0:3], b'foo')
698 self.assertEqual(b[0:3], b"foo")
683
699
684 # readinto() with small reads.
700 # readinto() with small reads.
685 b = bytearray(1024)
701 b = bytearray(1024)
686 reader = dctx.stream_reader(foo, read_size=1)
702 reader = dctx.stream_reader(foo, read_size=1)
687 self.assertEqual(reader.readinto1(b), 3)
703 self.assertEqual(reader.readinto1(b), 3)
688 self.assertEqual(b[0:3], b'foo')
704 self.assertEqual(b[0:3], b"foo")
689
705
690 # Too small destination buffer.
706 # Too small destination buffer.
691 b = bytearray(2)
707 b = bytearray(2)
692 reader = dctx.stream_reader(foo)
708 reader = dctx.stream_reader(foo)
693 self.assertEqual(reader.readinto1(b), 2)
709 self.assertEqual(reader.readinto1(b), 2)
694 self.assertEqual(b[:], b'fo')
710 self.assertEqual(b[:], b"fo")
695
711
696 def test_readall(self):
712 def test_readall(self):
697 cctx = zstd.ZstdCompressor()
713 cctx = zstd.ZstdCompressor()
698 foo = cctx.compress(b'foo')
714 foo = cctx.compress(b"foo")
699
715
700 dctx = zstd.ZstdDecompressor()
716 dctx = zstd.ZstdDecompressor()
701 reader = dctx.stream_reader(foo)
717 reader = dctx.stream_reader(foo)
702
718
703 self.assertEqual(reader.readall(), b'foo')
719 self.assertEqual(reader.readall(), b"foo")
704
720
705 def test_read1(self):
721 def test_read1(self):
706 cctx = zstd.ZstdCompressor()
722 cctx = zstd.ZstdCompressor()
707 foo = cctx.compress(b'foo')
723 foo = cctx.compress(b"foo")
708
724
709 dctx = zstd.ZstdDecompressor()
725 dctx = zstd.ZstdDecompressor()
710
726
711 b = OpCountingBytesIO(foo)
727 b = OpCountingBytesIO(foo)
712 reader = dctx.stream_reader(b)
728 reader = dctx.stream_reader(b)
713
729
714 self.assertEqual(reader.read1(), b'foo')
730 self.assertEqual(reader.read1(), b"foo")
715 self.assertEqual(b._read_count, 1)
731 self.assertEqual(b._read_count, 1)
716
732
717 b = OpCountingBytesIO(foo)
733 b = OpCountingBytesIO(foo)
718 reader = dctx.stream_reader(b)
734 reader = dctx.stream_reader(b)
719
735
720 self.assertEqual(reader.read1(0), b'')
736 self.assertEqual(reader.read1(0), b"")
721 self.assertEqual(reader.read1(2), b'fo')
737 self.assertEqual(reader.read1(2), b"fo")
722 self.assertEqual(b._read_count, 1)
738 self.assertEqual(b._read_count, 1)
723 self.assertEqual(reader.read1(1), b'o')
739 self.assertEqual(reader.read1(1), b"o")
724 self.assertEqual(b._read_count, 1)
740 self.assertEqual(b._read_count, 1)
725 self.assertEqual(reader.read1(1), b'')
741 self.assertEqual(reader.read1(1), b"")
726 self.assertEqual(b._read_count, 2)
742 self.assertEqual(b._read_count, 2)
727
743
728 def test_read_lines(self):
744 def test_read_lines(self):
729 cctx = zstd.ZstdCompressor()
745 cctx = zstd.ZstdCompressor()
730 source = b'\n'.join(('line %d' % i).encode('ascii') for i in range(1024))
746 source = b"\n".join(("line %d" % i).encode("ascii") for i in range(1024))
731
747
732 frame = cctx.compress(source)
748 frame = cctx.compress(source)
733
749
734 dctx = zstd.ZstdDecompressor()
750 dctx = zstd.ZstdDecompressor()
735 reader = dctx.stream_reader(frame)
751 reader = dctx.stream_reader(frame)
736 tr = io.TextIOWrapper(reader, encoding='utf-8')
752 tr = io.TextIOWrapper(reader, encoding="utf-8")
737
753
738 lines = []
754 lines = []
739 for line in tr:
755 for line in tr:
740 lines.append(line.encode('utf-8'))
756 lines.append(line.encode("utf-8"))
741
757
742 self.assertEqual(len(lines), 1024)
758 self.assertEqual(len(lines), 1024)
743 self.assertEqual(b''.join(lines), source)
759 self.assertEqual(b"".join(lines), source)
744
760
745 reader = dctx.stream_reader(frame)
761 reader = dctx.stream_reader(frame)
746 tr = io.TextIOWrapper(reader, encoding='utf-8')
762 tr = io.TextIOWrapper(reader, encoding="utf-8")
747
763
748 lines = tr.readlines()
764 lines = tr.readlines()
749 self.assertEqual(len(lines), 1024)
765 self.assertEqual(len(lines), 1024)
750 self.assertEqual(''.join(lines).encode('utf-8'), source)
766 self.assertEqual("".join(lines).encode("utf-8"), source)
751
767
752 reader = dctx.stream_reader(frame)
768 reader = dctx.stream_reader(frame)
753 tr = io.TextIOWrapper(reader, encoding='utf-8')
769 tr = io.TextIOWrapper(reader, encoding="utf-8")
754
770
755 lines = []
771 lines = []
756 while True:
772 while True:
@@ -758,26 +774,26 b' class TestDecompressor_stream_reader(uni'
758 if not line:
774 if not line:
759 break
775 break
760
776
761 lines.append(line.encode('utf-8'))
777 lines.append(line.encode("utf-8"))
762
778
763 self.assertEqual(len(lines), 1024)
779 self.assertEqual(len(lines), 1024)
764 self.assertEqual(b''.join(lines), source)
780 self.assertEqual(b"".join(lines), source)
765
781
766
782
767 @make_cffi
783 @make_cffi
768 class TestDecompressor_decompressobj(unittest.TestCase):
784 class TestDecompressor_decompressobj(TestCase):
769 def test_simple(self):
785 def test_simple(self):
770 data = zstd.ZstdCompressor(level=1).compress(b'foobar')
786 data = zstd.ZstdCompressor(level=1).compress(b"foobar")
771
787
772 dctx = zstd.ZstdDecompressor()
788 dctx = zstd.ZstdDecompressor()
773 dobj = dctx.decompressobj()
789 dobj = dctx.decompressobj()
774 self.assertEqual(dobj.decompress(data), b'foobar')
790 self.assertEqual(dobj.decompress(data), b"foobar")
775 self.assertIsNone(dobj.flush())
791 self.assertIsNone(dobj.flush())
776 self.assertIsNone(dobj.flush(10))
792 self.assertIsNone(dobj.flush(10))
777 self.assertIsNone(dobj.flush(length=100))
793 self.assertIsNone(dobj.flush(length=100))
778
794
779 def test_input_types(self):
795 def test_input_types(self):
780 compressed = zstd.ZstdCompressor(level=1).compress(b'foo')
796 compressed = zstd.ZstdCompressor(level=1).compress(b"foo")
781
797
782 dctx = zstd.ZstdDecompressor()
798 dctx = zstd.ZstdDecompressor()
783
799
@@ -795,28 +811,28 b' class TestDecompressor_decompressobj(uni'
795 self.assertIsNone(dobj.flush())
811 self.assertIsNone(dobj.flush())
796 self.assertIsNone(dobj.flush(10))
812 self.assertIsNone(dobj.flush(10))
797 self.assertIsNone(dobj.flush(length=100))
813 self.assertIsNone(dobj.flush(length=100))
798 self.assertEqual(dobj.decompress(source), b'foo')
814 self.assertEqual(dobj.decompress(source), b"foo")
799 self.assertIsNone(dobj.flush())
815 self.assertIsNone(dobj.flush())
800
816
801 def test_reuse(self):
817 def test_reuse(self):
802 data = zstd.ZstdCompressor(level=1).compress(b'foobar')
818 data = zstd.ZstdCompressor(level=1).compress(b"foobar")
803
819
804 dctx = zstd.ZstdDecompressor()
820 dctx = zstd.ZstdDecompressor()
805 dobj = dctx.decompressobj()
821 dobj = dctx.decompressobj()
806 dobj.decompress(data)
822 dobj.decompress(data)
807
823
808 with self.assertRaisesRegexp(zstd.ZstdError, 'cannot use a decompressobj'):
824 with self.assertRaisesRegex(zstd.ZstdError, "cannot use a decompressobj"):
809 dobj.decompress(data)
825 dobj.decompress(data)
810 self.assertIsNone(dobj.flush())
826 self.assertIsNone(dobj.flush())
811
827
812 def test_bad_write_size(self):
828 def test_bad_write_size(self):
813 dctx = zstd.ZstdDecompressor()
829 dctx = zstd.ZstdDecompressor()
814
830
815 with self.assertRaisesRegexp(ValueError, 'write_size must be positive'):
831 with self.assertRaisesRegex(ValueError, "write_size must be positive"):
816 dctx.decompressobj(write_size=0)
832 dctx.decompressobj(write_size=0)
817
833
818 def test_write_size(self):
834 def test_write_size(self):
819 source = b'foo' * 64 + b'bar' * 128
835 source = b"foo" * 64 + b"bar" * 128
820 data = zstd.ZstdCompressor(level=1).compress(source)
836 data = zstd.ZstdCompressor(level=1).compress(source)
821
837
822 dctx = zstd.ZstdDecompressor()
838 dctx = zstd.ZstdDecompressor()
@@ -836,7 +852,7 b' def decompress_via_writer(data):'
836
852
837
853
838 @make_cffi
854 @make_cffi
839 class TestDecompressor_stream_writer(unittest.TestCase):
855 class TestDecompressor_stream_writer(TestCase):
840 def test_io_api(self):
856 def test_io_api(self):
841 buffer = io.BytesIO()
857 buffer = io.BytesIO()
842 dctx = zstd.ZstdDecompressor()
858 dctx = zstd.ZstdDecompressor()
@@ -908,14 +924,14 b' class TestDecompressor_stream_writer(uni'
908 writer.fileno()
924 writer.fileno()
909
925
910 def test_fileno_file(self):
926 def test_fileno_file(self):
911 with tempfile.TemporaryFile('wb') as tf:
927 with tempfile.TemporaryFile("wb") as tf:
912 dctx = zstd.ZstdDecompressor()
928 dctx = zstd.ZstdDecompressor()
913 writer = dctx.stream_writer(tf)
929 writer = dctx.stream_writer(tf)
914
930
915 self.assertEqual(writer.fileno(), tf.fileno())
931 self.assertEqual(writer.fileno(), tf.fileno())
916
932
917 def test_close(self):
933 def test_close(self):
918 foo = zstd.ZstdCompressor().compress(b'foo')
934 foo = zstd.ZstdCompressor().compress(b"foo")
919
935
920 buffer = NonClosingBytesIO()
936 buffer = NonClosingBytesIO()
921 dctx = zstd.ZstdDecompressor()
937 dctx = zstd.ZstdDecompressor()
@@ -928,17 +944,17 b' class TestDecompressor_stream_writer(uni'
928 self.assertTrue(writer.closed)
944 self.assertTrue(writer.closed)
929 self.assertTrue(buffer.closed)
945 self.assertTrue(buffer.closed)
930
946
931 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
947 with self.assertRaisesRegex(ValueError, "stream is closed"):
932 writer.write(b'')
948 writer.write(b"")
933
949
934 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
950 with self.assertRaisesRegex(ValueError, "stream is closed"):
935 writer.flush()
951 writer.flush()
936
952
937 with self.assertRaisesRegexp(ValueError, 'stream is closed'):
953 with self.assertRaisesRegex(ValueError, "stream is closed"):
938 with writer:
954 with writer:
939 pass
955 pass
940
956
941 self.assertEqual(buffer.getvalue(), b'foo')
957 self.assertEqual(buffer.getvalue(), b"foo")
942
958
943 # Context manager exit should close stream.
959 # Context manager exit should close stream.
944 buffer = NonClosingBytesIO()
960 buffer = NonClosingBytesIO()
@@ -948,7 +964,7 b' class TestDecompressor_stream_writer(uni'
948 writer.write(foo)
964 writer.write(foo)
949
965
950 self.assertTrue(writer.closed)
966 self.assertTrue(writer.closed)
951 self.assertEqual(buffer.getvalue(), b'foo')
967 self.assertEqual(buffer.getvalue(), b"foo")
952
968
953 def test_flush(self):
969 def test_flush(self):
954 buffer = OpCountingBytesIO()
970 buffer = OpCountingBytesIO()
@@ -962,12 +978,12 b' class TestDecompressor_stream_writer(uni'
962
978
963 def test_empty_roundtrip(self):
979 def test_empty_roundtrip(self):
964 cctx = zstd.ZstdCompressor()
980 cctx = zstd.ZstdCompressor()
965 empty = cctx.compress(b'')
981 empty = cctx.compress(b"")
966 self.assertEqual(decompress_via_writer(empty), b'')
982 self.assertEqual(decompress_via_writer(empty), b"")
967
983
968 def test_input_types(self):
984 def test_input_types(self):
969 cctx = zstd.ZstdCompressor(level=1)
985 cctx = zstd.ZstdCompressor(level=1)
970 compressed = cctx.compress(b'foo')
986 compressed = cctx.compress(b"foo")
971
987
972 mutable_array = bytearray(len(compressed))
988 mutable_array = bytearray(len(compressed))
973 mutable_array[:] = compressed
989 mutable_array[:] = compressed
@@ -984,25 +1000,25 b' class TestDecompressor_stream_writer(uni'
984
1000
985 decompressor = dctx.stream_writer(buffer)
1001 decompressor = dctx.stream_writer(buffer)
986 decompressor.write(source)
1002 decompressor.write(source)
987 self.assertEqual(buffer.getvalue(), b'foo')
1003 self.assertEqual(buffer.getvalue(), b"foo")
988
1004
989 buffer = NonClosingBytesIO()
1005 buffer = NonClosingBytesIO()
990
1006
991 with dctx.stream_writer(buffer) as decompressor:
1007 with dctx.stream_writer(buffer) as decompressor:
992 self.assertEqual(decompressor.write(source), 3)
1008 self.assertEqual(decompressor.write(source), 3)
993
1009
994 self.assertEqual(buffer.getvalue(), b'foo')
1010 self.assertEqual(buffer.getvalue(), b"foo")
995
1011
996 buffer = io.BytesIO()
1012 buffer = io.BytesIO()
997 writer = dctx.stream_writer(buffer, write_return_read=True)
1013 writer = dctx.stream_writer(buffer, write_return_read=True)
998 self.assertEqual(writer.write(source), len(source))
1014 self.assertEqual(writer.write(source), len(source))
999 self.assertEqual(buffer.getvalue(), b'foo')
1015 self.assertEqual(buffer.getvalue(), b"foo")
1000
1016
1001 def test_large_roundtrip(self):
1017 def test_large_roundtrip(self):
1002 chunks = []
1018 chunks = []
1003 for i in range(255):
1019 for i in range(255):
1004 chunks.append(struct.Struct('>B').pack(i) * 16384)
1020 chunks.append(struct.Struct(">B").pack(i) * 16384)
1005 orig = b''.join(chunks)
1021 orig = b"".join(chunks)
1006 cctx = zstd.ZstdCompressor()
1022 cctx = zstd.ZstdCompressor()
1007 compressed = cctx.compress(orig)
1023 compressed = cctx.compress(orig)
1008
1024
@@ -1012,9 +1028,9 b' class TestDecompressor_stream_writer(uni'
1012 chunks = []
1028 chunks = []
1013 for i in range(255):
1029 for i in range(255):
1014 for j in range(255):
1030 for j in range(255):
1015 chunks.append(struct.Struct('>B').pack(j) * i)
1031 chunks.append(struct.Struct(">B").pack(j) * i)
1016
1032
1017 orig = b''.join(chunks)
1033 orig = b"".join(chunks)
1018 cctx = zstd.ZstdCompressor()
1034 cctx = zstd.ZstdCompressor()
1019 compressed = cctx.compress(orig)
1035 compressed = cctx.compress(orig)
1020
1036
@@ -1042,13 +1058,13 b' class TestDecompressor_stream_writer(uni'
1042 def test_dictionary(self):
1058 def test_dictionary(self):
1043 samples = []
1059 samples = []
1044 for i in range(128):
1060 for i in range(128):
1045 samples.append(b'foo' * 64)
1061 samples.append(b"foo" * 64)
1046 samples.append(b'bar' * 64)
1062 samples.append(b"bar" * 64)
1047 samples.append(b'foobar' * 64)
1063 samples.append(b"foobar" * 64)
1048
1064
1049 d = zstd.train_dictionary(8192, samples)
1065 d = zstd.train_dictionary(8192, samples)
1050
1066
1051 orig = b'foobar' * 16384
1067 orig = b"foobar" * 16384
1052 buffer = NonClosingBytesIO()
1068 buffer = NonClosingBytesIO()
1053 cctx = zstd.ZstdCompressor(dict_data=d)
1069 cctx = zstd.ZstdCompressor(dict_data=d)
1054 with cctx.stream_writer(buffer) as compressor:
1070 with cctx.stream_writer(buffer) as compressor:
@@ -1083,22 +1099,22 b' class TestDecompressor_stream_writer(uni'
1083 self.assertGreater(size, 100000)
1099 self.assertGreater(size, 100000)
1084
1100
1085 def test_write_size(self):
1101 def test_write_size(self):
1086 source = zstd.ZstdCompressor().compress(b'foobarfoobar')
1102 source = zstd.ZstdCompressor().compress(b"foobarfoobar")
1087 dest = OpCountingBytesIO()
1103 dest = OpCountingBytesIO()
1088 dctx = zstd.ZstdDecompressor()
1104 dctx = zstd.ZstdDecompressor()
1089 with dctx.stream_writer(dest, write_size=1) as decompressor:
1105 with dctx.stream_writer(dest, write_size=1) as decompressor:
1090 s = struct.Struct('>B')
1106 s = struct.Struct(">B")
1091 for c in source:
1107 for c in source:
1092 if not isinstance(c, str):
1108 if not isinstance(c, str):
1093 c = s.pack(c)
1109 c = s.pack(c)
1094 decompressor.write(c)
1110 decompressor.write(c)
1095
1111
1096 self.assertEqual(dest.getvalue(), b'foobarfoobar')
1112 self.assertEqual(dest.getvalue(), b"foobarfoobar")
1097 self.assertEqual(dest._write_count, len(dest.getvalue()))
1113 self.assertEqual(dest._write_count, len(dest.getvalue()))
1098
1114
1099
1115
1100 @make_cffi
1116 @make_cffi
1101 class TestDecompressor_read_to_iter(unittest.TestCase):
1117 class TestDecompressor_read_to_iter(TestCase):
1102 def test_type_validation(self):
1118 def test_type_validation(self):
1103 dctx = zstd.ZstdDecompressor()
1119 dctx = zstd.ZstdDecompressor()
1104
1120
@@ -1106,10 +1122,10 b' class TestDecompressor_read_to_iter(unit'
1106 dctx.read_to_iter(io.BytesIO())
1122 dctx.read_to_iter(io.BytesIO())
1107
1123
1108 # Buffer protocol works.
1124 # Buffer protocol works.
1109 dctx.read_to_iter(b'foobar')
1125 dctx.read_to_iter(b"foobar")
1110
1126
1111 with self.assertRaisesRegexp(ValueError, 'must pass an object with a read'):
1127 with self.assertRaisesRegex(ValueError, "must pass an object with a read"):
1112 b''.join(dctx.read_to_iter(True))
1128 b"".join(dctx.read_to_iter(True))
1113
1129
1114 def test_empty_input(self):
1130 def test_empty_input(self):
1115 dctx = zstd.ZstdDecompressor()
1131 dctx = zstd.ZstdDecompressor()
@@ -1120,25 +1136,25 b' class TestDecompressor_read_to_iter(unit'
1120 with self.assertRaises(StopIteration):
1136 with self.assertRaises(StopIteration):
1121 next(it)
1137 next(it)
1122
1138
1123 it = dctx.read_to_iter(b'')
1139 it = dctx.read_to_iter(b"")
1124 with self.assertRaises(StopIteration):
1140 with self.assertRaises(StopIteration):
1125 next(it)
1141 next(it)
1126
1142
1127 def test_invalid_input(self):
1143 def test_invalid_input(self):
1128 dctx = zstd.ZstdDecompressor()
1144 dctx = zstd.ZstdDecompressor()
1129
1145
1130 source = io.BytesIO(b'foobar')
1146 source = io.BytesIO(b"foobar")
1131 it = dctx.read_to_iter(source)
1147 it = dctx.read_to_iter(source)
1132 with self.assertRaisesRegexp(zstd.ZstdError, 'Unknown frame descriptor'):
1148 with self.assertRaisesRegex(zstd.ZstdError, "Unknown frame descriptor"):
1133 next(it)
1149 next(it)
1134
1150
1135 it = dctx.read_to_iter(b'foobar')
1151 it = dctx.read_to_iter(b"foobar")
1136 with self.assertRaisesRegexp(zstd.ZstdError, 'Unknown frame descriptor'):
1152 with self.assertRaisesRegex(zstd.ZstdError, "Unknown frame descriptor"):
1137 next(it)
1153 next(it)
1138
1154
1139 def test_empty_roundtrip(self):
1155 def test_empty_roundtrip(self):
1140 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
1156 cctx = zstd.ZstdCompressor(level=1, write_content_size=False)
1141 empty = cctx.compress(b'')
1157 empty = cctx.compress(b"")
1142
1158
1143 source = io.BytesIO(empty)
1159 source = io.BytesIO(empty)
1144 source.seek(0)
1160 source.seek(0)
@@ -1157,24 +1173,28 b' class TestDecompressor_read_to_iter(unit'
1157 def test_skip_bytes_too_large(self):
1173 def test_skip_bytes_too_large(self):
1158 dctx = zstd.ZstdDecompressor()
1174 dctx = zstd.ZstdDecompressor()
1159
1175
1160 with self.assertRaisesRegexp(ValueError, 'skip_bytes must be smaller than read_size'):
1176 with self.assertRaisesRegex(
1161 b''.join(dctx.read_to_iter(b'', skip_bytes=1, read_size=1))
1177 ValueError, "skip_bytes must be smaller than read_size"
1178 ):
1179 b"".join(dctx.read_to_iter(b"", skip_bytes=1, read_size=1))
1162
1180
1163 with self.assertRaisesRegexp(ValueError, 'skip_bytes larger than first input chunk'):
1181 with self.assertRaisesRegex(
1164 b''.join(dctx.read_to_iter(b'foobar', skip_bytes=10))
1182 ValueError, "skip_bytes larger than first input chunk"
1183 ):
1184 b"".join(dctx.read_to_iter(b"foobar", skip_bytes=10))
1165
1185
1166 def test_skip_bytes(self):
1186 def test_skip_bytes(self):
1167 cctx = zstd.ZstdCompressor(write_content_size=False)
1187 cctx = zstd.ZstdCompressor(write_content_size=False)
1168 compressed = cctx.compress(b'foobar')
1188 compressed = cctx.compress(b"foobar")
1169
1189
1170 dctx = zstd.ZstdDecompressor()
1190 dctx = zstd.ZstdDecompressor()
1171 output = b''.join(dctx.read_to_iter(b'hdr' + compressed, skip_bytes=3))
1191 output = b"".join(dctx.read_to_iter(b"hdr" + compressed, skip_bytes=3))
1172 self.assertEqual(output, b'foobar')
1192 self.assertEqual(output, b"foobar")
1173
1193
1174 def test_large_output(self):
1194 def test_large_output(self):
1175 source = io.BytesIO()
1195 source = io.BytesIO()
1176 source.write(b'f' * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE)
1196 source.write(b"f" * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE)
1177 source.write(b'o')
1197 source.write(b"o")
1178 source.seek(0)
1198 source.seek(0)
1179
1199
1180 cctx = zstd.ZstdCompressor(level=1)
1200 cctx = zstd.ZstdCompressor(level=1)
@@ -1191,7 +1211,7 b' class TestDecompressor_read_to_iter(unit'
1191 with self.assertRaises(StopIteration):
1211 with self.assertRaises(StopIteration):
1192 next(it)
1212 next(it)
1193
1213
1194 decompressed = b''.join(chunks)
1214 decompressed = b"".join(chunks)
1195 self.assertEqual(decompressed, source.getvalue())
1215 self.assertEqual(decompressed, source.getvalue())
1196
1216
1197 # And again with buffer protocol.
1217 # And again with buffer protocol.
@@ -1203,12 +1223,12 b' class TestDecompressor_read_to_iter(unit'
1203 with self.assertRaises(StopIteration):
1223 with self.assertRaises(StopIteration):
1204 next(it)
1224 next(it)
1205
1225
1206 decompressed = b''.join(chunks)
1226 decompressed = b"".join(chunks)
1207 self.assertEqual(decompressed, source.getvalue())
1227 self.assertEqual(decompressed, source.getvalue())
1208
1228
1209 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
1229 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
1210 def test_large_input(self):
1230 def test_large_input(self):
1211 bytes = list(struct.Struct('>B').pack(i) for i in range(256))
1231 bytes = list(struct.Struct(">B").pack(i) for i in range(256))
1212 compressed = NonClosingBytesIO()
1232 compressed = NonClosingBytesIO()
1213 input_size = 0
1233 input_size = 0
1214 cctx = zstd.ZstdCompressor(level=1)
1234 cctx = zstd.ZstdCompressor(level=1)
@@ -1217,14 +1237,18 b' class TestDecompressor_read_to_iter(unit'
1217 compressor.write(random.choice(bytes))
1237 compressor.write(random.choice(bytes))
1218 input_size += 1
1238 input_size += 1
1219
1239
1220 have_compressed = len(compressed.getvalue()) > zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
1240 have_compressed = (
1241 len(compressed.getvalue())
1242 > zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
1243 )
1221 have_raw = input_size > zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE * 2
1244 have_raw = input_size > zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE * 2
1222 if have_compressed and have_raw:
1245 if have_compressed and have_raw:
1223 break
1246 break
1224
1247
1225 compressed = io.BytesIO(compressed.getvalue())
1248 compressed = io.BytesIO(compressed.getvalue())
1226 self.assertGreater(len(compressed.getvalue()),
1249 self.assertGreater(
1227 zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE)
1250 len(compressed.getvalue()), zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
1251 )
1228
1252
1229 dctx = zstd.ZstdDecompressor()
1253 dctx = zstd.ZstdDecompressor()
1230 it = dctx.read_to_iter(compressed)
1254 it = dctx.read_to_iter(compressed)
@@ -1237,7 +1261,7 b' class TestDecompressor_read_to_iter(unit'
1237 with self.assertRaises(StopIteration):
1261 with self.assertRaises(StopIteration):
1238 next(it)
1262 next(it)
1239
1263
1240 decompressed = b''.join(chunks)
1264 decompressed = b"".join(chunks)
1241 self.assertEqual(len(decompressed), input_size)
1265 self.assertEqual(len(decompressed), input_size)
1242
1266
1243 # And again with buffer protocol.
1267 # And again with buffer protocol.
@@ -1251,7 +1275,7 b' class TestDecompressor_read_to_iter(unit'
1251 with self.assertRaises(StopIteration):
1275 with self.assertRaises(StopIteration):
1252 next(it)
1276 next(it)
1253
1277
1254 decompressed = b''.join(chunks)
1278 decompressed = b"".join(chunks)
1255 self.assertEqual(len(decompressed), input_size)
1279 self.assertEqual(len(decompressed), input_size)
1256
1280
1257 def test_interesting(self):
1281 def test_interesting(self):
@@ -1263,22 +1287,23 b' class TestDecompressor_read_to_iter(unit'
1263 compressed = NonClosingBytesIO()
1287 compressed = NonClosingBytesIO()
1264 with cctx.stream_writer(compressed) as compressor:
1288 with cctx.stream_writer(compressed) as compressor:
1265 for i in range(256):
1289 for i in range(256):
1266 chunk = b'\0' * 1024
1290 chunk = b"\0" * 1024
1267 compressor.write(chunk)
1291 compressor.write(chunk)
1268 source.write(chunk)
1292 source.write(chunk)
1269
1293
1270 dctx = zstd.ZstdDecompressor()
1294 dctx = zstd.ZstdDecompressor()
1271
1295
1272 simple = dctx.decompress(compressed.getvalue(),
1296 simple = dctx.decompress(
1273 max_output_size=len(source.getvalue()))
1297 compressed.getvalue(), max_output_size=len(source.getvalue())
1298 )
1274 self.assertEqual(simple, source.getvalue())
1299 self.assertEqual(simple, source.getvalue())
1275
1300
1276 compressed = io.BytesIO(compressed.getvalue())
1301 compressed = io.BytesIO(compressed.getvalue())
1277 streamed = b''.join(dctx.read_to_iter(compressed))
1302 streamed = b"".join(dctx.read_to_iter(compressed))
1278 self.assertEqual(streamed, source.getvalue())
1303 self.assertEqual(streamed, source.getvalue())
1279
1304
1280 def test_read_write_size(self):
1305 def test_read_write_size(self):
1281 source = OpCountingBytesIO(zstd.ZstdCompressor().compress(b'foobarfoobar'))
1306 source = OpCountingBytesIO(zstd.ZstdCompressor().compress(b"foobarfoobar"))
1282 dctx = zstd.ZstdDecompressor()
1307 dctx = zstd.ZstdDecompressor()
1283 for chunk in dctx.read_to_iter(source, read_size=1, write_size=1):
1308 for chunk in dctx.read_to_iter(source, read_size=1, write_size=1):
1284 self.assertEqual(len(chunk), 1)
1309 self.assertEqual(len(chunk), 1)
@@ -1287,97 +1312,110 b' class TestDecompressor_read_to_iter(unit'
1287
1312
1288 def test_magic_less(self):
1313 def test_magic_less(self):
1289 params = zstd.CompressionParameters.from_level(
1314 params = zstd.CompressionParameters.from_level(
1290 1, format=zstd.FORMAT_ZSTD1_MAGICLESS)
1315 1, format=zstd.FORMAT_ZSTD1_MAGICLESS
1316 )
1291 cctx = zstd.ZstdCompressor(compression_params=params)
1317 cctx = zstd.ZstdCompressor(compression_params=params)
1292 frame = cctx.compress(b'foobar')
1318 frame = cctx.compress(b"foobar")
1293
1319
1294 self.assertNotEqual(frame[0:4], b'\x28\xb5\x2f\xfd')
1320 self.assertNotEqual(frame[0:4], b"\x28\xb5\x2f\xfd")
1295
1321
1296 dctx = zstd.ZstdDecompressor()
1322 dctx = zstd.ZstdDecompressor()
1297 with self.assertRaisesRegexp(
1323 with self.assertRaisesRegex(
1298 zstd.ZstdError, 'error determining content size from frame header'):
1324 zstd.ZstdError, "error determining content size from frame header"
1325 ):
1299 dctx.decompress(frame)
1326 dctx.decompress(frame)
1300
1327
1301 dctx = zstd.ZstdDecompressor(format=zstd.FORMAT_ZSTD1_MAGICLESS)
1328 dctx = zstd.ZstdDecompressor(format=zstd.FORMAT_ZSTD1_MAGICLESS)
1302 res = b''.join(dctx.read_to_iter(frame))
1329 res = b"".join(dctx.read_to_iter(frame))
1303 self.assertEqual(res, b'foobar')
1330 self.assertEqual(res, b"foobar")
1304
1331
1305
1332
1306 @make_cffi
1333 @make_cffi
1307 class TestDecompressor_content_dict_chain(unittest.TestCase):
1334 class TestDecompressor_content_dict_chain(TestCase):
1308 def test_bad_inputs_simple(self):
1335 def test_bad_inputs_simple(self):
1309 dctx = zstd.ZstdDecompressor()
1336 dctx = zstd.ZstdDecompressor()
1310
1337
1311 with self.assertRaises(TypeError):
1338 with self.assertRaises(TypeError):
1312 dctx.decompress_content_dict_chain(b'foo')
1339 dctx.decompress_content_dict_chain(b"foo")
1313
1340
1314 with self.assertRaises(TypeError):
1341 with self.assertRaises(TypeError):
1315 dctx.decompress_content_dict_chain((b'foo', b'bar'))
1342 dctx.decompress_content_dict_chain((b"foo", b"bar"))
1316
1343
1317 with self.assertRaisesRegexp(ValueError, 'empty input chain'):
1344 with self.assertRaisesRegex(ValueError, "empty input chain"):
1318 dctx.decompress_content_dict_chain([])
1345 dctx.decompress_content_dict_chain([])
1319
1346
1320 with self.assertRaisesRegexp(ValueError, 'chunk 0 must be bytes'):
1347 with self.assertRaisesRegex(ValueError, "chunk 0 must be bytes"):
1321 dctx.decompress_content_dict_chain([u'foo'])
1348 dctx.decompress_content_dict_chain([u"foo"])
1322
1349
1323 with self.assertRaisesRegexp(ValueError, 'chunk 0 must be bytes'):
1350 with self.assertRaisesRegex(ValueError, "chunk 0 must be bytes"):
1324 dctx.decompress_content_dict_chain([True])
1351 dctx.decompress_content_dict_chain([True])
1325
1352
1326 with self.assertRaisesRegexp(ValueError, 'chunk 0 is too small to contain a zstd frame'):
1353 with self.assertRaisesRegex(
1354 ValueError, "chunk 0 is too small to contain a zstd frame"
1355 ):
1327 dctx.decompress_content_dict_chain([zstd.FRAME_HEADER])
1356 dctx.decompress_content_dict_chain([zstd.FRAME_HEADER])
1328
1357
1329 with self.assertRaisesRegexp(ValueError, 'chunk 0 is not a valid zstd frame'):
1358 with self.assertRaisesRegex(ValueError, "chunk 0 is not a valid zstd frame"):
1330 dctx.decompress_content_dict_chain([b'foo' * 8])
1359 dctx.decompress_content_dict_chain([b"foo" * 8])
1331
1360
1332 no_size = zstd.ZstdCompressor(write_content_size=False).compress(b'foo' * 64)
1361 no_size = zstd.ZstdCompressor(write_content_size=False).compress(b"foo" * 64)
1333
1362
1334 with self.assertRaisesRegexp(ValueError, 'chunk 0 missing content size in frame'):
1363 with self.assertRaisesRegex(
1364 ValueError, "chunk 0 missing content size in frame"
1365 ):
1335 dctx.decompress_content_dict_chain([no_size])
1366 dctx.decompress_content_dict_chain([no_size])
1336
1367
1337 # Corrupt first frame.
1368 # Corrupt first frame.
1338 frame = zstd.ZstdCompressor().compress(b'foo' * 64)
1369 frame = zstd.ZstdCompressor().compress(b"foo" * 64)
1339 frame = frame[0:12] + frame[15:]
1370 frame = frame[0:12] + frame[15:]
1340 with self.assertRaisesRegexp(zstd.ZstdError,
1371 with self.assertRaisesRegex(
1341 'chunk 0 did not decompress full frame'):
1372 zstd.ZstdError, "chunk 0 did not decompress full frame"
1373 ):
1342 dctx.decompress_content_dict_chain([frame])
1374 dctx.decompress_content_dict_chain([frame])
1343
1375
1344 def test_bad_subsequent_input(self):
1376 def test_bad_subsequent_input(self):
1345 initial = zstd.ZstdCompressor().compress(b'foo' * 64)
1377 initial = zstd.ZstdCompressor().compress(b"foo" * 64)
1346
1378
1347 dctx = zstd.ZstdDecompressor()
1379 dctx = zstd.ZstdDecompressor()
1348
1380
1349 with self.assertRaisesRegexp(ValueError, 'chunk 1 must be bytes'):
1381 with self.assertRaisesRegex(ValueError, "chunk 1 must be bytes"):
1350 dctx.decompress_content_dict_chain([initial, u'foo'])
1382 dctx.decompress_content_dict_chain([initial, u"foo"])
1351
1383
1352 with self.assertRaisesRegexp(ValueError, 'chunk 1 must be bytes'):
1384 with self.assertRaisesRegex(ValueError, "chunk 1 must be bytes"):
1353 dctx.decompress_content_dict_chain([initial, None])
1385 dctx.decompress_content_dict_chain([initial, None])
1354
1386
1355 with self.assertRaisesRegexp(ValueError, 'chunk 1 is too small to contain a zstd frame'):
1387 with self.assertRaisesRegex(
1388 ValueError, "chunk 1 is too small to contain a zstd frame"
1389 ):
1356 dctx.decompress_content_dict_chain([initial, zstd.FRAME_HEADER])
1390 dctx.decompress_content_dict_chain([initial, zstd.FRAME_HEADER])
1357
1391
1358 with self.assertRaisesRegexp(ValueError, 'chunk 1 is not a valid zstd frame'):
1392 with self.assertRaisesRegex(ValueError, "chunk 1 is not a valid zstd frame"):
1359 dctx.decompress_content_dict_chain([initial, b'foo' * 8])
1393 dctx.decompress_content_dict_chain([initial, b"foo" * 8])
1360
1394
1361 no_size = zstd.ZstdCompressor(write_content_size=False).compress(b'foo' * 64)
1395 no_size = zstd.ZstdCompressor(write_content_size=False).compress(b"foo" * 64)
1362
1396
1363 with self.assertRaisesRegexp(ValueError, 'chunk 1 missing content size in frame'):
1397 with self.assertRaisesRegex(
1398 ValueError, "chunk 1 missing content size in frame"
1399 ):
1364 dctx.decompress_content_dict_chain([initial, no_size])
1400 dctx.decompress_content_dict_chain([initial, no_size])
1365
1401
1366 # Corrupt second frame.
1402 # Corrupt second frame.
1367 cctx = zstd.ZstdCompressor(dict_data=zstd.ZstdCompressionDict(b'foo' * 64))
1403 cctx = zstd.ZstdCompressor(dict_data=zstd.ZstdCompressionDict(b"foo" * 64))
1368 frame = cctx.compress(b'bar' * 64)
1404 frame = cctx.compress(b"bar" * 64)
1369 frame = frame[0:12] + frame[15:]
1405 frame = frame[0:12] + frame[15:]
1370
1406
1371 with self.assertRaisesRegexp(zstd.ZstdError, 'chunk 1 did not decompress full frame'):
1407 with self.assertRaisesRegex(
1408 zstd.ZstdError, "chunk 1 did not decompress full frame"
1409 ):
1372 dctx.decompress_content_dict_chain([initial, frame])
1410 dctx.decompress_content_dict_chain([initial, frame])
1373
1411
1374 def test_simple(self):
1412 def test_simple(self):
1375 original = [
1413 original = [
1376 b'foo' * 64,
1414 b"foo" * 64,
1377 b'foobar' * 64,
1415 b"foobar" * 64,
1378 b'baz' * 64,
1416 b"baz" * 64,
1379 b'foobaz' * 64,
1417 b"foobaz" * 64,
1380 b'foobarbaz' * 64,
1418 b"foobarbaz" * 64,
1381 ]
1419 ]
1382
1420
1383 chunks = []
1421 chunks = []
@@ -1396,12 +1434,12 b' class TestDecompressor_content_dict_chai'
1396
1434
1397
1435
1398 # TODO enable for CFFI
1436 # TODO enable for CFFI
1399 class TestDecompressor_multi_decompress_to_buffer(unittest.TestCase):
1437 class TestDecompressor_multi_decompress_to_buffer(TestCase):
1400 def test_invalid_inputs(self):
1438 def test_invalid_inputs(self):
1401 dctx = zstd.ZstdDecompressor()
1439 dctx = zstd.ZstdDecompressor()
1402
1440
1403 if not hasattr(dctx, 'multi_decompress_to_buffer'):
1441 if not hasattr(dctx, "multi_decompress_to_buffer"):
1404 self.skipTest('multi_decompress_to_buffer not available')
1442 self.skipTest("multi_decompress_to_buffer not available")
1405
1443
1406 with self.assertRaises(TypeError):
1444 with self.assertRaises(TypeError):
1407 dctx.multi_decompress_to_buffer(True)
1445 dctx.multi_decompress_to_buffer(True)
@@ -1409,22 +1447,24 b' class TestDecompressor_multi_decompress_'
1409 with self.assertRaises(TypeError):
1447 with self.assertRaises(TypeError):
1410 dctx.multi_decompress_to_buffer((1, 2))
1448 dctx.multi_decompress_to_buffer((1, 2))
1411
1449
1412 with self.assertRaisesRegexp(TypeError, 'item 0 not a bytes like object'):
1450 with self.assertRaisesRegex(TypeError, "item 0 not a bytes like object"):
1413 dctx.multi_decompress_to_buffer([u'foo'])
1451 dctx.multi_decompress_to_buffer([u"foo"])
1414
1452
1415 with self.assertRaisesRegexp(ValueError, 'could not determine decompressed size of item 0'):
1453 with self.assertRaisesRegex(
1416 dctx.multi_decompress_to_buffer([b'foobarbaz'])
1454 ValueError, "could not determine decompressed size of item 0"
1455 ):
1456 dctx.multi_decompress_to_buffer([b"foobarbaz"])
1417
1457
1418 def test_list_input(self):
1458 def test_list_input(self):
1419 cctx = zstd.ZstdCompressor()
1459 cctx = zstd.ZstdCompressor()
1420
1460
1421 original = [b'foo' * 4, b'bar' * 6]
1461 original = [b"foo" * 4, b"bar" * 6]
1422 frames = [cctx.compress(d) for d in original]
1462 frames = [cctx.compress(d) for d in original]
1423
1463
1424 dctx = zstd.ZstdDecompressor()
1464 dctx = zstd.ZstdDecompressor()
1425
1465
1426 if not hasattr(dctx, 'multi_decompress_to_buffer'):
1466 if not hasattr(dctx, "multi_decompress_to_buffer"):
1427 self.skipTest('multi_decompress_to_buffer not available')
1467 self.skipTest("multi_decompress_to_buffer not available")
1428
1468
1429 result = dctx.multi_decompress_to_buffer(frames)
1469 result = dctx.multi_decompress_to_buffer(frames)
1430
1470
@@ -1442,14 +1482,14 b' class TestDecompressor_multi_decompress_'
1442 def test_list_input_frame_sizes(self):
1482 def test_list_input_frame_sizes(self):
1443 cctx = zstd.ZstdCompressor()
1483 cctx = zstd.ZstdCompressor()
1444
1484
1445 original = [b'foo' * 4, b'bar' * 6, b'baz' * 8]
1485 original = [b"foo" * 4, b"bar" * 6, b"baz" * 8]
1446 frames = [cctx.compress(d) for d in original]
1486 frames = [cctx.compress(d) for d in original]
1447 sizes = struct.pack('=' + 'Q' * len(original), *map(len, original))
1487 sizes = struct.pack("=" + "Q" * len(original), *map(len, original))
1448
1488
1449 dctx = zstd.ZstdDecompressor()
1489 dctx = zstd.ZstdDecompressor()
1450
1490
1451 if not hasattr(dctx, 'multi_decompress_to_buffer'):
1491 if not hasattr(dctx, "multi_decompress_to_buffer"):
1452 self.skipTest('multi_decompress_to_buffer not available')
1492 self.skipTest("multi_decompress_to_buffer not available")
1453
1493
1454 result = dctx.multi_decompress_to_buffer(frames, decompressed_sizes=sizes)
1494 result = dctx.multi_decompress_to_buffer(frames, decompressed_sizes=sizes)
1455
1495
@@ -1462,16 +1502,18 b' class TestDecompressor_multi_decompress_'
1462 def test_buffer_with_segments_input(self):
1502 def test_buffer_with_segments_input(self):
1463 cctx = zstd.ZstdCompressor()
1503 cctx = zstd.ZstdCompressor()
1464
1504
1465 original = [b'foo' * 4, b'bar' * 6]
1505 original = [b"foo" * 4, b"bar" * 6]
1466 frames = [cctx.compress(d) for d in original]
1506 frames = [cctx.compress(d) for d in original]
1467
1507
1468 dctx = zstd.ZstdDecompressor()
1508 dctx = zstd.ZstdDecompressor()
1469
1509
1470 if not hasattr(dctx, 'multi_decompress_to_buffer'):
1510 if not hasattr(dctx, "multi_decompress_to_buffer"):
1471 self.skipTest('multi_decompress_to_buffer not available')
1511 self.skipTest("multi_decompress_to_buffer not available")
1472
1512
1473 segments = struct.pack('=QQQQ', 0, len(frames[0]), len(frames[0]), len(frames[1]))
1513 segments = struct.pack(
1474 b = zstd.BufferWithSegments(b''.join(frames), segments)
1514 "=QQQQ", 0, len(frames[0]), len(frames[0]), len(frames[1])
1515 )
1516 b = zstd.BufferWithSegments(b"".join(frames), segments)
1475
1517
1476 result = dctx.multi_decompress_to_buffer(b)
1518 result = dctx.multi_decompress_to_buffer(b)
1477
1519
@@ -1483,19 +1525,25 b' class TestDecompressor_multi_decompress_'
1483
1525
1484 def test_buffer_with_segments_sizes(self):
1526 def test_buffer_with_segments_sizes(self):
1485 cctx = zstd.ZstdCompressor(write_content_size=False)
1527 cctx = zstd.ZstdCompressor(write_content_size=False)
1486 original = [b'foo' * 4, b'bar' * 6, b'baz' * 8]
1528 original = [b"foo" * 4, b"bar" * 6, b"baz" * 8]
1487 frames = [cctx.compress(d) for d in original]
1529 frames = [cctx.compress(d) for d in original]
1488 sizes = struct.pack('=' + 'Q' * len(original), *map(len, original))
1530 sizes = struct.pack("=" + "Q" * len(original), *map(len, original))
1489
1531
1490 dctx = zstd.ZstdDecompressor()
1532 dctx = zstd.ZstdDecompressor()
1491
1533
1492 if not hasattr(dctx, 'multi_decompress_to_buffer'):
1534 if not hasattr(dctx, "multi_decompress_to_buffer"):
1493 self.skipTest('multi_decompress_to_buffer not available')
1535 self.skipTest("multi_decompress_to_buffer not available")
1494
1536
1495 segments = struct.pack('=QQQQQQ', 0, len(frames[0]),
1537 segments = struct.pack(
1496 len(frames[0]), len(frames[1]),
1538 "=QQQQQQ",
1497 len(frames[0]) + len(frames[1]), len(frames[2]))
1539 0,
1498 b = zstd.BufferWithSegments(b''.join(frames), segments)
1540 len(frames[0]),
1541 len(frames[0]),
1542 len(frames[1]),
1543 len(frames[0]) + len(frames[1]),
1544 len(frames[2]),
1545 )
1546 b = zstd.BufferWithSegments(b"".join(frames), segments)
1499
1547
1500 result = dctx.multi_decompress_to_buffer(b, decompressed_sizes=sizes)
1548 result = dctx.multi_decompress_to_buffer(b, decompressed_sizes=sizes)
1501
1549
@@ -1509,15 +1557,15 b' class TestDecompressor_multi_decompress_'
1509 cctx = zstd.ZstdCompressor()
1557 cctx = zstd.ZstdCompressor()
1510
1558
1511 original = [
1559 original = [
1512 b'foo0' * 2,
1560 b"foo0" * 2,
1513 b'foo1' * 3,
1561 b"foo1" * 3,
1514 b'foo2' * 4,
1562 b"foo2" * 4,
1515 b'foo3' * 5,
1563 b"foo3" * 5,
1516 b'foo4' * 6,
1564 b"foo4" * 6,
1517 ]
1565 ]
1518
1566
1519 if not hasattr(cctx, 'multi_compress_to_buffer'):
1567 if not hasattr(cctx, "multi_compress_to_buffer"):
1520 self.skipTest('multi_compress_to_buffer not available')
1568 self.skipTest("multi_compress_to_buffer not available")
1521
1569
1522 frames = cctx.multi_compress_to_buffer(original)
1570 frames = cctx.multi_compress_to_buffer(original)
1523
1571
@@ -1532,16 +1580,24 b' class TestDecompressor_multi_decompress_'
1532 self.assertEqual(data, decompressed[i].tobytes())
1580 self.assertEqual(data, decompressed[i].tobytes())
1533
1581
1534 # And a manual mode.
1582 # And a manual mode.
1535 b = b''.join([frames[0].tobytes(), frames[1].tobytes()])
1583 b = b"".join([frames[0].tobytes(), frames[1].tobytes()])
1536 b1 = zstd.BufferWithSegments(b, struct.pack('=QQQQ',
1584 b1 = zstd.BufferWithSegments(
1537 0, len(frames[0]),
1585 b, struct.pack("=QQQQ", 0, len(frames[0]), len(frames[0]), len(frames[1]))
1538 len(frames[0]), len(frames[1])))
1586 )
1539
1587
1540 b = b''.join([frames[2].tobytes(), frames[3].tobytes(), frames[4].tobytes()])
1588 b = b"".join([frames[2].tobytes(), frames[3].tobytes(), frames[4].tobytes()])
1541 b2 = zstd.BufferWithSegments(b, struct.pack('=QQQQQQ',
1589 b2 = zstd.BufferWithSegments(
1542 0, len(frames[2]),
1590 b,
1543 len(frames[2]), len(frames[3]),
1591 struct.pack(
1544 len(frames[2]) + len(frames[3]), len(frames[4])))
1592 "=QQQQQQ",
1593 0,
1594 len(frames[2]),
1595 len(frames[2]),
1596 len(frames[3]),
1597 len(frames[2]) + len(frames[3]),
1598 len(frames[4]),
1599 ),
1600 )
1545
1601
1546 c = zstd.BufferWithSegmentsCollection(b1, b2)
1602 c = zstd.BufferWithSegmentsCollection(b1, b2)
1547
1603
@@ -1560,8 +1616,8 b' class TestDecompressor_multi_decompress_'
1560
1616
1561 dctx = zstd.ZstdDecompressor(dict_data=d)
1617 dctx = zstd.ZstdDecompressor(dict_data=d)
1562
1618
1563 if not hasattr(dctx, 'multi_decompress_to_buffer'):
1619 if not hasattr(dctx, "multi_decompress_to_buffer"):
1564 self.skipTest('multi_decompress_to_buffer not available')
1620 self.skipTest("multi_decompress_to_buffer not available")
1565
1621
1566 result = dctx.multi_decompress_to_buffer(frames)
1622 result = dctx.multi_decompress_to_buffer(frames)
1567
1623
@@ -1571,41 +1627,44 b' class TestDecompressor_multi_decompress_'
1571 cctx = zstd.ZstdCompressor()
1627 cctx = zstd.ZstdCompressor()
1572
1628
1573 frames = []
1629 frames = []
1574 frames.extend(cctx.compress(b'x' * 64) for i in range(256))
1630 frames.extend(cctx.compress(b"x" * 64) for i in range(256))
1575 frames.extend(cctx.compress(b'y' * 64) for i in range(256))
1631 frames.extend(cctx.compress(b"y" * 64) for i in range(256))
1576
1632
1577 dctx = zstd.ZstdDecompressor()
1633 dctx = zstd.ZstdDecompressor()
1578
1634
1579 if not hasattr(dctx, 'multi_decompress_to_buffer'):
1635 if not hasattr(dctx, "multi_decompress_to_buffer"):
1580 self.skipTest('multi_decompress_to_buffer not available')
1636 self.skipTest("multi_decompress_to_buffer not available")
1581
1637
1582 result = dctx.multi_decompress_to_buffer(frames, threads=-1)
1638 result = dctx.multi_decompress_to_buffer(frames, threads=-1)
1583
1639
1584 self.assertEqual(len(result), len(frames))
1640 self.assertEqual(len(result), len(frames))
1585 self.assertEqual(result.size(), 2 * 64 * 256)
1641 self.assertEqual(result.size(), 2 * 64 * 256)
1586 self.assertEqual(result[0].tobytes(), b'x' * 64)
1642 self.assertEqual(result[0].tobytes(), b"x" * 64)
1587 self.assertEqual(result[256].tobytes(), b'y' * 64)
1643 self.assertEqual(result[256].tobytes(), b"y" * 64)
1588
1644
1589 def test_item_failure(self):
1645 def test_item_failure(self):
1590 cctx = zstd.ZstdCompressor()
1646 cctx = zstd.ZstdCompressor()
1591 frames = [cctx.compress(b'x' * 128), cctx.compress(b'y' * 128)]
1647 frames = [cctx.compress(b"x" * 128), cctx.compress(b"y" * 128)]
1592
1648
1593 frames[1] = frames[1][0:15] + b'extra' + frames[1][15:]
1649 frames[1] = frames[1][0:15] + b"extra" + frames[1][15:]
1594
1650
1595 dctx = zstd.ZstdDecompressor()
1651 dctx = zstd.ZstdDecompressor()
1596
1652
1597 if not hasattr(dctx, 'multi_decompress_to_buffer'):
1653 if not hasattr(dctx, "multi_decompress_to_buffer"):
1598 self.skipTest('multi_decompress_to_buffer not available')
1654 self.skipTest("multi_decompress_to_buffer not available")
1599
1655
1600 with self.assertRaisesRegexp(zstd.ZstdError,
1656 with self.assertRaisesRegex(
1601 'error decompressing item 1: ('
1657 zstd.ZstdError,
1602 'Corrupted block|'
1658 "error decompressing item 1: ("
1603 'Destination buffer is too small)'):
1659 "Corrupted block|"
1660 "Destination buffer is too small)",
1661 ):
1604 dctx.multi_decompress_to_buffer(frames)
1662 dctx.multi_decompress_to_buffer(frames)
1605
1663
1606 with self.assertRaisesRegexp(zstd.ZstdError,
1664 with self.assertRaisesRegex(
1607 'error decompressing item 1: ('
1665 zstd.ZstdError,
1608 'Corrupted block|'
1666 "error decompressing item 1: ("
1609 'Destination buffer is too small)'):
1667 "Corrupted block|"
1668 "Destination buffer is too small)",
1669 ):
1610 dctx.multi_decompress_to_buffer(frames, threads=2)
1670 dctx.multi_decompress_to_buffer(frames, threads=2)
1611
@@ -6,29 +6,37 b' try:'
6 import hypothesis
6 import hypothesis
7 import hypothesis.strategies as strategies
7 import hypothesis.strategies as strategies
8 except ImportError:
8 except ImportError:
9 raise unittest.SkipTest('hypothesis not available')
9 raise unittest.SkipTest("hypothesis not available")
10
10
11 import zstandard as zstd
11 import zstandard as zstd
12
12
13 from . common import (
13 from .common import (
14 make_cffi,
14 make_cffi,
15 NonClosingBytesIO,
15 NonClosingBytesIO,
16 random_input_data,
16 random_input_data,
17 TestCase,
17 )
18 )
18
19
19
20
20 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
21 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
21 @make_cffi
22 @make_cffi
22 class TestDecompressor_stream_reader_fuzzing(unittest.TestCase):
23 class TestDecompressor_stream_reader_fuzzing(TestCase):
23 @hypothesis.settings(
24 @hypothesis.settings(
24 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
25 suppress_health_check=[
25 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
26 hypothesis.HealthCheck.large_base_example,
26 level=strategies.integers(min_value=1, max_value=5),
27 hypothesis.HealthCheck.too_slow,
27 streaming=strategies.booleans(),
28 ]
28 source_read_size=strategies.integers(1, 1048576),
29 )
29 read_sizes=strategies.data())
30 @hypothesis.given(
30 def test_stream_source_read_variance(self, original, level, streaming,
31 original=strategies.sampled_from(random_input_data()),
31 source_read_size, read_sizes):
32 level=strategies.integers(min_value=1, max_value=5),
33 streaming=strategies.booleans(),
34 source_read_size=strategies.integers(1, 1048576),
35 read_sizes=strategies.data(),
36 )
37 def test_stream_source_read_variance(
38 self, original, level, streaming, source_read_size, read_sizes
39 ):
32 cctx = zstd.ZstdCompressor(level=level)
40 cctx = zstd.ZstdCompressor(level=level)
33
41
34 if streaming:
42 if streaming:
@@ -53,18 +61,22 b' class TestDecompressor_stream_reader_fuz'
53
61
54 chunks.append(chunk)
62 chunks.append(chunk)
55
63
56 self.assertEqual(b''.join(chunks), original)
64 self.assertEqual(b"".join(chunks), original)
57
65
58 # Similar to above except we have a constant read() size.
66 # Similar to above except we have a constant read() size.
59 @hypothesis.settings(
67 @hypothesis.settings(
60 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
68 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
61 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
69 )
62 level=strategies.integers(min_value=1, max_value=5),
70 @hypothesis.given(
63 streaming=strategies.booleans(),
71 original=strategies.sampled_from(random_input_data()),
64 source_read_size=strategies.integers(1, 1048576),
72 level=strategies.integers(min_value=1, max_value=5),
65 read_size=strategies.integers(-1, 131072))
73 streaming=strategies.booleans(),
66 def test_stream_source_read_size(self, original, level, streaming,
74 source_read_size=strategies.integers(1, 1048576),
67 source_read_size, read_size):
75 read_size=strategies.integers(-1, 131072),
76 )
77 def test_stream_source_read_size(
78 self, original, level, streaming, source_read_size, read_size
79 ):
68 if read_size == 0:
80 if read_size == 0:
69 read_size = 1
81 read_size = 1
70
82
@@ -91,17 +103,24 b' class TestDecompressor_stream_reader_fuz'
91
103
92 chunks.append(chunk)
104 chunks.append(chunk)
93
105
94 self.assertEqual(b''.join(chunks), original)
106 self.assertEqual(b"".join(chunks), original)
95
107
96 @hypothesis.settings(
108 @hypothesis.settings(
97 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
109 suppress_health_check=[
98 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
110 hypothesis.HealthCheck.large_base_example,
99 level=strategies.integers(min_value=1, max_value=5),
111 hypothesis.HealthCheck.too_slow,
100 streaming=strategies.booleans(),
112 ]
101 source_read_size=strategies.integers(1, 1048576),
113 )
102 read_sizes=strategies.data())
114 @hypothesis.given(
103 def test_buffer_source_read_variance(self, original, level, streaming,
115 original=strategies.sampled_from(random_input_data()),
104 source_read_size, read_sizes):
116 level=strategies.integers(min_value=1, max_value=5),
117 streaming=strategies.booleans(),
118 source_read_size=strategies.integers(1, 1048576),
119 read_sizes=strategies.data(),
120 )
121 def test_buffer_source_read_variance(
122 self, original, level, streaming, source_read_size, read_sizes
123 ):
105 cctx = zstd.ZstdCompressor(level=level)
124 cctx = zstd.ZstdCompressor(level=level)
106
125
107 if streaming:
126 if streaming:
@@ -125,18 +144,22 b' class TestDecompressor_stream_reader_fuz'
125
144
126 chunks.append(chunk)
145 chunks.append(chunk)
127
146
128 self.assertEqual(b''.join(chunks), original)
147 self.assertEqual(b"".join(chunks), original)
129
148
130 # Similar to above except we have a constant read() size.
149 # Similar to above except we have a constant read() size.
131 @hypothesis.settings(
150 @hypothesis.settings(
132 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
151 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
133 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
152 )
134 level=strategies.integers(min_value=1, max_value=5),
153 @hypothesis.given(
135 streaming=strategies.booleans(),
154 original=strategies.sampled_from(random_input_data()),
136 source_read_size=strategies.integers(1, 1048576),
155 level=strategies.integers(min_value=1, max_value=5),
137 read_size=strategies.integers(-1, 131072))
156 streaming=strategies.booleans(),
138 def test_buffer_source_constant_read_size(self, original, level, streaming,
157 source_read_size=strategies.integers(1, 1048576),
139 source_read_size, read_size):
158 read_size=strategies.integers(-1, 131072),
159 )
160 def test_buffer_source_constant_read_size(
161 self, original, level, streaming, source_read_size, read_size
162 ):
140 if read_size == 0:
163 if read_size == 0:
141 read_size = -1
164 read_size = -1
142
165
@@ -162,16 +185,18 b' class TestDecompressor_stream_reader_fuz'
162
185
163 chunks.append(chunk)
186 chunks.append(chunk)
164
187
165 self.assertEqual(b''.join(chunks), original)
188 self.assertEqual(b"".join(chunks), original)
166
189
167 @hypothesis.settings(
190 @hypothesis.settings(
168 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
191 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
169 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
192 )
170 level=strategies.integers(min_value=1, max_value=5),
193 @hypothesis.given(
171 streaming=strategies.booleans(),
194 original=strategies.sampled_from(random_input_data()),
172 source_read_size=strategies.integers(1, 1048576))
195 level=strategies.integers(min_value=1, max_value=5),
173 def test_stream_source_readall(self, original, level, streaming,
196 streaming=strategies.booleans(),
174 source_read_size):
197 source_read_size=strategies.integers(1, 1048576),
198 )
199 def test_stream_source_readall(self, original, level, streaming, source_read_size):
175 cctx = zstd.ZstdCompressor(level=level)
200 cctx = zstd.ZstdCompressor(level=level)
176
201
177 if streaming:
202 if streaming:
@@ -190,14 +215,21 b' class TestDecompressor_stream_reader_fuz'
190 self.assertEqual(data, original)
215 self.assertEqual(data, original)
191
216
192 @hypothesis.settings(
217 @hypothesis.settings(
193 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
218 suppress_health_check=[
194 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
219 hypothesis.HealthCheck.large_base_example,
195 level=strategies.integers(min_value=1, max_value=5),
220 hypothesis.HealthCheck.too_slow,
196 streaming=strategies.booleans(),
221 ]
197 source_read_size=strategies.integers(1, 1048576),
222 )
198 read_sizes=strategies.data())
223 @hypothesis.given(
199 def test_stream_source_read1_variance(self, original, level, streaming,
224 original=strategies.sampled_from(random_input_data()),
200 source_read_size, read_sizes):
225 level=strategies.integers(min_value=1, max_value=5),
226 streaming=strategies.booleans(),
227 source_read_size=strategies.integers(1, 1048576),
228 read_sizes=strategies.data(),
229 )
230 def test_stream_source_read1_variance(
231 self, original, level, streaming, source_read_size, read_sizes
232 ):
201 cctx = zstd.ZstdCompressor(level=level)
233 cctx = zstd.ZstdCompressor(level=level)
202
234
203 if streaming:
235 if streaming:
@@ -222,17 +254,24 b' class TestDecompressor_stream_reader_fuz'
222
254
223 chunks.append(chunk)
255 chunks.append(chunk)
224
256
225 self.assertEqual(b''.join(chunks), original)
257 self.assertEqual(b"".join(chunks), original)
226
258
227 @hypothesis.settings(
259 @hypothesis.settings(
228 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
260 suppress_health_check=[
229 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
261 hypothesis.HealthCheck.large_base_example,
230 level=strategies.integers(min_value=1, max_value=5),
262 hypothesis.HealthCheck.too_slow,
231 streaming=strategies.booleans(),
263 ]
232 source_read_size=strategies.integers(1, 1048576),
264 )
233 read_sizes=strategies.data())
265 @hypothesis.given(
234 def test_stream_source_readinto1_variance(self, original, level, streaming,
266 original=strategies.sampled_from(random_input_data()),
235 source_read_size, read_sizes):
267 level=strategies.integers(min_value=1, max_value=5),
268 streaming=strategies.booleans(),
269 source_read_size=strategies.integers(1, 1048576),
270 read_sizes=strategies.data(),
271 )
272 def test_stream_source_readinto1_variance(
273 self, original, level, streaming, source_read_size, read_sizes
274 ):
236 cctx = zstd.ZstdCompressor(level=level)
275 cctx = zstd.ZstdCompressor(level=level)
237
276
238 if streaming:
277 if streaming:
@@ -259,18 +298,24 b' class TestDecompressor_stream_reader_fuz'
259
298
260 chunks.append(bytes(b[0:count]))
299 chunks.append(bytes(b[0:count]))
261
300
262 self.assertEqual(b''.join(chunks), original)
301 self.assertEqual(b"".join(chunks), original)
263
302
264 @hypothesis.settings(
303 @hypothesis.settings(
265 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
304 suppress_health_check=[
305 hypothesis.HealthCheck.large_base_example,
306 hypothesis.HealthCheck.too_slow,
307 ]
308 )
266 @hypothesis.given(
309 @hypothesis.given(
267 original=strategies.sampled_from(random_input_data()),
310 original=strategies.sampled_from(random_input_data()),
268 level=strategies.integers(min_value=1, max_value=5),
311 level=strategies.integers(min_value=1, max_value=5),
269 source_read_size=strategies.integers(1, 1048576),
312 source_read_size=strategies.integers(1, 1048576),
270 seek_amounts=strategies.data(),
313 seek_amounts=strategies.data(),
271 read_sizes=strategies.data())
314 read_sizes=strategies.data(),
272 def test_relative_seeks(self, original, level, source_read_size, seek_amounts,
315 )
273 read_sizes):
316 def test_relative_seeks(
317 self, original, level, source_read_size, seek_amounts, read_sizes
318 ):
274 cctx = zstd.ZstdCompressor(level=level)
319 cctx = zstd.ZstdCompressor(level=level)
275 frame = cctx.compress(original)
320 frame = cctx.compress(original)
276
321
@@ -288,18 +333,24 b' class TestDecompressor_stream_reader_fuz'
288 if not chunk:
333 if not chunk:
289 break
334 break
290
335
291 self.assertEqual(original[offset:offset + len(chunk)], chunk)
336 self.assertEqual(original[offset : offset + len(chunk)], chunk)
292
337
293 @hypothesis.settings(
338 @hypothesis.settings(
294 suppress_health_check=[hypothesis.HealthCheck.large_base_example])
339 suppress_health_check=[
340 hypothesis.HealthCheck.large_base_example,
341 hypothesis.HealthCheck.too_slow,
342 ]
343 )
295 @hypothesis.given(
344 @hypothesis.given(
296 originals=strategies.data(),
345 originals=strategies.data(),
297 frame_count=strategies.integers(min_value=2, max_value=10),
346 frame_count=strategies.integers(min_value=2, max_value=10),
298 level=strategies.integers(min_value=1, max_value=5),
347 level=strategies.integers(min_value=1, max_value=5),
299 source_read_size=strategies.integers(1, 1048576),
348 source_read_size=strategies.integers(1, 1048576),
300 read_sizes=strategies.data())
349 read_sizes=strategies.data(),
301 def test_multiple_frames(self, originals, frame_count, level,
350 )
302 source_read_size, read_sizes):
351 def test_multiple_frames(
352 self, originals, frame_count, level, source_read_size, read_sizes
353 ):
303
354
304 cctx = zstd.ZstdCompressor(level=level)
355 cctx = zstd.ZstdCompressor(level=level)
305 source = io.BytesIO()
356 source = io.BytesIO()
@@ -314,8 +365,9 b' class TestDecompressor_stream_reader_fuz'
314
365
315 dctx = zstd.ZstdDecompressor()
366 dctx = zstd.ZstdDecompressor()
316 buffer.seek(0)
367 buffer.seek(0)
317 reader = dctx.stream_reader(buffer, read_size=source_read_size,
368 reader = dctx.stream_reader(
318 read_across_frames=True)
369 buffer, read_size=source_read_size, read_across_frames=True
370 )
319
371
320 chunks = []
372 chunks = []
321
373
@@ -328,16 +380,24 b' class TestDecompressor_stream_reader_fuz'
328
380
329 chunks.append(chunk)
381 chunks.append(chunk)
330
382
331 self.assertEqual(source.getvalue(), b''.join(chunks))
383 self.assertEqual(source.getvalue(), b"".join(chunks))
332
384
333
385
334 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
386 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
335 @make_cffi
387 @make_cffi
336 class TestDecompressor_stream_writer_fuzzing(unittest.TestCase):
388 class TestDecompressor_stream_writer_fuzzing(TestCase):
337 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
389 @hypothesis.settings(
338 level=strategies.integers(min_value=1, max_value=5),
390 suppress_health_check=[
339 write_size=strategies.integers(min_value=1, max_value=8192),
391 hypothesis.HealthCheck.large_base_example,
340 input_sizes=strategies.data())
392 hypothesis.HealthCheck.too_slow,
393 ]
394 )
395 @hypothesis.given(
396 original=strategies.sampled_from(random_input_data()),
397 level=strategies.integers(min_value=1, max_value=5),
398 write_size=strategies.integers(min_value=1, max_value=8192),
399 input_sizes=strategies.data(),
400 )
341 def test_write_size_variance(self, original, level, write_size, input_sizes):
401 def test_write_size_variance(self, original, level, write_size, input_sizes):
342 cctx = zstd.ZstdCompressor(level=level)
402 cctx = zstd.ZstdCompressor(level=level)
343 frame = cctx.compress(original)
403 frame = cctx.compress(original)
@@ -358,13 +418,21 b' class TestDecompressor_stream_writer_fuz'
358 self.assertEqual(dest.getvalue(), original)
418 self.assertEqual(dest.getvalue(), original)
359
419
360
420
361 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
421 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
362 @make_cffi
422 @make_cffi
363 class TestDecompressor_copy_stream_fuzzing(unittest.TestCase):
423 class TestDecompressor_copy_stream_fuzzing(TestCase):
364 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
424 @hypothesis.settings(
365 level=strategies.integers(min_value=1, max_value=5),
425 suppress_health_check=[
366 read_size=strategies.integers(min_value=1, max_value=8192),
426 hypothesis.HealthCheck.large_base_example,
367 write_size=strategies.integers(min_value=1, max_value=8192))
427 hypothesis.HealthCheck.too_slow,
428 ]
429 )
430 @hypothesis.given(
431 original=strategies.sampled_from(random_input_data()),
432 level=strategies.integers(min_value=1, max_value=5),
433 read_size=strategies.integers(min_value=1, max_value=8192),
434 write_size=strategies.integers(min_value=1, max_value=8192),
435 )
368 def test_read_write_size_variance(self, original, level, read_size, write_size):
436 def test_read_write_size_variance(self, original, level, read_size, write_size):
369 cctx = zstd.ZstdCompressor(level=level)
437 cctx = zstd.ZstdCompressor(level=level)
370 frame = cctx.compress(original)
438 frame = cctx.compress(original)
@@ -378,12 +446,20 b' class TestDecompressor_copy_stream_fuzzi'
378 self.assertEqual(dest.getvalue(), original)
446 self.assertEqual(dest.getvalue(), original)
379
447
380
448
381 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
449 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
382 @make_cffi
450 @make_cffi
383 class TestDecompressor_decompressobj_fuzzing(unittest.TestCase):
451 class TestDecompressor_decompressobj_fuzzing(TestCase):
384 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
452 @hypothesis.settings(
385 level=strategies.integers(min_value=1, max_value=5),
453 suppress_health_check=[
386 chunk_sizes=strategies.data())
454 hypothesis.HealthCheck.large_base_example,
455 hypothesis.HealthCheck.too_slow,
456 ]
457 )
458 @hypothesis.given(
459 original=strategies.sampled_from(random_input_data()),
460 level=strategies.integers(min_value=1, max_value=5),
461 chunk_sizes=strategies.data(),
462 )
387 def test_random_input_sizes(self, original, level, chunk_sizes):
463 def test_random_input_sizes(self, original, level, chunk_sizes):
388 cctx = zstd.ZstdCompressor(level=level)
464 cctx = zstd.ZstdCompressor(level=level)
389 frame = cctx.compress(original)
465 frame = cctx.compress(original)
@@ -402,13 +478,22 b' class TestDecompressor_decompressobj_fuz'
402
478
403 chunks.append(dobj.decompress(chunk))
479 chunks.append(dobj.decompress(chunk))
404
480
405 self.assertEqual(b''.join(chunks), original)
481 self.assertEqual(b"".join(chunks), original)
406
482
407 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
483 @hypothesis.settings(
408 level=strategies.integers(min_value=1, max_value=5),
484 suppress_health_check=[
409 write_size=strategies.integers(min_value=1,
485 hypothesis.HealthCheck.large_base_example,
410 max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE),
486 hypothesis.HealthCheck.too_slow,
411 chunk_sizes=strategies.data())
487 ]
488 )
489 @hypothesis.given(
490 original=strategies.sampled_from(random_input_data()),
491 level=strategies.integers(min_value=1, max_value=5),
492 write_size=strategies.integers(
493 min_value=1, max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE
494 ),
495 chunk_sizes=strategies.data(),
496 )
412 def test_random_output_sizes(self, original, level, write_size, chunk_sizes):
497 def test_random_output_sizes(self, original, level, write_size, chunk_sizes):
413 cctx = zstd.ZstdCompressor(level=level)
498 cctx = zstd.ZstdCompressor(level=level)
414 frame = cctx.compress(original)
499 frame = cctx.compress(original)
@@ -427,16 +512,18 b' class TestDecompressor_decompressobj_fuz'
427
512
428 chunks.append(dobj.decompress(chunk))
513 chunks.append(dobj.decompress(chunk))
429
514
430 self.assertEqual(b''.join(chunks), original)
515 self.assertEqual(b"".join(chunks), original)
431
516
432
517
433 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
518 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
434 @make_cffi
519 @make_cffi
435 class TestDecompressor_read_to_iter_fuzzing(unittest.TestCase):
520 class TestDecompressor_read_to_iter_fuzzing(TestCase):
436 @hypothesis.given(original=strategies.sampled_from(random_input_data()),
521 @hypothesis.given(
437 level=strategies.integers(min_value=1, max_value=5),
522 original=strategies.sampled_from(random_input_data()),
438 read_size=strategies.integers(min_value=1, max_value=4096),
523 level=strategies.integers(min_value=1, max_value=5),
439 write_size=strategies.integers(min_value=1, max_value=4096))
524 read_size=strategies.integers(min_value=1, max_value=4096),
525 write_size=strategies.integers(min_value=1, max_value=4096),
526 )
440 def test_read_write_size_variance(self, original, level, read_size, write_size):
527 def test_read_write_size_variance(self, original, level, read_size, write_size):
441 cctx = zstd.ZstdCompressor(level=level)
528 cctx = zstd.ZstdCompressor(level=level)
442 frame = cctx.compress(original)
529 frame = cctx.compress(original)
@@ -444,29 +531,33 b' class TestDecompressor_read_to_iter_fuzz'
444 source = io.BytesIO(frame)
531 source = io.BytesIO(frame)
445
532
446 dctx = zstd.ZstdDecompressor()
533 dctx = zstd.ZstdDecompressor()
447 chunks = list(dctx.read_to_iter(source, read_size=read_size, write_size=write_size))
534 chunks = list(
535 dctx.read_to_iter(source, read_size=read_size, write_size=write_size)
536 )
448
537
449 self.assertEqual(b''.join(chunks), original)
538 self.assertEqual(b"".join(chunks), original)
450
539
451
540
452 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set')
541 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
453 class TestDecompressor_multi_decompress_to_buffer_fuzzing(unittest.TestCase):
542 class TestDecompressor_multi_decompress_to_buffer_fuzzing(TestCase):
454 @hypothesis.given(original=strategies.lists(strategies.sampled_from(random_input_data()),
543 @hypothesis.given(
455 min_size=1, max_size=1024),
544 original=strategies.lists(
456 threads=strategies.integers(min_value=1, max_value=8),
545 strategies.sampled_from(random_input_data()), min_size=1, max_size=1024
457 use_dict=strategies.booleans())
546 ),
547 threads=strategies.integers(min_value=1, max_value=8),
548 use_dict=strategies.booleans(),
549 )
458 def test_data_equivalence(self, original, threads, use_dict):
550 def test_data_equivalence(self, original, threads, use_dict):
459 kwargs = {}
551 kwargs = {}
460 if use_dict:
552 if use_dict:
461 kwargs['dict_data'] = zstd.ZstdCompressionDict(original[0])
553 kwargs["dict_data"] = zstd.ZstdCompressionDict(original[0])
462
554
463 cctx = zstd.ZstdCompressor(level=1,
555 cctx = zstd.ZstdCompressor(
464 write_content_size=True,
556 level=1, write_content_size=True, write_checksum=True, **kwargs
465 write_checksum=True,
557 )
466 **kwargs)
467
558
468 if not hasattr(cctx, 'multi_compress_to_buffer'):
559 if not hasattr(cctx, "multi_compress_to_buffer"):
469 self.skipTest('multi_compress_to_buffer not available')
560 self.skipTest("multi_compress_to_buffer not available")
470
561
471 frames_buffer = cctx.multi_compress_to_buffer(original, threads=-1)
562 frames_buffer = cctx.multi_compress_to_buffer(original, threads=-1)
472
563
@@ -2,14 +2,14 b' import unittest'
2
2
3 import zstandard as zstd
3 import zstandard as zstd
4
4
5 from . common import (
5 from .common import (
6 make_cffi,
6 make_cffi,
7 TestCase,
7 )
8 )
8
9
9
10
10 @make_cffi
11 @make_cffi
11 class TestSizes(unittest.TestCase):
12 class TestSizes(TestCase):
12 def test_decompression_size(self):
13 def test_decompression_size(self):
13 size = zstd.estimate_decompression_context_size()
14 size = zstd.estimate_decompression_context_size()
14 self.assertGreater(size, 100000)
15 self.assertGreater(size, 100000)
15
@@ -4,65 +4,66 b' import unittest'
4
4
5 import zstandard as zstd
5 import zstandard as zstd
6
6
7 from . common import (
7 from .common import (
8 make_cffi,
8 make_cffi,
9 TestCase,
9 )
10 )
10
11
11
12
12 @make_cffi
13 @make_cffi
13 class TestModuleAttributes(unittest.TestCase):
14 class TestModuleAttributes(TestCase):
14 def test_version(self):
15 def test_version(self):
15 self.assertEqual(zstd.ZSTD_VERSION, (1, 4, 3))
16 self.assertEqual(zstd.ZSTD_VERSION, (1, 4, 4))
16
17
17 self.assertEqual(zstd.__version__, '0.12.0')
18 self.assertEqual(zstd.__version__, "0.13.0")
18
19
19 def test_constants(self):
20 def test_constants(self):
20 self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
21 self.assertEqual(zstd.MAX_COMPRESSION_LEVEL, 22)
21 self.assertEqual(zstd.FRAME_HEADER, b'\x28\xb5\x2f\xfd')
22 self.assertEqual(zstd.FRAME_HEADER, b"\x28\xb5\x2f\xfd")
22
23
23 def test_hasattr(self):
24 def test_hasattr(self):
24 attrs = (
25 attrs = (
25 'CONTENTSIZE_UNKNOWN',
26 "CONTENTSIZE_UNKNOWN",
26 'CONTENTSIZE_ERROR',
27 "CONTENTSIZE_ERROR",
27 'COMPRESSION_RECOMMENDED_INPUT_SIZE',
28 "COMPRESSION_RECOMMENDED_INPUT_SIZE",
28 'COMPRESSION_RECOMMENDED_OUTPUT_SIZE',
29 "COMPRESSION_RECOMMENDED_OUTPUT_SIZE",
29 'DECOMPRESSION_RECOMMENDED_INPUT_SIZE',
30 "DECOMPRESSION_RECOMMENDED_INPUT_SIZE",
30 'DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE',
31 "DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE",
31 'MAGIC_NUMBER',
32 "MAGIC_NUMBER",
32 'FLUSH_BLOCK',
33 "FLUSH_BLOCK",
33 'FLUSH_FRAME',
34 "FLUSH_FRAME",
34 'BLOCKSIZELOG_MAX',
35 "BLOCKSIZELOG_MAX",
35 'BLOCKSIZE_MAX',
36 "BLOCKSIZE_MAX",
36 'WINDOWLOG_MIN',
37 "WINDOWLOG_MIN",
37 'WINDOWLOG_MAX',
38 "WINDOWLOG_MAX",
38 'CHAINLOG_MIN',
39 "CHAINLOG_MIN",
39 'CHAINLOG_MAX',
40 "CHAINLOG_MAX",
40 'HASHLOG_MIN',
41 "HASHLOG_MIN",
41 'HASHLOG_MAX',
42 "HASHLOG_MAX",
42 'HASHLOG3_MAX',
43 "HASHLOG3_MAX",
43 'MINMATCH_MIN',
44 "MINMATCH_MIN",
44 'MINMATCH_MAX',
45 "MINMATCH_MAX",
45 'SEARCHLOG_MIN',
46 "SEARCHLOG_MIN",
46 'SEARCHLOG_MAX',
47 "SEARCHLOG_MAX",
47 'SEARCHLENGTH_MIN',
48 "SEARCHLENGTH_MIN",
48 'SEARCHLENGTH_MAX',
49 "SEARCHLENGTH_MAX",
49 'TARGETLENGTH_MIN',
50 "TARGETLENGTH_MIN",
50 'TARGETLENGTH_MAX',
51 "TARGETLENGTH_MAX",
51 'LDM_MINMATCH_MIN',
52 "LDM_MINMATCH_MIN",
52 'LDM_MINMATCH_MAX',
53 "LDM_MINMATCH_MAX",
53 'LDM_BUCKETSIZELOG_MAX',
54 "LDM_BUCKETSIZELOG_MAX",
54 'STRATEGY_FAST',
55 "STRATEGY_FAST",
55 'STRATEGY_DFAST',
56 "STRATEGY_DFAST",
56 'STRATEGY_GREEDY',
57 "STRATEGY_GREEDY",
57 'STRATEGY_LAZY',
58 "STRATEGY_LAZY",
58 'STRATEGY_LAZY2',
59 "STRATEGY_LAZY2",
59 'STRATEGY_BTLAZY2',
60 "STRATEGY_BTLAZY2",
60 'STRATEGY_BTOPT',
61 "STRATEGY_BTOPT",
61 'STRATEGY_BTULTRA',
62 "STRATEGY_BTULTRA",
62 'STRATEGY_BTULTRA2',
63 "STRATEGY_BTULTRA2",
63 'DICT_TYPE_AUTO',
64 "DICT_TYPE_AUTO",
64 'DICT_TYPE_RAWCONTENT',
65 "DICT_TYPE_RAWCONTENT",
65 'DICT_TYPE_FULLDICT',
66 "DICT_TYPE_FULLDICT",
66 )
67 )
67
68
68 for a in attrs:
69 for a in attrs:
@@ -4,10 +4,11 b' import unittest'
4
4
5 import zstandard as zstd
5 import zstandard as zstd
6
6
7 from . common import (
7 from .common import (
8 generate_samples,
8 generate_samples,
9 make_cffi,
9 make_cffi,
10 random_input_data,
10 random_input_data,
11 TestCase,
11 )
12 )
12
13
13 if sys.version_info[0] >= 3:
14 if sys.version_info[0] >= 3:
@@ -17,24 +18,24 b' else:'
17
18
18
19
19 @make_cffi
20 @make_cffi
20 class TestTrainDictionary(unittest.TestCase):
21 class TestTrainDictionary(TestCase):
21 def test_no_args(self):
22 def test_no_args(self):
22 with self.assertRaises(TypeError):
23 with self.assertRaises(TypeError):
23 zstd.train_dictionary()
24 zstd.train_dictionary()
24
25
25 def test_bad_args(self):
26 def test_bad_args(self):
26 with self.assertRaises(TypeError):
27 with self.assertRaises(TypeError):
27 zstd.train_dictionary(8192, u'foo')
28 zstd.train_dictionary(8192, u"foo")
28
29
29 with self.assertRaises(ValueError):
30 with self.assertRaises(ValueError):
30 zstd.train_dictionary(8192, [u'foo'])
31 zstd.train_dictionary(8192, [u"foo"])
31
32
32 def test_no_params(self):
33 def test_no_params(self):
33 d = zstd.train_dictionary(8192, random_input_data())
34 d = zstd.train_dictionary(8192, random_input_data())
34 self.assertIsInstance(d.dict_id(), int_type)
35 self.assertIsInstance(d.dict_id(), int_type)
35
36
36 # The dictionary ID may be different across platforms.
37 # The dictionary ID may be different across platforms.
37 expected = b'\x37\xa4\x30\xec' + struct.pack('<I', d.dict_id())
38 expected = b"\x37\xa4\x30\xec" + struct.pack("<I", d.dict_id())
38
39
39 data = d.as_bytes()
40 data = d.as_bytes()
40 self.assertEqual(data[0:8], expected)
41 self.assertEqual(data[0:8], expected)
@@ -44,46 +45,48 b' class TestTrainDictionary(unittest.TestC'
44 self.assertIsInstance(d.dict_id(), int_type)
45 self.assertIsInstance(d.dict_id(), int_type)
45
46
46 data = d.as_bytes()
47 data = d.as_bytes()
47 self.assertEqual(data[0:4], b'\x37\xa4\x30\xec')
48 self.assertEqual(data[0:4], b"\x37\xa4\x30\xec")
48
49
49 self.assertEqual(d.k, 64)
50 self.assertEqual(d.k, 64)
50 self.assertEqual(d.d, 16)
51 self.assertEqual(d.d, 16)
51
52
52 def test_set_dict_id(self):
53 def test_set_dict_id(self):
53 d = zstd.train_dictionary(8192, generate_samples(), k=64, d=16,
54 d = zstd.train_dictionary(8192, generate_samples(), k=64, d=16, dict_id=42)
54 dict_id=42)
55 self.assertEqual(d.dict_id(), 42)
55 self.assertEqual(d.dict_id(), 42)
56
56
57 def test_optimize(self):
57 def test_optimize(self):
58 d = zstd.train_dictionary(8192, generate_samples(), threads=-1, steps=1,
58 d = zstd.train_dictionary(8192, generate_samples(), threads=-1, steps=1, d=16)
59 d=16)
60
59
61 # This varies by platform.
60 # This varies by platform.
62 self.assertIn(d.k, (50, 2000))
61 self.assertIn(d.k, (50, 2000))
63 self.assertEqual(d.d, 16)
62 self.assertEqual(d.d, 16)
64
63
64
65 @make_cffi
65 @make_cffi
66 class TestCompressionDict(unittest.TestCase):
66 class TestCompressionDict(TestCase):
67 def test_bad_mode(self):
67 def test_bad_mode(self):
68 with self.assertRaisesRegexp(ValueError, 'invalid dictionary load mode'):
68 with self.assertRaisesRegex(ValueError, "invalid dictionary load mode"):
69 zstd.ZstdCompressionDict(b'foo', dict_type=42)
69 zstd.ZstdCompressionDict(b"foo", dict_type=42)
70
70
71 def test_bad_precompute_compress(self):
71 def test_bad_precompute_compress(self):
72 d = zstd.train_dictionary(8192, generate_samples(), k=64, d=16)
72 d = zstd.train_dictionary(8192, generate_samples(), k=64, d=16)
73
73
74 with self.assertRaisesRegexp(ValueError, 'must specify one of level or '):
74 with self.assertRaisesRegex(ValueError, "must specify one of level or "):
75 d.precompute_compress()
75 d.precompute_compress()
76
76
77 with self.assertRaisesRegexp(ValueError, 'must only specify one of level or '):
77 with self.assertRaisesRegex(ValueError, "must only specify one of level or "):
78 d.precompute_compress(level=3,
78 d.precompute_compress(
79 compression_params=zstd.CompressionParameters())
79 level=3, compression_params=zstd.CompressionParameters()
80 )
80
81
81 def test_precompute_compress_rawcontent(self):
82 def test_precompute_compress_rawcontent(self):
82 d = zstd.ZstdCompressionDict(b'dictcontent' * 64,
83 d = zstd.ZstdCompressionDict(
83 dict_type=zstd.DICT_TYPE_RAWCONTENT)
84 b"dictcontent" * 64, dict_type=zstd.DICT_TYPE_RAWCONTENT
85 )
84 d.precompute_compress(level=1)
86 d.precompute_compress(level=1)
85
87
86 d = zstd.ZstdCompressionDict(b'dictcontent' * 64,
88 d = zstd.ZstdCompressionDict(
87 dict_type=zstd.DICT_TYPE_FULLDICT)
89 b"dictcontent" * 64, dict_type=zstd.DICT_TYPE_FULLDICT
88 with self.assertRaisesRegexp(zstd.ZstdError, 'unable to precompute dictionary'):
90 )
91 with self.assertRaisesRegex(zstd.ZstdError, "unable to precompute dictionary"):
89 d.precompute_compress(level=1)
92 d.precompute_compress(level=1)
@@ -28,38 +28,48 b' import platform'
28 # defining a variable and `setup.py` could write the file with whatever
28 # defining a variable and `setup.py` could write the file with whatever
29 # policy was specified at build time. Until someone needs it, we go with
29 # policy was specified at build time. Until someone needs it, we go with
30 # the hacky but simple environment variable approach.
30 # the hacky but simple environment variable approach.
31 _module_policy = os.environ.get('PYTHON_ZSTANDARD_IMPORT_POLICY', 'default')
31 _module_policy = os.environ.get("PYTHON_ZSTANDARD_IMPORT_POLICY", "default")
32
32
33 if _module_policy == 'default':
33 if _module_policy == "default":
34 if platform.python_implementation() in ('CPython',):
34 if platform.python_implementation() in ("CPython",):
35 from zstd import *
35 from zstd import *
36 backend = 'cext'
36
37 elif platform.python_implementation() in ('PyPy',):
37 backend = "cext"
38 elif platform.python_implementation() in ("PyPy",):
38 from .cffi import *
39 from .cffi import *
39 backend = 'cffi'
40
41 backend = "cffi"
40 else:
42 else:
41 try:
43 try:
42 from zstd import *
44 from zstd import *
43 backend = 'cext'
45
46 backend = "cext"
44 except ImportError:
47 except ImportError:
45 from .cffi import *
48 from .cffi import *
46 backend = 'cffi'
49
47 elif _module_policy == 'cffi_fallback':
50 backend = "cffi"
51 elif _module_policy == "cffi_fallback":
48 try:
52 try:
49 from zstd import *
53 from zstd import *
50 backend = 'cext'
54
55 backend = "cext"
51 except ImportError:
56 except ImportError:
52 from .cffi import *
57 from .cffi import *
53 backend = 'cffi'
58
54 elif _module_policy == 'cext':
59 backend = "cffi"
60 elif _module_policy == "cext":
55 from zstd import *
61 from zstd import *
56 backend = 'cext'
62
57 elif _module_policy == 'cffi':
63 backend = "cext"
64 elif _module_policy == "cffi":
58 from .cffi import *
65 from .cffi import *
59 backend = 'cffi'
66
67 backend = "cffi"
60 else:
68 else:
61 raise ImportError('unknown module import policy: %s; use default, cffi_fallback, '
69 raise ImportError(
62 'cext, or cffi' % _module_policy)
70 "unknown module import policy: %s; use default, cffi_fallback, "
71 "cext, or cffi" % _module_policy
72 )
63
73
64 # Keep this in sync with python-zstandard.h.
74 # Keep this in sync with python-zstandard.h.
65 __version__ = '0.12.0'
75 __version__ = "0.13.0"
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/default.d/mergetools.rc to mercurial/defaultrc/mergetools.rc
NO CONTENT: file renamed from mercurial/default.d/mergetools.rc to mercurial/defaultrc/mergetools.rc
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/help/bundlespec.txt to mercurial/helptext/bundlespec.txt
NO CONTENT: file renamed from mercurial/help/bundlespec.txt to mercurial/helptext/bundlespec.txt
1 NO CONTENT: file renamed from mercurial/help/color.txt to mercurial/helptext/color.txt
NO CONTENT: file renamed from mercurial/help/color.txt to mercurial/helptext/color.txt
1 NO CONTENT: file renamed from mercurial/help/common.txt to mercurial/helptext/common.txt
NO CONTENT: file renamed from mercurial/help/common.txt to mercurial/helptext/common.txt
1 NO CONTENT: file renamed from mercurial/help/config.txt to mercurial/helptext/config.txt
NO CONTENT: file renamed from mercurial/help/config.txt to mercurial/helptext/config.txt
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/help/dates.txt to mercurial/helptext/dates.txt
NO CONTENT: file renamed from mercurial/help/dates.txt to mercurial/helptext/dates.txt
1 NO CONTENT: file renamed from mercurial/help/deprecated.txt to mercurial/helptext/deprecated.txt
NO CONTENT: file renamed from mercurial/help/deprecated.txt to mercurial/helptext/deprecated.txt
1 NO CONTENT: file renamed from mercurial/help/diffs.txt to mercurial/helptext/diffs.txt
NO CONTENT: file renamed from mercurial/help/diffs.txt to mercurial/helptext/diffs.txt
1 NO CONTENT: file renamed from mercurial/help/environment.txt to mercurial/helptext/environment.txt
NO CONTENT: file renamed from mercurial/help/environment.txt to mercurial/helptext/environment.txt
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/help/extensions.txt to mercurial/helptext/extensions.txt
NO CONTENT: file renamed from mercurial/help/extensions.txt to mercurial/helptext/extensions.txt
1 NO CONTENT: file renamed from mercurial/help/filesets.txt to mercurial/helptext/filesets.txt
NO CONTENT: file renamed from mercurial/help/filesets.txt to mercurial/helptext/filesets.txt
1 NO CONTENT: file renamed from mercurial/help/flags.txt to mercurial/helptext/flags.txt
NO CONTENT: file renamed from mercurial/help/flags.txt to mercurial/helptext/flags.txt
1 NO CONTENT: file renamed from mercurial/help/glossary.txt to mercurial/helptext/glossary.txt
NO CONTENT: file renamed from mercurial/help/glossary.txt to mercurial/helptext/glossary.txt
1 NO CONTENT: file renamed from mercurial/help/hg-ssh.8.txt to mercurial/helptext/hg-ssh.8.txt
NO CONTENT: file renamed from mercurial/help/hg-ssh.8.txt to mercurial/helptext/hg-ssh.8.txt
1 NO CONTENT: file renamed from mercurial/help/hg.1.txt to mercurial/helptext/hg.1.txt
NO CONTENT: file renamed from mercurial/help/hg.1.txt to mercurial/helptext/hg.1.txt
1 NO CONTENT: file renamed from mercurial/help/hgignore.5.txt to mercurial/helptext/hgignore.5.txt
NO CONTENT: file renamed from mercurial/help/hgignore.5.txt to mercurial/helptext/hgignore.5.txt
1 NO CONTENT: file renamed from mercurial/help/hgignore.txt to mercurial/helptext/hgignore.txt
NO CONTENT: file renamed from mercurial/help/hgignore.txt to mercurial/helptext/hgignore.txt
1 NO CONTENT: file renamed from mercurial/help/hgrc.5.txt to mercurial/helptext/hgrc.5.txt
NO CONTENT: file renamed from mercurial/help/hgrc.5.txt to mercurial/helptext/hgrc.5.txt
1 NO CONTENT: file renamed from mercurial/help/hgweb.txt to mercurial/helptext/hgweb.txt
NO CONTENT: file renamed from mercurial/help/hgweb.txt to mercurial/helptext/hgweb.txt
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/help/internals/bundle2.txt to mercurial/helptext/internals/bundle2.txt
NO CONTENT: file renamed from mercurial/help/internals/bundle2.txt to mercurial/helptext/internals/bundle2.txt
1 NO CONTENT: file renamed from mercurial/help/internals/bundles.txt to mercurial/helptext/internals/bundles.txt
NO CONTENT: file renamed from mercurial/help/internals/bundles.txt to mercurial/helptext/internals/bundles.txt
1 NO CONTENT: file renamed from mercurial/help/internals/cbor.txt to mercurial/helptext/internals/cbor.txt
NO CONTENT: file renamed from mercurial/help/internals/cbor.txt to mercurial/helptext/internals/cbor.txt
1 NO CONTENT: file renamed from mercurial/help/internals/censor.txt to mercurial/helptext/internals/censor.txt
NO CONTENT: file renamed from mercurial/help/internals/censor.txt to mercurial/helptext/internals/censor.txt
1 NO CONTENT: file renamed from mercurial/help/internals/changegroups.txt to mercurial/helptext/internals/changegroups.txt
NO CONTENT: file renamed from mercurial/help/internals/changegroups.txt to mercurial/helptext/internals/changegroups.txt
1 NO CONTENT: file renamed from mercurial/help/internals/config.txt to mercurial/helptext/internals/config.txt
NO CONTENT: file renamed from mercurial/help/internals/config.txt to mercurial/helptext/internals/config.txt
1 NO CONTENT: file renamed from mercurial/help/internals/extensions.txt to mercurial/helptext/internals/extensions.txt
NO CONTENT: file renamed from mercurial/help/internals/extensions.txt to mercurial/helptext/internals/extensions.txt
1 NO CONTENT: file renamed from mercurial/help/internals/linelog.txt to mercurial/helptext/internals/linelog.txt
NO CONTENT: file renamed from mercurial/help/internals/linelog.txt to mercurial/helptext/internals/linelog.txt
1 NO CONTENT: file renamed from mercurial/help/internals/mergestate.txt to mercurial/helptext/internals/mergestate.txt
NO CONTENT: file renamed from mercurial/help/internals/mergestate.txt to mercurial/helptext/internals/mergestate.txt
1 NO CONTENT: file renamed from mercurial/help/internals/requirements.txt to mercurial/helptext/internals/requirements.txt
NO CONTENT: file renamed from mercurial/help/internals/requirements.txt to mercurial/helptext/internals/requirements.txt
1 NO CONTENT: file renamed from mercurial/help/internals/revlogs.txt to mercurial/helptext/internals/revlogs.txt
NO CONTENT: file renamed from mercurial/help/internals/revlogs.txt to mercurial/helptext/internals/revlogs.txt
1 NO CONTENT: file renamed from mercurial/help/internals/wireprotocol.txt to mercurial/helptext/internals/wireprotocol.txt
NO CONTENT: file renamed from mercurial/help/internals/wireprotocol.txt to mercurial/helptext/internals/wireprotocol.txt
1 NO CONTENT: file renamed from mercurial/help/internals/wireprotocolrpc.txt to mercurial/helptext/internals/wireprotocolrpc.txt
NO CONTENT: file renamed from mercurial/help/internals/wireprotocolrpc.txt to mercurial/helptext/internals/wireprotocolrpc.txt
1 NO CONTENT: file renamed from mercurial/help/internals/wireprotocolv2.txt to mercurial/helptext/internals/wireprotocolv2.txt
NO CONTENT: file renamed from mercurial/help/internals/wireprotocolv2.txt to mercurial/helptext/internals/wireprotocolv2.txt
1 NO CONTENT: file renamed from mercurial/help/merge-tools.txt to mercurial/helptext/merge-tools.txt
NO CONTENT: file renamed from mercurial/help/merge-tools.txt to mercurial/helptext/merge-tools.txt
1 NO CONTENT: file renamed from mercurial/help/pager.txt to mercurial/helptext/pager.txt
NO CONTENT: file renamed from mercurial/help/pager.txt to mercurial/helptext/pager.txt
1 NO CONTENT: file renamed from mercurial/help/patterns.txt to mercurial/helptext/patterns.txt
NO CONTENT: file renamed from mercurial/help/patterns.txt to mercurial/helptext/patterns.txt
1 NO CONTENT: file renamed from mercurial/help/phases.txt to mercurial/helptext/phases.txt
NO CONTENT: file renamed from mercurial/help/phases.txt to mercurial/helptext/phases.txt
1 NO CONTENT: file renamed from mercurial/help/revisions.txt to mercurial/helptext/revisions.txt
NO CONTENT: file renamed from mercurial/help/revisions.txt to mercurial/helptext/revisions.txt
1 NO CONTENT: file renamed from mercurial/help/scripting.txt to mercurial/helptext/scripting.txt
NO CONTENT: file renamed from mercurial/help/scripting.txt to mercurial/helptext/scripting.txt
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file renamed from mercurial/help/subrepos.txt to mercurial/helptext/subrepos.txt
NO CONTENT: file renamed from mercurial/help/subrepos.txt to mercurial/helptext/subrepos.txt
1 NO CONTENT: file renamed from mercurial/help/templates.txt to mercurial/helptext/templates.txt
NO CONTENT: file renamed from mercurial/help/templates.txt to mercurial/helptext/templates.txt
1 NO CONTENT: file renamed from mercurial/help/urls.txt to mercurial/helptext/urls.txt
NO CONTENT: file renamed from mercurial/help/urls.txt to mercurial/helptext/urls.txt
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from mercurial/utils/procutil.py to mercurial/utils/resourceutil.py
NO CONTENT: file copied from mercurial/utils/procutil.py to mercurial/utils/resourceutil.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from rust/README.rst to rust/hgcli/README.rst
NO CONTENT: file copied from rust/README.rst to rust/hgcli/README.rst
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from tests/test-graft.t to tests/test-graft-interrupted.t
NO CONTENT: file copied from tests/test-graft.t to tests/test-graft-interrupted.t
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from tests/test-graft.t to tests/test-graft-rename.t
NO CONTENT: file copied from tests/test-graft.t to tests/test-graft-rename.t
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now