Show More
@@ -201,6 +201,7 b' def run_tests_linux(' | |||||
201 | with aws.temporary_linux_dev_instances( |
|
201 | with aws.temporary_linux_dev_instances( | |
202 | c, image, instance_type, ensure_extra_volume=ensure_extra_volume |
|
202 | c, image, instance_type, ensure_extra_volume=ensure_extra_volume | |
203 | ) as insts: |
|
203 | ) as insts: | |
|
204 | ||||
204 | instance = insts[0] |
|
205 | instance = insts[0] | |
205 |
|
206 | |||
206 | linux.prepare_exec_environment( |
|
207 | linux.prepare_exec_environment( |
@@ -57,6 +57,7 b' if sys.version_info[0] > 2:' | |||||
57 | return b |
|
57 | return b | |
58 | return b.decode('utf8') |
|
58 | return b.decode('utf8') | |
59 |
|
59 | |||
|
60 | ||||
60 | else: |
|
61 | else: | |
61 | mkstr = lambda x: x |
|
62 | mkstr = lambda x: x | |
62 |
|
63 |
@@ -25,6 +25,7 b' if sys.version_info[0] < 3:' | |||||
25 | """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__""" |
|
25 | """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__""" | |
26 | return self.__bytes__() |
|
26 | return self.__bytes__() | |
27 |
|
27 | |||
|
28 | ||||
28 | else: |
|
29 | else: | |
29 |
|
30 | |||
30 | class py2reprhack: |
|
31 | class py2reprhack: |
@@ -22,7 +22,7 b' workflow:' | |||||
22 | stages: |
|
22 | stages: | |
23 | - tests |
|
23 | - tests | |
24 |
|
24 | |||
25 | image: registry.heptapod.net/mercurial/ci-images/mercurial-core@sha256:dc95edf69e2f9baee0eba74a92514b0d6852b98aba48495831a81a495f58c1e7 |
|
25 | image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG | |
26 |
|
26 | |||
27 | variables: |
|
27 | variables: | |
28 | PYTHON: python |
|
28 | PYTHON: python | |
@@ -39,16 +39,15 b' variables:' | |||||
39 | # The runner made a clone as root. |
|
39 | # The runner made a clone as root. | |
40 | # We make a new clone owned by user used to run the step. |
|
40 | # We make a new clone owned by user used to run the step. | |
41 | before_script: |
|
41 | before_script: | |
42 | - export PATH="/home/ci-runner/vendor/pyenv/pyenv-2.4.7-adf3c2bccf09cdb81febcfd15b186711a33ac7a8/shims:/home/ci-runner/vendor/pyenv/pyenv-2.4.7-adf3c2bccf09cdb81febcfd15b186711a33ac7a8/bin:$PATH" |
|
|||
43 | - echo "python used, $PYTHON" |
|
|||
44 | - $PYTHON --version |
|
|||
45 | - black --version |
|
|||
46 | - clang-format --version |
|
|||
47 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no |
|
42 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no | |
48 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` |
|
43 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` | |
49 | - cd /tmp/mercurial-ci/ |
|
44 | - cd /tmp/mercurial-ci/ | |
50 | - ls -1 tests/test-check-*.* > /tmp/check-tests.txt |
|
45 | - ls -1 tests/test-check-*.* > /tmp/check-tests.txt | |
|
46 | - black --version | |||
|
47 | - clang-format --version | |||
51 | script: |
|
48 | script: | |
|
49 | - echo "python used, $PYTHON" | |||
|
50 | - $PYTHON --version | |||
52 | - echo "$RUNTEST_ARGS" |
|
51 | - echo "$RUNTEST_ARGS" | |
53 | - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS |
|
52 | - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS | |
54 |
|
53 | |||
@@ -56,22 +55,25 b' checks:' | |||||
56 | <<: *runtests |
|
55 | <<: *runtests | |
57 | variables: |
|
56 | variables: | |
58 | RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" |
|
57 | RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" | |
59 | PYTHON: python |
|
58 | PYTHON: python3 | |
60 | CI_CLEVER_CLOUD_FLAVOR: S |
|
59 | CI_CLEVER_CLOUD_FLAVOR: S | |
61 |
|
60 | |||
62 | rust-cargo-test: |
|
61 | rust-cargo-test: | |
63 | <<: *all |
|
62 | <<: *all | |
64 | stage: tests |
|
63 | stage: tests | |
65 | script: |
|
64 | script: | |
|
65 | - echo "python used, $PYTHON" | |||
66 | - make rust-tests |
|
66 | - make rust-tests | |
67 | - make cargo-clippy |
|
67 | - make cargo-clippy | |
68 | variables: |
|
68 | variables: | |
|
69 | PYTHON: python3 | |||
69 | CI_CLEVER_CLOUD_FLAVOR: S |
|
70 | CI_CLEVER_CLOUD_FLAVOR: S | |
70 |
|
71 | |||
71 | test-c: |
|
72 | test-c: | |
72 | <<: *runtests |
|
73 | <<: *runtests | |
73 | variables: |
|
74 | variables: | |
74 | RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" |
|
75 | RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" | |
|
76 | PYTHON: python3 | |||
75 | TEST_HGMODULEPOLICY: "c" |
|
77 | TEST_HGMODULEPOLICY: "c" | |
76 | TEST_HGTESTS_ALLOW_NETIO: "1" |
|
78 | TEST_HGTESTS_ALLOW_NETIO: "1" | |
77 |
|
79 | |||
@@ -79,6 +81,7 b' test-pure:' | |||||
79 | <<: *runtests |
|
81 | <<: *runtests | |
80 | variables: |
|
82 | variables: | |
81 | RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" |
|
83 | RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" | |
|
84 | PYTHON: python3 | |||
82 | TEST_HGMODULEPOLICY: "py" |
|
85 | TEST_HGMODULEPOLICY: "py" | |
83 |
|
86 | |||
84 | test-rust: |
|
87 | test-rust: | |
@@ -86,7 +89,7 b' test-rust:' | |||||
86 | variables: |
|
89 | variables: | |
87 | HGWITHRUSTEXT: cpython |
|
90 | HGWITHRUSTEXT: cpython | |
88 | RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" |
|
91 | RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" | |
89 | PYTHON: python |
|
92 | PYTHON: python3 | |
90 | TEST_HGMODULEPOLICY: "rust+c" |
|
93 | TEST_HGMODULEPOLICY: "rust+c" | |
91 |
|
94 | |||
92 | test-rhg: |
|
95 | test-rhg: | |
@@ -94,27 +97,30 b' test-rhg:' | |||||
94 | variables: |
|
97 | variables: | |
95 | HGWITHRUSTEXT: cpython |
|
98 | HGWITHRUSTEXT: cpython | |
96 | RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt" |
|
99 | RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt" | |
|
100 | PYTHON: python3 | |||
97 | TEST_HGMODULEPOLICY: "rust+c" |
|
101 | TEST_HGMODULEPOLICY: "rust+c" | |
98 |
|
102 | |||
99 | test-chg: |
|
103 | test-chg: | |
100 | <<: *runtests |
|
104 | <<: *runtests | |
101 | variables: |
|
105 | variables: | |
|
106 | PYTHON: python3 | |||
102 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" |
|
107 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" | |
103 | TEST_HGMODULEPOLICY: "c" |
|
108 | TEST_HGMODULEPOLICY: "c" | |
104 |
|
109 | |||
105 | check-pytype: |
|
110 | check-pytype: | |
106 | extends: .runtests_template |
|
111 | extends: .runtests_template | |
107 | before_script: |
|
112 | before_script: | |
108 | - export PATH="/home/ci-runner/vendor/pyenv/pyenv-2.4.7-adf3c2bccf09cdb81febcfd15b186711a33ac7a8/shims:/home/ci-runner/vendor/pyenv/pyenv-2.4.7-adf3c2bccf09cdb81febcfd15b186711a33ac7a8/bin:$PATH" |
|
|||
109 | - echo "PATH, $PATH" |
|
|||
110 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no |
|
113 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no | |
111 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` |
|
114 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` | |
112 | - cd /tmp/mercurial-ci/ |
|
115 | - cd /tmp/mercurial-ci/ | |
113 | - make local PYTHON=$PYTHON |
|
116 | - make local PYTHON=$PYTHON | |
|
117 | - $PYTHON -m pip install --user -U libcst==0.3.20 pytype==2022.11.18 | |||
114 | - ./contrib/setup-pytype.sh |
|
118 | - ./contrib/setup-pytype.sh | |
115 | script: |
|
119 | script: | |
116 | - echo "Entering script section" |
|
120 | - echo "Entering script section" | |
117 | - sh contrib/check-pytype.sh |
|
121 | - sh contrib/check-pytype.sh | |
|
122 | variables: | |||
|
123 | PYTHON: python3 | |||
118 |
|
124 | |||
119 | # `sh.exe --login` sets a couple of extra environment variables that are defined |
|
125 | # `sh.exe --login` sets a couple of extra environment variables that are defined | |
120 | # in the MinGW shell, but switches CWD to /home/$username. The previous value |
|
126 | # in the MinGW shell, but switches CWD to /home/$username. The previous value |
@@ -21,6 +21,7 b' if sys.version_info[0] >= 3:' | |||||
21 | pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args] |
|
21 | pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args] | |
22 | stdout.write(b' '.join(pargs) + b'\n') |
|
22 | stdout.write(b' '.join(pargs) + b'\n') | |
23 |
|
23 | |||
|
24 | ||||
24 | else: |
|
25 | else: | |
25 | import cStringIO |
|
26 | import cStringIO | |
26 |
|
27 |
@@ -205,6 +205,7 b' def process(case, variant):' | |||||
205 |
|
205 | |||
206 |
|
206 | |||
207 | if __name__ == '__main__': |
|
207 | if __name__ == '__main__': | |
|
208 | ||||
208 | argv = sys.argv[:] |
|
209 | argv = sys.argv[:] | |
209 |
|
210 | |||
210 | kwargs = {} |
|
211 | kwargs = {} |
@@ -130,6 +130,7 b' try:' | |||||
130 | def revlog(opener, *args, **kwargs): |
|
130 | def revlog(opener, *args, **kwargs): | |
131 | return mercurial.revlog.revlog(opener, perf_rl_kind, *args, **kwargs) |
|
131 | return mercurial.revlog.revlog(opener, perf_rl_kind, *args, **kwargs) | |
132 |
|
132 | |||
|
133 | ||||
133 | except (ImportError, AttributeError): |
|
134 | except (ImportError, AttributeError): | |
134 | perf_rl_kind = None |
|
135 | perf_rl_kind = None | |
135 |
|
136 | |||
@@ -260,6 +261,7 b" elif safehasattr(cmdutil, 'command'):" | |||||
260 | commands.norepo += b' %s' % b' '.join(parsealiases(name)) |
|
261 | commands.norepo += b' %s' % b' '.join(parsealiases(name)) | |
261 | return _command(name, list(options), synopsis) |
|
262 | return _command(name, list(options), synopsis) | |
262 |
|
263 | |||
|
264 | ||||
263 | else: |
|
265 | else: | |
264 | # for "historical portability": |
|
266 | # for "historical portability": | |
265 | # define "@command" annotation locally, because cmdutil.command |
|
267 | # define "@command" annotation locally, because cmdutil.command | |
@@ -1924,7 +1926,7 b' def perfindex(ui, repo, **opts):' | |||||
1924 |
|
1926 | |||
1925 | opts = _byteskwargs(opts) |
|
1927 | opts = _byteskwargs(opts) | |
1926 | timer, fm = gettimer(ui, opts) |
|
1928 | timer, fm = gettimer(ui, opts) | |
1927 | mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg |
|
1929 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg | |
1928 | if opts[b'no_lookup']: |
|
1930 | if opts[b'no_lookup']: | |
1929 | if opts['rev']: |
|
1931 | if opts['rev']: | |
1930 | raise error.Abort('--no-lookup and --rev are mutually exclusive') |
|
1932 | raise error.Abort('--no-lookup and --rev are mutually exclusive') | |
@@ -1983,7 +1985,7 b' def perfnodemap(ui, repo, **opts):' | |||||
1983 |
|
1985 | |||
1984 | opts = _byteskwargs(opts) |
|
1986 | opts = _byteskwargs(opts) | |
1985 | timer, fm = gettimer(ui, opts) |
|
1987 | timer, fm = gettimer(ui, opts) | |
1986 | mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg |
|
1988 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg | |
1987 |
|
1989 | |||
1988 | unfi = repo.unfiltered() |
|
1990 | unfi = repo.unfiltered() | |
1989 | clearcaches = opts[b'clear_caches'] |
|
1991 | clearcaches = opts[b'clear_caches'] | |
@@ -2387,7 +2389,7 b' def perfnodelookup(ui, repo, rev, **opts' | |||||
2387 | timer, fm = gettimer(ui, opts) |
|
2389 | timer, fm = gettimer(ui, opts) | |
2388 | import mercurial.revlog |
|
2390 | import mercurial.revlog | |
2389 |
|
2391 | |||
2390 | mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg |
|
2392 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg | |
2391 | n = scmutil.revsingle(repo, rev).node() |
|
2393 | n = scmutil.revsingle(repo, rev).node() | |
2392 |
|
2394 | |||
2393 | try: |
|
2395 | try: | |
@@ -3100,7 +3102,7 b' def perf_unbundle(ui, repo, fname, **opt' | |||||
3100 | # disable inlining |
|
3102 | # disable inlining | |
3101 | old_max_inline = mercurial.revlog._maxinline |
|
3103 | old_max_inline = mercurial.revlog._maxinline | |
3102 | # large enough to never happen |
|
3104 | # large enough to never happen | |
3103 | mercurial.revlog._maxinline = 2**50 |
|
3105 | mercurial.revlog._maxinline = 2 ** 50 | |
3104 |
|
3106 | |||
3105 | with repo.lock(): |
|
3107 | with repo.lock(): | |
3106 | bundle = [None, None] |
|
3108 | bundle = [None, None] |
@@ -137,6 +137,7 b' class TestCompressor_stream_reader_fuzzi' | |||||
137 | def test_buffer_source_read_variance( |
|
137 | def test_buffer_source_read_variance( | |
138 | self, original, level, source_read_size, read_sizes |
|
138 | self, original, level, source_read_size, read_sizes | |
139 | ): |
|
139 | ): | |
|
140 | ||||
140 | refctx = zstd.ZstdCompressor(level=level) |
|
141 | refctx = zstd.ZstdCompressor(level=level) | |
141 | ref_frame = refctx.compress(original) |
|
142 | ref_frame = refctx.compress(original) | |
142 |
|
143 | |||
@@ -202,6 +203,7 b' class TestCompressor_stream_reader_fuzzi' | |||||
202 | def test_buffer_source_readinto( |
|
203 | def test_buffer_source_readinto( | |
203 | self, original, level, source_read_size, read_size |
|
204 | self, original, level, source_read_size, read_size | |
204 | ): |
|
205 | ): | |
|
206 | ||||
205 | refctx = zstd.ZstdCompressor(level=level) |
|
207 | refctx = zstd.ZstdCompressor(level=level) | |
206 | ref_frame = refctx.compress(original) |
|
208 | ref_frame = refctx.compress(original) | |
207 |
|
209 | |||
@@ -271,6 +273,7 b' class TestCompressor_stream_reader_fuzzi' | |||||
271 | def test_buffer_source_readinto_variance( |
|
273 | def test_buffer_source_readinto_variance( | |
272 | self, original, level, source_read_size, read_sizes |
|
274 | self, original, level, source_read_size, read_sizes | |
273 | ): |
|
275 | ): | |
|
276 | ||||
274 | refctx = zstd.ZstdCompressor(level=level) |
|
277 | refctx = zstd.ZstdCompressor(level=level) | |
275 | ref_frame = refctx.compress(original) |
|
278 | ref_frame = refctx.compress(original) | |
276 |
|
279 | |||
@@ -407,6 +410,7 b' class TestCompressor_stream_reader_fuzzi' | |||||
407 | def test_buffer_source_read1_variance( |
|
410 | def test_buffer_source_read1_variance( | |
408 | self, original, level, source_read_size, read_sizes |
|
411 | self, original, level, source_read_size, read_sizes | |
409 | ): |
|
412 | ): | |
|
413 | ||||
410 | refctx = zstd.ZstdCompressor(level=level) |
|
414 | refctx = zstd.ZstdCompressor(level=level) | |
411 | ref_frame = refctx.compress(original) |
|
415 | ref_frame = refctx.compress(original) | |
412 |
|
416 | |||
@@ -547,6 +551,7 b' class TestCompressor_stream_reader_fuzzi' | |||||
547 | def test_buffer_source_readinto1_variance( |
|
551 | def test_buffer_source_readinto1_variance( | |
548 | self, original, level, source_read_size, read_sizes |
|
552 | self, original, level, source_read_size, read_sizes | |
549 | ): |
|
553 | ): | |
|
554 | ||||
550 | refctx = zstd.ZstdCompressor(level=level) |
|
555 | refctx = zstd.ZstdCompressor(level=level) | |
551 | ref_frame = refctx.compress(original) |
|
556 | ref_frame = refctx.compress(original) | |
552 |
|
557 |
@@ -189,7 +189,7 b' class TestDecompressor_decompress(TestCa' | |||||
189 | # Will get OverflowError on some Python distributions that can't |
|
189 | # Will get OverflowError on some Python distributions that can't | |
190 | # handle really large integers. |
|
190 | # handle really large integers. | |
191 | with self.assertRaises((MemoryError, OverflowError)): |
|
191 | with self.assertRaises((MemoryError, OverflowError)): | |
192 | dctx.decompress(compressed, max_output_size=2**62) |
|
192 | dctx.decompress(compressed, max_output_size=2 ** 62) | |
193 |
|
193 | |||
194 | def test_dictionary(self): |
|
194 | def test_dictionary(self): | |
195 | samples = [] |
|
195 | samples = [] | |
@@ -238,7 +238,7 b' class TestDecompressor_decompress(TestCa' | |||||
238 | cctx = zstd.ZstdCompressor(write_content_size=False) |
|
238 | cctx = zstd.ZstdCompressor(write_content_size=False) | |
239 | frame = cctx.compress(source) |
|
239 | frame = cctx.compress(source) | |
240 |
|
240 | |||
241 | dctx = zstd.ZstdDecompressor(max_window_size=2**zstd.WINDOWLOG_MIN) |
|
241 | dctx = zstd.ZstdDecompressor(max_window_size=2 ** zstd.WINDOWLOG_MIN) | |
242 |
|
242 | |||
243 | with self.assertRaisesRegex( |
|
243 | with self.assertRaisesRegex( | |
244 | zstd.ZstdError, |
|
244 | zstd.ZstdError, |
@@ -353,6 +353,7 b' class TestDecompressor_stream_reader_fuz' | |||||
353 | def test_multiple_frames( |
|
353 | def test_multiple_frames( | |
354 | self, originals, frame_count, level, source_read_size, read_sizes |
|
354 | self, originals, frame_count, level, source_read_size, read_sizes | |
355 | ): |
|
355 | ): | |
|
356 | ||||
356 | cctx = zstd.ZstdCompressor(level=level) |
|
357 | cctx = zstd.ZstdCompressor(level=level) | |
357 | source = io.BytesIO() |
|
358 | source = io.BytesIO() | |
358 | buffer = io.BytesIO() |
|
359 | buffer = io.BytesIO() |
@@ -273,6 +273,7 b' class ZstdCompressionParameters(object):' | |||||
273 | ldm_hash_every_log=-1, |
|
273 | ldm_hash_every_log=-1, | |
274 | threads=0, |
|
274 | threads=0, | |
275 | ): |
|
275 | ): | |
|
276 | ||||
276 | params = lib.ZSTD_createCCtxParams() |
|
277 | params = lib.ZSTD_createCCtxParams() | |
277 | if params == ffi.NULL: |
|
278 | if params == ffi.NULL: | |
278 | raise MemoryError() |
|
279 | raise MemoryError() | |
@@ -1422,6 +1423,7 b' class ZstdCompressor(object):' | |||||
1422 | read_size=COMPRESSION_RECOMMENDED_INPUT_SIZE, |
|
1423 | read_size=COMPRESSION_RECOMMENDED_INPUT_SIZE, | |
1423 | write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE, |
|
1424 | write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE, | |
1424 | ): |
|
1425 | ): | |
|
1426 | ||||
1425 | if not hasattr(ifh, "read"): |
|
1427 | if not hasattr(ifh, "read"): | |
1426 | raise ValueError("first argument must have a read() method") |
|
1428 | raise ValueError("first argument must have a read() method") | |
1427 | if not hasattr(ofh, "write"): |
|
1429 | if not hasattr(ofh, "write"): | |
@@ -1521,6 +1523,7 b' class ZstdCompressor(object):' | |||||
1521 | write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE, |
|
1523 | write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE, | |
1522 | write_return_read=False, |
|
1524 | write_return_read=False, | |
1523 | ): |
|
1525 | ): | |
|
1526 | ||||
1524 | if not hasattr(writer, "write"): |
|
1527 | if not hasattr(writer, "write"): | |
1525 | raise ValueError("must pass an object with a write() method") |
|
1528 | raise ValueError("must pass an object with a write() method") | |
1526 |
|
1529 |
@@ -191,7 +191,7 b' def formatfactor(factor):' | |||||
191 |
|
191 | |||
192 | def formattiming(value): |
|
192 | def formattiming(value): | |
193 | """format a value to strictly 8 char, dropping some precision if needed""" |
|
193 | """format a value to strictly 8 char, dropping some precision if needed""" | |
194 | if value < 10**7: |
|
194 | if value < 10 ** 7: | |
195 | return ('%.6f' % value)[:8] |
|
195 | return ('%.6f' % value)[:8] | |
196 | else: |
|
196 | else: | |
197 | # value is HUGE very unlikely to happen (4+ month run) |
|
197 | # value is HUGE very unlikely to happen (4+ month run) | |
@@ -371,6 +371,7 b' print()' | |||||
371 | print() |
|
371 | print() | |
372 |
|
372 | |||
373 | for ridx, rset in enumerate(revsets): |
|
373 | for ridx, rset in enumerate(revsets): | |
|
374 | ||||
374 | print("revset #%i: %s" % (ridx, rset)) |
|
375 | print("revset #%i: %s" % (ridx, rset)) | |
375 | printheader(variants, len(results), verbose=options.verbose, relative=True) |
|
376 | printheader(variants, len(results), verbose=options.verbose, relative=True) | |
376 | ref = None |
|
377 | ref = None |
@@ -5,7 +5,7 b' set -u' | |||||
5 |
|
5 | |||
6 | # Find the python3 setup that would run pytype |
|
6 | # Find the python3 setup that would run pytype | |
7 | PYTYPE=`which pytype` |
|
7 | PYTYPE=`which pytype` | |
8 |
PYTHON3= |
|
8 | PYTHON3=`head -n1 ${PYTYPE} | sed -s 's/#!//'` | |
9 |
|
9 | |||
10 | # Existing stubs that pytype processes live here |
|
10 | # Existing stubs that pytype processes live here | |
11 | TYPESHED=$(${PYTHON3} -c "import pytype; print(pytype.__path__[0])")/typeshed/stubs |
|
11 | TYPESHED=$(${PYTHON3} -c "import pytype; print(pytype.__path__[0])")/typeshed/stubs |
@@ -101,7 +101,6 b" if getattr(sys, 'isapidllhandle', None) " | |||||
101 | import isapi_wsgi |
|
101 | import isapi_wsgi | |
102 | from mercurial.hgweb.hgwebdir_mod import hgwebdir |
|
102 | from mercurial.hgweb.hgwebdir_mod import hgwebdir | |
103 |
|
103 | |||
104 |
|
||||
105 | # Example tweak: Replace isapi_wsgi's handler to provide better error message |
|
104 | # Example tweak: Replace isapi_wsgi's handler to provide better error message | |
106 | # Other stuff could also be done here, like logging errors etc. |
|
105 | # Other stuff could also be done here, like logging errors etc. | |
107 | class WsgiHandler(isapi_wsgi.IsapiWsgiHandler): |
|
106 | class WsgiHandler(isapi_wsgi.IsapiWsgiHandler): | |
@@ -115,6 +114,7 b' application = hgwebdir(hgweb_config)' | |||||
115 |
|
114 | |||
116 |
|
115 | |||
117 | def handler(environ, start_response): |
|
116 | def handler(environ, start_response): | |
|
117 | ||||
118 | # Translate IIS's weird URLs |
|
118 | # Translate IIS's weird URLs | |
119 | url = environ['SCRIPT_NAME'] + environ['PATH_INFO'] |
|
119 | url = environ['SCRIPT_NAME'] + environ['PATH_INFO'] | |
120 | paths = url[1:].split('/')[path_strip:] |
|
120 | paths = url[1:].split('/')[path_strip:] |
@@ -95,6 +95,7 b' level margin: \\\\n[rst2man-indent\\\\n[rst2' | |||||
95 |
|
95 | |||
96 |
|
96 | |||
97 | class Writer(writers.Writer): |
|
97 | class Writer(writers.Writer): | |
|
98 | ||||
98 | supported = 'manpage' |
|
99 | supported = 'manpage' | |
99 | """Formats this writer supports.""" |
|
100 | """Formats this writer supports.""" | |
100 |
|
101 | |||
@@ -296,7 +297,7 b' class Translator(nodes.NodeVisitor):' | |||||
296 | (u'´', u"\\'"), |
|
297 | (u'´', u"\\'"), | |
297 | (u'`', u'\\(ga'), |
|
298 | (u'`', u'\\(ga'), | |
298 | ] |
|
299 | ] | |
299 | for in_char, out_markup in replace_pairs: |
|
300 | for (in_char, out_markup) in replace_pairs: | |
300 | text = text.replace(in_char, out_markup) |
|
301 | text = text.replace(in_char, out_markup) | |
301 | # unicode |
|
302 | # unicode | |
302 | text = self.deunicode(text) |
|
303 | text = self.deunicode(text) |
@@ -279,6 +279,7 b' configitem(' | |||||
279 |
|
279 | |||
280 |
|
280 | |||
281 | def _getusers(ui, group): |
|
281 | def _getusers(ui, group): | |
|
282 | ||||
282 | # First, try to use group definition from section [acl.groups] |
|
283 | # First, try to use group definition from section [acl.groups] | |
283 | hgrcusers = ui.configlist(b'acl.groups', group) |
|
284 | hgrcusers = ui.configlist(b'acl.groups', group) | |
284 | if hgrcusers: |
|
285 | if hgrcusers: | |
@@ -293,10 +294,12 b' def _getusers(ui, group):' | |||||
293 |
|
294 | |||
294 |
|
295 | |||
295 | def _usermatch(ui, user, usersorgroups): |
|
296 | def _usermatch(ui, user, usersorgroups): | |
|
297 | ||||
296 | if usersorgroups == b'*': |
|
298 | if usersorgroups == b'*': | |
297 | return True |
|
299 | return True | |
298 |
|
300 | |||
299 | for ug in usersorgroups.replace(b',', b' ').split(): |
|
301 | for ug in usersorgroups.replace(b',', b' ').split(): | |
|
302 | ||||
300 | if ug.startswith(b'!'): |
|
303 | if ug.startswith(b'!'): | |
301 | # Test for excluded user or group. Format: |
|
304 | # Test for excluded user or group. Format: | |
302 | # if ug is a user name: !username |
|
305 | # if ug is a user name: !username | |
@@ -365,6 +368,7 b' def ensureenabled(ui):' | |||||
365 |
|
368 | |||
366 |
|
369 | |||
367 | def hook(ui, repo, hooktype, node=None, source=None, **kwargs): |
|
370 | def hook(ui, repo, hooktype, node=None, source=None, **kwargs): | |
|
371 | ||||
368 | ensureenabled(ui) |
|
372 | ensureenabled(ui) | |
369 |
|
373 | |||
370 | if hooktype not in [b'pretxnchangegroup', b'pretxncommit', b'prepushkey']: |
|
374 | if hooktype not in [b'pretxnchangegroup', b'pretxncommit', b'prepushkey']: |
@@ -76,6 +76,7 b' def readauthormap(ui: "uimod.ui", author' | |||||
76 | authors = {} |
|
76 | authors = {} | |
77 | with open(authorfile, b'rb') as afile: |
|
77 | with open(authorfile, b'rb') as afile: | |
78 | for line in afile: |
|
78 | for line in afile: | |
|
79 | ||||
79 | line = line.strip() |
|
80 | line = line.strip() | |
80 | if not line or line.startswith(b'#'): |
|
81 | if not line or line.startswith(b'#'): | |
81 | continue |
|
82 | continue | |
@@ -272,6 +273,7 b' class keysorter:' | |||||
272 |
|
273 | |||
273 | class converter: |
|
274 | class converter: | |
274 | def __init__(self, ui: "uimod.ui", source, dest, revmapfile, opts) -> None: |
|
275 | def __init__(self, ui: "uimod.ui", source, dest, revmapfile, opts) -> None: | |
|
276 | ||||
275 | self.source = source |
|
277 | self.source = source | |
276 | self.dest = dest |
|
278 | self.dest = dest | |
277 | self.ui = ui |
|
279 | self.ui = ui |
@@ -639,6 +639,7 b' def createchangeset(ui, log, fuzz=60, me' | |||||
639 | files = set() |
|
639 | files = set() | |
640 | c = None |
|
640 | c = None | |
641 | for i, e in enumerate(log): |
|
641 | for i, e in enumerate(log): | |
|
642 | ||||
642 | # Check if log entry belongs to the current changeset or not. |
|
643 | # Check if log entry belongs to the current changeset or not. | |
643 |
|
644 | |||
644 | # Since CVS is file-centric, two different file revisions with |
|
645 | # Since CVS is file-centric, two different file revisions with | |
@@ -982,6 +983,7 b' def debugcvsps(ui, *args, **opts):' | |||||
982 | branches = {} # latest version number in each branch |
|
983 | branches = {} # latest version number in each branch | |
983 | ancestors = {} # parent branch |
|
984 | ancestors = {} # parent branch | |
984 | for cs in changesets: |
|
985 | for cs in changesets: | |
|
986 | ||||
985 | if opts[b"ancestors"]: |
|
987 | if opts[b"ancestors"]: | |
986 | if cs.branch not in branches and cs.parents and cs.parents[0].id: |
|
988 | if cs.branch not in branches and cs.parents and cs.parents[0].id: | |
987 | ancestors[cs.branch] = ( |
|
989 | ancestors[cs.branch] = ( |
@@ -1425,6 +1425,7 b' class svn_sink(converter_sink, commandli' | |||||
1425 | return self.join(b'hg-authormap') |
|
1425 | return self.join(b'hg-authormap') | |
1426 |
|
1426 | |||
1427 | def __init__(self, ui, repotype, path): |
|
1427 | def __init__(self, ui, repotype, path): | |
|
1428 | ||||
1428 | converter_sink.__init__(self, ui, repotype, path) |
|
1429 | converter_sink.__init__(self, ui, repotype, path) | |
1429 | commandline.__init__(self, ui, b'svn') |
|
1430 | commandline.__init__(self, ui, b'svn') | |
1430 | self.delete = [] |
|
1431 | self.delete = [] |
@@ -405,6 +405,7 b' def diffrevs(' | |||||
405 | guitool, |
|
405 | guitool, | |
406 | opts, |
|
406 | opts, | |
407 | ): |
|
407 | ): | |
|
408 | ||||
408 | subrepos = opts.get(b'subrepos') |
|
409 | subrepos = opts.get(b'subrepos') | |
409 |
|
410 | |||
410 | # calculate list of files changed between both revs |
|
411 | # calculate list of files changed between both revs |
@@ -38,7 +38,6 b' from . import (' | |||||
38 | revmap as revmapmod, |
|
38 | revmap as revmapmod, | |
39 | ) |
|
39 | ) | |
40 |
|
40 | |||
41 |
|
||||
42 | # given path, get filelog, cached |
|
41 | # given path, get filelog, cached | |
43 | @util.lrucachefunc |
|
42 | @util.lrucachefunc | |
44 | def _getflog(repo, path): |
|
43 | def _getflog(repo, path): |
@@ -17,7 +17,6 b' from mercurial import (' | |||||
17 | ) |
|
17 | ) | |
18 | from mercurial.utils import dateutil |
|
18 | from mercurial.utils import dateutil | |
19 |
|
19 | |||
20 |
|
||||
21 | # imitating mercurial.commands.annotate, not using the vanilla formatter since |
|
20 | # imitating mercurial.commands.annotate, not using the vanilla formatter since | |
22 | # the data structures are a bit different, and we have some fast paths. |
|
21 | # the data structures are a bit different, and we have some fast paths. | |
23 | class defaultformatter: |
|
22 | class defaultformatter: |
@@ -893,6 +893,7 b' def wrapupdate(' | |||||
893 | matcher=None, |
|
893 | matcher=None, | |
894 | **kwargs |
|
894 | **kwargs | |
895 | ): |
|
895 | ): | |
|
896 | ||||
896 | distance = 0 |
|
897 | distance = 0 | |
897 | partial = True |
|
898 | partial = True | |
898 | oldnode = repo[b'.'].node() |
|
899 | oldnode = repo[b'.'].node() |
@@ -210,6 +210,7 b' if _debugging:' | |||||
210 | ) |
|
210 | ) | |
211 | ) |
|
211 | ) | |
212 |
|
212 | |||
|
213 | ||||
213 | else: |
|
214 | else: | |
214 |
|
215 | |||
215 | def log(fmt, *args): |
|
216 | def log(fmt, *args): |
@@ -46,6 +46,7 b' if compat.PYTHON3:' | |||||
46 | # returns None. |
|
46 | # returns None. | |
47 | return sys.getfilesystemencoding() |
|
47 | return sys.getfilesystemencoding() | |
48 |
|
48 | |||
|
49 | ||||
49 | else: |
|
50 | else: | |
50 | # Python 2 doesn't support surrogateescape, so use 'strict' by |
|
51 | # Python 2 doesn't support surrogateescape, so use 'strict' by | |
51 | # default. Users can register a custom surrogateescape error handler and use |
|
52 | # default. Users can register a custom surrogateescape error handler and use |
@@ -43,6 +43,7 b' SYNTAX_CSS = (' | |||||
43 |
|
43 | |||
44 |
|
44 | |||
45 | def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False): |
|
45 | def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False): | |
|
46 | ||||
46 | # append a <link ...> to the syntax highlighting css |
|
47 | # append a <link ...> to the syntax highlighting css | |
47 | tmpl.load(b'header') |
|
48 | tmpl.load(b'header') | |
48 | old_header = tmpl.cache[b'header'] |
|
49 | old_header = tmpl.cache[b'header'] |
@@ -1526,8 +1526,7 b' pgup/K: move patch up, pgdn/J: move patc' | |||||
1526 |
|
1526 | |||
1527 | def move_cursor(self, oldpos, newpos): |
|
1527 | def move_cursor(self, oldpos, newpos): | |
1528 | """Change the rule/changeset that the cursor is pointing to, regardless of |
|
1528 | """Change the rule/changeset that the cursor is pointing to, regardless of | |
1529 | current mode (you can switch between patches from the view patch window). |
|
1529 | current mode (you can switch between patches from the view patch window).""" | |
1530 | """ |
|
|||
1531 | self.pos = newpos |
|
1530 | self.pos = newpos | |
1532 |
|
1531 | |||
1533 | mode, _ = self.mode |
|
1532 | mode, _ = self.mode | |
@@ -1606,8 +1605,7 b' pgup/K: move patch up, pgdn/J: move patc' | |||||
1606 |
|
1605 | |||
1607 | def change_view(self, delta, unit): |
|
1606 | def change_view(self, delta, unit): | |
1608 | """Change the region of whatever is being viewed (a patch or the list of |
|
1607 | """Change the region of whatever is being viewed (a patch or the list of | |
1609 | changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'. |
|
1608 | changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.""" | |
1610 | """ |
|
|||
1611 | mode, _ = self.mode |
|
1609 | mode, _ = self.mode | |
1612 | if mode != MODE_PATCH: |
|
1610 | if mode != MODE_PATCH: | |
1613 | return |
|
1611 | return |
@@ -64,7 +64,6 b' sharednamespaces = {' | |||||
64 | bookmarktype: hg.sharedbookmarks, |
|
64 | bookmarktype: hg.sharedbookmarks, | |
65 | } |
|
65 | } | |
66 |
|
66 | |||
67 |
|
||||
68 | # Journal recording, register hooks and storage object |
|
67 | # Journal recording, register hooks and storage object | |
69 | def extsetup(ui): |
|
68 | def extsetup(ui): | |
70 | extensions.wrapfunction(dispatch, 'runcommand', runcommand) |
|
69 | extensions.wrapfunction(dispatch, 'runcommand', runcommand) |
@@ -160,8 +160,6 b' configitem(' | |||||
160 | b'svn', |
|
160 | b'svn', | |
161 | default=False, |
|
161 | default=False, | |
162 | ) |
|
162 | ) | |
163 |
|
||||
164 |
|
||||
165 | # date like in cvs' $Date |
|
163 | # date like in cvs' $Date | |
166 | @templatefilter(b'utcdate', intype=templateutil.date) |
|
164 | @templatefilter(b'utcdate', intype=templateutil.date) | |
167 | def utcdate(date): |
|
165 | def utcdate(date): |
@@ -897,7 +897,7 b' def overridecopy(orig, ui, repo, pats, o' | |||||
897 | result += orig(ui, repo, listpats, opts, rename) |
|
897 | result += orig(ui, repo, listpats, opts, rename) | |
898 |
|
898 | |||
899 | lfdirstate = lfutil.openlfdirstate(ui, repo) |
|
899 | lfdirstate = lfutil.openlfdirstate(ui, repo) | |
900 | for src, dest in copiedfiles: |
|
900 | for (src, dest) in copiedfiles: | |
901 | if lfutil.shortname in src and dest.startswith( |
|
901 | if lfutil.shortname in src and dest.startswith( | |
902 | repo.wjoin(lfutil.shortname) |
|
902 | repo.wjoin(lfutil.shortname) | |
903 | ): |
|
903 | ): |
@@ -140,6 +140,7 b' def reposetup(ui, repo):' | |||||
140 | wlock = util.nullcontextmanager() |
|
140 | wlock = util.nullcontextmanager() | |
141 | gotlock = False |
|
141 | gotlock = False | |
142 | with wlock, self.dirstate.running_status(self): |
|
142 | with wlock, self.dirstate.running_status(self): | |
|
143 | ||||
143 | # First check if paths or patterns were specified on the |
|
144 | # First check if paths or patterns were specified on the | |
144 | # command line. If there were, and they don't match any |
|
145 | # command line. If there were, and they don't match any | |
145 | # largefiles, we should just bail here and let super |
|
146 | # largefiles, we should just bail here and let super |
@@ -37,7 +37,6 b' from mercurial import (' | |||||
37 | _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER) |
|
37 | _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER) | |
38 | _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER) |
|
38 | _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER) | |
39 |
|
39 | |||
40 |
|
||||
41 | # Serve a changegroup for a client with a narrow clone. |
|
40 | # Serve a changegroup for a client with a narrow clone. | |
42 | def getbundlechangegrouppart_narrow( |
|
41 | def getbundlechangegrouppart_narrow( | |
43 | bundler, |
|
42 | bundler, |
@@ -543,6 +543,7 b' class notifier:' | |||||
543 | ) |
|
543 | ) | |
544 |
|
544 | |||
545 | def diff(self, ctx, ref=None): |
|
545 | def diff(self, ctx, ref=None): | |
|
546 | ||||
546 | maxdiff = int(self.ui.config(b'notify', b'maxdiff')) |
|
547 | maxdiff = int(self.ui.config(b'notify', b'maxdiff')) | |
547 | prev = ctx.p1().node() |
|
548 | prev = ctx.p1().node() | |
548 | if ref: |
|
549 | if ref: |
@@ -261,6 +261,7 b' def makepatch(' | |||||
261 | numbered, |
|
261 | numbered, | |
262 | patchname=None, |
|
262 | patchname=None, | |
263 | ): |
|
263 | ): | |
|
264 | ||||
264 | desc = [] |
|
265 | desc = [] | |
265 | node = None |
|
266 | node = None | |
266 | body = b'' |
|
267 | body = b'' |
@@ -830,6 +830,7 b' class rebaseruntime:' | |||||
830 | cleanup = False |
|
830 | cleanup = False | |
831 |
|
831 | |||
832 | if cleanup: |
|
832 | if cleanup: | |
|
833 | ||||
833 | if rebased: |
|
834 | if rebased: | |
834 | strippoints = [ |
|
835 | strippoints = [ | |
835 | c.node() for c in repo.set(b'roots(%ld)', rebased) |
|
836 | c.node() for c in repo.set(b'roots(%ld)', rebased) |
@@ -45,7 +45,7 b' LARGEFANOUTPREFIX = 2' | |||||
45 | # bisect) with (8 step fanout scan + 1 step bisect) |
|
45 | # bisect) with (8 step fanout scan + 1 step bisect) | |
46 | # 5 step bisect = log(2^16 / 8 / 255) # fanout |
|
46 | # 5 step bisect = log(2^16 / 8 / 255) # fanout | |
47 | # 10 step fanout scan = 2^16 / (2^16 / 8) # fanout space divided by entries |
|
47 | # 10 step fanout scan = 2^16 / (2^16 / 8) # fanout space divided by entries | |
48 | SMALLFANOUTCUTOFF = 2**16 // 8 |
|
48 | SMALLFANOUTCUTOFF = 2 ** 16 // 8 | |
49 |
|
49 | |||
50 | # The amount of time to wait between checking for new packs. This prevents an |
|
50 | # The amount of time to wait between checking for new packs. This prevents an | |
51 | # exception when data is moved to a new pack after the process has already |
|
51 | # exception when data is moved to a new pack after the process has already | |
@@ -275,7 +275,7 b' class versionmixin:' | |||||
275 | class basepack(versionmixin): |
|
275 | class basepack(versionmixin): | |
276 | # The maximum amount we should read via mmap before remmaping so the old |
|
276 | # The maximum amount we should read via mmap before remmaping so the old | |
277 | # pages can be released (100MB) |
|
277 | # pages can be released (100MB) | |
278 | MAXPAGEDIN = 100 * 1024**2 |
|
278 | MAXPAGEDIN = 100 * 1024 ** 2 | |
279 |
|
279 | |||
280 | SUPPORTED_VERSIONS = [2] |
|
280 | SUPPORTED_VERSIONS = [2] | |
281 |
|
281 |
@@ -38,6 +38,7 b' class connectionpool:' | |||||
38 | pass |
|
38 | pass | |
39 |
|
39 | |||
40 | if conn is None: |
|
40 | if conn is None: | |
|
41 | ||||
41 | peer = hg.peer(self._repo.ui, {}, path) |
|
42 | peer = hg.peer(self._repo.ui, {}, path) | |
42 | if hasattr(peer, '_cleanup'): |
|
43 | if hasattr(peer, '_cleanup'): | |
43 |
|
44 |
@@ -414,7 +414,7 b' class mutabledatapack(basepack.mutableba' | |||||
414 |
|
414 | |||
415 | def add(self, name, node, deltabasenode, delta, metadata=None): |
|
415 | def add(self, name, node, deltabasenode, delta, metadata=None): | |
416 | # metadata is a dict, ex. {METAKEYFLAG: flag} |
|
416 | # metadata is a dict, ex. {METAKEYFLAG: flag} | |
417 | if len(name) > 2**16: |
|
417 | if len(name) > 2 ** 16: | |
418 | raise RuntimeError(_(b"name too long %s") % name) |
|
418 | raise RuntimeError(_(b"name too long %s") % name) | |
419 | if len(node) != 20: |
|
419 | if len(node) != 20: | |
420 | raise RuntimeError(_(b"node should be 20 bytes %s") % node) |
|
420 | raise RuntimeError(_(b"node should be 20 bytes %s") % node) |
@@ -41,6 +41,7 b' class remotefilelognodemap:' | |||||
41 |
|
41 | |||
42 |
|
42 | |||
43 | class remotefilelog: |
|
43 | class remotefilelog: | |
|
44 | ||||
44 | _flagserrorclass = error.RevlogError |
|
45 | _flagserrorclass = error.RevlogError | |
45 |
|
46 | |||
46 | def __init__(self, opener, path, repo): |
|
47 | def __init__(self, opener, path, repo): |
@@ -32,7 +32,6 b' from . import (' | |||||
32 | shallowutil, |
|
32 | shallowutil, | |
33 | ) |
|
33 | ) | |
34 |
|
34 | |||
35 |
|
||||
36 | # These make*stores functions are global so that other extensions can replace |
|
35 | # These make*stores functions are global so that other extensions can replace | |
37 | # them. |
|
36 | # them. | |
38 | def makelocalstores(repo): |
|
37 | def makelocalstores(repo): |
@@ -259,6 +259,7 b' def extsetup(ui):' | |||||
259 |
|
259 | |||
260 |
|
260 | |||
261 | def reposetup(ui, repo): |
|
261 | def reposetup(ui, repo): | |
|
262 | ||||
262 | # set the config option to store remotenames |
|
263 | # set the config option to store remotenames | |
263 | repo.ui.setconfig(b'experimental', b'remotenames', True, b'remotenames-ext') |
|
264 | repo.ui.setconfig(b'experimental', b'remotenames', True, b'remotenames-ext') | |
264 |
|
265 |
@@ -649,6 +649,7 b' class sqlitefilestore:' | |||||
649 | deltamode=deltamode, |
|
649 | deltamode=deltamode, | |
650 | sidedata_helpers=sidedata_helpers, |
|
650 | sidedata_helpers=sidedata_helpers, | |
651 | ): |
|
651 | ): | |
|
652 | ||||
652 | yield delta |
|
653 | yield delta | |
653 |
|
654 | |||
654 | # End of ifiledata interface. |
|
655 | # End of ifiledata interface. |
@@ -154,6 +154,7 b' def uncommit(ui, repo, *pats, **opts):' | |||||
154 | cmdutil.resolve_commit_options(ui, opts) |
|
154 | cmdutil.resolve_commit_options(ui, opts) | |
155 |
|
155 | |||
156 | with repo.wlock(), repo.lock(): |
|
156 | with repo.wlock(), repo.lock(): | |
|
157 | ||||
157 | st = repo.status() |
|
158 | st = repo.status() | |
158 | m, a, r, d = st.modified, st.added, st.removed, st.deleted |
|
159 | m, a, r, d = st.modified, st.added, st.removed, st.deleted | |
159 | isdirtypath = any(set(m + a + r + d) & set(pats)) |
|
160 | isdirtypath = any(set(m + a + r + d) & set(pats)) | |
@@ -263,6 +264,7 b' def unamend(ui, repo, **opts):' | |||||
263 |
|
264 | |||
264 | unfi = repo.unfiltered() |
|
265 | unfi = repo.unfiltered() | |
265 | with repo.wlock(), repo.lock(), repo.transaction(b'unamend'): |
|
266 | with repo.wlock(), repo.lock(), repo.transaction(b'unamend'): | |
|
267 | ||||
266 | # identify the commit from which to unamend |
|
268 | # identify the commit from which to unamend | |
267 | curctx = repo[b'.'] |
|
269 | curctx = repo[b'.'] | |
268 |
|
270 |
@@ -1307,7 +1307,6 b' class ServiceInfo:' | |||||
1307 | delay = _LISTENER_TIME |
|
1307 | delay = _LISTENER_TIME | |
1308 | next = now + delay |
|
1308 | next = now + delay | |
1309 | last = now + timeout |
|
1309 | last = now + timeout | |
1310 | result = False |
|
|||
1311 | try: |
|
1310 | try: | |
1312 | zeroconf.addListener( |
|
1311 | zeroconf.addListener( | |
1313 | self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN) |
|
1312 | self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN) | |
@@ -1353,7 +1352,7 b' class ServiceInfo:' | |||||
1353 |
|
1352 | |||
1354 | zeroconf.wait(min(next, last) - now) |
|
1353 | zeroconf.wait(min(next, last) - now) | |
1355 | now = currentTimeMillis() |
|
1354 | now = currentTimeMillis() | |
1356 |
result = |
|
1355 | result = 1 | |
1357 | finally: |
|
1356 | finally: | |
1358 | zeroconf.removeListener(self) |
|
1357 | zeroconf.removeListener(self) | |
1359 |
|
1358 |
@@ -64,6 +64,7 b' if sys.version_info[:2] < (3, 0):' | |||||
64 | def u(s): |
|
64 | def u(s): | |
65 | return unicode(s, "unicode_escape") |
|
65 | return unicode(s, "unicode_escape") | |
66 |
|
66 | |||
|
67 | ||||
67 | else: |
|
68 | else: | |
68 | PY3 = True |
|
69 | PY3 = True | |
69 | text_type = str |
|
70 | text_type = str | |
@@ -1888,6 +1889,7 b' class TextWrapper(textwrap.TextWrapper):' | |||||
1888 | chunks.reverse() |
|
1889 | chunks.reverse() | |
1889 |
|
1890 | |||
1890 | while chunks: |
|
1891 | while chunks: | |
|
1892 | ||||
1891 | # Start the list of chunks that will make up the current line. |
|
1893 | # Start the list of chunks that will make up the current line. | |
1892 | # cur_len is just the length of all the chunks in cur_line. |
|
1894 | # cur_len is just the length of all the chunks in cur_line. | |
1893 | cur_line = [] |
|
1895 | cur_line = [] |
@@ -88,7 +88,7 b' def ancestors(pfunc, *orignodes):' | |||||
88 | depth = [0] * count |
|
88 | depth = [0] * count | |
89 | seen = [0] * count |
|
89 | seen = [0] * count | |
90 | mapping = [] |
|
90 | mapping = [] | |
91 | for i, n in enumerate(sorted(nodes)): |
|
91 | for (i, n) in enumerate(sorted(nodes)): | |
92 | depth[n] = 1 |
|
92 | depth[n] = 1 | |
93 | b = 1 << i |
|
93 | b = 1 << i | |
94 | seen[n] = b |
|
94 | seen[n] = b |
@@ -685,7 +685,7 b' def mirroring_remote(ui, repo, remotemar' | |||||
685 | remotemarks""" |
|
685 | remotemarks""" | |
686 | changed = [] |
|
686 | changed = [] | |
687 | localmarks = repo._bookmarks |
|
687 | localmarks = repo._bookmarks | |
688 | for b, id in remotemarks.items(): |
|
688 | for (b, id) in remotemarks.items(): | |
689 | if id != localmarks.get(b, None) and id in repo: |
|
689 | if id != localmarks.get(b, None) and id in repo: | |
690 | changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b)) |
|
690 | changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b)) | |
691 | for b in localmarks: |
|
691 | for b in localmarks: |
@@ -1286,6 +1286,7 b' class interrupthandler(unpackermixin):' | |||||
1286 | return None |
|
1286 | return None | |
1287 |
|
1287 | |||
1288 | def __call__(self): |
|
1288 | def __call__(self): | |
|
1289 | ||||
1289 | self.ui.debug( |
|
1290 | self.ui.debug( | |
1290 | b'bundle2-input-stream-interrupt: opening out of band context\n' |
|
1291 | b'bundle2-input-stream-interrupt: opening out of band context\n' | |
1291 | ) |
|
1292 | ) | |
@@ -2613,6 +2614,7 b' def bundle2getvars(op, part):' | |||||
2613 |
|
2614 | |||
2614 | @parthandler(b'stream2', (b'requirements', b'filecount', b'bytecount')) |
|
2615 | @parthandler(b'stream2', (b'requirements', b'filecount', b'bytecount')) | |
2615 | def handlestreamv2bundle(op, part): |
|
2616 | def handlestreamv2bundle(op, part): | |
|
2617 | ||||
2616 | requirements = urlreq.unquote(part.params[b'requirements']) |
|
2618 | requirements = urlreq.unquote(part.params[b'requirements']) | |
2617 | requirements = requirements.split(b',') if requirements else [] |
|
2619 | requirements = requirements.split(b',') if requirements else [] | |
2618 | filecount = int(part.params[b'filecount']) |
|
2620 | filecount = int(part.params[b'filecount']) |
@@ -408,7 +408,7 b' class bundlerepository:' | |||||
408 | with os.fdopen(fdtemp, 'wb') as fptemp: |
|
408 | with os.fdopen(fdtemp, 'wb') as fptemp: | |
409 | fptemp.write(header) |
|
409 | fptemp.write(header) | |
410 | while True: |
|
410 | while True: | |
411 | chunk = readfn(2**18) |
|
411 | chunk = readfn(2 ** 18) | |
412 | if not chunk: |
|
412 | if not chunk: | |
413 | break |
|
413 | break | |
414 | fptemp.write(chunk) |
|
414 | fptemp.write(chunk) |
@@ -407,7 +407,7 b' class cg1unpacker:' | |||||
407 | yield chunkheader(len(chunk)) |
|
407 | yield chunkheader(len(chunk)) | |
408 | pos = 0 |
|
408 | pos = 0 | |
409 | while pos < len(chunk): |
|
409 | while pos < len(chunk): | |
410 | next = pos + 2**20 |
|
410 | next = pos + 2 ** 20 | |
411 | yield chunk[pos:next] |
|
411 | yield chunk[pos:next] | |
412 | pos = next |
|
412 | pos = next | |
413 | yield closechunk() |
|
413 | yield closechunk() |
@@ -151,7 +151,7 b' def _getmtimepaths(ui):' | |||||
151 | """ |
|
151 | """ | |
152 | modules = [m for n, m in extensions.extensions(ui)] |
|
152 | modules = [m for n, m in extensions.extensions(ui)] | |
153 | try: |
|
153 | try: | |
154 |
from . import __version__ |
|
154 | from . import __version__ | |
155 |
|
155 | |||
156 | modules.append(__version__) |
|
156 | modules.append(__version__) | |
157 | except ImportError: |
|
157 | except ImportError: |
@@ -3833,6 +3833,7 b' def _performrevert(' | |||||
3833 | original_headers = patch.parsepatch(diff) |
|
3833 | original_headers = patch.parsepatch(diff) | |
3834 |
|
3834 | |||
3835 | try: |
|
3835 | try: | |
|
3836 | ||||
3836 | chunks, opts = recordfilter( |
|
3837 | chunks, opts = recordfilter( | |
3837 | repo.ui, original_headers, match, operation=operation |
|
3838 | repo.ui, original_headers, match, operation=operation | |
3838 | ) |
|
3839 | ) |
@@ -915,14 +915,11 b' class branch_copies:' | |||||
915 | self.movewithdir = {} if movewithdir is None else movewithdir |
|
915 | self.movewithdir = {} if movewithdir is None else movewithdir | |
916 |
|
916 | |||
917 | def __repr__(self): |
|
917 | def __repr__(self): | |
918 | return ( |
|
918 | return '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' % ( | |
919 | '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' |
|
919 | self.copy, | |
920 | % ( |
|
920 | self.renamedelete, | |
921 |
|
|
921 | self.dirmove, | |
922 |
|
|
922 | self.movewithdir, | |
923 | self.dirmove, |
|
|||
924 | self.movewithdir, |
|
|||
925 | ) |
|
|||
926 | ) |
|
923 | ) | |
927 |
|
924 | |||
928 |
|
925 |
@@ -136,6 +136,7 b' CHANGE_TYPE_FILES = "files"' | |||||
136 |
|
136 | |||
137 | @interfaceutil.implementer(intdirstate.idirstate) |
|
137 | @interfaceutil.implementer(intdirstate.idirstate) | |
138 | class dirstate: |
|
138 | class dirstate: | |
|
139 | ||||
139 | # used by largefile to avoid overwritting transaction callback |
|
140 | # used by largefile to avoid overwritting transaction callback | |
140 | _tr_key_suffix = b'' |
|
141 | _tr_key_suffix = b'' | |
141 |
|
142 | |||
@@ -879,6 +880,7 b' class dirstate:' | |||||
879 | possibly_dirty=False, |
|
880 | possibly_dirty=False, | |
880 | parentfiledata=None, |
|
881 | parentfiledata=None, | |
881 | ): |
|
882 | ): | |
|
883 | ||||
882 | # note: I do not think we need to double check name clash here since we |
|
884 | # note: I do not think we need to double check name clash here since we | |
883 | # are in a update/merge case that should already have taken care of |
|
885 | # are in a update/merge case that should already have taken care of | |
884 | # this. The test agrees |
|
886 | # this. The test agrees | |
@@ -1090,6 +1092,7 b' class dirstate:' | |||||
1090 |
|
1092 | |||
1091 | write_key = self._use_tracked_hint and self._dirty_tracked_set |
|
1093 | write_key = self._use_tracked_hint and self._dirty_tracked_set | |
1092 | if tr: |
|
1094 | if tr: | |
|
1095 | ||||
1093 | self._setup_tr_abort(tr) |
|
1096 | self._setup_tr_abort(tr) | |
1094 | self._attached_to_a_transaction = True |
|
1097 | self._attached_to_a_transaction = True | |
1095 |
|
1098 | |||
@@ -1283,7 +1286,7 b' class dirstate:' | |||||
1283 | badfn(ff, badtype(kind)) |
|
1286 | badfn(ff, badtype(kind)) | |
1284 | if nf in dmap: |
|
1287 | if nf in dmap: | |
1285 | results[nf] = None |
|
1288 | results[nf] = None | |
1286 | except OSError as inst: |
|
1289 | except (OSError) as inst: | |
1287 | # nf not found on disk - it is dirstate only |
|
1290 | # nf not found on disk - it is dirstate only | |
1288 | if nf in dmap: # does it exactly match a missing file? |
|
1291 | if nf in dmap: # does it exactly match a missing file? | |
1289 | results[nf] = None |
|
1292 | results[nf] = None |
@@ -331,7 +331,7 b' class dirstatemap(_dirstatemapcommon):' | |||||
331 |
|
331 | |||
332 | `all` is unused when Rust is not enabled |
|
332 | `all` is unused when Rust is not enabled | |
333 | """ |
|
333 | """ | |
334 | for filename, item in self.items(): |
|
334 | for (filename, item) in self.items(): | |
335 | yield (filename, item.state, item.mode, item.size, item.mtime) |
|
335 | yield (filename, item.state, item.mode, item.size, item.mtime) | |
336 |
|
336 | |||
337 | def keys(self): |
|
337 | def keys(self): | |
@@ -617,8 +617,7 b' class dirstatemap(_dirstatemapcommon):' | |||||
617 |
|
617 | |||
618 | This should also drop associated copy information |
|
618 | This should also drop associated copy information | |
619 |
|
619 | |||
620 | The fact we actually need to drop it is the responsability of the caller |
|
620 | The fact we actually need to drop it is the responsability of the caller""" | |
621 | """ |
|
|||
622 | self._map.pop(f, None) |
|
621 | self._map.pop(f, None) | |
623 | self.copymap.pop(f, None) |
|
622 | self.copymap.pop(f, None) | |
624 |
|
623 | |||
@@ -626,6 +625,7 b' class dirstatemap(_dirstatemapcommon):' | |||||
626 | if rustmod is not None: |
|
625 | if rustmod is not None: | |
627 |
|
626 | |||
628 | class dirstatemap(_dirstatemapcommon): |
|
627 | class dirstatemap(_dirstatemapcommon): | |
|
628 | ||||
629 | ### Core data storage and access |
|
629 | ### Core data storage and access | |
630 |
|
630 | |||
631 | @propertycache |
|
631 | @propertycache |
@@ -367,6 +367,7 b' if pycompat.iswindows:' | |||||
367 | cwd = cwd[0:1].upper() + cwd[1:] |
|
367 | cwd = cwd[0:1].upper() + cwd[1:] | |
368 | return cwd |
|
368 | return cwd | |
369 |
|
369 | |||
|
370 | ||||
370 | else: |
|
371 | else: | |
371 | getcwd = os.getcwdb # re-exports |
|
372 | getcwd = os.getcwdb # re-exports | |
372 |
|
373 |
@@ -290,7 +290,7 b' def loadall(ui, whitelist=None):' | |||||
290 | with util.timedcm('load all extensions') as stats: |
|
290 | with util.timedcm('load all extensions') as stats: | |
291 | default_sub_options = ui.configsuboptions(b"extensions", b"*")[1] |
|
291 | default_sub_options = ui.configsuboptions(b"extensions", b"*")[1] | |
292 |
|
292 | |||
293 | for name, path in result: |
|
293 | for (name, path) in result: | |
294 | if path: |
|
294 | if path: | |
295 | if path[0:1] == b'!': |
|
295 | if path[0:1] == b'!': | |
296 | if name not in _disabledextensions: |
|
296 | if name not in _disabledextensions: |
@@ -175,6 +175,7 b' class filelog:' | |||||
175 | ) |
|
175 | ) | |
176 |
|
176 | |||
177 | with self._revlog._writing(transaction): |
|
177 | with self._revlog._writing(transaction): | |
|
178 | ||||
178 | if self._fix_issue6528: |
|
179 | if self._fix_issue6528: | |
179 | deltas = rewrite.filter_delta_issue6528(self._revlog, deltas) |
|
180 | deltas = rewrite.filter_delta_issue6528(self._revlog, deltas) | |
180 |
|
181 |
@@ -176,6 +176,7 b' class _nullconverter:' | |||||
176 |
|
176 | |||
177 |
|
177 | |||
178 | class baseformatter: |
|
178 | class baseformatter: | |
|
179 | ||||
179 | # set to True if the formater output a strict format that does not support |
|
180 | # set to True if the formater output a strict format that does not support | |
180 | # arbitrary output in the stream. |
|
181 | # arbitrary output in the stream. | |
181 | strict_format = False |
|
182 | strict_format = False | |
@@ -420,6 +421,7 b' class cborformatter(baseformatter):' | |||||
420 |
|
421 | |||
421 |
|
422 | |||
422 | class jsonformatter(baseformatter): |
|
423 | class jsonformatter(baseformatter): | |
|
424 | ||||
423 | strict_format = True |
|
425 | strict_format = True | |
424 |
|
426 | |||
425 | def __init__(self, ui, out, topic, opts): |
|
427 | def __init__(self, ui, out, topic, opts): |
@@ -133,7 +133,8 b' def colored(dag, repo):' | |||||
133 | else: |
|
133 | else: | |
134 | getconf = lambda rev: {} |
|
134 | getconf = lambda rev: {} | |
135 |
|
135 | |||
136 | for cur, type, data, parents in dag: |
|
136 | for (cur, type, data, parents) in dag: | |
|
137 | ||||
137 | # Compute seen and next |
|
138 | # Compute seen and next | |
138 | if cur not in seen: |
|
139 | if cur not in seen: | |
139 | seen.append(cur) # new head |
|
140 | seen.append(cur) # new head | |
@@ -243,7 +244,7 b' def asciiedges(type, char, state, rev, p' | |||||
243 |
|
244 | |||
244 |
|
245 | |||
245 | def _fixlongrightedges(edges): |
|
246 | def _fixlongrightedges(edges): | |
246 | for i, (start, end) in enumerate(edges): |
|
247 | for (i, (start, end)) in enumerate(edges): | |
247 | if end > start: |
|
248 | if end > start: | |
248 | edges[i] = (start, end + 1) |
|
249 | edges[i] = (start, end + 1) | |
249 |
|
250 | |||
@@ -264,7 +265,7 b' def _getnodelineedgestail(echars, idx, p' | |||||
264 |
|
265 | |||
265 |
|
266 | |||
266 | def _drawedges(echars, edges, nodeline, interline): |
|
267 | def _drawedges(echars, edges, nodeline, interline): | |
267 | for start, end in edges: |
|
268 | for (start, end) in edges: | |
268 | if start == end + 1: |
|
269 | if start == end + 1: | |
269 | interline[2 * end + 1] = b"/" |
|
270 | interline[2 * end + 1] = b"/" | |
270 | elif start == end - 1: |
|
271 | elif start == end - 1: | |
@@ -380,7 +381,7 b' def outputgraph(ui, graph):' | |||||
380 | this function can be monkey-patched by extensions to alter graph display |
|
381 | this function can be monkey-patched by extensions to alter graph display | |
381 | without needing to mimic all of the edge-fixup logic in ascii() |
|
382 | without needing to mimic all of the edge-fixup logic in ascii() | |
382 | """ |
|
383 | """ | |
383 | for ln, logstr in graph: |
|
384 | for (ln, logstr) in graph: | |
384 | ui.write((ln + logstr).rstrip() + b"\n") |
|
385 | ui.write((ln + logstr).rstrip() + b"\n") | |
385 |
|
386 | |||
386 |
|
387 |
@@ -120,6 +120,7 b' def rawindexentries(ui, repos, req, subd' | |||||
120 | seenrepos = set() |
|
120 | seenrepos = set() | |
121 | seendirs = set() |
|
121 | seendirs = set() | |
122 | for name, path in repos: |
|
122 | for name, path in repos: | |
|
123 | ||||
123 | if not name.startswith(subdir): |
|
124 | if not name.startswith(subdir): | |
124 | continue |
|
125 | continue | |
125 | name = name[len(subdir) :] |
|
126 | name = name[len(subdir) :] |
@@ -66,6 +66,7 b' class _error_logger:' | |||||
66 |
|
66 | |||
67 |
|
67 | |||
68 | class _httprequesthandler(httpservermod.basehttprequesthandler): |
|
68 | class _httprequesthandler(httpservermod.basehttprequesthandler): | |
|
69 | ||||
69 | url_scheme = b'http' |
|
70 | url_scheme = b'http' | |
70 |
|
71 | |||
71 | @staticmethod |
|
72 | @staticmethod | |
@@ -357,6 +358,7 b' def openlog(opt, default):' | |||||
357 |
|
358 | |||
358 |
|
359 | |||
359 | class MercurialHTTPServer(_mixin, httpservermod.httpserver, object): |
|
360 | class MercurialHTTPServer(_mixin, httpservermod.httpserver, object): | |
|
361 | ||||
360 | # SO_REUSEADDR has broken semantics on windows |
|
362 | # SO_REUSEADDR has broken semantics on windows | |
361 | if pycompat.iswindows: |
|
363 | if pycompat.iswindows: | |
362 | allow_reuse_address = 0 |
|
364 | allow_reuse_address = 0 | |
@@ -394,6 +396,7 b' class IPv6HTTPServer(MercurialHTTPServer' | |||||
394 |
|
396 | |||
395 |
|
397 | |||
396 | def create_server(ui, app): |
|
398 | def create_server(ui, app): | |
|
399 | ||||
397 | if ui.config(b'web', b'certificate'): |
|
400 | if ui.config(b'web', b'certificate'): | |
398 | handler = _httprequesthandlerssl |
|
401 | handler = _httprequesthandlerssl | |
399 | else: |
|
402 | else: |
@@ -601,6 +601,7 b' def manifest(web):' | |||||
601 |
|
601 | |||
602 | def dirlist(context): |
|
602 | def dirlist(context): | |
603 | for d in sorted(dirs): |
|
603 | for d in sorted(dirs): | |
|
604 | ||||
604 | emptydirs = [] |
|
605 | emptydirs = [] | |
605 | h = dirs[d] |
|
606 | h = dirs[d] | |
606 | while isinstance(h, dict) and len(h) == 1: |
|
607 | while isinstance(h, dict) and len(h) == 1: | |
@@ -1426,7 +1427,7 b' def graph(web):' | |||||
1426 | return tree |
|
1427 | return tree | |
1427 |
|
1428 | |||
1428 | def jsdata(context): |
|
1429 | def jsdata(context): | |
1429 | for id, type, ctx, vtx, edges in fulltree(): |
|
1430 | for (id, type, ctx, vtx, edges) in fulltree(): | |
1430 | yield { |
|
1431 | yield { | |
1431 | b'node': pycompat.bytestr(ctx), |
|
1432 | b'node': pycompat.bytestr(ctx), | |
1432 | b'graphnode': webutil.getgraphnode(web.repo, ctx), |
|
1433 | b'graphnode': webutil.getgraphnode(web.repo, ctx), |
@@ -25,7 +25,6 b' from .utils import (' | |||||
25 | urlerr = util.urlerr |
|
25 | urlerr = util.urlerr | |
26 | urlreq = util.urlreq |
|
26 | urlreq = util.urlreq | |
27 |
|
27 | |||
28 |
|
||||
29 | # moved here from url.py to avoid a cycle |
|
28 | # moved here from url.py to avoid a cycle | |
30 | class httpsendfile: |
|
29 | class httpsendfile: | |
31 | """This is a wrapper around the objects returned by python's "open". |
|
30 | """This is a wrapper around the objects returned by python's "open". |
@@ -119,5 +119,6 b' if _plain():' | |||||
119 | def _(message: bytes) -> bytes: |
|
119 | def _(message: bytes) -> bytes: | |
120 | return message |
|
120 | return message | |
121 |
|
121 | |||
|
122 | ||||
122 | else: |
|
123 | else: | |
123 | _ = gettext |
|
124 | _ = gettext |
@@ -53,6 +53,7 b' class annotateresult:' | |||||
53 |
|
53 | |||
54 |
|
54 | |||
55 | class _llinstruction: # pytype: disable=ignored-metaclass |
|
55 | class _llinstruction: # pytype: disable=ignored-metaclass | |
|
56 | ||||
56 | __metaclass__ = abc.ABCMeta |
|
57 | __metaclass__ = abc.ABCMeta | |
57 |
|
58 | |||
58 | @abc.abstractmethod |
|
59 | @abc.abstractmethod |
@@ -420,11 +420,11 b' def checkpathconflicts(repo, wctx, mctx,' | |||||
420 | # Track the names of all deleted files. |
|
420 | # Track the names of all deleted files. | |
421 | for f in mresult.files((mergestatemod.ACTION_REMOVE,)): |
|
421 | for f in mresult.files((mergestatemod.ACTION_REMOVE,)): | |
422 | deletedfiles.add(f) |
|
422 | deletedfiles.add(f) | |
423 | for f, args, msg in mresult.getactions((mergestatemod.ACTION_MERGE,)): |
|
423 | for (f, args, msg) in mresult.getactions((mergestatemod.ACTION_MERGE,)): | |
424 | f1, f2, fa, move, anc = args |
|
424 | f1, f2, fa, move, anc = args | |
425 | if move: |
|
425 | if move: | |
426 | deletedfiles.add(f1) |
|
426 | deletedfiles.add(f1) | |
427 | for f, args, msg in mresult.getactions( |
|
427 | for (f, args, msg) in mresult.getactions( | |
428 | (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,) |
|
428 | (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,) | |
429 | ): |
|
429 | ): | |
430 | f2, flags = args |
|
430 | f2, flags = args |
@@ -495,6 +495,7 b' class _mergestate_base:' | |||||
495 |
|
495 | |||
496 |
|
496 | |||
497 | class mergestate(_mergestate_base): |
|
497 | class mergestate(_mergestate_base): | |
|
498 | ||||
498 | statepathv1 = b'merge/state' |
|
499 | statepathv1 = b'merge/state' | |
499 | statepathv2 = b'merge/state2' |
|
500 | statepathv2 = b'merge/state2' | |
500 |
|
501 |
@@ -433,12 +433,14 b' def _process_merge(p1_ctx, p2_ctx, ctx):' | |||||
433 | # Iteration over d1 content will deal with all cases, but the one in the |
|
433 | # Iteration over d1 content will deal with all cases, but the one in the | |
434 | # first column of the table. |
|
434 | # first column of the table. | |
435 | for filename, d1 in diff_p1.items(): |
|
435 | for filename, d1 in diff_p1.items(): | |
|
436 | ||||
436 | d2 = diff_p2.pop(filename, None) |
|
437 | d2 = diff_p2.pop(filename, None) | |
437 |
|
438 | |||
438 | if d2 is None: |
|
439 | if d2 is None: | |
439 | # this deal with the first line of the table. |
|
440 | # this deal with the first line of the table. | |
440 | _process_other_unchanged(md, mas, filename, d1) |
|
441 | _process_other_unchanged(md, mas, filename, d1) | |
441 | else: |
|
442 | else: | |
|
443 | ||||
442 | if d1[0][0] is None and d2[0][0] is None: |
|
444 | if d1[0][0] is None and d2[0][0] is None: | |
443 | # case 🄼 — both deleted the file. |
|
445 | # case 🄼 — both deleted the file. | |
444 | md.mark_added(filename) |
|
446 | md.mark_added(filename) |
@@ -225,6 +225,7 b' def copytoworkingcopy(repo):' | |||||
225 | m = "changing narrow spec outside of a transaction" |
|
225 | m = "changing narrow spec outside of a transaction" | |
226 | raise error.ProgrammingError(m) |
|
226 | raise error.ProgrammingError(m) | |
227 | else: |
|
227 | else: | |
|
228 | ||||
228 | reporef = weakref.ref(repo) |
|
229 | reporef = weakref.ref(repo) | |
229 |
|
230 | |||
230 | def clean_pending(tr): |
|
231 | def clean_pending(tr): |
@@ -1038,6 +1038,7 b' def _computecontentdivergentset(repo):' | |||||
1038 |
|
1038 | |||
1039 |
|
1039 | |||
1040 | def makefoldid(relation, user): |
|
1040 | def makefoldid(relation, user): | |
|
1041 | ||||
1041 | folddigest = hashutil.sha1(user) |
|
1042 | folddigest = hashutil.sha1(user) | |
1042 | for p in relation[0] + relation[1]: |
|
1043 | for p in relation[0] + relation[1]: | |
1043 | folddigest.update(b'%d' % p.rev()) |
|
1044 | folddigest.update(b'%d' % p.rev()) |
@@ -961,6 +961,7 b' def _getfilteredreason(repo, changeid, c' | |||||
961 | single_successor = short(successors[0][0]) |
|
961 | single_successor = short(successors[0][0]) | |
962 | return filteredmsgtable[b'superseded'] % (changeid, single_successor) |
|
962 | return filteredmsgtable[b'superseded'] % (changeid, single_successor) | |
963 | elif fate == b'superseded_split': |
|
963 | elif fate == b'superseded_split': | |
|
964 | ||||
964 | succs = [] |
|
965 | succs = [] | |
965 | for node_id in successors[0]: |
|
966 | for node_id in successors[0]: | |
966 | succs.append(short(node_id)) |
|
967 | succs.append(short(node_id)) |
@@ -229,6 +229,7 b' def extract(ui, fileobj):' | |||||
229 |
|
229 | |||
230 |
|
230 | |||
231 | def _extract(ui, fileobj, tmpname, tmpfp): |
|
231 | def _extract(ui, fileobj, tmpname, tmpfp): | |
|
232 | ||||
232 | # attempt to detect the start of a patch |
|
233 | # attempt to detect the start of a patch | |
233 | # (this heuristic is borrowed from quilt) |
|
234 | # (this heuristic is borrowed from quilt) | |
234 | diffre = re.compile( |
|
235 | diffre = re.compile( | |
@@ -595,7 +596,7 b' class filestore:' | |||||
595 | self.created = 0 |
|
596 | self.created = 0 | |
596 | self.maxsize = maxsize |
|
597 | self.maxsize = maxsize | |
597 | if self.maxsize is None: |
|
598 | if self.maxsize is None: | |
598 | self.maxsize = 4 * (2**20) |
|
599 | self.maxsize = 4 * (2 ** 20) | |
599 | self.size = 0 |
|
600 | self.size = 0 | |
600 | self.data = {} |
|
601 | self.data = {} | |
601 |
|
602 |
@@ -893,6 +893,7 b' class phasecache:' | |||||
893 |
|
893 | |||
894 | this_phase_set = self._phasesets[targetphase] |
|
894 | this_phase_set = self._phasesets[targetphase] | |
895 | for r in range(start, end): |
|
895 | for r in range(start, end): | |
|
896 | ||||
896 | # gather information about the current_rev |
|
897 | # gather information about the current_rev | |
897 | r_phase = phase(repo, r) |
|
898 | r_phase = phase(repo, r) | |
898 | p_phase = None # phase inherited from parents |
|
899 | p_phase = None # phase inherited from parents |
@@ -37,7 +37,7 b" policy = b'allow'" | |||||
37 | } |
|
37 | } | |
38 |
|
38 | |||
39 | try: |
|
39 | try: | |
40 |
from . import __modulepolicy__ |
|
40 | from . import __modulepolicy__ | |
41 |
|
41 | |||
42 | policy = __modulepolicy__.modulepolicy |
|
42 | policy = __modulepolicy__.modulepolicy | |
43 | except ImportError: |
|
43 | except ImportError: |
@@ -911,6 +911,7 b' class IndexChangelogV2(IndexObject2):' | |||||
911 | ) |
|
911 | ) | |
912 |
|
912 | |||
913 | def _pack_entry(self, rev, entry): |
|
913 | def _pack_entry(self, rev, entry): | |
|
914 | ||||
914 | base = entry[revlog_constants.ENTRY_DELTA_BASE] |
|
915 | base = entry[revlog_constants.ENTRY_DELTA_BASE] | |
915 | link_rev = entry[revlog_constants.ENTRY_LINK_REV] |
|
916 | link_rev = entry[revlog_constants.ENTRY_LINK_REV] | |
916 | assert base == rev, (base, rev) |
|
917 | assert base == rev, (base, rev) |
@@ -226,6 +226,7 b" if hasattr(parsers, 'parse_index_devel_n" | |||||
226 | index, cache = parsers.parse_index_devel_nodemap(data, inline) |
|
226 | index, cache = parsers.parse_index_devel_nodemap(data, inline) | |
227 | return index, cache |
|
227 | return index, cache | |
228 |
|
228 | |||
|
229 | ||||
229 | else: |
|
230 | else: | |
230 | parse_index_v1_nodemap = None |
|
231 | parse_index_v1_nodemap = None | |
231 |
|
232 |
@@ -428,6 +428,7 b' def _slicechunktodensity(revlog, revs, t' | |||||
428 | # Cut the revs at collected indices |
|
428 | # Cut the revs at collected indices | |
429 | previdx = 0 |
|
429 | previdx = 0 | |
430 | for idx in selected: |
|
430 | for idx in selected: | |
|
431 | ||||
431 | chunk = _trimchunk(revlog, revs, previdx, idx) |
|
432 | chunk = _trimchunk(revlog, revs, previdx, idx) | |
432 | if chunk: |
|
433 | if chunk: | |
433 | yield chunk |
|
434 | yield chunk |
@@ -553,7 +553,7 b' def _walk_trie(block):' | |||||
553 |
|
553 | |||
554 | Children blocks are always yield before their parent block. |
|
554 | Children blocks are always yield before their parent block. | |
555 | """ |
|
555 | """ | |
556 | for __, item in sorted(block.items()): |
|
556 | for (__, item) in sorted(block.items()): | |
557 | if isinstance(item, dict): |
|
557 | if isinstance(item, dict): | |
558 | for sub_block in _walk_trie(item): |
|
558 | for sub_block in _walk_trie(item): | |
559 | yield sub_block |
|
559 | yield sub_block |
@@ -258,6 +258,7 b' def _precompute_rewritten_delta(' | |||||
258 | # this revision is empty, we can delta against nullrev |
|
258 | # this revision is empty, we can delta against nullrev | |
259 | rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN) |
|
259 | rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN) | |
260 | else: |
|
260 | else: | |
|
261 | ||||
261 | text = revlog.rawdata(rev) |
|
262 | text = revlog.rawdata(rev) | |
262 | info = revlogutils.revisioninfo( |
|
263 | info = revlogutils.revisioninfo( | |
263 | node=entry[ENTRY_NODE_ID], |
|
264 | node=entry[ENTRY_NODE_ID], |
@@ -730,6 +730,7 b' def _filterederror(repo, changeid):' | |||||
730 | This is extracted in a function to help extensions (eg: evolve) to |
|
730 | This is extracted in a function to help extensions (eg: evolve) to | |
731 | experiment with various message variants.""" |
|
731 | experiment with various message variants.""" | |
732 | if repo.filtername.startswith(b'visible'): |
|
732 | if repo.filtername.startswith(b'visible'): | |
|
733 | ||||
733 | # Check if the changeset is obsolete |
|
734 | # Check if the changeset is obsolete | |
734 | unfilteredrepo = repo.unfiltered() |
|
735 | unfilteredrepo = repo.unfiltered() | |
735 | ctx = revsymbol(unfilteredrepo, changeid) |
|
736 | ctx = revsymbol(unfilteredrepo, changeid) |
@@ -190,6 +190,7 b' class partialdiscovery:' | |||||
190 | return getparents |
|
190 | return getparents | |
191 |
|
191 | |||
192 | def _childrengetter(self): |
|
192 | def _childrengetter(self): | |
|
193 | ||||
193 | if self._childrenmap is not None: |
|
194 | if self._childrenmap is not None: | |
194 | # During discovery, the `undecided` set keep shrinking. |
|
195 | # During discovery, the `undecided` set keep shrinking. | |
195 | # Therefore, the map computed for an iteration N will be |
|
196 | # Therefore, the map computed for an iteration N will be | |
@@ -453,6 +454,7 b' def findcommonheads(' | |||||
453 | full = not initial_head_exchange |
|
454 | full = not initial_head_exchange | |
454 | progress = ui.makeprogress(_(b'searching'), unit=_(b'queries')) |
|
455 | progress = ui.makeprogress(_(b'searching'), unit=_(b'queries')) | |
455 | while not disco.iscomplete(): |
|
456 | while not disco.iscomplete(): | |
|
457 | ||||
456 | if full or disco.hasinfo(): |
|
458 | if full or disco.hasinfo(): | |
457 | if full: |
|
459 | if full: | |
458 | ui.note(_(b"sampling from both directions\n")) |
|
460 | ui.note(_(b"sampling from both directions\n")) |
@@ -118,14 +118,14 b' def findrenames(repo, added, removed, th' | |||||
118 |
|
118 | |||
119 | # Find exact matches. |
|
119 | # Find exact matches. | |
120 | matchedfiles = set() |
|
120 | matchedfiles = set() | |
121 | for a, b in _findexactmatches(repo, addedfiles, removedfiles): |
|
121 | for (a, b) in _findexactmatches(repo, addedfiles, removedfiles): | |
122 | matchedfiles.add(b) |
|
122 | matchedfiles.add(b) | |
123 | yield (a.path(), b.path(), 1.0) |
|
123 | yield (a.path(), b.path(), 1.0) | |
124 |
|
124 | |||
125 | # If the user requested similar files to be matched, search for them also. |
|
125 | # If the user requested similar files to be matched, search for them also. | |
126 | if threshold < 1.0: |
|
126 | if threshold < 1.0: | |
127 | addedfiles = [x for x in addedfiles if x not in matchedfiles] |
|
127 | addedfiles = [x for x in addedfiles if x not in matchedfiles] | |
128 | for a, b, score in _findsimilarmatches( |
|
128 | for (a, b, score) in _findsimilarmatches( | |
129 | repo, addedfiles, removedfiles, threshold |
|
129 | repo, addedfiles, removedfiles, threshold | |
130 | ): |
|
130 | ): | |
131 | yield (a.path(), b.path(), score) |
|
131 | yield (a.path(), b.path(), score) |
@@ -497,6 +497,7 b' def wrapsocket(sock, keyfile, certfile, ' | |||||
497 | ) |
|
497 | ) | |
498 |
|
498 | |||
499 | elif e.reason == 'CERTIFICATE_VERIFY_FAILED' and pycompat.iswindows: |
|
499 | elif e.reason == 'CERTIFICATE_VERIFY_FAILED' and pycompat.iswindows: | |
|
500 | ||||
500 | ui.warn( |
|
501 | ui.warn( | |
501 | _( |
|
502 | _( | |
502 | b'(the full certificate chain may not be available ' |
|
503 | b'(the full certificate chain may not be available ' |
@@ -37,7 +37,7 b' from .utils import hashutil' | |||||
37 | parsers = policy.importmod('parsers') |
|
37 | parsers = policy.importmod('parsers') | |
38 | # how much bytes should be read from fncache in one read |
|
38 | # how much bytes should be read from fncache in one read | |
39 | # It is done to prevent loading large fncache files into memory |
|
39 | # It is done to prevent loading large fncache files into memory | |
40 | fncache_chunksize = 10**6 |
|
40 | fncache_chunksize = 10 ** 6 | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | def _match_tracked_entry(entry, matcher): |
|
43 | def _match_tracked_entry(entry, matcher): |
@@ -547,7 +547,6 b' class streamcloneapplier:' | |||||
547 | _srcstore = b's' # store (svfs) |
|
547 | _srcstore = b's' # store (svfs) | |
548 | _srccache = b'c' # cache (cache) |
|
548 | _srccache = b'c' # cache (cache) | |
549 |
|
549 | |||
550 |
|
||||
551 | # This is it's own function so extensions can override it. |
|
550 | # This is it's own function so extensions can override it. | |
552 | def _walkstreamfullstorefiles(repo): |
|
551 | def _walkstreamfullstorefiles(repo): | |
553 | """list snapshot file from the store""" |
|
552 | """list snapshot file from the store""" | |
@@ -810,6 +809,7 b' def generatev2(repo, includes, excludes,' | |||||
810 | """ |
|
809 | """ | |
811 |
|
810 | |||
812 | with repo.lock(): |
|
811 | with repo.lock(): | |
|
812 | ||||
813 | repo.ui.debug(b'scanning\n') |
|
813 | repo.ui.debug(b'scanning\n') | |
814 |
|
814 | |||
815 | entries = _entries_walk( |
|
815 | entries = _entries_walk( | |
@@ -857,6 +857,7 b' def generatev3(repo, includes, excludes,' | |||||
857 | # considering the files to preserve, disabling the gc while we do so helps |
|
857 | # considering the files to preserve, disabling the gc while we do so helps | |
858 | # performance a lot. |
|
858 | # performance a lot. | |
859 | with repo.lock(), util.nogc(): |
|
859 | with repo.lock(), util.nogc(): | |
|
860 | ||||
860 | repo.ui.debug(b'scanning\n') |
|
861 | repo.ui.debug(b'scanning\n') | |
861 |
|
862 | |||
862 | entries = _entries_walk( |
|
863 | entries = _entries_walk( | |
@@ -989,6 +990,7 b' def consumev3(repo, fp):' | |||||
989 | with repo.transaction(b'clone'): |
|
990 | with repo.transaction(b'clone'): | |
990 | ctxs = (vfs.backgroundclosing(repo.ui) for vfs in vfsmap.values()) |
|
991 | ctxs = (vfs.backgroundclosing(repo.ui) for vfs in vfsmap.values()) | |
991 | with nested(*ctxs): |
|
992 | with nested(*ctxs): | |
|
993 | ||||
992 | for i in range(entrycount): |
|
994 | for i in range(entrycount): | |
993 | filecount = util.uvarintdecodestream(fp) |
|
995 | filecount = util.uvarintdecodestream(fp) | |
994 | if filecount == 0: |
|
996 | if filecount == 0: | |
@@ -1121,6 +1123,7 b' def local_copy(src_repo, dest_repo):' | |||||
1121 |
|
1123 | |||
1122 | with dest_repo.lock(): |
|
1124 | with dest_repo.lock(): | |
1123 | with src_repo.lock(): |
|
1125 | with src_repo.lock(): | |
|
1126 | ||||
1124 | # bookmark is not integrated to the streaming as it might use the |
|
1127 | # bookmark is not integrated to the streaming as it might use the | |
1125 | # `repo.vfs` and they are too many sentitive data accessible |
|
1128 | # `repo.vfs` and they are too many sentitive data accessible | |
1126 | # through `repo.vfs` to expose it to streaming clone. |
|
1129 | # through `repo.vfs` to expose it to streaming clone. |
@@ -63,6 +63,7 b' def strip(' | |||||
63 | soft=False, |
|
63 | soft=False, | |
64 | ): |
|
64 | ): | |
65 | with repo.wlock(), repo.lock(): |
|
65 | with repo.wlock(), repo.lock(): | |
|
66 | ||||
66 | if update: |
|
67 | if update: | |
67 | checklocalchanges(repo, force=force) |
|
68 | checklocalchanges(repo, force=force) | |
68 | urev = _findupdatetarget(repo, revs) |
|
69 | urev = _findupdatetarget(repo, revs) |
@@ -1227,12 +1227,16 b' class svnsubrepo(abstractsubrepo):' | |||||
1227 | externals.append(path) |
|
1227 | externals.append(path) | |
1228 | elif item == 'missing': |
|
1228 | elif item == 'missing': | |
1229 | missing.append(path) |
|
1229 | missing.append(path) | |
1230 |
if |
|
1230 | if ( | |
1231 |
|
|
1231 | item | |
1232 |
|
|
1232 | not in ( | |
1233 |
' |
|
1233 | '', | |
1234 |
' |
|
1234 | 'normal', | |
1235 | ) or props not in ('', 'none', 'normal'): |
|
1235 | 'unversioned', | |
|
1236 | 'external', | |||
|
1237 | ) | |||
|
1238 | or props not in ('', 'none', 'normal') | |||
|
1239 | ): | |||
1236 | changes.append(path) |
|
1240 | changes.append(path) | |
1237 | for path in changes: |
|
1241 | for path in changes: | |
1238 | for ext in externals: |
|
1242 | for ext in externals: |
@@ -601,7 +601,7 b' def _writetagcache(ui, repo, valid, cach' | |||||
601 | # we keep them in UTF-8 throughout this module. If we converted |
|
601 | # we keep them in UTF-8 throughout this module. If we converted | |
602 | # them local encoding on input, we would lose info writing them to |
|
602 | # them local encoding on input, we would lose info writing them to | |
603 | # the cache. |
|
603 | # the cache. | |
604 | for name, (node, hist) in sorted(cachetags.items()): |
|
604 | for (name, (node, hist)) in sorted(cachetags.items()): | |
605 | for n in hist: |
|
605 | for n in hist: | |
606 | cachefile.write(b"%s %s\n" % (hex(n), name)) |
|
606 | cachefile.write(b"%s %s\n" % (hex(n), name)) | |
607 | cachefile.write(b"%s %s\n" % (hex(node), name)) |
|
607 | cachefile.write(b"%s %s\n" % (hex(node), name)) |
@@ -194,6 +194,7 b' def upgraderepo(' | |||||
194 | onlydefault.append(d) |
|
194 | onlydefault.append(d) | |
195 |
|
195 | |||
196 | if fromconfig or onlydefault: |
|
196 | if fromconfig or onlydefault: | |
|
197 | ||||
197 | if fromconfig: |
|
198 | if fromconfig: | |
198 | ui.status( |
|
199 | ui.status( | |
199 | _( |
|
200 | _( |
@@ -109,7 +109,6 b' httpserver._registeraliases(' | |||||
109 | ), |
|
109 | ), | |
110 | ) |
|
110 | ) | |
111 |
|
111 | |||
112 |
|
||||
113 | # urllib.parse.quote() accepts both str and bytes, decodes bytes |
|
112 | # urllib.parse.quote() accepts both str and bytes, decodes bytes | |
114 | # (if necessary), and returns str. This is wonky. We provide a custom |
|
113 | # (if necessary), and returns str. This is wonky. We provide a custom | |
115 | # implementation that only accepts bytes and emits bytes. |
|
114 | # implementation that only accepts bytes and emits bytes. |
@@ -1152,7 +1152,7 b' def makeloggingsocket(' | |||||
1152 | def version(): |
|
1152 | def version(): | |
1153 | """Return version information if available.""" |
|
1153 | """Return version information if available.""" | |
1154 | try: |
|
1154 | try: | |
1155 |
from . import __version__ |
|
1155 | from . import __version__ | |
1156 |
|
1156 | |||
1157 | return __version__.version |
|
1157 | return __version__.version | |
1158 | except ImportError: |
|
1158 | except ImportError: | |
@@ -1328,7 +1328,7 b' class sortdict(collections.OrderedDict):' | |||||
1328 | self[k] = f[k] |
|
1328 | self[k] = f[k] | |
1329 |
|
1329 | |||
1330 | def insert(self, position, key, value): |
|
1330 | def insert(self, position, key, value): | |
1331 | for i, (k, v) in enumerate(list(self.items())): |
|
1331 | for (i, (k, v)) in enumerate(list(self.items())): | |
1332 | if i == position: |
|
1332 | if i == position: | |
1333 | self[key] = value |
|
1333 | self[key] = value | |
1334 | if i >= position: |
|
1334 | if i >= position: | |
@@ -2724,10 +2724,10 b' class chunkbuffer:' | |||||
2724 |
|
2724 | |||
2725 | def splitbig(chunks): |
|
2725 | def splitbig(chunks): | |
2726 | for chunk in chunks: |
|
2726 | for chunk in chunks: | |
2727 | if len(chunk) > 2**20: |
|
2727 | if len(chunk) > 2 ** 20: | |
2728 | pos = 0 |
|
2728 | pos = 0 | |
2729 | while pos < len(chunk): |
|
2729 | while pos < len(chunk): | |
2730 | end = pos + 2**18 |
|
2730 | end = pos + 2 ** 18 | |
2731 | yield chunk[pos:end] |
|
2731 | yield chunk[pos:end] | |
2732 | pos = end |
|
2732 | pos = end | |
2733 | else: |
|
2733 | else: | |
@@ -2751,7 +2751,7 b' class chunkbuffer:' | |||||
2751 | while left > 0: |
|
2751 | while left > 0: | |
2752 | # refill the queue |
|
2752 | # refill the queue | |
2753 | if not queue: |
|
2753 | if not queue: | |
2754 | target = 2**18 |
|
2754 | target = 2 ** 18 | |
2755 | for chunk in self.iter: |
|
2755 | for chunk in self.iter: | |
2756 | queue.append(chunk) |
|
2756 | queue.append(chunk) | |
2757 | target -= len(chunk) |
|
2757 | target -= len(chunk) | |
@@ -3081,12 +3081,12 b' def timed(func):' | |||||
3081 |
|
3081 | |||
3082 |
|
3082 | |||
3083 | _sizeunits = ( |
|
3083 | _sizeunits = ( | |
3084 | (b'm', 2**20), |
|
3084 | (b'm', 2 ** 20), | |
3085 | (b'k', 2**10), |
|
3085 | (b'k', 2 ** 10), | |
3086 | (b'g', 2**30), |
|
3086 | (b'g', 2 ** 30), | |
3087 | (b'kb', 2**10), |
|
3087 | (b'kb', 2 ** 10), | |
3088 | (b'mb', 2**20), |
|
3088 | (b'mb', 2 ** 20), | |
3089 | (b'gb', 2**30), |
|
3089 | (b'gb', 2 ** 30), | |
3090 | (b'b', 1), |
|
3090 | (b'b', 1), | |
3091 | ) |
|
3091 | ) | |
3092 |
|
3092 |
@@ -511,7 +511,7 b' class _zlibengine(compressionengine):' | |||||
511 | parts = [] |
|
511 | parts = [] | |
512 | pos = 0 |
|
512 | pos = 0 | |
513 | while pos < insize: |
|
513 | while pos < insize: | |
514 | pos2 = pos + 2**20 |
|
514 | pos2 = pos + 2 ** 20 | |
515 | parts.append(z.compress(data[pos:pos2])) |
|
515 | parts.append(z.compress(data[pos:pos2])) | |
516 | pos = pos2 |
|
516 | pos = pos2 | |
517 | parts.append(z.flush()) |
|
517 | parts.append(z.flush()) |
@@ -711,6 +711,7 b' if pycompat.iswindows:' | |||||
711 | if stdin is not None: |
|
711 | if stdin is not None: | |
712 | stdin.close() |
|
712 | stdin.close() | |
713 |
|
713 | |||
|
714 | ||||
714 | else: |
|
715 | else: | |
715 |
|
716 | |||
716 | def runbgcommand( |
|
717 | def runbgcommand( |
@@ -52,6 +52,7 b' if mainfrozen() and getattr(sys, "frozen' | |||||
52 | assert dirs[0] == b"mercurial" |
|
52 | assert dirs[0] == b"mercurial" | |
53 | return os.path.join(_rootpath, *dirs[1:]) |
|
53 | return os.path.join(_rootpath, *dirs[1:]) | |
54 |
|
54 | |||
|
55 | ||||
55 | else: |
|
56 | else: | |
56 | datapath = os.path.dirname(os.path.dirname(pycompat.fsencode(__file__))) |
|
57 | datapath = os.path.dirname(os.path.dirname(pycompat.fsencode(__file__))) | |
57 | _rootpath = os.path.dirname(datapath) |
|
58 | _rootpath = os.path.dirname(datapath) | |
@@ -97,6 +98,7 b' except (ImportError, AttributeError):' | |||||
97 | for p in os.listdir(path): |
|
98 | for p in os.listdir(path): | |
98 | yield pycompat.fsencode(p) |
|
99 | yield pycompat.fsencode(p) | |
99 |
|
100 | |||
|
101 | ||||
100 | else: |
|
102 | else: | |
101 | from .. import encoding |
|
103 | from .. import encoding | |
102 |
|
104 |
@@ -574,6 +574,7 b' def parsemailmap(mailmapcontent):' | |||||
574 | return mailmap |
|
574 | return mailmap | |
575 |
|
575 | |||
576 | for line in mailmapcontent.splitlines(): |
|
576 | for line in mailmapcontent.splitlines(): | |
|
577 | ||||
577 | # Don't bother checking the line if it is a comment or |
|
578 | # Don't bother checking the line if it is a comment or | |
578 | # is an improperly formed author field |
|
579 | # is an improperly formed author field | |
579 | if line.lstrip().startswith(b'#'): |
|
580 | if line.lstrip().startswith(b'#'): | |
@@ -800,6 +801,7 b' def _MBTextWrapper(**kwargs):' | |||||
800 | chunks.reverse() |
|
801 | chunks.reverse() | |
801 |
|
802 | |||
802 | while chunks: |
|
803 | while chunks: | |
|
804 | ||||
803 | # Start the list of chunks that will make up the current line. |
|
805 | # Start the list of chunks that will make up the current line. | |
804 | # cur_len is just the length of all the chunks in cur_line. |
|
806 | # cur_len is just the length of all the chunks in cur_line. | |
805 | cur_line = [] |
|
807 | cur_line = [] |
@@ -172,7 +172,6 b' CERT_TRUST_IS_PARTIAL_CHAIN = 0x10000' | |||||
172 | X509_ASN_ENCODING = 0x00000001 |
|
172 | X509_ASN_ENCODING = 0x00000001 | |
173 | PKCS_7_ASN_ENCODING = 0x00010000 |
|
173 | PKCS_7_ASN_ENCODING = 0x00010000 | |
174 |
|
174 | |||
175 |
|
||||
176 | # These structs are only complete enough to achieve what we need. |
|
175 | # These structs are only complete enough to achieve what we need. | |
177 | class CERT_CHAIN_CONTEXT(ctypes.Structure): |
|
176 | class CERT_CHAIN_CONTEXT(ctypes.Structure): | |
178 | _fields_ = ( |
|
177 | _fields_ = ( | |
@@ -369,7 +368,7 b' def _raiseoserror(name: bytes) -> NoRetu' | |||||
369 | # See https://bugs.python.org/issue28474 |
|
368 | # See https://bugs.python.org/issue28474 | |
370 | code = _kernel32.GetLastError() |
|
369 | code = _kernel32.GetLastError() | |
371 | if code > 0x7FFFFFFF: |
|
370 | if code > 0x7FFFFFFF: | |
372 | code -= 2**32 |
|
371 | code -= 2 ** 32 | |
373 | err = ctypes.WinError(code=code) # pytype: disable=module-attr |
|
372 | err = ctypes.WinError(code=code) # pytype: disable=module-attr | |
374 | raise OSError( |
|
373 | raise OSError( | |
375 | err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror) |
|
374 | err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror) |
@@ -546,7 +546,7 b' def createtextoutputframe(' | |||||
546 | """ |
|
546 | """ | |
547 | atomdicts = [] |
|
547 | atomdicts = [] | |
548 |
|
548 | |||
549 | for formatting, args, labels in atoms: |
|
549 | for (formatting, args, labels) in atoms: | |
550 | # TODO look for localstr, other types here? |
|
550 | # TODO look for localstr, other types here? | |
551 |
|
551 | |||
552 | if not isinstance(formatting, bytes): |
|
552 | if not isinstance(formatting, bytes): | |
@@ -1198,6 +1198,7 b' class serverreactor:' | |||||
1198 | b'%s' % stringutil.forcebytestr(e), |
|
1198 | b'%s' % stringutil.forcebytestr(e), | |
1199 | errtype=b'server', |
|
1199 | errtype=b'server', | |
1200 | ): |
|
1200 | ): | |
|
1201 | ||||
1201 | yield frame |
|
1202 | yield frame | |
1202 |
|
1203 | |||
1203 | break |
|
1204 | break | |
@@ -1258,6 +1259,7 b' class serverreactor:' | |||||
1258 | for chunk in cborutil.streamencodebytestringfromiter( |
|
1259 | for chunk in cborutil.streamencodebytestringfromiter( | |
1259 | o.chunks |
|
1260 | o.chunks | |
1260 | ): |
|
1261 | ): | |
|
1262 | ||||
1261 | for frame in emitter.send(chunk): |
|
1263 | for frame in emitter.send(chunk): | |
1262 | yield frame |
|
1264 | yield frame | |
1263 |
|
1265 |
1 | NO CONTENT: file renamed from rust/.cargo/config.toml to rust/.cargo/config |
|
NO CONTENT: file renamed from rust/.cargo/config.toml to rust/.cargo/config |
@@ -332,7 +332,9 b' impl Node {' | |||||
332 | ) -> Result<usize, DirstateV2ParseError> { |
|
332 | ) -> Result<usize, DirstateV2ParseError> { | |
333 | let start = self.base_name_start.get(); |
|
333 | let start = self.base_name_start.get(); | |
334 | if start < self.full_path.len.get() { |
|
334 | if start < self.full_path.len.get() { | |
335 |
let start = usize::from(start) |
|
335 | let start = usize::try_from(start) | |
|
336 | // u32 -> usize, could only panic on a 16-bit CPU | |||
|
337 | .expect("dirstate-v2 base_name_start out of bounds"); | |||
336 | Ok(start) |
|
338 | Ok(start) | |
337 | } else { |
|
339 | } else { | |
338 | Err(DirstateV2ParseError::new("not enough bytes for base name")) |
|
340 | Err(DirstateV2ParseError::new("not enough bytes for base name")) | |
@@ -591,8 +593,8 b' where' | |||||
591 | { |
|
593 | { | |
592 | // Either `usize::MAX` would result in "out of bounds" error since a single |
|
594 | // Either `usize::MAX` would result in "out of bounds" error since a single | |
593 | // `&[u8]` cannot occupy the entire addess space. |
|
595 | // `&[u8]` cannot occupy the entire addess space. | |
594 | let start = start.get().try_into().unwrap_or(usize::MAX); |
|
596 | let start = start.get().try_into().unwrap_or(std::usize::MAX); | |
595 | let len = len.try_into().unwrap_or(usize::MAX); |
|
597 | let len = len.try_into().unwrap_or(std::usize::MAX); | |
596 | let bytes = match on_disk.get(start..) { |
|
598 | let bytes = match on_disk.get(start..) { | |
597 | Some(bytes) => bytes, |
|
599 | Some(bytes) => bytes, | |
598 | None => { |
|
600 | None => { |
@@ -677,14 +677,14 b" impl<'a, 'tree, 'on_disk> StatusCommon<'" | |||||
677 | // The directory was modified too recently, |
|
677 | // The directory was modified too recently, | |
678 | // don’t cache its `read_dir` results. |
|
678 | // don’t cache its `read_dir` results. | |
679 | // |
|
679 | // | |
680 |
// 1. A change to this directory (direct child was |
|
680 | // 1. A change to this directory (direct child was | |
681 |
// removed) cause its mtime to be set |
|
681 | // added or removed) cause its mtime to be set | |
682 | // to `directory_mtime` |
|
682 | // (possibly truncated) to `directory_mtime` | |
683 | // 2. This `status` algorithm calls `read_dir` |
|
683 | // 2. This `status` algorithm calls `read_dir` | |
684 |
// 3. An other change is made to the same directory is |
|
684 | // 3. An other change is made to the same directory is | |
685 |
// that calling `read_dir` agin would give |
|
685 | // made so that calling `read_dir` agin would give | |
686 |
// results, but soon enough after 1. that |
|
686 | // different results, but soon enough after 1. that | |
687 | // the same |
|
687 | // the mtime stays the same | |
688 | // |
|
688 | // | |
689 | // On a system where the time resolution poor, this |
|
689 | // On a system where the time resolution poor, this | |
690 | // scenario is not unlikely if all three steps are caused |
|
690 | // scenario is not unlikely if all three steps are caused |
@@ -617,11 +617,7 b' impl IntersectionMatcher {' | |||||
617 | std::mem::swap(&mut m1, &mut m2); |
|
617 | std::mem::swap(&mut m1, &mut m2); | |
618 | } |
|
618 | } | |
619 | m1.file_set().map(|m1_files| { |
|
619 | m1.file_set().map(|m1_files| { | |
620 | m1_files |
|
620 | m1_files.iter().cloned().filter(|f| m2.matches(f)).collect() | |
621 | .iter() |
|
|||
622 | .filter(|&f| m2.matches(f)) |
|
|||
623 | .cloned() |
|
|||
624 | .collect() |
|
|||
625 | }) |
|
621 | }) | |
626 | } else { |
|
622 | } else { | |
627 | // without exact input file sets, we can't do an exact |
|
623 | // without exact input file sets, we can't do an exact | |
@@ -714,7 +710,7 b' impl DifferenceMatcher {' | |||||
714 | }; |
|
710 | }; | |
715 | if base_is_exact { |
|
711 | if base_is_exact { | |
716 | new.files = base_files.map(|files| { |
|
712 | new.files = base_files.map(|files| { | |
717 |
files.iter().filter(| |
|
713 | files.iter().cloned().filter(|f| new.matches(f)).collect() | |
718 | }); |
|
714 | }); | |
719 | } |
|
715 | } | |
720 | new |
|
716 | new |
@@ -713,7 +713,7 b' message",' | |||||
713 |
|
713 | |||
714 | for (extra, msg) in test_cases { |
|
714 | for (extra, msg) in test_cases { | |
715 | assert!( |
|
715 | assert!( | |
716 | decode_extra(extra).is_err(), |
|
716 | decode_extra(&extra).is_err(), | |
717 | "corrupt extra should have failed to parse: {}", |
|
717 | "corrupt extra should have failed to parse: {}", | |
718 | msg |
|
718 | msg | |
719 | ); |
|
719 | ); |
@@ -1387,7 +1387,6 b' trait PoisonableBitSet: Sized + PartialE' | |||||
1387 | fn vec_of_empty(sets_size: usize, vec_len: usize) -> Vec<Self>; |
|
1387 | fn vec_of_empty(sets_size: usize, vec_len: usize) -> Vec<Self>; | |
1388 |
|
1388 | |||
1389 | /// The size of the bit mask in memory |
|
1389 | /// The size of the bit mask in memory | |
1390 | #[allow(unused)] |
|
|||
1391 | fn size(&self) -> usize; |
|
1390 | fn size(&self) -> usize; | |
1392 |
|
1391 | |||
1393 | /// The number of elements that can be represented in the set. |
|
1392 | /// The number of elements that can be represented in the set. | |
@@ -1395,14 +1394,12 b' trait PoisonableBitSet: Sized + PartialE' | |||||
1395 | /// Another way to put it is that it is the highest integer `C` such that |
|
1394 | /// Another way to put it is that it is the highest integer `C` such that | |
1396 | /// the set is guaranteed to always be a subset of the integer range |
|
1395 | /// the set is guaranteed to always be a subset of the integer range | |
1397 | /// `[0, C)` |
|
1396 | /// `[0, C)` | |
1398 | #[allow(unused)] |
|
|||
1399 | fn capacity(&self) -> usize; |
|
1397 | fn capacity(&self) -> usize; | |
1400 |
|
1398 | |||
1401 | /// Declare `n` to belong to the set |
|
1399 | /// Declare `n` to belong to the set | |
1402 | fn add(&mut self, n: usize); |
|
1400 | fn add(&mut self, n: usize); | |
1403 |
|
1401 | |||
1404 | /// Declare `n` not to belong to the set |
|
1402 | /// Declare `n` not to belong to the set | |
1405 | #[allow(unused)] |
|
|||
1406 | fn discard(&mut self, n: usize); |
|
1403 | fn discard(&mut self, n: usize); | |
1407 |
|
1404 | |||
1408 | /// Replace this bit set by its union with other |
|
1405 | /// Replace this bit set by its union with other | |
@@ -1752,9 +1749,6 b" impl<'a> IndexEntry<'a> {" | |||||
1752 | } |
|
1749 | } | |
1753 |
|
1750 | |||
1754 | #[cfg(test)] |
|
1751 | #[cfg(test)] | |
1755 | pub use tests::IndexEntryBuilder; |
|
|||
1756 |
|
||||
1757 | #[cfg(test)] |
|
|||
1758 | mod tests { |
|
1752 | mod tests { | |
1759 | use super::*; |
|
1753 | use super::*; | |
1760 | use crate::node::NULL_NODE; |
|
1754 | use crate::node::NULL_NODE; | |
@@ -2033,3 +2027,6 b' mod tests {' | |||||
2033 | assert_eq!(get_version(&bytes), 2) |
|
2027 | assert_eq!(get_version(&bytes), 2) | |
2034 | } |
|
2028 | } | |
2035 | } |
|
2029 | } | |
|
2030 | ||||
|
2031 | #[cfg(test)] | |||
|
2032 | pub use tests::IndexEntryBuilder; |
@@ -83,7 +83,7 b" impl<'a> TryFrom<&'a [u8]> for &'a Node " | |||||
83 | #[inline] |
|
83 | #[inline] | |
84 | fn try_from(bytes: &'a [u8]) -> Result<Self, Self::Error> { |
|
84 | fn try_from(bytes: &'a [u8]) -> Result<Self, Self::Error> { | |
85 | match Node::from_bytes(bytes) { |
|
85 | match Node::from_bytes(bytes) { | |
86 |
Ok((node, |
|
86 | Ok((node, rest)) if rest.is_empty() => Ok(node), | |
87 | _ => Err(()), |
|
87 | _ => Err(()), | |
88 | } |
|
88 | } | |
89 | } |
|
89 | } | |
@@ -323,9 +323,6 b' impl PartialEq<Node> for NodePrefix {' | |||||
323 | } |
|
323 | } | |
324 |
|
324 | |||
325 | #[cfg(test)] |
|
325 | #[cfg(test)] | |
326 | pub use tests::hex_pad_right; |
|
|||
327 |
|
||||
328 | #[cfg(test)] |
|
|||
329 | mod tests { |
|
326 | mod tests { | |
330 | use super::*; |
|
327 | use super::*; | |
331 |
|
328 | |||
@@ -431,3 +428,6 b' mod tests {' | |||||
431 | assert_eq!(prefix.first_different_nybble(&node), None); |
|
428 | assert_eq!(prefix.first_different_nybble(&node), None); | |
432 | } |
|
429 | } | |
433 | } |
|
430 | } | |
|
431 | ||||
|
432 | #[cfg(test)] | |||
|
433 | pub use tests::hex_pad_right; |
@@ -19,8 +19,8 b'' | |||||
19 | //! `mercurial.ancestor.incrementalmissingancestors`. |
|
19 | //! `mercurial.ancestor.incrementalmissingancestors`. | |
20 | //! |
|
20 | //! | |
21 | //! API differences: |
|
21 | //! API differences: | |
22 |
//! + it is instantiated with a C `parsers.index` |
|
22 | //! + it is instantiated with a C `parsers.index` | |
23 | //! parents function. |
|
23 | //! instance instead of a parents function. | |
24 | //! + `MissingAncestors.bases` is a method returning a tuple instead of |
|
24 | //! + `MissingAncestors.bases` is a method returning a tuple instead of | |
25 | //! a set-valued attribute. We could return a Python set easily if our |
|
25 | //! a set-valued attribute. We could return a Python set easily if our | |
26 | //! [PySet PR](https://github.com/dgrunwald/rust-cpython/pull/165) |
|
26 | //! [PySet PR](https://github.com/dgrunwald/rust-cpython/pull/165) |
@@ -463,6 +463,7 b' class hgbuild(build):' | |||||
463 |
|
463 | |||
464 |
|
464 | |||
465 | class hgbuildmo(build): |
|
465 | class hgbuildmo(build): | |
|
466 | ||||
466 | description = "build translations (.mo files)" |
|
467 | description = "build translations (.mo files)" | |
467 |
|
468 | |||
468 | def run(self): |
|
469 | def run(self): | |
@@ -1055,6 +1056,7 b' class hgbuilddoc(Command):' | |||||
1055 |
|
1056 | |||
1056 |
|
1057 | |||
1057 | class hginstall(install): |
|
1058 | class hginstall(install): | |
|
1059 | ||||
1058 | user_options = install.user_options + [ |
|
1060 | user_options = install.user_options + [ | |
1059 | ( |
|
1061 | ( | |
1060 | 'old-and-unmanageable', |
|
1062 | 'old-and-unmanageable', |
@@ -26,6 +26,7 b" if os.environ.get('HGIPV6', '0') == '1':" | |||||
26 | class simplehttpserver(httpserver.httpserver): |
|
26 | class simplehttpserver(httpserver.httpserver): | |
27 | address_family = socket.AF_INET6 |
|
27 | address_family = socket.AF_INET6 | |
28 |
|
28 | |||
|
29 | ||||
29 | else: |
|
30 | else: | |
30 | simplehttpserver = httpserver.httpserver |
|
31 | simplehttpserver = httpserver.httpserver | |
31 |
|
32 |
@@ -31,15 +31,8 b' def log(msg):' | |||||
31 | def mocksmtpserversession(conn, addr): |
|
31 | def mocksmtpserversession(conn, addr): | |
32 | conn.send(b'220 smtp.example.com ESMTP\r\n') |
|
32 | conn.send(b'220 smtp.example.com ESMTP\r\n') | |
33 |
|
33 | |||
34 | try: |
|
34 | line = conn.recv(1024) | |
35 | # Newer versions of OpenSSL raise on EOF |
|
|||
36 | line = conn.recv(1024) |
|
|||
37 | except ssl.SSLError: |
|
|||
38 | log('no hello: EOF\n') |
|
|||
39 | return |
|
|||
40 |
|
||||
41 | if not line.lower().startswith(b'ehlo '): |
|
35 | if not line.lower().startswith(b'ehlo '): | |
42 | # Older versions of OpenSSl don't raise |
|
|||
43 | log('no hello: %s\n' % line) |
|
36 | log('no hello: %s\n' % line) | |
44 | return |
|
37 | return | |
45 |
|
38 |
@@ -30,6 +30,7 b' if sys.version_info > (3, 5, 0):' | |||||
30 | def _sys2bytes(p): |
|
30 | def _sys2bytes(p): | |
31 | return p.encode('utf-8') |
|
31 | return p.encode('utf-8') | |
32 |
|
32 | |||
|
33 | ||||
33 | elif sys.version_info >= (3, 0, 0): |
|
34 | elif sys.version_info >= (3, 0, 0): | |
34 | print( |
|
35 | print( | |
35 | '%s is only supported on Python 3.5+ and 2.7, not %s' |
|
36 | '%s is only supported on Python 3.5+ and 2.7, not %s' |
@@ -33,7 +33,6 b'' | |||||
33 | import os |
|
33 | import os | |
34 | import sys |
|
34 | import sys | |
35 |
|
35 | |||
36 |
|
||||
37 | # Generates pairs of (filename, contents), where 'contents' is a list |
|
36 | # Generates pairs of (filename, contents), where 'contents' is a list | |
38 | # describing the file's content at each revision (or in the working copy). |
|
37 | # describing the file's content at each revision (or in the working copy). | |
39 | # At each revision, it is either None or the file's actual content. When not |
|
38 | # At each revision, it is either None or the file's actual content. When not |
@@ -1113,13 +1113,13 b' def has_emacs():' | |||||
1113 | return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)') |
|
1113 | return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)') | |
1114 |
|
1114 | |||
1115 |
|
1115 | |||
1116 |
@check('black', 'the black formatter for python |
|
1116 | @check('black', 'the black formatter for python (>= 20.8b1)') | |
1117 | def has_black(): |
|
1117 | def has_black(): | |
1118 | blackcmd = 'black --version' |
|
1118 | blackcmd = 'black --version' | |
1119 | version_regex = b'black, (?:version )?([0-9a-b.]+)' |
|
1119 | version_regex = b'black, (?:version )?([0-9a-b.]+)' | |
1120 | version = matchoutput(blackcmd, version_regex) |
|
1120 | version = matchoutput(blackcmd, version_regex) | |
1121 | sv = distutils.version.StrictVersion |
|
1121 | sv = distutils.version.StrictVersion | |
1122 |
return version and sv(_bytes2sys(version.group(1))) >= sv('2 |
|
1122 | return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1') | |
1123 |
|
1123 | |||
1124 |
|
1124 | |||
1125 | @check('pytype', 'the pytype type checker') |
|
1125 | @check('pytype', 'the pytype type checker') |
@@ -75,6 +75,7 b" if os.name == 'nt':" | |||||
75 | raise |
|
75 | raise | |
76 | _check(ctypes.windll.kernel32.CloseHandle(handle)) |
|
76 | _check(ctypes.windll.kernel32.CloseHandle(handle)) | |
77 |
|
77 | |||
|
78 | ||||
78 | else: |
|
79 | else: | |
79 |
|
80 | |||
80 | def kill(pid, logfn, tryhard=True): |
|
81 | def kill(pid, logfn, tryhard=True): |
@@ -1,6 +1,5 b'' | |||||
1 | from mercurial.utils import procutil |
|
1 | from mercurial.utils import procutil | |
2 |
|
2 | |||
3 |
|
||||
4 | # XXX: we should probably offer a devel option to do this in blackbox directly |
|
3 | # XXX: we should probably offer a devel option to do this in blackbox directly | |
5 | def getuser(): |
|
4 | def getuser(): | |
6 | return b'bob' |
|
5 | return b'bob' |
@@ -223,7 +223,6 b' if WINDOWS:' | |||||
223 | # For Windows support |
|
223 | # For Windows support | |
224 | wifexited = getattr(os, "WIFEXITED", lambda x: False) |
|
224 | wifexited = getattr(os, "WIFEXITED", lambda x: False) | |
225 |
|
225 | |||
226 |
|
||||
227 | # Whether to use IPv6 |
|
226 | # Whether to use IPv6 | |
228 | def checksocketfamily(name, port=20058): |
|
227 | def checksocketfamily(name, port=20058): | |
229 | """return true if we can listen on localhost using family=name |
|
228 | """return true if we can listen on localhost using family=name | |
@@ -3398,6 +3397,7 b' class TestRunner:' | |||||
3398 | os.path.basename(t).startswith(b'test-') |
|
3397 | os.path.basename(t).startswith(b'test-') | |
3399 | and (t.endswith(b'.py') or t.endswith(b'.t')) |
|
3398 | and (t.endswith(b'.py') or t.endswith(b'.t')) | |
3400 | ): |
|
3399 | ): | |
|
3400 | ||||
3401 | m = testcasepattern.match(os.path.basename(t)) |
|
3401 | m = testcasepattern.match(os.path.basename(t)) | |
3402 | if m is not None: |
|
3402 | if m is not None: | |
3403 | t_basename, casestr = m.groups() |
|
3403 | t_basename, casestr = m.groups() |
@@ -87,7 +87,6 b' def test_missingancestors(seed, rng):' | |||||
87 | testcount = 10 |
|
87 | testcount = 10 | |
88 | inccount = 10 |
|
88 | inccount = 10 | |
89 | nerrs = [0] |
|
89 | nerrs = [0] | |
90 |
|
||||
91 | # the default mu and sigma give us a nice distribution of mostly |
|
90 | # the default mu and sigma give us a nice distribution of mostly | |
92 | # single-digit counts (including 0) with some higher ones |
|
91 | # single-digit counts (including 0) with some higher ones | |
93 | def lognormrandom(mu, sigma): |
|
92 | def lognormrandom(mu, sigma): |
@@ -55,6 +55,7 b' class localthing(thing):' | |||||
55 |
|
55 | |||
56 | # usage of "thing" interface |
|
56 | # usage of "thing" interface | |
57 | def use(it): |
|
57 | def use(it): | |
|
58 | ||||
58 | # Direct call to base method shared between client and server. |
|
59 | # Direct call to base method shared between client and server. | |
59 | bprint(it.hello()) |
|
60 | bprint(it.hello()) | |
60 |
|
61 | |||
@@ -105,7 +106,6 b' def unescapearg(escaped):' | |||||
105 |
|
106 | |||
106 | # server side |
|
107 | # server side | |
107 |
|
108 | |||
108 |
|
||||
109 | # equivalent of wireproto's global functions |
|
109 | # equivalent of wireproto's global functions | |
110 | class server: |
|
110 | class server: | |
111 | def __init__(self, local): |
|
111 | def __init__(self, local): | |
@@ -156,7 +156,6 b' myserver = server(mylocal)' | |||||
156 |
|
156 | |||
157 | # local side |
|
157 | # local side | |
158 |
|
158 | |||
159 |
|
||||
160 | # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling |
|
159 | # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling | |
161 | # here we just transform the strings a bit to check we're properly en-/decoding |
|
160 | # here we just transform the strings a bit to check we're properly en-/decoding | |
162 | def mangle(s): |
|
161 | def mangle(s): |
@@ -216,11 +216,11 b' class BytestringTests(TestCase):' | |||||
216 | for size in lens: |
|
216 | for size in lens: | |
217 | if size < 24: |
|
217 | if size < 24: | |
218 | hlen = 1 |
|
218 | hlen = 1 | |
219 | elif size < 2**8: |
|
219 | elif size < 2 ** 8: | |
220 | hlen = 2 |
|
220 | hlen = 2 | |
221 | elif size < 2**16: |
|
221 | elif size < 2 ** 16: | |
222 | hlen = 3 |
|
222 | hlen = 3 | |
223 | elif size < 2**32: |
|
223 | elif size < 2 ** 32: | |
224 | hlen = 5 |
|
224 | hlen = 5 | |
225 | else: |
|
225 | else: | |
226 | assert False |
|
226 | assert False | |
@@ -487,7 +487,7 b' class IntTests(TestCase):' | |||||
487 | ) |
|
487 | ) | |
488 |
|
488 | |||
489 | def testdecodepartialushort(self): |
|
489 | def testdecodepartialushort(self): | |
490 | encoded = b''.join(cborutil.streamencode(2**15)) |
|
490 | encoded = b''.join(cborutil.streamencode(2 ** 15)) | |
491 |
|
491 | |||
492 | self.assertEqual( |
|
492 | self.assertEqual( | |
493 | cborutil.decodeitem(encoded[0:1]), |
|
493 | cborutil.decodeitem(encoded[0:1]), | |
@@ -499,7 +499,7 b' class IntTests(TestCase):' | |||||
499 | ) |
|
499 | ) | |
500 | self.assertEqual( |
|
500 | self.assertEqual( | |
501 | cborutil.decodeitem(encoded[0:5]), |
|
501 | cborutil.decodeitem(encoded[0:5]), | |
502 | (True, 2**15, 3, cborutil.SPECIAL_NONE), |
|
502 | (True, 2 ** 15, 3, cborutil.SPECIAL_NONE), | |
503 | ) |
|
503 | ) | |
504 |
|
504 | |||
505 | def testdecodepartialshort(self): |
|
505 | def testdecodepartialshort(self): | |
@@ -519,7 +519,7 b' class IntTests(TestCase):' | |||||
519 | ) |
|
519 | ) | |
520 |
|
520 | |||
521 | def testdecodepartialulong(self): |
|
521 | def testdecodepartialulong(self): | |
522 | encoded = b''.join(cborutil.streamencode(2**28)) |
|
522 | encoded = b''.join(cborutil.streamencode(2 ** 28)) | |
523 |
|
523 | |||
524 | self.assertEqual( |
|
524 | self.assertEqual( | |
525 | cborutil.decodeitem(encoded[0:1]), |
|
525 | cborutil.decodeitem(encoded[0:1]), | |
@@ -539,7 +539,7 b' class IntTests(TestCase):' | |||||
539 | ) |
|
539 | ) | |
540 | self.assertEqual( |
|
540 | self.assertEqual( | |
541 | cborutil.decodeitem(encoded[0:5]), |
|
541 | cborutil.decodeitem(encoded[0:5]), | |
542 | (True, 2**28, 5, cborutil.SPECIAL_NONE), |
|
542 | (True, 2 ** 28, 5, cborutil.SPECIAL_NONE), | |
543 | ) |
|
543 | ) | |
544 |
|
544 | |||
545 | def testdecodepartiallong(self): |
|
545 | def testdecodepartiallong(self): | |
@@ -567,7 +567,7 b' class IntTests(TestCase):' | |||||
567 | ) |
|
567 | ) | |
568 |
|
568 | |||
569 | def testdecodepartialulonglong(self): |
|
569 | def testdecodepartialulonglong(self): | |
570 | encoded = b''.join(cborutil.streamencode(2**32)) |
|
570 | encoded = b''.join(cborutil.streamencode(2 ** 32)) | |
571 |
|
571 | |||
572 | self.assertEqual( |
|
572 | self.assertEqual( | |
573 | cborutil.decodeitem(encoded[0:1]), |
|
573 | cborutil.decodeitem(encoded[0:1]), | |
@@ -603,7 +603,7 b' class IntTests(TestCase):' | |||||
603 | ) |
|
603 | ) | |
604 | self.assertEqual( |
|
604 | self.assertEqual( | |
605 | cborutil.decodeitem(encoded[0:9]), |
|
605 | cborutil.decodeitem(encoded[0:9]), | |
606 | (True, 2**32, 9, cborutil.SPECIAL_NONE), |
|
606 | (True, 2 ** 32, 9, cborutil.SPECIAL_NONE), | |
607 | ) |
|
607 | ) | |
608 |
|
608 | |||
609 | with self.assertRaisesRegex( |
|
609 | with self.assertRaisesRegex( |
@@ -15,7 +15,6 b' from mercurial.utils import procutil' | |||||
15 |
|
15 | |||
16 | testtmp = encoding.environ[b'TESTTMP'] |
|
16 | testtmp = encoding.environ[b'TESTTMP'] | |
17 |
|
17 | |||
18 |
|
||||
19 | # prepare hgrc files |
|
18 | # prepare hgrc files | |
20 | def join(name): |
|
19 | def join(name): | |
21 | return os.path.join(testtmp, name) |
|
20 | return os.path.join(testtmp, name) | |
@@ -27,7 +26,6 b" with open(join(b'sysrc'), 'wb') as f:" | |||||
27 | with open(join(b'userrc'), 'wb') as f: |
|
26 | with open(join(b'userrc'), 'wb') as f: | |
28 | f.write(b'[ui]\neditor=e1') |
|
27 | f.write(b'[ui]\neditor=e1') | |
29 |
|
28 | |||
30 |
|
||||
31 | # replace rcpath functions so they point to the files above |
|
29 | # replace rcpath functions so they point to the files above | |
32 | def systemrcpath(): |
|
30 | def systemrcpath(): | |
33 | return [join(b'sysrc')] |
|
31 | return [join(b'sysrc')] | |
@@ -42,7 +40,6 b" extensions.wrapfunction(rcutil, 'default" | |||||
42 | rcutil.systemrcpath = systemrcpath |
|
40 | rcutil.systemrcpath = systemrcpath | |
43 | rcutil.userrcpath = userrcpath |
|
41 | rcutil.userrcpath = userrcpath | |
44 |
|
42 | |||
45 |
|
||||
46 | # utility to print configs |
|
43 | # utility to print configs | |
47 | def printconfigs(env): |
|
44 | def printconfigs(env): | |
48 | encoding.environ = env |
|
45 | encoding.environ = env |
@@ -66,7 +66,6 b' with wrap1:' | |||||
66 | print('context manager', dummy.getstack()) |
|
66 | print('context manager', dummy.getstack()) | |
67 | print('context manager', dummy.getstack()) |
|
67 | print('context manager', dummy.getstack()) | |
68 |
|
68 | |||
69 |
|
||||
70 | # Wrap callable object which has no __name__ |
|
69 | # Wrap callable object which has no __name__ | |
71 | class callableobj: |
|
70 | class callableobj: | |
72 | def __call__(self): |
|
71 | def __call__(self): |
@@ -5,6 +5,7 b' from mercurial.utils import urlutil' | |||||
5 |
|
5 | |||
6 | class ParseRequestTests(unittest.TestCase): |
|
6 | class ParseRequestTests(unittest.TestCase): | |
7 | def testparse(self): |
|
7 | def testparse(self): | |
|
8 | ||||
8 | self.assertEqual( |
|
9 | self.assertEqual( | |
9 | urlutil.parseurl(b'http://example.com/no/anchor'), |
|
10 | urlutil.parseurl(b'http://example.com/no/anchor'), | |
10 | (b'http://example.com/no/anchor', (None, [])), |
|
11 | (b'http://example.com/no/anchor', (None, [])), |
@@ -5,6 +5,7 b' from mercurial import store' | |||||
5 |
|
5 | |||
6 | class hybridencodetests(unittest.TestCase): |
|
6 | class hybridencodetests(unittest.TestCase): | |
7 | def hybridencode(self, input, want): |
|
7 | def hybridencode(self, input, want): | |
|
8 | ||||
8 | # Check the C implementation if it's in use |
|
9 | # Check the C implementation if it's in use | |
9 | got = store._pathencode(input) |
|
10 | got = store._pathencode(input) | |
10 | self.assertEqual(want, got) |
|
11 | self.assertEqual(want, got) |
@@ -216,9 +216,6 b" since it's bin on most platforms but Scr" | |||||
216 | Failed building wheel for mercurial (?) |
|
216 | Failed building wheel for mercurial (?) | |
217 | WARNING: You are using pip version *; however, version * is available. (glob) (?) |
|
217 | WARNING: You are using pip version *; however, version * is available. (glob) (?) | |
218 | You should consider upgrading via the '$TESTTMP/installenv/bin/python* -m pip install --upgrade pip' command. (glob) (?) |
|
218 | You should consider upgrading via the '$TESTTMP/installenv/bin/python* -m pip install --upgrade pip' command. (glob) (?) | |
219 | (?) |
|
|||
220 | [notice] A new release of pip is available: * -> * (glob) (?) |
|
|||
221 | [notice] To update, run: python -m pip install --upgrade pip (?) |
|
|||
222 | $ ./installenv/*/hg debuginstall || cat pip.log |
|
219 | $ ./installenv/*/hg debuginstall || cat pip.log | |
223 | checking encoding (ascii)... |
|
220 | checking encoding (ascii)... | |
224 | checking Python executable (*) (glob) |
|
221 | checking Python executable (*) (glob) |
@@ -26,7 +26,6 b' from mercurial.revlogutils import (' | |||||
26 |
|
26 | |||
27 | parsers = policy.importmod('parsers') |
|
27 | parsers = policy.importmod('parsers') | |
28 |
|
28 | |||
29 |
|
||||
30 | # original python implementation |
|
29 | # original python implementation | |
31 | def gettype(q): |
|
30 | def gettype(q): | |
32 | return int(q & 0xFFFF) |
|
31 | return int(q & 0xFFFF) |
@@ -98,7 +98,7 b' Without certificates:' | |||||
98 | $ wait_log "no hello:" |
|
98 | $ wait_log "no hello:" | |
99 | $ cat ../log |
|
99 | $ cat ../log | |
100 | connection from * (glob) |
|
100 | connection from * (glob) | |
101 |
no hello: |
|
101 | no hello: b'' | |
102 | $ : > ../log |
|
102 | $ : > ../log | |
103 |
|
103 | |||
104 | With global certificates: |
|
104 | With global certificates: |
@@ -186,7 +186,7 b' class datapacktestsbase:' | |||||
186 | content = b'put-something-here \n' * i |
|
186 | content = b'put-something-here \n' * i | |
187 | node = self.getHash(content) |
|
187 | node = self.getHash(content) | |
188 | meta = { |
|
188 | meta = { | |
189 | constants.METAKEYFLAG: i**4, |
|
189 | constants.METAKEYFLAG: i ** 4, | |
190 | constants.METAKEYSIZE: len(content), |
|
190 | constants.METAKEYSIZE: len(content), | |
191 | b'Z': b'random_string', |
|
191 | b'Z': b'random_string', | |
192 | b'_': b'\0' * i, |
|
192 | b'_': b'\0' * i, |
@@ -177,7 +177,7 b' class histpacktests(unittest.TestCase):' | |||||
177 | pack = self.createPack(revisions) |
|
177 | pack = self.createPack(revisions) | |
178 |
|
178 | |||
179 | # Verify the pack contents |
|
179 | # Verify the pack contents | |
180 | for filename, node in allentries: |
|
180 | for (filename, node) in allentries: | |
181 | ancestors = pack.getancestors(filename, node) |
|
181 | ancestors = pack.getancestors(filename, node) | |
182 | self.assertEqual(ancestorcounts[(filename, node)], len(ancestors)) |
|
182 | self.assertEqual(ancestorcounts[(filename, node)], len(ancestors)) | |
183 | for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items(): |
|
183 | for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items(): |
@@ -246,7 +246,7 b' def genbits(n):' | |||||
246 | That is to say, given any x, y where both x, and y are in range(2 ** n), |
|
246 | That is to say, given any x, y where both x, and y are in range(2 ** n), | |
247 | there is an x followed immediately by y in the generated sequence. |
|
247 | there is an x followed immediately by y in the generated sequence. | |
248 | """ |
|
248 | """ | |
249 | m = 2**n |
|
249 | m = 2 ** n | |
250 |
|
250 | |||
251 | # Gray Code. See https://en.wikipedia.org/wiki/Gray_code |
|
251 | # Gray Code. See https://en.wikipedia.org/wiki/Gray_code | |
252 | gray = lambda x: x ^ (x >> 1) |
|
252 | gray = lambda x: x ^ (x >> 1) |
@@ -24,8 +24,6 b' from mercurial import (' | |||||
24 | from mercurial.utils import stringutil |
|
24 | from mercurial.utils import stringutil | |
25 |
|
25 | |||
26 | TestCase = unittest.TestCase |
|
26 | TestCase = unittest.TestCase | |
27 |
|
||||
28 |
|
||||
29 | # bzr compatible interface, for the tests |
|
27 | # bzr compatible interface, for the tests | |
30 | class Merge3(simplemerge.Merge3Text): |
|
28 | class Merge3(simplemerge.Merge3Text): | |
31 | """3-way merge of texts. |
|
29 | """3-way merge of texts. |
@@ -30,7 +30,6 b' commands.unbundle(u, repo, pycompat.fsen' | |||||
30 | time.sleep(1) |
|
30 | time.sleep(1) | |
31 | commands.status(u, repo) |
|
31 | commands.status(u, repo) | |
32 |
|
32 | |||
33 |
|
||||
34 | # now disable symlink support -- this is what os.symlink would do on a |
|
33 | # now disable symlink support -- this is what os.symlink would do on a | |
35 | # non-symlink file system |
|
34 | # non-symlink file system | |
36 | def symlink_failure(src, dst): |
|
35 | def symlink_failure(src, dst): |
@@ -25,7 +25,6 b" ui_.setconfig(b'ui', b'formatted', b'Tru" | |||||
25 | # we're not interested in the output, so write that to devnull |
|
25 | # we're not interested in the output, so write that to devnull | |
26 | ui_.fout = open(os.devnull, 'wb') |
|
26 | ui_.fout = open(os.devnull, 'wb') | |
27 |
|
27 | |||
28 |
|
||||
29 | # call some arbitrary command just so we go through |
|
28 | # call some arbitrary command just so we go through | |
30 | # color's wrapped _runcommand twice. |
|
29 | # color's wrapped _runcommand twice. | |
31 | def runcmd(): |
|
30 | def runcmd(): |
@@ -615,8 +615,8 b' settings.register_profile(' | |||||
615 | settings( |
|
615 | settings( | |
616 | timeout=-1, |
|
616 | timeout=-1, | |
617 | stateful_step_count=1000, |
|
617 | stateful_step_count=1000, | |
618 | max_examples=10**8, |
|
618 | max_examples=10 ** 8, | |
619 | max_iterations=10**8, |
|
619 | max_iterations=10 ** 8, | |
620 | database=writeonlydatabase(settings.default.database), |
|
620 | database=writeonlydatabase(settings.default.database), | |
621 | ), |
|
621 | ), | |
622 | ) |
|
622 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now