##// END OF EJS Templates
black: format the codebase with 23.3.0...
Raphaël Gomès -
r52583:493034cc default
parent child Browse files
Show More
@@ -201,7 +201,6 b' def run_tests_linux('
201 with aws.temporary_linux_dev_instances(
201 with aws.temporary_linux_dev_instances(
202 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
202 c, image, instance_type, ensure_extra_volume=ensure_extra_volume
203 ) as insts:
203 ) as insts:
204
205 instance = insts[0]
204 instance = insts[0]
206
205
207 linux.prepare_exec_environment(
206 linux.prepare_exec_environment(
@@ -57,7 +57,6 b' if sys.version_info[0] > 2:'
57 return b
57 return b
58 return b.decode('utf8')
58 return b.decode('utf8')
59
59
60
61 else:
60 else:
62 mkstr = lambda x: x
61 mkstr = lambda x: x
63
62
@@ -25,7 +25,6 b' if sys.version_info[0] < 3:'
25 """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__"""
25 """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__"""
26 return self.__bytes__()
26 return self.__bytes__()
27
27
28
29 else:
28 else:
30
29
31 class py2reprhack:
30 class py2reprhack:
@@ -21,7 +21,6 b' if sys.version_info[0] >= 3:'
21 pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
21 pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
22 stdout.write(b' '.join(pargs) + b'\n')
22 stdout.write(b' '.join(pargs) + b'\n')
23
23
24
25 else:
24 else:
26 import cStringIO
25 import cStringIO
27
26
@@ -205,7 +205,6 b' def process(case, variant):'
205
205
206
206
207 if __name__ == '__main__':
207 if __name__ == '__main__':
208
209 argv = sys.argv[:]
208 argv = sys.argv[:]
210
209
211 kwargs = {}
210 kwargs = {}
@@ -130,7 +130,6 b' try:'
130 def revlog(opener, *args, **kwargs):
130 def revlog(opener, *args, **kwargs):
131 return mercurial.revlog.revlog(opener, perf_rl_kind, *args, **kwargs)
131 return mercurial.revlog.revlog(opener, perf_rl_kind, *args, **kwargs)
132
132
133
134 except (ImportError, AttributeError):
133 except (ImportError, AttributeError):
135 perf_rl_kind = None
134 perf_rl_kind = None
136
135
@@ -261,7 +260,6 b" elif safehasattr(cmdutil, 'command'):"
261 commands.norepo += b' %s' % b' '.join(parsealiases(name))
260 commands.norepo += b' %s' % b' '.join(parsealiases(name))
262 return _command(name, list(options), synopsis)
261 return _command(name, list(options), synopsis)
263
262
264
265 else:
263 else:
266 # for "historical portability":
264 # for "historical portability":
267 # define "@command" annotation locally, because cmdutil.command
265 # define "@command" annotation locally, because cmdutil.command
@@ -1926,7 +1924,7 b' def perfindex(ui, repo, **opts):'
1926
1924
1927 opts = _byteskwargs(opts)
1925 opts = _byteskwargs(opts)
1928 timer, fm = gettimer(ui, opts)
1926 timer, fm = gettimer(ui, opts)
1929 mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg
1927 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
1930 if opts[b'no_lookup']:
1928 if opts[b'no_lookup']:
1931 if opts['rev']:
1929 if opts['rev']:
1932 raise error.Abort('--no-lookup and --rev are mutually exclusive')
1930 raise error.Abort('--no-lookup and --rev are mutually exclusive')
@@ -1985,7 +1983,7 b' def perfnodemap(ui, repo, **opts):'
1985
1983
1986 opts = _byteskwargs(opts)
1984 opts = _byteskwargs(opts)
1987 timer, fm = gettimer(ui, opts)
1985 timer, fm = gettimer(ui, opts)
1988 mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg
1986 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
1989
1987
1990 unfi = repo.unfiltered()
1988 unfi = repo.unfiltered()
1991 clearcaches = opts[b'clear_caches']
1989 clearcaches = opts[b'clear_caches']
@@ -2389,7 +2387,7 b' def perfnodelookup(ui, repo, rev, **opts'
2389 timer, fm = gettimer(ui, opts)
2387 timer, fm = gettimer(ui, opts)
2390 import mercurial.revlog
2388 import mercurial.revlog
2391
2389
2392 mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg
2390 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
2393 n = scmutil.revsingle(repo, rev).node()
2391 n = scmutil.revsingle(repo, rev).node()
2394
2392
2395 try:
2393 try:
@@ -3102,7 +3100,7 b' def perf_unbundle(ui, repo, fname, **opt'
3102 # disable inlining
3100 # disable inlining
3103 old_max_inline = mercurial.revlog._maxinline
3101 old_max_inline = mercurial.revlog._maxinline
3104 # large enough to never happen
3102 # large enough to never happen
3105 mercurial.revlog._maxinline = 2 ** 50
3103 mercurial.revlog._maxinline = 2**50
3106
3104
3107 with repo.lock():
3105 with repo.lock():
3108 bundle = [None, None]
3106 bundle = [None, None]
@@ -137,7 +137,6 b' class TestCompressor_stream_reader_fuzzi'
137 def test_buffer_source_read_variance(
137 def test_buffer_source_read_variance(
138 self, original, level, source_read_size, read_sizes
138 self, original, level, source_read_size, read_sizes
139 ):
139 ):
140
141 refctx = zstd.ZstdCompressor(level=level)
140 refctx = zstd.ZstdCompressor(level=level)
142 ref_frame = refctx.compress(original)
141 ref_frame = refctx.compress(original)
143
142
@@ -203,7 +202,6 b' class TestCompressor_stream_reader_fuzzi'
203 def test_buffer_source_readinto(
202 def test_buffer_source_readinto(
204 self, original, level, source_read_size, read_size
203 self, original, level, source_read_size, read_size
205 ):
204 ):
206
207 refctx = zstd.ZstdCompressor(level=level)
205 refctx = zstd.ZstdCompressor(level=level)
208 ref_frame = refctx.compress(original)
206 ref_frame = refctx.compress(original)
209
207
@@ -273,7 +271,6 b' class TestCompressor_stream_reader_fuzzi'
273 def test_buffer_source_readinto_variance(
271 def test_buffer_source_readinto_variance(
274 self, original, level, source_read_size, read_sizes
272 self, original, level, source_read_size, read_sizes
275 ):
273 ):
276
277 refctx = zstd.ZstdCompressor(level=level)
274 refctx = zstd.ZstdCompressor(level=level)
278 ref_frame = refctx.compress(original)
275 ref_frame = refctx.compress(original)
279
276
@@ -410,7 +407,6 b' class TestCompressor_stream_reader_fuzzi'
410 def test_buffer_source_read1_variance(
407 def test_buffer_source_read1_variance(
411 self, original, level, source_read_size, read_sizes
408 self, original, level, source_read_size, read_sizes
412 ):
409 ):
413
414 refctx = zstd.ZstdCompressor(level=level)
410 refctx = zstd.ZstdCompressor(level=level)
415 ref_frame = refctx.compress(original)
411 ref_frame = refctx.compress(original)
416
412
@@ -551,7 +547,6 b' class TestCompressor_stream_reader_fuzzi'
551 def test_buffer_source_readinto1_variance(
547 def test_buffer_source_readinto1_variance(
552 self, original, level, source_read_size, read_sizes
548 self, original, level, source_read_size, read_sizes
553 ):
549 ):
554
555 refctx = zstd.ZstdCompressor(level=level)
550 refctx = zstd.ZstdCompressor(level=level)
556 ref_frame = refctx.compress(original)
551 ref_frame = refctx.compress(original)
557
552
@@ -189,7 +189,7 b' class TestDecompressor_decompress(TestCa'
189 # Will get OverflowError on some Python distributions that can't
189 # Will get OverflowError on some Python distributions that can't
190 # handle really large integers.
190 # handle really large integers.
191 with self.assertRaises((MemoryError, OverflowError)):
191 with self.assertRaises((MemoryError, OverflowError)):
192 dctx.decompress(compressed, max_output_size=2 ** 62)
192 dctx.decompress(compressed, max_output_size=2**62)
193
193
194 def test_dictionary(self):
194 def test_dictionary(self):
195 samples = []
195 samples = []
@@ -238,7 +238,7 b' class TestDecompressor_decompress(TestCa'
238 cctx = zstd.ZstdCompressor(write_content_size=False)
238 cctx = zstd.ZstdCompressor(write_content_size=False)
239 frame = cctx.compress(source)
239 frame = cctx.compress(source)
240
240
241 dctx = zstd.ZstdDecompressor(max_window_size=2 ** zstd.WINDOWLOG_MIN)
241 dctx = zstd.ZstdDecompressor(max_window_size=2**zstd.WINDOWLOG_MIN)
242
242
243 with self.assertRaisesRegex(
243 with self.assertRaisesRegex(
244 zstd.ZstdError,
244 zstd.ZstdError,
@@ -353,7 +353,6 b' class TestDecompressor_stream_reader_fuz'
353 def test_multiple_frames(
353 def test_multiple_frames(
354 self, originals, frame_count, level, source_read_size, read_sizes
354 self, originals, frame_count, level, source_read_size, read_sizes
355 ):
355 ):
356
357 cctx = zstd.ZstdCompressor(level=level)
356 cctx = zstd.ZstdCompressor(level=level)
358 source = io.BytesIO()
357 source = io.BytesIO()
359 buffer = io.BytesIO()
358 buffer = io.BytesIO()
@@ -273,7 +273,6 b' class ZstdCompressionParameters(object):'
273 ldm_hash_every_log=-1,
273 ldm_hash_every_log=-1,
274 threads=0,
274 threads=0,
275 ):
275 ):
276
277 params = lib.ZSTD_createCCtxParams()
276 params = lib.ZSTD_createCCtxParams()
278 if params == ffi.NULL:
277 if params == ffi.NULL:
279 raise MemoryError()
278 raise MemoryError()
@@ -1423,7 +1422,6 b' class ZstdCompressor(object):'
1423 read_size=COMPRESSION_RECOMMENDED_INPUT_SIZE,
1422 read_size=COMPRESSION_RECOMMENDED_INPUT_SIZE,
1424 write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE,
1423 write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE,
1425 ):
1424 ):
1426
1427 if not hasattr(ifh, "read"):
1425 if not hasattr(ifh, "read"):
1428 raise ValueError("first argument must have a read() method")
1426 raise ValueError("first argument must have a read() method")
1429 if not hasattr(ofh, "write"):
1427 if not hasattr(ofh, "write"):
@@ -1523,7 +1521,6 b' class ZstdCompressor(object):'
1523 write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE,
1521 write_size=COMPRESSION_RECOMMENDED_OUTPUT_SIZE,
1524 write_return_read=False,
1522 write_return_read=False,
1525 ):
1523 ):
1526
1527 if not hasattr(writer, "write"):
1524 if not hasattr(writer, "write"):
1528 raise ValueError("must pass an object with a write() method")
1525 raise ValueError("must pass an object with a write() method")
1529
1526
@@ -191,7 +191,7 b' def formatfactor(factor):'
191
191
192 def formattiming(value):
192 def formattiming(value):
193 """format a value to strictly 8 char, dropping some precision if needed"""
193 """format a value to strictly 8 char, dropping some precision if needed"""
194 if value < 10 ** 7:
194 if value < 10**7:
195 return ('%.6f' % value)[:8]
195 return ('%.6f' % value)[:8]
196 else:
196 else:
197 # value is HUGE very unlikely to happen (4+ month run)
197 # value is HUGE very unlikely to happen (4+ month run)
@@ -371,7 +371,6 b' print()'
371 print()
371 print()
372
372
373 for ridx, rset in enumerate(revsets):
373 for ridx, rset in enumerate(revsets):
374
375 print("revset #%i: %s" % (ridx, rset))
374 print("revset #%i: %s" % (ridx, rset))
376 printheader(variants, len(results), verbose=options.verbose, relative=True)
375 printheader(variants, len(results), verbose=options.verbose, relative=True)
377 ref = None
376 ref = None
@@ -101,6 +101,7 b" if getattr(sys, 'isapidllhandle', None) "
101 import isapi_wsgi
101 import isapi_wsgi
102 from mercurial.hgweb.hgwebdir_mod import hgwebdir
102 from mercurial.hgweb.hgwebdir_mod import hgwebdir
103
103
104
104 # Example tweak: Replace isapi_wsgi's handler to provide better error message
105 # Example tweak: Replace isapi_wsgi's handler to provide better error message
105 # Other stuff could also be done here, like logging errors etc.
106 # Other stuff could also be done here, like logging errors etc.
106 class WsgiHandler(isapi_wsgi.IsapiWsgiHandler):
107 class WsgiHandler(isapi_wsgi.IsapiWsgiHandler):
@@ -114,7 +115,6 b' application = hgwebdir(hgweb_config)'
114
115
115
116
116 def handler(environ, start_response):
117 def handler(environ, start_response):
117
118 # Translate IIS's weird URLs
118 # Translate IIS's weird URLs
119 url = environ['SCRIPT_NAME'] + environ['PATH_INFO']
119 url = environ['SCRIPT_NAME'] + environ['PATH_INFO']
120 paths = url[1:].split('/')[path_strip:]
120 paths = url[1:].split('/')[path_strip:]
@@ -95,7 +95,6 b' level margin: \\\\n[rst2man-indent\\\\n[rst2'
95
95
96
96
97 class Writer(writers.Writer):
97 class Writer(writers.Writer):
98
99 supported = 'manpage'
98 supported = 'manpage'
100 """Formats this writer supports."""
99 """Formats this writer supports."""
101
100
@@ -297,7 +296,7 b' class Translator(nodes.NodeVisitor):'
297 (u'´', u"\\'"),
296 (u'´', u"\\'"),
298 (u'`', u'\\(ga'),
297 (u'`', u'\\(ga'),
299 ]
298 ]
300 for (in_char, out_markup) in replace_pairs:
299 for in_char, out_markup in replace_pairs:
301 text = text.replace(in_char, out_markup)
300 text = text.replace(in_char, out_markup)
302 # unicode
301 # unicode
303 text = self.deunicode(text)
302 text = self.deunicode(text)
@@ -279,7 +279,6 b' configitem('
279
279
280
280
281 def _getusers(ui, group):
281 def _getusers(ui, group):
282
283 # First, try to use group definition from section [acl.groups]
282 # First, try to use group definition from section [acl.groups]
284 hgrcusers = ui.configlist(b'acl.groups', group)
283 hgrcusers = ui.configlist(b'acl.groups', group)
285 if hgrcusers:
284 if hgrcusers:
@@ -294,12 +293,10 b' def _getusers(ui, group):'
294
293
295
294
296 def _usermatch(ui, user, usersorgroups):
295 def _usermatch(ui, user, usersorgroups):
297
298 if usersorgroups == b'*':
296 if usersorgroups == b'*':
299 return True
297 return True
300
298
301 for ug in usersorgroups.replace(b',', b' ').split():
299 for ug in usersorgroups.replace(b',', b' ').split():
302
303 if ug.startswith(b'!'):
300 if ug.startswith(b'!'):
304 # Test for excluded user or group. Format:
301 # Test for excluded user or group. Format:
305 # if ug is a user name: !username
302 # if ug is a user name: !username
@@ -368,7 +365,6 b' def ensureenabled(ui):'
368
365
369
366
370 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
367 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
371
372 ensureenabled(ui)
368 ensureenabled(ui)
373
369
374 if hooktype not in [b'pretxnchangegroup', b'pretxncommit', b'prepushkey']:
370 if hooktype not in [b'pretxnchangegroup', b'pretxncommit', b'prepushkey']:
@@ -76,7 +76,6 b' def readauthormap(ui: "uimod.ui", author'
76 authors = {}
76 authors = {}
77 with open(authorfile, b'rb') as afile:
77 with open(authorfile, b'rb') as afile:
78 for line in afile:
78 for line in afile:
79
80 line = line.strip()
79 line = line.strip()
81 if not line or line.startswith(b'#'):
80 if not line or line.startswith(b'#'):
82 continue
81 continue
@@ -273,7 +272,6 b' class keysorter:'
273
272
274 class converter:
273 class converter:
275 def __init__(self, ui: "uimod.ui", source, dest, revmapfile, opts) -> None:
274 def __init__(self, ui: "uimod.ui", source, dest, revmapfile, opts) -> None:
276
277 self.source = source
275 self.source = source
278 self.dest = dest
276 self.dest = dest
279 self.ui = ui
277 self.ui = ui
@@ -639,7 +639,6 b' def createchangeset(ui, log, fuzz=60, me'
639 files = set()
639 files = set()
640 c = None
640 c = None
641 for i, e in enumerate(log):
641 for i, e in enumerate(log):
642
643 # Check if log entry belongs to the current changeset or not.
642 # Check if log entry belongs to the current changeset or not.
644
643
645 # Since CVS is file-centric, two different file revisions with
644 # Since CVS is file-centric, two different file revisions with
@@ -983,7 +982,6 b' def debugcvsps(ui, *args, **opts):'
983 branches = {} # latest version number in each branch
982 branches = {} # latest version number in each branch
984 ancestors = {} # parent branch
983 ancestors = {} # parent branch
985 for cs in changesets:
984 for cs in changesets:
986
987 if opts[b"ancestors"]:
985 if opts[b"ancestors"]:
988 if cs.branch not in branches and cs.parents and cs.parents[0].id:
986 if cs.branch not in branches and cs.parents and cs.parents[0].id:
989 ancestors[cs.branch] = (
987 ancestors[cs.branch] = (
@@ -1425,7 +1425,6 b' class svn_sink(converter_sink, commandli'
1425 return self.join(b'hg-authormap')
1425 return self.join(b'hg-authormap')
1426
1426
1427 def __init__(self, ui, repotype, path):
1427 def __init__(self, ui, repotype, path):
1428
1429 converter_sink.__init__(self, ui, repotype, path)
1428 converter_sink.__init__(self, ui, repotype, path)
1430 commandline.__init__(self, ui, b'svn')
1429 commandline.__init__(self, ui, b'svn')
1431 self.delete = []
1430 self.delete = []
@@ -405,7 +405,6 b' def diffrevs('
405 guitool,
405 guitool,
406 opts,
406 opts,
407 ):
407 ):
408
409 subrepos = opts.get(b'subrepos')
408 subrepos = opts.get(b'subrepos')
410
409
411 # calculate list of files changed between both revs
410 # calculate list of files changed between both revs
@@ -38,6 +38,7 b' from . import ('
38 revmap as revmapmod,
38 revmap as revmapmod,
39 )
39 )
40
40
41
41 # given path, get filelog, cached
42 # given path, get filelog, cached
42 @util.lrucachefunc
43 @util.lrucachefunc
43 def _getflog(repo, path):
44 def _getflog(repo, path):
@@ -17,6 +17,7 b' from mercurial import ('
17 )
17 )
18 from mercurial.utils import dateutil
18 from mercurial.utils import dateutil
19
19
20
20 # imitating mercurial.commands.annotate, not using the vanilla formatter since
21 # imitating mercurial.commands.annotate, not using the vanilla formatter since
21 # the data structures are a bit different, and we have some fast paths.
22 # the data structures are a bit different, and we have some fast paths.
22 class defaultformatter:
23 class defaultformatter:
@@ -893,7 +893,6 b' def wrapupdate('
893 matcher=None,
893 matcher=None,
894 **kwargs
894 **kwargs
895 ):
895 ):
896
897 distance = 0
896 distance = 0
898 partial = True
897 partial = True
899 oldnode = repo[b'.'].node()
898 oldnode = repo[b'.'].node()
@@ -210,7 +210,6 b' if _debugging:'
210 )
210 )
211 )
211 )
212
212
213
214 else:
213 else:
215
214
216 def log(fmt, *args):
215 def log(fmt, *args):
@@ -46,7 +46,6 b' if compat.PYTHON3:'
46 # returns None.
46 # returns None.
47 return sys.getfilesystemencoding()
47 return sys.getfilesystemencoding()
48
48
49
50 else:
49 else:
51 # Python 2 doesn't support surrogateescape, so use 'strict' by
50 # Python 2 doesn't support surrogateescape, so use 'strict' by
52 # default. Users can register a custom surrogateescape error handler and use
51 # default. Users can register a custom surrogateescape error handler and use
@@ -43,7 +43,6 b' SYNTAX_CSS = ('
43
43
44
44
45 def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
45 def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
46
47 # append a <link ...> to the syntax highlighting css
46 # append a <link ...> to the syntax highlighting css
48 tmpl.load(b'header')
47 tmpl.load(b'header')
49 old_header = tmpl.cache[b'header']
48 old_header = tmpl.cache[b'header']
@@ -1526,7 +1526,8 b' pgup/K: move patch up, pgdn/J: move patc'
1526
1526
1527 def move_cursor(self, oldpos, newpos):
1527 def move_cursor(self, oldpos, newpos):
1528 """Change the rule/changeset that the cursor is pointing to, regardless of
1528 """Change the rule/changeset that the cursor is pointing to, regardless of
1529 current mode (you can switch between patches from the view patch window)."""
1529 current mode (you can switch between patches from the view patch window).
1530 """
1530 self.pos = newpos
1531 self.pos = newpos
1531
1532
1532 mode, _ = self.mode
1533 mode, _ = self.mode
@@ -1605,7 +1606,8 b' pgup/K: move patch up, pgdn/J: move patc'
1605
1606
1606 def change_view(self, delta, unit):
1607 def change_view(self, delta, unit):
1607 """Change the region of whatever is being viewed (a patch or the list of
1608 """Change the region of whatever is being viewed (a patch or the list of
1608 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
1609 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.
1610 """
1609 mode, _ = self.mode
1611 mode, _ = self.mode
1610 if mode != MODE_PATCH:
1612 if mode != MODE_PATCH:
1611 return
1613 return
@@ -64,6 +64,7 b' sharednamespaces = {'
64 bookmarktype: hg.sharedbookmarks,
64 bookmarktype: hg.sharedbookmarks,
65 }
65 }
66
66
67
67 # Journal recording, register hooks and storage object
68 # Journal recording, register hooks and storage object
68 def extsetup(ui):
69 def extsetup(ui):
69 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
70 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
@@ -160,6 +160,8 b' configitem('
160 b'svn',
160 b'svn',
161 default=False,
161 default=False,
162 )
162 )
163
164
163 # date like in cvs' $Date
165 # date like in cvs' $Date
164 @templatefilter(b'utcdate', intype=templateutil.date)
166 @templatefilter(b'utcdate', intype=templateutil.date)
165 def utcdate(date):
167 def utcdate(date):
@@ -897,7 +897,7 b' def overridecopy(orig, ui, repo, pats, o'
897 result += orig(ui, repo, listpats, opts, rename)
897 result += orig(ui, repo, listpats, opts, rename)
898
898
899 lfdirstate = lfutil.openlfdirstate(ui, repo)
899 lfdirstate = lfutil.openlfdirstate(ui, repo)
900 for (src, dest) in copiedfiles:
900 for src, dest in copiedfiles:
901 if lfutil.shortname in src and dest.startswith(
901 if lfutil.shortname in src and dest.startswith(
902 repo.wjoin(lfutil.shortname)
902 repo.wjoin(lfutil.shortname)
903 ):
903 ):
@@ -140,7 +140,6 b' def reposetup(ui, repo):'
140 wlock = util.nullcontextmanager()
140 wlock = util.nullcontextmanager()
141 gotlock = False
141 gotlock = False
142 with wlock, self.dirstate.running_status(self):
142 with wlock, self.dirstate.running_status(self):
143
144 # First check if paths or patterns were specified on the
143 # First check if paths or patterns were specified on the
145 # command line. If there were, and they don't match any
144 # command line. If there were, and they don't match any
146 # largefiles, we should just bail here and let super
145 # largefiles, we should just bail here and let super
@@ -37,6 +37,7 b' from mercurial import ('
37 _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER)
37 _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER)
38 _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER)
38 _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER)
39
39
40
40 # Serve a changegroup for a client with a narrow clone.
41 # Serve a changegroup for a client with a narrow clone.
41 def getbundlechangegrouppart_narrow(
42 def getbundlechangegrouppart_narrow(
42 bundler,
43 bundler,
@@ -543,7 +543,6 b' class notifier:'
543 )
543 )
544
544
545 def diff(self, ctx, ref=None):
545 def diff(self, ctx, ref=None):
546
547 maxdiff = int(self.ui.config(b'notify', b'maxdiff'))
546 maxdiff = int(self.ui.config(b'notify', b'maxdiff'))
548 prev = ctx.p1().node()
547 prev = ctx.p1().node()
549 if ref:
548 if ref:
@@ -261,7 +261,6 b' def makepatch('
261 numbered,
261 numbered,
262 patchname=None,
262 patchname=None,
263 ):
263 ):
264
265 desc = []
264 desc = []
266 node = None
265 node = None
267 body = b''
266 body = b''
@@ -830,7 +830,6 b' class rebaseruntime:'
830 cleanup = False
830 cleanup = False
831
831
832 if cleanup:
832 if cleanup:
833
834 if rebased:
833 if rebased:
835 strippoints = [
834 strippoints = [
836 c.node() for c in repo.set(b'roots(%ld)', rebased)
835 c.node() for c in repo.set(b'roots(%ld)', rebased)
@@ -45,7 +45,7 b' LARGEFANOUTPREFIX = 2'
45 # bisect) with (8 step fanout scan + 1 step bisect)
45 # bisect) with (8 step fanout scan + 1 step bisect)
46 # 5 step bisect = log(2^16 / 8 / 255) # fanout
46 # 5 step bisect = log(2^16 / 8 / 255) # fanout
47 # 10 step fanout scan = 2^16 / (2^16 / 8) # fanout space divided by entries
47 # 10 step fanout scan = 2^16 / (2^16 / 8) # fanout space divided by entries
48 SMALLFANOUTCUTOFF = 2 ** 16 // 8
48 SMALLFANOUTCUTOFF = 2**16 // 8
49
49
50 # The amount of time to wait between checking for new packs. This prevents an
50 # The amount of time to wait between checking for new packs. This prevents an
51 # exception when data is moved to a new pack after the process has already
51 # exception when data is moved to a new pack after the process has already
@@ -275,7 +275,7 b' class versionmixin:'
275 class basepack(versionmixin):
275 class basepack(versionmixin):
276 # The maximum amount we should read via mmap before remmaping so the old
276 # The maximum amount we should read via mmap before remmaping so the old
277 # pages can be released (100MB)
277 # pages can be released (100MB)
278 MAXPAGEDIN = 100 * 1024 ** 2
278 MAXPAGEDIN = 100 * 1024**2
279
279
280 SUPPORTED_VERSIONS = [2]
280 SUPPORTED_VERSIONS = [2]
281
281
@@ -38,7 +38,6 b' class connectionpool:'
38 pass
38 pass
39
39
40 if conn is None:
40 if conn is None:
41
42 peer = hg.peer(self._repo.ui, {}, path)
41 peer = hg.peer(self._repo.ui, {}, path)
43 if hasattr(peer, '_cleanup'):
42 if hasattr(peer, '_cleanup'):
44
43
@@ -414,7 +414,7 b' class mutabledatapack(basepack.mutableba'
414
414
415 def add(self, name, node, deltabasenode, delta, metadata=None):
415 def add(self, name, node, deltabasenode, delta, metadata=None):
416 # metadata is a dict, ex. {METAKEYFLAG: flag}
416 # metadata is a dict, ex. {METAKEYFLAG: flag}
417 if len(name) > 2 ** 16:
417 if len(name) > 2**16:
418 raise RuntimeError(_(b"name too long %s") % name)
418 raise RuntimeError(_(b"name too long %s") % name)
419 if len(node) != 20:
419 if len(node) != 20:
420 raise RuntimeError(_(b"node should be 20 bytes %s") % node)
420 raise RuntimeError(_(b"node should be 20 bytes %s") % node)
@@ -41,7 +41,6 b' class remotefilelognodemap:'
41
41
42
42
43 class remotefilelog:
43 class remotefilelog:
44
45 _flagserrorclass = error.RevlogError
44 _flagserrorclass = error.RevlogError
46
45
47 def __init__(self, opener, path, repo):
46 def __init__(self, opener, path, repo):
@@ -32,6 +32,7 b' from . import ('
32 shallowutil,
32 shallowutil,
33 )
33 )
34
34
35
35 # These make*stores functions are global so that other extensions can replace
36 # These make*stores functions are global so that other extensions can replace
36 # them.
37 # them.
37 def makelocalstores(repo):
38 def makelocalstores(repo):
@@ -259,7 +259,6 b' def extsetup(ui):'
259
259
260
260
261 def reposetup(ui, repo):
261 def reposetup(ui, repo):
262
263 # set the config option to store remotenames
262 # set the config option to store remotenames
264 repo.ui.setconfig(b'experimental', b'remotenames', True, b'remotenames-ext')
263 repo.ui.setconfig(b'experimental', b'remotenames', True, b'remotenames-ext')
265
264
@@ -649,7 +649,6 b' class sqlitefilestore:'
649 deltamode=deltamode,
649 deltamode=deltamode,
650 sidedata_helpers=sidedata_helpers,
650 sidedata_helpers=sidedata_helpers,
651 ):
651 ):
652
653 yield delta
652 yield delta
654
653
655 # End of ifiledata interface.
654 # End of ifiledata interface.
@@ -154,7 +154,6 b' def uncommit(ui, repo, *pats, **opts):'
154 cmdutil.resolve_commit_options(ui, opts)
154 cmdutil.resolve_commit_options(ui, opts)
155
155
156 with repo.wlock(), repo.lock():
156 with repo.wlock(), repo.lock():
157
158 st = repo.status()
157 st = repo.status()
159 m, a, r, d = st.modified, st.added, st.removed, st.deleted
158 m, a, r, d = st.modified, st.added, st.removed, st.deleted
160 isdirtypath = any(set(m + a + r + d) & set(pats))
159 isdirtypath = any(set(m + a + r + d) & set(pats))
@@ -264,7 +263,6 b' def unamend(ui, repo, **opts):'
264
263
265 unfi = repo.unfiltered()
264 unfi = repo.unfiltered()
266 with repo.wlock(), repo.lock(), repo.transaction(b'unamend'):
265 with repo.wlock(), repo.lock(), repo.transaction(b'unamend'):
267
268 # identify the commit from which to unamend
266 # identify the commit from which to unamend
269 curctx = repo[b'.']
267 curctx = repo[b'.']
270
268
@@ -64,7 +64,6 b' if sys.version_info[:2] < (3, 0):'
64 def u(s):
64 def u(s):
65 return unicode(s, "unicode_escape")
65 return unicode(s, "unicode_escape")
66
66
67
68 else:
67 else:
69 PY3 = True
68 PY3 = True
70 text_type = str
69 text_type = str
@@ -1889,7 +1888,6 b' class TextWrapper(textwrap.TextWrapper):'
1889 chunks.reverse()
1888 chunks.reverse()
1890
1889
1891 while chunks:
1890 while chunks:
1892
1893 # Start the list of chunks that will make up the current line.
1891 # Start the list of chunks that will make up the current line.
1894 # cur_len is just the length of all the chunks in cur_line.
1892 # cur_len is just the length of all the chunks in cur_line.
1895 cur_line = []
1893 cur_line = []
@@ -88,7 +88,7 b' def ancestors(pfunc, *orignodes):'
88 depth = [0] * count
88 depth = [0] * count
89 seen = [0] * count
89 seen = [0] * count
90 mapping = []
90 mapping = []
91 for (i, n) in enumerate(sorted(nodes)):
91 for i, n in enumerate(sorted(nodes)):
92 depth[n] = 1
92 depth[n] = 1
93 b = 1 << i
93 b = 1 << i
94 seen[n] = b
94 seen[n] = b
@@ -685,7 +685,7 b' def mirroring_remote(ui, repo, remotemar'
685 remotemarks"""
685 remotemarks"""
686 changed = []
686 changed = []
687 localmarks = repo._bookmarks
687 localmarks = repo._bookmarks
688 for (b, id) in remotemarks.items():
688 for b, id in remotemarks.items():
689 if id != localmarks.get(b, None) and id in repo:
689 if id != localmarks.get(b, None) and id in repo:
690 changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b))
690 changed.append((b, id, ui.debug, _(b"updating bookmark %s\n") % b))
691 for b in localmarks:
691 for b in localmarks:
@@ -1286,7 +1286,6 b' class interrupthandler(unpackermixin):'
1286 return None
1286 return None
1287
1287
1288 def __call__(self):
1288 def __call__(self):
1289
1290 self.ui.debug(
1289 self.ui.debug(
1291 b'bundle2-input-stream-interrupt: opening out of band context\n'
1290 b'bundle2-input-stream-interrupt: opening out of band context\n'
1292 )
1291 )
@@ -2614,7 +2613,6 b' def bundle2getvars(op, part):'
2614
2613
2615 @parthandler(b'stream2', (b'requirements', b'filecount', b'bytecount'))
2614 @parthandler(b'stream2', (b'requirements', b'filecount', b'bytecount'))
2616 def handlestreamv2bundle(op, part):
2615 def handlestreamv2bundle(op, part):
2617
2618 requirements = urlreq.unquote(part.params[b'requirements'])
2616 requirements = urlreq.unquote(part.params[b'requirements'])
2619 requirements = requirements.split(b',') if requirements else []
2617 requirements = requirements.split(b',') if requirements else []
2620 filecount = int(part.params[b'filecount'])
2618 filecount = int(part.params[b'filecount'])
@@ -408,7 +408,7 b' class bundlerepository:'
408 with os.fdopen(fdtemp, 'wb') as fptemp:
408 with os.fdopen(fdtemp, 'wb') as fptemp:
409 fptemp.write(header)
409 fptemp.write(header)
410 while True:
410 while True:
411 chunk = readfn(2 ** 18)
411 chunk = readfn(2**18)
412 if not chunk:
412 if not chunk:
413 break
413 break
414 fptemp.write(chunk)
414 fptemp.write(chunk)
@@ -407,7 +407,7 b' class cg1unpacker:'
407 yield chunkheader(len(chunk))
407 yield chunkheader(len(chunk))
408 pos = 0
408 pos = 0
409 while pos < len(chunk):
409 while pos < len(chunk):
410 next = pos + 2 ** 20
410 next = pos + 2**20
411 yield chunk[pos:next]
411 yield chunk[pos:next]
412 pos = next
412 pos = next
413 yield closechunk()
413 yield closechunk()
@@ -3833,7 +3833,6 b' def _performrevert('
3833 original_headers = patch.parsepatch(diff)
3833 original_headers = patch.parsepatch(diff)
3834
3834
3835 try:
3835 try:
3836
3837 chunks, opts = recordfilter(
3836 chunks, opts = recordfilter(
3838 repo.ui, original_headers, match, operation=operation
3837 repo.ui, original_headers, match, operation=operation
3839 )
3838 )
@@ -915,11 +915,14 b' class branch_copies:'
915 self.movewithdir = {} if movewithdir is None else movewithdir
915 self.movewithdir = {} if movewithdir is None else movewithdir
916
916
917 def __repr__(self):
917 def __repr__(self):
918 return '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' % (
918 return (
919 self.copy,
919 '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>'
920 self.renamedelete,
920 % (
921 self.dirmove,
921 self.copy,
922 self.movewithdir,
922 self.renamedelete,
923 self.dirmove,
924 self.movewithdir,
925 )
923 )
926 )
924
927
925
928
@@ -136,7 +136,6 b' CHANGE_TYPE_FILES = "files"'
136
136
137 @interfaceutil.implementer(intdirstate.idirstate)
137 @interfaceutil.implementer(intdirstate.idirstate)
138 class dirstate:
138 class dirstate:
139
140 # used by largefile to avoid overwritting transaction callback
139 # used by largefile to avoid overwritting transaction callback
141 _tr_key_suffix = b''
140 _tr_key_suffix = b''
142
141
@@ -880,7 +879,6 b' class dirstate:'
880 possibly_dirty=False,
879 possibly_dirty=False,
881 parentfiledata=None,
880 parentfiledata=None,
882 ):
881 ):
883
884 # note: I do not think we need to double check name clash here since we
882 # note: I do not think we need to double check name clash here since we
885 # are in a update/merge case that should already have taken care of
883 # are in a update/merge case that should already have taken care of
886 # this. The test agrees
884 # this. The test agrees
@@ -1092,7 +1090,6 b' class dirstate:'
1092
1090
1093 write_key = self._use_tracked_hint and self._dirty_tracked_set
1091 write_key = self._use_tracked_hint and self._dirty_tracked_set
1094 if tr:
1092 if tr:
1095
1096 self._setup_tr_abort(tr)
1093 self._setup_tr_abort(tr)
1097 self._attached_to_a_transaction = True
1094 self._attached_to_a_transaction = True
1098
1095
@@ -1286,7 +1283,7 b' class dirstate:'
1286 badfn(ff, badtype(kind))
1283 badfn(ff, badtype(kind))
1287 if nf in dmap:
1284 if nf in dmap:
1288 results[nf] = None
1285 results[nf] = None
1289 except (OSError) as inst:
1286 except OSError as inst:
1290 # nf not found on disk - it is dirstate only
1287 # nf not found on disk - it is dirstate only
1291 if nf in dmap: # does it exactly match a missing file?
1288 if nf in dmap: # does it exactly match a missing file?
1292 results[nf] = None
1289 results[nf] = None
@@ -331,7 +331,7 b' class dirstatemap(_dirstatemapcommon):'
331
331
332 `all` is unused when Rust is not enabled
332 `all` is unused when Rust is not enabled
333 """
333 """
334 for (filename, item) in self.items():
334 for filename, item in self.items():
335 yield (filename, item.state, item.mode, item.size, item.mtime)
335 yield (filename, item.state, item.mode, item.size, item.mtime)
336
336
337 def keys(self):
337 def keys(self):
@@ -617,7 +617,8 b' class dirstatemap(_dirstatemapcommon):'
617
617
618 This should also drop associated copy information
618 This should also drop associated copy information
619
619
620 The fact we actually need to drop it is the responsability of the caller"""
620 The fact we actually need to drop it is the responsability of the caller
621 """
621 self._map.pop(f, None)
622 self._map.pop(f, None)
622 self.copymap.pop(f, None)
623 self.copymap.pop(f, None)
623
624
@@ -625,7 +626,6 b' class dirstatemap(_dirstatemapcommon):'
625 if rustmod is not None:
626 if rustmod is not None:
626
627
627 class dirstatemap(_dirstatemapcommon):
628 class dirstatemap(_dirstatemapcommon):
628
629 ### Core data storage and access
629 ### Core data storage and access
630
630
631 @propertycache
631 @propertycache
@@ -367,7 +367,6 b' if pycompat.iswindows:'
367 cwd = cwd[0:1].upper() + cwd[1:]
367 cwd = cwd[0:1].upper() + cwd[1:]
368 return cwd
368 return cwd
369
369
370
371 else:
370 else:
372 getcwd = os.getcwdb # re-exports
371 getcwd = os.getcwdb # re-exports
373
372
@@ -290,7 +290,7 b' def loadall(ui, whitelist=None):'
290 with util.timedcm('load all extensions') as stats:
290 with util.timedcm('load all extensions') as stats:
291 default_sub_options = ui.configsuboptions(b"extensions", b"*")[1]
291 default_sub_options = ui.configsuboptions(b"extensions", b"*")[1]
292
292
293 for (name, path) in result:
293 for name, path in result:
294 if path:
294 if path:
295 if path[0:1] == b'!':
295 if path[0:1] == b'!':
296 if name not in _disabledextensions:
296 if name not in _disabledextensions:
@@ -175,7 +175,6 b' class filelog:'
175 )
175 )
176
176
177 with self._revlog._writing(transaction):
177 with self._revlog._writing(transaction):
178
179 if self._fix_issue6528:
178 if self._fix_issue6528:
180 deltas = rewrite.filter_delta_issue6528(self._revlog, deltas)
179 deltas = rewrite.filter_delta_issue6528(self._revlog, deltas)
181
180
@@ -176,7 +176,6 b' class _nullconverter:'
176
176
177
177
178 class baseformatter:
178 class baseformatter:
179
180 # set to True if the formater output a strict format that does not support
179 # set to True if the formater output a strict format that does not support
181 # arbitrary output in the stream.
180 # arbitrary output in the stream.
182 strict_format = False
181 strict_format = False
@@ -421,7 +420,6 b' class cborformatter(baseformatter):'
421
420
422
421
423 class jsonformatter(baseformatter):
422 class jsonformatter(baseformatter):
424
425 strict_format = True
423 strict_format = True
426
424
427 def __init__(self, ui, out, topic, opts):
425 def __init__(self, ui, out, topic, opts):
@@ -133,8 +133,7 b' def colored(dag, repo):'
133 else:
133 else:
134 getconf = lambda rev: {}
134 getconf = lambda rev: {}
135
135
136 for (cur, type, data, parents) in dag:
136 for cur, type, data, parents in dag:
137
138 # Compute seen and next
137 # Compute seen and next
139 if cur not in seen:
138 if cur not in seen:
140 seen.append(cur) # new head
139 seen.append(cur) # new head
@@ -244,7 +243,7 b' def asciiedges(type, char, state, rev, p'
244
243
245
244
246 def _fixlongrightedges(edges):
245 def _fixlongrightedges(edges):
247 for (i, (start, end)) in enumerate(edges):
246 for i, (start, end) in enumerate(edges):
248 if end > start:
247 if end > start:
249 edges[i] = (start, end + 1)
248 edges[i] = (start, end + 1)
250
249
@@ -265,7 +264,7 b' def _getnodelineedgestail(echars, idx, p'
265
264
266
265
267 def _drawedges(echars, edges, nodeline, interline):
266 def _drawedges(echars, edges, nodeline, interline):
268 for (start, end) in edges:
267 for start, end in edges:
269 if start == end + 1:
268 if start == end + 1:
270 interline[2 * end + 1] = b"/"
269 interline[2 * end + 1] = b"/"
271 elif start == end - 1:
270 elif start == end - 1:
@@ -381,7 +380,7 b' def outputgraph(ui, graph):'
381 this function can be monkey-patched by extensions to alter graph display
380 this function can be monkey-patched by extensions to alter graph display
382 without needing to mimic all of the edge-fixup logic in ascii()
381 without needing to mimic all of the edge-fixup logic in ascii()
383 """
382 """
384 for (ln, logstr) in graph:
383 for ln, logstr in graph:
385 ui.write((ln + logstr).rstrip() + b"\n")
384 ui.write((ln + logstr).rstrip() + b"\n")
386
385
387
386
@@ -120,7 +120,6 b' def rawindexentries(ui, repos, req, subd'
120 seenrepos = set()
120 seenrepos = set()
121 seendirs = set()
121 seendirs = set()
122 for name, path in repos:
122 for name, path in repos:
123
124 if not name.startswith(subdir):
123 if not name.startswith(subdir):
125 continue
124 continue
126 name = name[len(subdir) :]
125 name = name[len(subdir) :]
@@ -66,7 +66,6 b' class _error_logger:'
66
66
67
67
68 class _httprequesthandler(httpservermod.basehttprequesthandler):
68 class _httprequesthandler(httpservermod.basehttprequesthandler):
69
70 url_scheme = b'http'
69 url_scheme = b'http'
71
70
72 @staticmethod
71 @staticmethod
@@ -358,7 +357,6 b' def openlog(opt, default):'
358
357
359
358
360 class MercurialHTTPServer(_mixin, httpservermod.httpserver, object):
359 class MercurialHTTPServer(_mixin, httpservermod.httpserver, object):
361
362 # SO_REUSEADDR has broken semantics on windows
360 # SO_REUSEADDR has broken semantics on windows
363 if pycompat.iswindows:
361 if pycompat.iswindows:
364 allow_reuse_address = 0
362 allow_reuse_address = 0
@@ -396,7 +394,6 b' class IPv6HTTPServer(MercurialHTTPServer'
396
394
397
395
398 def create_server(ui, app):
396 def create_server(ui, app):
399
400 if ui.config(b'web', b'certificate'):
397 if ui.config(b'web', b'certificate'):
401 handler = _httprequesthandlerssl
398 handler = _httprequesthandlerssl
402 else:
399 else:
@@ -601,7 +601,6 b' def manifest(web):'
601
601
602 def dirlist(context):
602 def dirlist(context):
603 for d in sorted(dirs):
603 for d in sorted(dirs):
604
605 emptydirs = []
604 emptydirs = []
606 h = dirs[d]
605 h = dirs[d]
607 while isinstance(h, dict) and len(h) == 1:
606 while isinstance(h, dict) and len(h) == 1:
@@ -1427,7 +1426,7 b' def graph(web):'
1427 return tree
1426 return tree
1428
1427
1429 def jsdata(context):
1428 def jsdata(context):
1430 for (id, type, ctx, vtx, edges) in fulltree():
1429 for id, type, ctx, vtx, edges in fulltree():
1431 yield {
1430 yield {
1432 b'node': pycompat.bytestr(ctx),
1431 b'node': pycompat.bytestr(ctx),
1433 b'graphnode': webutil.getgraphnode(web.repo, ctx),
1432 b'graphnode': webutil.getgraphnode(web.repo, ctx),
@@ -25,6 +25,7 b' from .utils import ('
25 urlerr = util.urlerr
25 urlerr = util.urlerr
26 urlreq = util.urlreq
26 urlreq = util.urlreq
27
27
28
28 # moved here from url.py to avoid a cycle
29 # moved here from url.py to avoid a cycle
29 class httpsendfile:
30 class httpsendfile:
30 """This is a wrapper around the objects returned by python's "open".
31 """This is a wrapper around the objects returned by python's "open".
@@ -119,6 +119,5 b' if _plain():'
119 def _(message: bytes) -> bytes:
119 def _(message: bytes) -> bytes:
120 return message
120 return message
121
121
122
123 else:
122 else:
124 _ = gettext
123 _ = gettext
@@ -53,7 +53,6 b' class annotateresult:'
53
53
54
54
55 class _llinstruction: # pytype: disable=ignored-metaclass
55 class _llinstruction: # pytype: disable=ignored-metaclass
56
57 __metaclass__ = abc.ABCMeta
56 __metaclass__ = abc.ABCMeta
58
57
59 @abc.abstractmethod
58 @abc.abstractmethod
@@ -420,11 +420,11 b' def checkpathconflicts(repo, wctx, mctx,'
420 # Track the names of all deleted files.
420 # Track the names of all deleted files.
421 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
421 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
422 deletedfiles.add(f)
422 deletedfiles.add(f)
423 for (f, args, msg) in mresult.getactions((mergestatemod.ACTION_MERGE,)):
423 for f, args, msg in mresult.getactions((mergestatemod.ACTION_MERGE,)):
424 f1, f2, fa, move, anc = args
424 f1, f2, fa, move, anc = args
425 if move:
425 if move:
426 deletedfiles.add(f1)
426 deletedfiles.add(f1)
427 for (f, args, msg) in mresult.getactions(
427 for f, args, msg in mresult.getactions(
428 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,)
428 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,)
429 ):
429 ):
430 f2, flags = args
430 f2, flags = args
@@ -495,7 +495,6 b' class _mergestate_base:'
495
495
496
496
497 class mergestate(_mergestate_base):
497 class mergestate(_mergestate_base):
498
499 statepathv1 = b'merge/state'
498 statepathv1 = b'merge/state'
500 statepathv2 = b'merge/state2'
499 statepathv2 = b'merge/state2'
501
500
@@ -433,14 +433,12 b' def _process_merge(p1_ctx, p2_ctx, ctx):'
433 # Iteration over d1 content will deal with all cases, but the one in the
433 # Iteration over d1 content will deal with all cases, but the one in the
434 # first column of the table.
434 # first column of the table.
435 for filename, d1 in diff_p1.items():
435 for filename, d1 in diff_p1.items():
436
437 d2 = diff_p2.pop(filename, None)
436 d2 = diff_p2.pop(filename, None)
438
437
439 if d2 is None:
438 if d2 is None:
440 # this deal with the first line of the table.
439 # this deal with the first line of the table.
441 _process_other_unchanged(md, mas, filename, d1)
440 _process_other_unchanged(md, mas, filename, d1)
442 else:
441 else:
443
444 if d1[0][0] is None and d2[0][0] is None:
442 if d1[0][0] is None and d2[0][0] is None:
445 # case 🄼 — both deleted the file.
443 # case 🄼 — both deleted the file.
446 md.mark_added(filename)
444 md.mark_added(filename)
@@ -225,7 +225,6 b' def copytoworkingcopy(repo):'
225 m = "changing narrow spec outside of a transaction"
225 m = "changing narrow spec outside of a transaction"
226 raise error.ProgrammingError(m)
226 raise error.ProgrammingError(m)
227 else:
227 else:
228
229 reporef = weakref.ref(repo)
228 reporef = weakref.ref(repo)
230
229
231 def clean_pending(tr):
230 def clean_pending(tr):
@@ -1038,7 +1038,6 b' def _computecontentdivergentset(repo):'
1038
1038
1039
1039
1040 def makefoldid(relation, user):
1040 def makefoldid(relation, user):
1041
1042 folddigest = hashutil.sha1(user)
1041 folddigest = hashutil.sha1(user)
1043 for p in relation[0] + relation[1]:
1042 for p in relation[0] + relation[1]:
1044 folddigest.update(b'%d' % p.rev())
1043 folddigest.update(b'%d' % p.rev())
@@ -961,7 +961,6 b' def _getfilteredreason(repo, changeid, c'
961 single_successor = short(successors[0][0])
961 single_successor = short(successors[0][0])
962 return filteredmsgtable[b'superseded'] % (changeid, single_successor)
962 return filteredmsgtable[b'superseded'] % (changeid, single_successor)
963 elif fate == b'superseded_split':
963 elif fate == b'superseded_split':
964
965 succs = []
964 succs = []
966 for node_id in successors[0]:
965 for node_id in successors[0]:
967 succs.append(short(node_id))
966 succs.append(short(node_id))
@@ -229,7 +229,6 b' def extract(ui, fileobj):'
229
229
230
230
231 def _extract(ui, fileobj, tmpname, tmpfp):
231 def _extract(ui, fileobj, tmpname, tmpfp):
232
233 # attempt to detect the start of a patch
232 # attempt to detect the start of a patch
234 # (this heuristic is borrowed from quilt)
233 # (this heuristic is borrowed from quilt)
235 diffre = re.compile(
234 diffre = re.compile(
@@ -596,7 +595,7 b' class filestore:'
596 self.created = 0
595 self.created = 0
597 self.maxsize = maxsize
596 self.maxsize = maxsize
598 if self.maxsize is None:
597 if self.maxsize is None:
599 self.maxsize = 4 * (2 ** 20)
598 self.maxsize = 4 * (2**20)
600 self.size = 0
599 self.size = 0
601 self.data = {}
600 self.data = {}
602
601
@@ -893,7 +893,6 b' class phasecache:'
893
893
894 this_phase_set = self._phasesets[targetphase]
894 this_phase_set = self._phasesets[targetphase]
895 for r in range(start, end):
895 for r in range(start, end):
896
897 # gather information about the current_rev
896 # gather information about the current_rev
898 r_phase = phase(repo, r)
897 r_phase = phase(repo, r)
899 p_phase = None # phase inherited from parents
898 p_phase = None # phase inherited from parents
@@ -911,7 +911,6 b' class IndexChangelogV2(IndexObject2):'
911 )
911 )
912
912
913 def _pack_entry(self, rev, entry):
913 def _pack_entry(self, rev, entry):
914
915 base = entry[revlog_constants.ENTRY_DELTA_BASE]
914 base = entry[revlog_constants.ENTRY_DELTA_BASE]
916 link_rev = entry[revlog_constants.ENTRY_LINK_REV]
915 link_rev = entry[revlog_constants.ENTRY_LINK_REV]
917 assert base == rev, (base, rev)
916 assert base == rev, (base, rev)
@@ -226,7 +226,6 b" if hasattr(parsers, 'parse_index_devel_n"
226 index, cache = parsers.parse_index_devel_nodemap(data, inline)
226 index, cache = parsers.parse_index_devel_nodemap(data, inline)
227 return index, cache
227 return index, cache
228
228
229
230 else:
229 else:
231 parse_index_v1_nodemap = None
230 parse_index_v1_nodemap = None
232
231
@@ -428,7 +428,6 b' def _slicechunktodensity(revlog, revs, t'
428 # Cut the revs at collected indices
428 # Cut the revs at collected indices
429 previdx = 0
429 previdx = 0
430 for idx in selected:
430 for idx in selected:
431
432 chunk = _trimchunk(revlog, revs, previdx, idx)
431 chunk = _trimchunk(revlog, revs, previdx, idx)
433 if chunk:
432 if chunk:
434 yield chunk
433 yield chunk
@@ -553,7 +553,7 b' def _walk_trie(block):'
553
553
554 Children blocks are always yield before their parent block.
554 Children blocks are always yield before their parent block.
555 """
555 """
556 for (__, item) in sorted(block.items()):
556 for __, item in sorted(block.items()):
557 if isinstance(item, dict):
557 if isinstance(item, dict):
558 for sub_block in _walk_trie(item):
558 for sub_block in _walk_trie(item):
559 yield sub_block
559 yield sub_block
@@ -258,7 +258,6 b' def _precompute_rewritten_delta('
258 # this revision is empty, we can delta against nullrev
258 # this revision is empty, we can delta against nullrev
259 rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN)
259 rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN)
260 else:
260 else:
261
262 text = revlog.rawdata(rev)
261 text = revlog.rawdata(rev)
263 info = revlogutils.revisioninfo(
262 info = revlogutils.revisioninfo(
264 node=entry[ENTRY_NODE_ID],
263 node=entry[ENTRY_NODE_ID],
@@ -730,7 +730,6 b' def _filterederror(repo, changeid):'
730 This is extracted in a function to help extensions (eg: evolve) to
730 This is extracted in a function to help extensions (eg: evolve) to
731 experiment with various message variants."""
731 experiment with various message variants."""
732 if repo.filtername.startswith(b'visible'):
732 if repo.filtername.startswith(b'visible'):
733
734 # Check if the changeset is obsolete
733 # Check if the changeset is obsolete
735 unfilteredrepo = repo.unfiltered()
734 unfilteredrepo = repo.unfiltered()
736 ctx = revsymbol(unfilteredrepo, changeid)
735 ctx = revsymbol(unfilteredrepo, changeid)
@@ -190,7 +190,6 b' class partialdiscovery:'
190 return getparents
190 return getparents
191
191
192 def _childrengetter(self):
192 def _childrengetter(self):
193
194 if self._childrenmap is not None:
193 if self._childrenmap is not None:
195 # During discovery, the `undecided` set keep shrinking.
194 # During discovery, the `undecided` set keep shrinking.
196 # Therefore, the map computed for an iteration N will be
195 # Therefore, the map computed for an iteration N will be
@@ -454,7 +453,6 b' def findcommonheads('
454 full = not initial_head_exchange
453 full = not initial_head_exchange
455 progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
454 progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
456 while not disco.iscomplete():
455 while not disco.iscomplete():
457
458 if full or disco.hasinfo():
456 if full or disco.hasinfo():
459 if full:
457 if full:
460 ui.note(_(b"sampling from both directions\n"))
458 ui.note(_(b"sampling from both directions\n"))
@@ -118,14 +118,14 b' def findrenames(repo, added, removed, th'
118
118
119 # Find exact matches.
119 # Find exact matches.
120 matchedfiles = set()
120 matchedfiles = set()
121 for (a, b) in _findexactmatches(repo, addedfiles, removedfiles):
121 for a, b in _findexactmatches(repo, addedfiles, removedfiles):
122 matchedfiles.add(b)
122 matchedfiles.add(b)
123 yield (a.path(), b.path(), 1.0)
123 yield (a.path(), b.path(), 1.0)
124
124
125 # If the user requested similar files to be matched, search for them also.
125 # If the user requested similar files to be matched, search for them also.
126 if threshold < 1.0:
126 if threshold < 1.0:
127 addedfiles = [x for x in addedfiles if x not in matchedfiles]
127 addedfiles = [x for x in addedfiles if x not in matchedfiles]
128 for (a, b, score) in _findsimilarmatches(
128 for a, b, score in _findsimilarmatches(
129 repo, addedfiles, removedfiles, threshold
129 repo, addedfiles, removedfiles, threshold
130 ):
130 ):
131 yield (a.path(), b.path(), score)
131 yield (a.path(), b.path(), score)
@@ -497,7 +497,6 b' def wrapsocket(sock, keyfile, certfile, '
497 )
497 )
498
498
499 elif e.reason == 'CERTIFICATE_VERIFY_FAILED' and pycompat.iswindows:
499 elif e.reason == 'CERTIFICATE_VERIFY_FAILED' and pycompat.iswindows:
500
501 ui.warn(
500 ui.warn(
502 _(
501 _(
503 b'(the full certificate chain may not be available '
502 b'(the full certificate chain may not be available '
@@ -37,7 +37,7 b' from .utils import hashutil'
37 parsers = policy.importmod('parsers')
37 parsers = policy.importmod('parsers')
38 # how much bytes should be read from fncache in one read
38 # how much bytes should be read from fncache in one read
39 # It is done to prevent loading large fncache files into memory
39 # It is done to prevent loading large fncache files into memory
40 fncache_chunksize = 10 ** 6
40 fncache_chunksize = 10**6
41
41
42
42
43 def _match_tracked_entry(entry, matcher):
43 def _match_tracked_entry(entry, matcher):
@@ -547,6 +547,7 b' class streamcloneapplier:'
547 _srcstore = b's' # store (svfs)
547 _srcstore = b's' # store (svfs)
548 _srccache = b'c' # cache (cache)
548 _srccache = b'c' # cache (cache)
549
549
550
550 # This is it's own function so extensions can override it.
551 # This is it's own function so extensions can override it.
551 def _walkstreamfullstorefiles(repo):
552 def _walkstreamfullstorefiles(repo):
552 """list snapshot file from the store"""
553 """list snapshot file from the store"""
@@ -809,7 +810,6 b' def generatev2(repo, includes, excludes,'
809 """
810 """
810
811
811 with repo.lock():
812 with repo.lock():
812
813 repo.ui.debug(b'scanning\n')
813 repo.ui.debug(b'scanning\n')
814
814
815 entries = _entries_walk(
815 entries = _entries_walk(
@@ -857,7 +857,6 b' def generatev3(repo, includes, excludes,'
857 # considering the files to preserve, disabling the gc while we do so helps
857 # considering the files to preserve, disabling the gc while we do so helps
858 # performance a lot.
858 # performance a lot.
859 with repo.lock(), util.nogc():
859 with repo.lock(), util.nogc():
860
861 repo.ui.debug(b'scanning\n')
860 repo.ui.debug(b'scanning\n')
862
861
863 entries = _entries_walk(
862 entries = _entries_walk(
@@ -990,7 +989,6 b' def consumev3(repo, fp):'
990 with repo.transaction(b'clone'):
989 with repo.transaction(b'clone'):
991 ctxs = (vfs.backgroundclosing(repo.ui) for vfs in vfsmap.values())
990 ctxs = (vfs.backgroundclosing(repo.ui) for vfs in vfsmap.values())
992 with nested(*ctxs):
991 with nested(*ctxs):
993
994 for i in range(entrycount):
992 for i in range(entrycount):
995 filecount = util.uvarintdecodestream(fp)
993 filecount = util.uvarintdecodestream(fp)
996 if filecount == 0:
994 if filecount == 0:
@@ -1123,7 +1121,6 b' def local_copy(src_repo, dest_repo):'
1123
1121
1124 with dest_repo.lock():
1122 with dest_repo.lock():
1125 with src_repo.lock():
1123 with src_repo.lock():
1126
1127 # bookmark is not integrated to the streaming as it might use the
1124 # bookmark is not integrated to the streaming as it might use the
1128 # `repo.vfs` and they are too many sentitive data accessible
1125 # `repo.vfs` and they are too many sentitive data accessible
1129 # through `repo.vfs` to expose it to streaming clone.
1126 # through `repo.vfs` to expose it to streaming clone.
@@ -63,7 +63,6 b' def strip('
63 soft=False,
63 soft=False,
64 ):
64 ):
65 with repo.wlock(), repo.lock():
65 with repo.wlock(), repo.lock():
66
67 if update:
66 if update:
68 checklocalchanges(repo, force=force)
67 checklocalchanges(repo, force=force)
69 urev = _findupdatetarget(repo, revs)
68 urev = _findupdatetarget(repo, revs)
@@ -1227,16 +1227,12 b' class svnsubrepo(abstractsubrepo):'
1227 externals.append(path)
1227 externals.append(path)
1228 elif item == 'missing':
1228 elif item == 'missing':
1229 missing.append(path)
1229 missing.append(path)
1230 if (
1230 if item not in (
1231 item
1231 '',
1232 not in (
1232 'normal',
1233 '',
1233 'unversioned',
1234 'normal',
1234 'external',
1235 'unversioned',
1235 ) or props not in ('', 'none', 'normal'):
1236 'external',
1237 )
1238 or props not in ('', 'none', 'normal')
1239 ):
1240 changes.append(path)
1236 changes.append(path)
1241 for path in changes:
1237 for path in changes:
1242 for ext in externals:
1238 for ext in externals:
@@ -601,7 +601,7 b' def _writetagcache(ui, repo, valid, cach'
601 # we keep them in UTF-8 throughout this module. If we converted
601 # we keep them in UTF-8 throughout this module. If we converted
602 # them local encoding on input, we would lose info writing them to
602 # them local encoding on input, we would lose info writing them to
603 # the cache.
603 # the cache.
604 for (name, (node, hist)) in sorted(cachetags.items()):
604 for name, (node, hist) in sorted(cachetags.items()):
605 for n in hist:
605 for n in hist:
606 cachefile.write(b"%s %s\n" % (hex(n), name))
606 cachefile.write(b"%s %s\n" % (hex(n), name))
607 cachefile.write(b"%s %s\n" % (hex(node), name))
607 cachefile.write(b"%s %s\n" % (hex(node), name))
@@ -194,7 +194,6 b' def upgraderepo('
194 onlydefault.append(d)
194 onlydefault.append(d)
195
195
196 if fromconfig or onlydefault:
196 if fromconfig or onlydefault:
197
198 if fromconfig:
197 if fromconfig:
199 ui.status(
198 ui.status(
200 _(
199 _(
@@ -109,6 +109,7 b' httpserver._registeraliases('
109 ),
109 ),
110 )
110 )
111
111
112
112 # urllib.parse.quote() accepts both str and bytes, decodes bytes
113 # urllib.parse.quote() accepts both str and bytes, decodes bytes
113 # (if necessary), and returns str. This is wonky. We provide a custom
114 # (if necessary), and returns str. This is wonky. We provide a custom
114 # implementation that only accepts bytes and emits bytes.
115 # implementation that only accepts bytes and emits bytes.
@@ -1328,7 +1328,7 b' class sortdict(collections.OrderedDict):'
1328 self[k] = f[k]
1328 self[k] = f[k]
1329
1329
1330 def insert(self, position, key, value):
1330 def insert(self, position, key, value):
1331 for (i, (k, v)) in enumerate(list(self.items())):
1331 for i, (k, v) in enumerate(list(self.items())):
1332 if i == position:
1332 if i == position:
1333 self[key] = value
1333 self[key] = value
1334 if i >= position:
1334 if i >= position:
@@ -2724,10 +2724,10 b' class chunkbuffer:'
2724
2724
2725 def splitbig(chunks):
2725 def splitbig(chunks):
2726 for chunk in chunks:
2726 for chunk in chunks:
2727 if len(chunk) > 2 ** 20:
2727 if len(chunk) > 2**20:
2728 pos = 0
2728 pos = 0
2729 while pos < len(chunk):
2729 while pos < len(chunk):
2730 end = pos + 2 ** 18
2730 end = pos + 2**18
2731 yield chunk[pos:end]
2731 yield chunk[pos:end]
2732 pos = end
2732 pos = end
2733 else:
2733 else:
@@ -2751,7 +2751,7 b' class chunkbuffer:'
2751 while left > 0:
2751 while left > 0:
2752 # refill the queue
2752 # refill the queue
2753 if not queue:
2753 if not queue:
2754 target = 2 ** 18
2754 target = 2**18
2755 for chunk in self.iter:
2755 for chunk in self.iter:
2756 queue.append(chunk)
2756 queue.append(chunk)
2757 target -= len(chunk)
2757 target -= len(chunk)
@@ -3081,12 +3081,12 b' def timed(func):'
3081
3081
3082
3082
3083 _sizeunits = (
3083 _sizeunits = (
3084 (b'm', 2 ** 20),
3084 (b'm', 2**20),
3085 (b'k', 2 ** 10),
3085 (b'k', 2**10),
3086 (b'g', 2 ** 30),
3086 (b'g', 2**30),
3087 (b'kb', 2 ** 10),
3087 (b'kb', 2**10),
3088 (b'mb', 2 ** 20),
3088 (b'mb', 2**20),
3089 (b'gb', 2 ** 30),
3089 (b'gb', 2**30),
3090 (b'b', 1),
3090 (b'b', 1),
3091 )
3091 )
3092
3092
@@ -511,7 +511,7 b' class _zlibengine(compressionengine):'
511 parts = []
511 parts = []
512 pos = 0
512 pos = 0
513 while pos < insize:
513 while pos < insize:
514 pos2 = pos + 2 ** 20
514 pos2 = pos + 2**20
515 parts.append(z.compress(data[pos:pos2]))
515 parts.append(z.compress(data[pos:pos2]))
516 pos = pos2
516 pos = pos2
517 parts.append(z.flush())
517 parts.append(z.flush())
@@ -711,7 +711,6 b' if pycompat.iswindows:'
711 if stdin is not None:
711 if stdin is not None:
712 stdin.close()
712 stdin.close()
713
713
714
715 else:
714 else:
716
715
717 def runbgcommand(
716 def runbgcommand(
@@ -52,7 +52,6 b' if mainfrozen() and getattr(sys, "frozen'
52 assert dirs[0] == b"mercurial"
52 assert dirs[0] == b"mercurial"
53 return os.path.join(_rootpath, *dirs[1:])
53 return os.path.join(_rootpath, *dirs[1:])
54
54
55
56 else:
55 else:
57 datapath = os.path.dirname(os.path.dirname(pycompat.fsencode(__file__)))
56 datapath = os.path.dirname(os.path.dirname(pycompat.fsencode(__file__)))
58 _rootpath = os.path.dirname(datapath)
57 _rootpath = os.path.dirname(datapath)
@@ -98,7 +97,6 b' except (ImportError, AttributeError):'
98 for p in os.listdir(path):
97 for p in os.listdir(path):
99 yield pycompat.fsencode(p)
98 yield pycompat.fsencode(p)
100
99
101
102 else:
100 else:
103 from .. import encoding
101 from .. import encoding
104
102
@@ -574,7 +574,6 b' def parsemailmap(mailmapcontent):'
574 return mailmap
574 return mailmap
575
575
576 for line in mailmapcontent.splitlines():
576 for line in mailmapcontent.splitlines():
577
578 # Don't bother checking the line if it is a comment or
577 # Don't bother checking the line if it is a comment or
579 # is an improperly formed author field
578 # is an improperly formed author field
580 if line.lstrip().startswith(b'#'):
579 if line.lstrip().startswith(b'#'):
@@ -801,7 +800,6 b' def _MBTextWrapper(**kwargs):'
801 chunks.reverse()
800 chunks.reverse()
802
801
803 while chunks:
802 while chunks:
804
805 # Start the list of chunks that will make up the current line.
803 # Start the list of chunks that will make up the current line.
806 # cur_len is just the length of all the chunks in cur_line.
804 # cur_len is just the length of all the chunks in cur_line.
807 cur_line = []
805 cur_line = []
@@ -172,6 +172,7 b' CERT_TRUST_IS_PARTIAL_CHAIN = 0x10000'
172 X509_ASN_ENCODING = 0x00000001
172 X509_ASN_ENCODING = 0x00000001
173 PKCS_7_ASN_ENCODING = 0x00010000
173 PKCS_7_ASN_ENCODING = 0x00010000
174
174
175
175 # These structs are only complete enough to achieve what we need.
176 # These structs are only complete enough to achieve what we need.
176 class CERT_CHAIN_CONTEXT(ctypes.Structure):
177 class CERT_CHAIN_CONTEXT(ctypes.Structure):
177 _fields_ = (
178 _fields_ = (
@@ -368,7 +369,7 b' def _raiseoserror(name: bytes) -> NoRetu'
368 # See https://bugs.python.org/issue28474
369 # See https://bugs.python.org/issue28474
369 code = _kernel32.GetLastError()
370 code = _kernel32.GetLastError()
370 if code > 0x7FFFFFFF:
371 if code > 0x7FFFFFFF:
371 code -= 2 ** 32
372 code -= 2**32
372 err = ctypes.WinError(code=code) # pytype: disable=module-attr
373 err = ctypes.WinError(code=code) # pytype: disable=module-attr
373 raise OSError(
374 raise OSError(
374 err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror)
375 err.errno, '%s: %s' % (encoding.strfromlocal(name), err.strerror)
@@ -546,7 +546,7 b' def createtextoutputframe('
546 """
546 """
547 atomdicts = []
547 atomdicts = []
548
548
549 for (formatting, args, labels) in atoms:
549 for formatting, args, labels in atoms:
550 # TODO look for localstr, other types here?
550 # TODO look for localstr, other types here?
551
551
552 if not isinstance(formatting, bytes):
552 if not isinstance(formatting, bytes):
@@ -1198,7 +1198,6 b' class serverreactor:'
1198 b'%s' % stringutil.forcebytestr(e),
1198 b'%s' % stringutil.forcebytestr(e),
1199 errtype=b'server',
1199 errtype=b'server',
1200 ):
1200 ):
1201
1202 yield frame
1201 yield frame
1203
1202
1204 break
1203 break
@@ -1259,7 +1258,6 b' class serverreactor:'
1259 for chunk in cborutil.streamencodebytestringfromiter(
1258 for chunk in cborutil.streamencodebytestringfromiter(
1260 o.chunks
1259 o.chunks
1261 ):
1260 ):
1262
1263 for frame in emitter.send(chunk):
1261 for frame in emitter.send(chunk):
1264 yield frame
1262 yield frame
1265
1263
@@ -463,7 +463,6 b' class hgbuild(build):'
463
463
464
464
465 class hgbuildmo(build):
465 class hgbuildmo(build):
466
467 description = "build translations (.mo files)"
466 description = "build translations (.mo files)"
468
467
469 def run(self):
468 def run(self):
@@ -1056,7 +1055,6 b' class hgbuilddoc(Command):'
1056
1055
1057
1056
1058 class hginstall(install):
1057 class hginstall(install):
1059
1060 user_options = install.user_options + [
1058 user_options = install.user_options + [
1061 (
1059 (
1062 'old-and-unmanageable',
1060 'old-and-unmanageable',
@@ -26,7 +26,6 b" if os.environ.get('HGIPV6', '0') == '1':"
26 class simplehttpserver(httpserver.httpserver):
26 class simplehttpserver(httpserver.httpserver):
27 address_family = socket.AF_INET6
27 address_family = socket.AF_INET6
28
28
29
30 else:
29 else:
31 simplehttpserver = httpserver.httpserver
30 simplehttpserver = httpserver.httpserver
32
31
@@ -30,7 +30,6 b' if sys.version_info > (3, 5, 0):'
30 def _sys2bytes(p):
30 def _sys2bytes(p):
31 return p.encode('utf-8')
31 return p.encode('utf-8')
32
32
33
34 elif sys.version_info >= (3, 0, 0):
33 elif sys.version_info >= (3, 0, 0):
35 print(
34 print(
36 '%s is only supported on Python 3.5+ and 2.7, not %s'
35 '%s is only supported on Python 3.5+ and 2.7, not %s'
@@ -33,6 +33,7 b''
33 import os
33 import os
34 import sys
34 import sys
35
35
36
36 # Generates pairs of (filename, contents), where 'contents' is a list
37 # Generates pairs of (filename, contents), where 'contents' is a list
37 # describing the file's content at each revision (or in the working copy).
38 # describing the file's content at each revision (or in the working copy).
38 # At each revision, it is either None or the file's actual content. When not
39 # At each revision, it is either None or the file's actual content. When not
@@ -75,7 +75,6 b" if os.name == 'nt':"
75 raise
75 raise
76 _check(ctypes.windll.kernel32.CloseHandle(handle))
76 _check(ctypes.windll.kernel32.CloseHandle(handle))
77
77
78
79 else:
78 else:
80
79
81 def kill(pid, logfn, tryhard=True):
80 def kill(pid, logfn, tryhard=True):
@@ -1,5 +1,6 b''
1 from mercurial.utils import procutil
1 from mercurial.utils import procutil
2
2
3
3 # XXX: we should probably offer a devel option to do this in blackbox directly
4 # XXX: we should probably offer a devel option to do this in blackbox directly
4 def getuser():
5 def getuser():
5 return b'bob'
6 return b'bob'
@@ -223,6 +223,7 b' if WINDOWS:'
223 # For Windows support
223 # For Windows support
224 wifexited = getattr(os, "WIFEXITED", lambda x: False)
224 wifexited = getattr(os, "WIFEXITED", lambda x: False)
225
225
226
226 # Whether to use IPv6
227 # Whether to use IPv6
227 def checksocketfamily(name, port=20058):
228 def checksocketfamily(name, port=20058):
228 """return true if we can listen on localhost using family=name
229 """return true if we can listen on localhost using family=name
@@ -3397,7 +3398,6 b' class TestRunner:'
3397 os.path.basename(t).startswith(b'test-')
3398 os.path.basename(t).startswith(b'test-')
3398 and (t.endswith(b'.py') or t.endswith(b'.t'))
3399 and (t.endswith(b'.py') or t.endswith(b'.t'))
3399 ):
3400 ):
3400
3401 m = testcasepattern.match(os.path.basename(t))
3401 m = testcasepattern.match(os.path.basename(t))
3402 if m is not None:
3402 if m is not None:
3403 t_basename, casestr = m.groups()
3403 t_basename, casestr = m.groups()
@@ -87,6 +87,7 b' def test_missingancestors(seed, rng):'
87 testcount = 10
87 testcount = 10
88 inccount = 10
88 inccount = 10
89 nerrs = [0]
89 nerrs = [0]
90
90 # the default mu and sigma give us a nice distribution of mostly
91 # the default mu and sigma give us a nice distribution of mostly
91 # single-digit counts (including 0) with some higher ones
92 # single-digit counts (including 0) with some higher ones
92 def lognormrandom(mu, sigma):
93 def lognormrandom(mu, sigma):
@@ -55,7 +55,6 b' class localthing(thing):'
55
55
56 # usage of "thing" interface
56 # usage of "thing" interface
57 def use(it):
57 def use(it):
58
59 # Direct call to base method shared between client and server.
58 # Direct call to base method shared between client and server.
60 bprint(it.hello())
59 bprint(it.hello())
61
60
@@ -106,6 +105,7 b' def unescapearg(escaped):'
106
105
107 # server side
106 # server side
108
107
108
109 # equivalent of wireproto's global functions
109 # equivalent of wireproto's global functions
110 class server:
110 class server:
111 def __init__(self, local):
111 def __init__(self, local):
@@ -156,6 +156,7 b' myserver = server(mylocal)'
156
156
157 # local side
157 # local side
158
158
159
159 # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling
160 # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling
160 # here we just transform the strings a bit to check we're properly en-/decoding
161 # here we just transform the strings a bit to check we're properly en-/decoding
161 def mangle(s):
162 def mangle(s):
@@ -216,11 +216,11 b' class BytestringTests(TestCase):'
216 for size in lens:
216 for size in lens:
217 if size < 24:
217 if size < 24:
218 hlen = 1
218 hlen = 1
219 elif size < 2 ** 8:
219 elif size < 2**8:
220 hlen = 2
220 hlen = 2
221 elif size < 2 ** 16:
221 elif size < 2**16:
222 hlen = 3
222 hlen = 3
223 elif size < 2 ** 32:
223 elif size < 2**32:
224 hlen = 5
224 hlen = 5
225 else:
225 else:
226 assert False
226 assert False
@@ -487,7 +487,7 b' class IntTests(TestCase):'
487 )
487 )
488
488
489 def testdecodepartialushort(self):
489 def testdecodepartialushort(self):
490 encoded = b''.join(cborutil.streamencode(2 ** 15))
490 encoded = b''.join(cborutil.streamencode(2**15))
491
491
492 self.assertEqual(
492 self.assertEqual(
493 cborutil.decodeitem(encoded[0:1]),
493 cborutil.decodeitem(encoded[0:1]),
@@ -499,7 +499,7 b' class IntTests(TestCase):'
499 )
499 )
500 self.assertEqual(
500 self.assertEqual(
501 cborutil.decodeitem(encoded[0:5]),
501 cborutil.decodeitem(encoded[0:5]),
502 (True, 2 ** 15, 3, cborutil.SPECIAL_NONE),
502 (True, 2**15, 3, cborutil.SPECIAL_NONE),
503 )
503 )
504
504
505 def testdecodepartialshort(self):
505 def testdecodepartialshort(self):
@@ -519,7 +519,7 b' class IntTests(TestCase):'
519 )
519 )
520
520
521 def testdecodepartialulong(self):
521 def testdecodepartialulong(self):
522 encoded = b''.join(cborutil.streamencode(2 ** 28))
522 encoded = b''.join(cborutil.streamencode(2**28))
523
523
524 self.assertEqual(
524 self.assertEqual(
525 cborutil.decodeitem(encoded[0:1]),
525 cborutil.decodeitem(encoded[0:1]),
@@ -539,7 +539,7 b' class IntTests(TestCase):'
539 )
539 )
540 self.assertEqual(
540 self.assertEqual(
541 cborutil.decodeitem(encoded[0:5]),
541 cborutil.decodeitem(encoded[0:5]),
542 (True, 2 ** 28, 5, cborutil.SPECIAL_NONE),
542 (True, 2**28, 5, cborutil.SPECIAL_NONE),
543 )
543 )
544
544
545 def testdecodepartiallong(self):
545 def testdecodepartiallong(self):
@@ -567,7 +567,7 b' class IntTests(TestCase):'
567 )
567 )
568
568
569 def testdecodepartialulonglong(self):
569 def testdecodepartialulonglong(self):
570 encoded = b''.join(cborutil.streamencode(2 ** 32))
570 encoded = b''.join(cborutil.streamencode(2**32))
571
571
572 self.assertEqual(
572 self.assertEqual(
573 cborutil.decodeitem(encoded[0:1]),
573 cborutil.decodeitem(encoded[0:1]),
@@ -603,7 +603,7 b' class IntTests(TestCase):'
603 )
603 )
604 self.assertEqual(
604 self.assertEqual(
605 cborutil.decodeitem(encoded[0:9]),
605 cborutil.decodeitem(encoded[0:9]),
606 (True, 2 ** 32, 9, cborutil.SPECIAL_NONE),
606 (True, 2**32, 9, cborutil.SPECIAL_NONE),
607 )
607 )
608
608
609 with self.assertRaisesRegex(
609 with self.assertRaisesRegex(
@@ -15,6 +15,7 b' from mercurial.utils import procutil'
15
15
16 testtmp = encoding.environ[b'TESTTMP']
16 testtmp = encoding.environ[b'TESTTMP']
17
17
18
18 # prepare hgrc files
19 # prepare hgrc files
19 def join(name):
20 def join(name):
20 return os.path.join(testtmp, name)
21 return os.path.join(testtmp, name)
@@ -26,6 +27,7 b" with open(join(b'sysrc'), 'wb') as f:"
26 with open(join(b'userrc'), 'wb') as f:
27 with open(join(b'userrc'), 'wb') as f:
27 f.write(b'[ui]\neditor=e1')
28 f.write(b'[ui]\neditor=e1')
28
29
30
29 # replace rcpath functions so they point to the files above
31 # replace rcpath functions so they point to the files above
30 def systemrcpath():
32 def systemrcpath():
31 return [join(b'sysrc')]
33 return [join(b'sysrc')]
@@ -40,6 +42,7 b" extensions.wrapfunction(rcutil, 'default"
40 rcutil.systemrcpath = systemrcpath
42 rcutil.systemrcpath = systemrcpath
41 rcutil.userrcpath = userrcpath
43 rcutil.userrcpath = userrcpath
42
44
45
43 # utility to print configs
46 # utility to print configs
44 def printconfigs(env):
47 def printconfigs(env):
45 encoding.environ = env
48 encoding.environ = env
@@ -66,6 +66,7 b' with wrap1:'
66 print('context manager', dummy.getstack())
66 print('context manager', dummy.getstack())
67 print('context manager', dummy.getstack())
67 print('context manager', dummy.getstack())
68
68
69
69 # Wrap callable object which has no __name__
70 # Wrap callable object which has no __name__
70 class callableobj:
71 class callableobj:
71 def __call__(self):
72 def __call__(self):
@@ -5,7 +5,6 b' from mercurial.utils import urlutil'
5
5
6 class ParseRequestTests(unittest.TestCase):
6 class ParseRequestTests(unittest.TestCase):
7 def testparse(self):
7 def testparse(self):
8
9 self.assertEqual(
8 self.assertEqual(
10 urlutil.parseurl(b'http://example.com/no/anchor'),
9 urlutil.parseurl(b'http://example.com/no/anchor'),
11 (b'http://example.com/no/anchor', (None, [])),
10 (b'http://example.com/no/anchor', (None, [])),
@@ -5,7 +5,6 b' from mercurial import store'
5
5
6 class hybridencodetests(unittest.TestCase):
6 class hybridencodetests(unittest.TestCase):
7 def hybridencode(self, input, want):
7 def hybridencode(self, input, want):
8
9 # Check the C implementation if it's in use
8 # Check the C implementation if it's in use
10 got = store._pathencode(input)
9 got = store._pathencode(input)
11 self.assertEqual(want, got)
10 self.assertEqual(want, got)
@@ -26,6 +26,7 b' from mercurial.revlogutils import ('
26
26
27 parsers = policy.importmod('parsers')
27 parsers = policy.importmod('parsers')
28
28
29
29 # original python implementation
30 # original python implementation
30 def gettype(q):
31 def gettype(q):
31 return int(q & 0xFFFF)
32 return int(q & 0xFFFF)
@@ -186,7 +186,7 b' class datapacktestsbase:'
186 content = b'put-something-here \n' * i
186 content = b'put-something-here \n' * i
187 node = self.getHash(content)
187 node = self.getHash(content)
188 meta = {
188 meta = {
189 constants.METAKEYFLAG: i ** 4,
189 constants.METAKEYFLAG: i**4,
190 constants.METAKEYSIZE: len(content),
190 constants.METAKEYSIZE: len(content),
191 b'Z': b'random_string',
191 b'Z': b'random_string',
192 b'_': b'\0' * i,
192 b'_': b'\0' * i,
@@ -177,7 +177,7 b' class histpacktests(unittest.TestCase):'
177 pack = self.createPack(revisions)
177 pack = self.createPack(revisions)
178
178
179 # Verify the pack contents
179 # Verify the pack contents
180 for (filename, node) in allentries:
180 for filename, node in allentries:
181 ancestors = pack.getancestors(filename, node)
181 ancestors = pack.getancestors(filename, node)
182 self.assertEqual(ancestorcounts[(filename, node)], len(ancestors))
182 self.assertEqual(ancestorcounts[(filename, node)], len(ancestors))
183 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
183 for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
@@ -246,7 +246,7 b' def genbits(n):'
246 That is to say, given any x, y where both x, and y are in range(2 ** n),
246 That is to say, given any x, y where both x, and y are in range(2 ** n),
247 there is an x followed immediately by y in the generated sequence.
247 there is an x followed immediately by y in the generated sequence.
248 """
248 """
249 m = 2 ** n
249 m = 2**n
250
250
251 # Gray Code. See https://en.wikipedia.org/wiki/Gray_code
251 # Gray Code. See https://en.wikipedia.org/wiki/Gray_code
252 gray = lambda x: x ^ (x >> 1)
252 gray = lambda x: x ^ (x >> 1)
@@ -24,6 +24,8 b' from mercurial import ('
24 from mercurial.utils import stringutil
24 from mercurial.utils import stringutil
25
25
26 TestCase = unittest.TestCase
26 TestCase = unittest.TestCase
27
28
27 # bzr compatible interface, for the tests
29 # bzr compatible interface, for the tests
28 class Merge3(simplemerge.Merge3Text):
30 class Merge3(simplemerge.Merge3Text):
29 """3-way merge of texts.
31 """3-way merge of texts.
@@ -25,6 +25,7 b" ui_.setconfig(b'ui', b'formatted', b'Tru"
25 # we're not interested in the output, so write that to devnull
25 # we're not interested in the output, so write that to devnull
26 ui_.fout = open(os.devnull, 'wb')
26 ui_.fout = open(os.devnull, 'wb')
27
27
28
28 # call some arbitrary command just so we go through
29 # call some arbitrary command just so we go through
29 # color's wrapped _runcommand twice.
30 # color's wrapped _runcommand twice.
30 def runcmd():
31 def runcmd():
@@ -615,8 +615,8 b' settings.register_profile('
615 settings(
615 settings(
616 timeout=-1,
616 timeout=-1,
617 stateful_step_count=1000,
617 stateful_step_count=1000,
618 max_examples=10 ** 8,
618 max_examples=10**8,
619 max_iterations=10 ** 8,
619 max_iterations=10**8,
620 database=writeonlydatabase(settings.default.database),
620 database=writeonlydatabase(settings.default.database),
621 ),
621 ),
622 )
622 )
General Comments 0
You need to be logged in to leave comments. Login now