##// END OF EJS Templates
formating: upgrade to black 20.8b1...
Augie Fackler -
r46554:89a2afe3 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -152,7 +152,11 b" ASSUME_ROLE_POLICY_DOCUMENT = '''"
152 152
153 153
154 154 IAM_INSTANCE_PROFILES = {
155 'ephemeral-ec2-1': {'roles': ['ephemeral-ec2-role-1',],}
155 'ephemeral-ec2-1': {
156 'roles': [
157 'ephemeral-ec2-role-1',
158 ],
159 }
156 160 }
157 161
158 162
@@ -469,10 +473,22 b' def find_image(ec2resource, owner_id, na'
469 473
470 474 images = ec2resource.images.filter(
471 475 Filters=[
472 {'Name': 'owner-id', 'Values': [owner_id],},
473 {'Name': 'state', 'Values': ['available'],},
474 {'Name': 'image-type', 'Values': ['machine'],},
475 {'Name': 'name', 'Values': [name],},
476 {
477 'Name': 'owner-id',
478 'Values': [owner_id],
479 },
480 {
481 'Name': 'state',
482 'Values': ['available'],
483 },
484 {
485 'Name': 'image-type',
486 'Values': ['machine'],
487 },
488 {
489 'Name': 'name',
490 'Values': [name],
491 },
476 492 ]
477 493 )
478 494
@@ -519,10 +535,13 b' def ensure_security_groups(ec2resource, '
519 535 print('adding security group %s' % actual)
520 536
521 537 group_res = ec2resource.create_security_group(
522 Description=group['description'], GroupName=actual,
538 Description=group['description'],
539 GroupName=actual,
523 540 )
524 541
525 group_res.authorize_ingress(IpPermissions=group['ingress'],)
542 group_res.authorize_ingress(
543 IpPermissions=group['ingress'],
544 )
526 545
527 546 security_groups[name] = group_res
528 547
@@ -614,7 +633,10 b' def wait_for_ssm(ssmclient, instances):'
614 633 while True:
615 634 res = ssmclient.describe_instance_information(
616 635 Filters=[
617 {'Key': 'InstanceIds', 'Values': [i.id for i in instances],},
636 {
637 'Key': 'InstanceIds',
638 'Values': [i.id for i in instances],
639 },
618 640 ],
619 641 )
620 642
@@ -636,7 +658,9 b' def run_ssm_command(ssmclient, instances'
636 658 InstanceIds=[i.id for i in instances],
637 659 DocumentName=document_name,
638 660 Parameters=parameters,
639 CloudWatchOutputConfig={'CloudWatchOutputEnabled': True,},
661 CloudWatchOutputConfig={
662 'CloudWatchOutputEnabled': True,
663 },
640 664 )
641 665
642 666 command_id = res['Command']['CommandId']
@@ -645,7 +669,8 b' def run_ssm_command(ssmclient, instances'
645 669 while True:
646 670 try:
647 671 res = ssmclient.get_command_invocation(
648 CommandId=command_id, InstanceId=instance.id,
672 CommandId=command_id,
673 InstanceId=instance.id,
649 674 )
650 675 except botocore.exceptions.ClientError as e:
651 676 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
@@ -799,19 +824,32 b' def create_ami_from_instance('
799 824 instance.stop()
800 825
801 826 ec2client.get_waiter('instance_stopped').wait(
802 InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
827 InstanceIds=[instance.id],
828 WaiterConfig={
829 'Delay': 5,
830 },
803 831 )
804 832 print('%s is stopped' % instance.id)
805 833
806 image = instance.create_image(Name=name, Description=description,)
834 image = instance.create_image(
835 Name=name,
836 Description=description,
837 )
807 838
808 839 image.create_tags(
809 Tags=[{'Key': 'HGIMAGEFINGERPRINT', 'Value': fingerprint,},]
840 Tags=[
841 {
842 'Key': 'HGIMAGEFINGERPRINT',
843 'Value': fingerprint,
844 },
845 ]
810 846 )
811 847
812 848 print('waiting for image %s' % image.id)
813 849
814 ec2client.get_waiter('image_available').wait(ImageIds=[image.id],)
850 ec2client.get_waiter('image_available').wait(
851 ImageIds=[image.id],
852 )
815 853
816 854 print('image %s available as %s' % (image.id, image.name))
817 855
@@ -837,7 +875,9 b' def ensure_linux_dev_ami(c: AWSConnectio'
837 875 ssh_username = 'admin'
838 876 elif distro == 'debian10':
839 877 image = find_image(
840 ec2resource, DEBIAN_ACCOUNT_ID_2, 'debian-10-amd64-20190909-10',
878 ec2resource,
879 DEBIAN_ACCOUNT_ID_2,
880 'debian-10-amd64-20190909-10',
841 881 )
842 882 ssh_username = 'admin'
843 883 elif distro == 'ubuntu18.04':
@@ -1066,7 +1106,9 b' def temporary_linux_dev_instances('
1066 1106
1067 1107
1068 1108 def ensure_windows_dev_ami(
1069 c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME,
1109 c: AWSConnection,
1110 prefix='hg-',
1111 base_image_name=WINDOWS_BASE_IMAGE_NAME,
1070 1112 ):
1071 1113 """Ensure Windows Development AMI is available and up-to-date.
1072 1114
@@ -1190,7 +1232,9 b' def ensure_windows_dev_ami('
1190 1232 ssmclient,
1191 1233 [instance],
1192 1234 'AWS-RunPowerShellScript',
1193 {'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),},
1235 {
1236 'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),
1237 },
1194 1238 )
1195 1239
1196 1240 # Reboot so all updates are fully applied.
@@ -1202,7 +1246,10 b' def ensure_windows_dev_ami('
1202 1246 print('rebooting instance %s' % instance.id)
1203 1247 instance.stop()
1204 1248 ec2client.get_waiter('instance_stopped').wait(
1205 InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
1249 InstanceIds=[instance.id],
1250 WaiterConfig={
1251 'Delay': 5,
1252 },
1206 1253 )
1207 1254
1208 1255 instance.start()
@@ -282,16 +282,20 b' def get_parser():'
282 282 help='Path for local state files',
283 283 )
284 284 parser.add_argument(
285 '--aws-region', help='AWS region to use', default='us-west-2',
285 '--aws-region',
286 help='AWS region to use',
287 default='us-west-2',
286 288 )
287 289
288 290 subparsers = parser.add_subparsers()
289 291
290 292 sp = subparsers.add_parser(
291 'bootstrap-linux-dev', help='Bootstrap Linux development environments',
293 'bootstrap-linux-dev',
294 help='Bootstrap Linux development environments',
292 295 )
293 296 sp.add_argument(
294 '--distros', help='Comma delimited list of distros to bootstrap',
297 '--distros',
298 help='Comma delimited list of distros to bootstrap',
295 299 )
296 300 sp.add_argument(
297 301 '--parallel',
@@ -312,13 +316,17 b' def get_parser():'
312 316 sp.set_defaults(func=bootstrap_windows_dev)
313 317
314 318 sp = subparsers.add_parser(
315 'build-all-windows-packages', help='Build all Windows packages',
319 'build-all-windows-packages',
320 help='Build all Windows packages',
316 321 )
317 322 sp.add_argument(
318 '--revision', help='Mercurial revision to build', default='.',
323 '--revision',
324 help='Mercurial revision to build',
325 default='.',
319 326 )
320 327 sp.add_argument(
321 '--version', help='Mercurial version string to use',
328 '--version',
329 help='Mercurial version string to use',
322 330 )
323 331 sp.add_argument(
324 332 '--base-image-name',
@@ -328,7 +336,8 b' def get_parser():'
328 336 sp.set_defaults(func=build_all_windows_packages)
329 337
330 338 sp = subparsers.add_parser(
331 'build-inno', help='Build Inno Setup installer(s)',
339 'build-inno',
340 help='Build Inno Setup installer(s)',
332 341 )
333 342 sp.add_argument(
334 343 '--python-version',
@@ -346,10 +355,13 b' def get_parser():'
346 355 default=['x64'],
347 356 )
348 357 sp.add_argument(
349 '--revision', help='Mercurial revision to build', default='.',
358 '--revision',
359 help='Mercurial revision to build',
360 default='.',
350 361 )
351 362 sp.add_argument(
352 '--version', help='Mercurial version string to use in installer',
363 '--version',
364 help='Mercurial version string to use in installer',
353 365 )
354 366 sp.add_argument(
355 367 '--base-image-name',
@@ -359,7 +371,8 b' def get_parser():'
359 371 sp.set_defaults(func=build_inno)
360 372
361 373 sp = subparsers.add_parser(
362 'build-windows-wheel', help='Build Windows wheel(s)',
374 'build-windows-wheel',
375 help='Build Windows wheel(s)',
363 376 )
364 377 sp.add_argument(
365 378 '--python-version',
@@ -376,7 +389,9 b' def get_parser():'
376 389 default=['x64'],
377 390 )
378 391 sp.add_argument(
379 '--revision', help='Mercurial revision to build', default='.',
392 '--revision',
393 help='Mercurial revision to build',
394 default='.',
380 395 )
381 396 sp.add_argument(
382 397 '--base-image-name',
@@ -402,10 +417,13 b' def get_parser():'
402 417 default=['x64'],
403 418 )
404 419 sp.add_argument(
405 '--revision', help='Mercurial revision to build', default='.',
420 '--revision',
421 help='Mercurial revision to build',
422 default='.',
406 423 )
407 424 sp.add_argument(
408 '--version', help='Mercurial version string to use in installer',
425 '--version',
426 help='Mercurial version string to use in installer',
409 427 )
410 428 sp.add_argument(
411 429 '--base-image-name',
@@ -421,11 +439,15 b' def get_parser():'
421 439 sp.set_defaults(func=terminate_ec2_instances)
422 440
423 441 sp = subparsers.add_parser(
424 'purge-ec2-resources', help='Purge all EC2 resources managed by us',
442 'purge-ec2-resources',
443 help='Purge all EC2 resources managed by us',
425 444 )
426 445 sp.set_defaults(func=purge_ec2_resources)
427 446
428 sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
447 sp = subparsers.add_parser(
448 'run-tests-linux',
449 help='Run tests on Linux',
450 )
429 451 sp.add_argument(
430 452 '--distro',
431 453 help='Linux distribution to run tests on',
@@ -468,10 +490,13 b' def get_parser():'
468 490 sp.set_defaults(func=run_tests_linux)
469 491
470 492 sp = subparsers.add_parser(
471 'run-tests-windows', help='Run tests on Windows',
493 'run-tests-windows',
494 help='Run tests on Windows',
472 495 )
473 496 sp.add_argument(
474 '--instance-type', help='EC2 instance type to use', default='t3.medium',
497 '--instance-type',
498 help='EC2 instance type to use',
499 default='t3.medium',
475 500 )
476 501 sp.add_argument(
477 502 '--python-version',
@@ -486,7 +511,8 b' def get_parser():'
486 511 default='x64',
487 512 )
488 513 sp.add_argument(
489 '--test-flags', help='Extra command line flags to pass to run-tests.py',
514 '--test-flags',
515 help='Extra command line flags to pass to run-tests.py',
490 516 )
491 517 sp.add_argument(
492 518 '--base-image-name',
@@ -514,10 +540,12 b' def get_parser():'
514 540 help='Skip uploading to www.mercurial-scm.org',
515 541 )
516 542 sp.add_argument(
517 '--ssh-username', help='SSH username for mercurial-scm.org',
543 '--ssh-username',
544 help='SSH username for mercurial-scm.org',
518 545 )
519 546 sp.add_argument(
520 'version', help='Mercurial version string to locate local packages',
547 'version',
548 help='Mercurial version string to locate local packages',
521 549 )
522 550 sp.set_defaults(func=publish_windows_artifacts)
523 551
@@ -362,7 +362,8 b' def build_inno_installer('
362 362 raise Exception("unhandled arch: %s" % arch)
363 363
364 364 ps = BUILD_INNO_PYTHON3.format(
365 pyoxidizer_target=target_triple, version=version,
365 pyoxidizer_target=target_triple,
366 version=version,
366 367 )
367 368 else:
368 369 extra_args = []
@@ -427,7 +428,8 b' def build_wix_installer('
427 428 raise Exception("unhandled arch: %s" % arch)
428 429
429 430 ps = BUILD_WIX_PYTHON3.format(
430 pyoxidizer_target=target_triple, version=version,
431 pyoxidizer_target=target_triple,
432 version=version,
431 433 )
432 434 else:
433 435 extra_args = []
@@ -460,7 +462,10 b' def run_tests(winrm_client, python_versi'
460 462
461 463 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
462 464
463 ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
465 ps = RUN_TESTS.format(
466 python_path=python_path,
467 test_flags=test_flags or '',
468 )
464 469
465 470 run_powershell(winrm_client, ps)
466 471
@@ -213,15 +213,19 b' def replacetokens(tokens, opts):'
213 213 fn = t.string
214 214
215 215 # *attr() builtins don't accept byte strings to 2nd argument.
216 if fn in (
217 'getattr',
218 'setattr',
219 'hasattr',
220 'safehasattr',
221 'wrapfunction',
222 'wrapclass',
223 'addattr',
224 ) and (opts['allow-attr-methods'] or not _isop(i - 1, '.')):
216 if (
217 fn
218 in (
219 'getattr',
220 'setattr',
221 'hasattr',
222 'safehasattr',
223 'wrapfunction',
224 'wrapclass',
225 'addattr',
226 )
227 and (opts['allow-attr-methods'] or not _isop(i - 1, '.'))
228 ):
225 229 arg1idx = _findargnofcall(1)
226 230 if arg1idx is not None:
227 231 _ensuresysstr(arg1idx)
@@ -620,13 +620,17 b' cfilters = ['
620 620 ]
621 621
622 622 inutilpats = [
623 [(r'\bui\.', "don't use ui in util"),],
623 [
624 (r'\bui\.', "don't use ui in util"),
625 ],
624 626 # warnings
625 627 [],
626 628 ]
627 629
628 630 inrevlogpats = [
629 [(r'\brepo\.', "don't use repo in revlog"),],
631 [
632 (r'\brepo\.', "don't use repo in revlog"),
633 ],
630 634 # warnings
631 635 [],
632 636 ]
@@ -44,7 +44,11 b' def build_inno(pyoxidizer_target=None, p'
44 44 )
45 45 else:
46 46 inno.build_with_py2exe(
47 SOURCE_DIR, build_dir, pathlib.Path(python), iscc, version=version,
47 SOURCE_DIR,
48 build_dir,
49 pathlib.Path(python),
50 iscc,
51 version=version,
48 52 )
49 53
50 54
@@ -198,7 +198,11 b' def build_installer('
198 198 except jinja2.TemplateSyntaxError as e:
199 199 raise Exception(
200 200 'template syntax error at %s:%d: %s'
201 % (e.name, e.lineno, e.message,)
201 % (
202 e.name,
203 e.lineno,
204 e.message,
205 )
202 206 )
203 207
204 208 content = template.render(package_files=package_files)
@@ -517,7 +517,10 b' def run_wix_packaging('
517 517 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
518 518
519 519 args.extend(
520 [str(build_dir / 'stage.wixobj'), str(build_dir / 'mercurial.wixobj'),]
520 [
521 str(build_dir / 'stage.wixobj'),
522 str(build_dir / 'mercurial.wixobj'),
523 ]
521 524 )
522 525
523 526 subprocess.run(args, cwd=str(source_dir), check=True)
@@ -291,7 +291,9 b' try:'
291 291 experimental=True,
292 292 )
293 293 configitem(
294 b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
294 b'perf',
295 b'pre-run',
296 default=mercurial.configitems.dynamicdefault,
295 297 )
296 298 configitem(
297 299 b'perf',
@@ -310,19 +312,29 b' except TypeError:'
310 312 # compatibility fix for a11fd395e83f
311 313 # hg version: 5.2
312 314 configitem(
313 b'perf', b'presleep', default=mercurial.configitems.dynamicdefault,
315 b'perf',
316 b'presleep',
317 default=mercurial.configitems.dynamicdefault,
314 318 )
315 319 configitem(
316 b'perf', b'stub', default=mercurial.configitems.dynamicdefault,
320 b'perf',
321 b'stub',
322 default=mercurial.configitems.dynamicdefault,
317 323 )
318 324 configitem(
319 b'perf', b'parentscount', default=mercurial.configitems.dynamicdefault,
325 b'perf',
326 b'parentscount',
327 default=mercurial.configitems.dynamicdefault,
320 328 )
321 329 configitem(
322 b'perf', b'all-timing', default=mercurial.configitems.dynamicdefault,
330 b'perf',
331 b'all-timing',
332 default=mercurial.configitems.dynamicdefault,
323 333 )
324 334 configitem(
325 b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
335 b'perf',
336 b'pre-run',
337 default=mercurial.configitems.dynamicdefault,
326 338 )
327 339 configitem(
328 340 b'perf',
@@ -330,7 +342,9 b' except TypeError:'
330 342 default=mercurial.configitems.dynamicdefault,
331 343 )
332 344 configitem(
333 b'perf', b'run-limits', default=mercurial.configitems.dynamicdefault,
345 b'perf',
346 b'run-limits',
347 default=mercurial.configitems.dynamicdefault,
334 348 )
335 349
336 350
@@ -385,8 +399,7 b' def gettimer(ui, opts=None):'
385 399 from mercurial import node
386 400
387 401 class defaultformatter(object):
388 """Minimized composition of baseformatter and plainformatter
389 """
402 """Minimized composition of baseformatter and plainformatter"""
390 403
391 404 def __init__(self, ui, topic, opts):
392 405 self._ui = ui
@@ -658,8 +671,7 b' def getbranchmapsubsettable():'
658 671
659 672
660 673 def getsvfs(repo):
661 """Return appropriate object to access files under .hg/store
662 """
674 """Return appropriate object to access files under .hg/store"""
663 675 # for "historical portability":
664 676 # repo.svfs has been available since 2.3 (or 7034365089bf)
665 677 svfs = getattr(repo, 'svfs', None)
@@ -670,8 +682,7 b' def getsvfs(repo):'
670 682
671 683
672 684 def getvfs(repo):
673 """Return appropriate object to access files under .hg
674 """
685 """Return appropriate object to access files under .hg"""
675 686 # for "historical portability":
676 687 # repo.vfs has been available since 2.3 (or 7034365089bf)
677 688 vfs = getattr(repo, 'vfs', None)
@@ -682,8 +693,7 b' def getvfs(repo):'
682 693
683 694
684 695 def repocleartagscachefunc(repo):
685 """Return the function to clear tags cache according to repo internal API
686 """
696 """Return the function to clear tags cache according to repo internal API"""
687 697 if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525)
688 698 # in this case, setattr(repo, '_tagscache', None) or so isn't
689 699 # correct way to clear tags cache, because existing code paths
@@ -847,7 +857,9 b' def perfheads(ui, repo, **opts):'
847 857 @command(
848 858 b'perftags',
849 859 formatteropts
850 + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
860 + [
861 (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
862 ],
851 863 )
852 864 def perftags(ui, repo, **opts):
853 865 opts = _byteskwargs(opts)
@@ -900,8 +912,7 b' def perfancestorset(ui, repo, revset, **'
900 912
901 913 @command(b'perfdiscovery', formatteropts, b'PATH')
902 914 def perfdiscovery(ui, repo, path, **opts):
903 """benchmark discovery between local repo and the peer at given path
904 """
915 """benchmark discovery between local repo and the peer at given path"""
905 916 repos = [repo, None]
906 917 timer, fm = gettimer(ui, opts)
907 918 path = ui.expandpath(path)
@@ -919,7 +930,9 b' def perfdiscovery(ui, repo, path, **opts'
919 930 @command(
920 931 b'perfbookmarks',
921 932 formatteropts
922 + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
933 + [
934 (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
935 ],
923 936 )
924 937 def perfbookmarks(ui, repo, **opts):
925 938 """benchmark parsing bookmarks from disk to memory"""
@@ -1184,8 +1197,7 b' def perfdirstate(ui, repo, **opts):'
1184 1197
1185 1198 @command(b'perfdirstatedirs', formatteropts)
1186 1199 def perfdirstatedirs(ui, repo, **opts):
1187 """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache
1188 """
1200 """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache"""
1189 1201 opts = _byteskwargs(opts)
1190 1202 timer, fm = gettimer(ui, opts)
1191 1203 repo.dirstate.hasdir(b"a")
@@ -1245,8 +1257,7 b' def perfdirfoldmap(ui, repo, **opts):'
1245 1257
1246 1258 @command(b'perfdirstatewrite', formatteropts)
1247 1259 def perfdirstatewrite(ui, repo, **opts):
1248 """benchmap the time it take to write a dirstate on disk
1249 """
1260 """benchmap the time it take to write a dirstate on disk"""
1250 1261 opts = _byteskwargs(opts)
1251 1262 timer, fm = gettimer(ui, opts)
1252 1263 ds = repo.dirstate
@@ -1359,7 +1370,9 b' def perfpathcopies(ui, repo, rev1, rev2,'
1359 1370
1360 1371 @command(
1361 1372 b'perfphases',
1362 [(b'', b'full', False, b'include file reading time too'),],
1373 [
1374 (b'', b'full', False, b'include file reading time too'),
1375 ],
1363 1376 b"",
1364 1377 )
1365 1378 def perfphases(ui, repo, **opts):
@@ -1839,7 +1852,10 b' def perfmoonwalk(ui, repo, **opts):'
1839 1852
1840 1853 @command(
1841 1854 b'perftemplating',
1842 [(b'r', b'rev', [], b'revisions to run the template on'),] + formatteropts,
1855 [
1856 (b'r', b'rev', [], b'revisions to run the template on'),
1857 ]
1858 + formatteropts,
1843 1859 )
1844 1860 def perftemplating(ui, repo, testedtemplate=None, **opts):
1845 1861 """test the rendering time of a given template"""
@@ -2193,10 +2209,18 b' def perfhelperpathcopies(ui, repo, revs='
2193 2209 }
2194 2210 if dostats:
2195 2211 alldata['nbrevs'].append(
2196 (data['nbrevs'], base.hex(), parent.hex(),)
2212 (
2213 data['nbrevs'],
2214 base.hex(),
2215 parent.hex(),
2216 )
2197 2217 )
2198 2218 alldata['nbmissingfiles'].append(
2199 (data['nbmissingfiles'], base.hex(), parent.hex(),)
2219 (
2220 data['nbmissingfiles'],
2221 base.hex(),
2222 parent.hex(),
2223 )
2200 2224 )
2201 2225 if dotiming:
2202 2226 begin = util.timer()
@@ -2207,10 +2231,18 b' def perfhelperpathcopies(ui, repo, revs='
2207 2231 data['nbrenamedfiles'] = len(renames)
2208 2232 if dostats:
2209 2233 alldata['time'].append(
2210 (data['time'], base.hex(), parent.hex(),)
2234 (
2235 data['time'],
2236 base.hex(),
2237 parent.hex(),
2238 )
2211 2239 )
2212 2240 alldata['nbrenames'].append(
2213 (data['nbrenamedfiles'], base.hex(), parent.hex(),)
2241 (
2242 data['nbrenamedfiles'],
2243 base.hex(),
2244 parent.hex(),
2245 )
2214 2246 )
2215 2247 fm.startitem()
2216 2248 fm.data(**data)
@@ -3321,7 +3353,9 b' def perfrevset(ui, repo, expr, clear=Fal'
3321 3353
3322 3354 @command(
3323 3355 b'perfvolatilesets',
3324 [(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),]
3356 [
3357 (b'', b'clear-obsstore', False, b'drop obsstore between each call.'),
3358 ]
3325 3359 + formatteropts,
3326 3360 )
3327 3361 def perfvolatilesets(ui, repo, *names, **opts):
@@ -3807,8 +3841,7 b' def perflrucache('
3807 3841 ],
3808 3842 )
3809 3843 def perfwrite(ui, repo, **opts):
3810 """microbenchmark ui.write (and others)
3811 """
3844 """microbenchmark ui.write (and others)"""
3812 3845 opts = _byteskwargs(opts)
3813 3846
3814 3847 write = getattr(ui, _sysstr(opts[b'write_method']))
@@ -9,12 +9,12 b' from mercurial import ('
9 9
10 10
11 11 def diffstat(ui, repo, **kwargs):
12 '''Example usage:
12 """Example usage:
13 13
14 14 [hooks]
15 15 commit.diffstat = python:/path/to/this/file.py:diffstat
16 16 changegroup.diffstat = python:/path/to/this/file.py:diffstat
17 '''
17 """
18 18 if kwargs.get('parent2'):
19 19 return
20 20 node = kwargs['node']
@@ -53,7 +53,10 b' SOURCES = ['
53 53 # Headers whose preprocessed output will be fed into cdef().
54 54 HEADERS = [
55 55 os.path.join(HERE, "zstd", *p)
56 for p in (("zstd.h",), ("dictBuilder", "zdict.h"),)
56 for p in (
57 ("zstd.h",),
58 ("dictBuilder", "zdict.h"),
59 )
57 60 ]
58 61
59 62 INCLUDE_DIRS = [
@@ -80,12 +83,20 b' if hasattr(compiler, "initialize"):'
80 83 if compiler.compiler_type == "unix":
81 84 args = list(compiler.executables["compiler"])
82 85 args.extend(
83 ["-E", "-DZSTD_STATIC_LINKING_ONLY", "-DZDICT_STATIC_LINKING_ONLY",]
86 [
87 "-E",
88 "-DZSTD_STATIC_LINKING_ONLY",
89 "-DZDICT_STATIC_LINKING_ONLY",
90 ]
84 91 )
85 92 elif compiler.compiler_type == "msvc":
86 93 args = [compiler.cc]
87 94 args.extend(
88 ["/EP", "/DZSTD_STATIC_LINKING_ONLY", "/DZDICT_STATIC_LINKING_ONLY",]
95 [
96 "/EP",
97 "/DZSTD_STATIC_LINKING_ONLY",
98 "/DZDICT_STATIC_LINKING_ONLY",
99 ]
89 100 )
90 101 else:
91 102 raise Exception("unsupported compiler type: %s" % compiler.compiler_type)
@@ -26,7 +26,9 b' import sys'
26 26
27 27 _hgenv = dict(os.environ)
28 28 _hgenv.update(
29 {'HGPLAIN': '1',}
29 {
30 'HGPLAIN': '1',
31 }
30 32 )
31 33
32 34 _HG_FIRST_CHANGE = '9117c6561b0bd7792fa13b50d28239d51b78e51f'
@@ -122,7 +122,7 b' def parsegitdiff(lines):'
122 122 optionalrepo=True,
123 123 )
124 124 def analyze(ui, repo, *revs, **opts):
125 '''create a simple model of a repository to use for later synthesis
125 """create a simple model of a repository to use for later synthesis
126 126
127 127 This command examines every changeset in the given range (or all
128 128 of history if none are specified) and creates a simple statistical
@@ -133,7 +133,7 b' def analyze(ui, repo, *revs, **opts):'
133 133 :hg:`synthesize` to create or augment a repository with synthetic
134 134 commits that have a structure that is statistically similar to the
135 135 analyzed repository.
136 '''
136 """
137 137 root = repo.root
138 138 if not root.endswith(os.path.sep):
139 139 root += os.path.sep
@@ -281,7 +281,7 b' def analyze(ui, repo, *revs, **opts):'
281 281 _('hg synthesize [OPTION].. DESCFILE'),
282 282 )
283 283 def synthesize(ui, repo, descpath, **opts):
284 '''synthesize commits based on a model of an existing repository
284 """synthesize commits based on a model of an existing repository
285 285
286 286 The model must have been generated by :hg:`analyze`. Commits will
287 287 be generated randomly according to the probabilities described in
@@ -293,7 +293,7 b' def synthesize(ui, repo, descpath, **opt'
293 293 names, words will be chosen randomly from a dictionary that is
294 294 presumed to contain one word per line. Use --dict to specify the
295 295 path to an alternate dictionary to use.
296 '''
296 """
297 297 try:
298 298 fp = hg.openpath(ui, descpath)
299 299 except Exception as err:
@@ -542,12 +542,12 b' def renamedirs(dirs, words):'
542 542 replacements = {'': ''}
543 543
544 544 def rename(dirpath):
545 '''Recursively rename the directory and all path prefixes.
545 """Recursively rename the directory and all path prefixes.
546 546
547 547 The mapping from path to renamed path is stored for all path prefixes
548 548 as in dynamic programming, ensuring linear runtime and consistent
549 549 renaming regardless of iteration order through the model.
550 '''
550 """
551 551 if dirpath in replacements:
552 552 return replacements[dirpath]
553 553 head, _ = os.path.split(dirpath)
@@ -81,8 +81,7 b' def writeerr(data):'
81 81
82 82
83 83 class embeddedmatcher(object): # pytype: disable=ignored-metaclass
84 """Base class to detect embedded code fragments in *.t test script
85 """
84 """Base class to detect embedded code fragments in *.t test script"""
86 85
87 86 __metaclass__ = abc.ABCMeta
88 87
@@ -103,8 +102,7 b' class embeddedmatcher(object): # pytype'
103 102
104 103 @abc.abstractmethod
105 104 def isinside(self, ctx, line):
106 """Examine whether line is inside embedded code, if not yet endsat
107 """
105 """Examine whether line is inside embedded code, if not yet endsat"""
108 106
109 107 @abc.abstractmethod
110 108 def ignores(self, ctx):
@@ -822,7 +822,10 b' class Translator(nodes.NodeVisitor):'
822 822 # man 7 man argues to use ".IP" instead of ".TP"
823 823 self.body.append(
824 824 '.IP %s %d\n'
825 % (next(self._list_char[-1]), self._list_char[-1].get_width(),)
825 % (
826 next(self._list_char[-1]),
827 self._list_char[-1].get_width(),
828 )
826 829 )
827 830
828 831 def depart_list_item(self, node):
@@ -239,25 +239,44 b' configitem = registrar.configitem(config'
239 239
240 240 # deprecated config: acl.config
241 241 configitem(
242 b'acl', b'config', default=None,
242 b'acl',
243 b'config',
244 default=None,
243 245 )
244 246 configitem(
245 b'acl.groups', b'.*', default=None, generic=True,
247 b'acl.groups',
248 b'.*',
249 default=None,
250 generic=True,
246 251 )
247 252 configitem(
248 b'acl.deny.branches', b'.*', default=None, generic=True,
253 b'acl.deny.branches',
254 b'.*',
255 default=None,
256 generic=True,
249 257 )
250 258 configitem(
251 b'acl.allow.branches', b'.*', default=None, generic=True,
259 b'acl.allow.branches',
260 b'.*',
261 default=None,
262 generic=True,
252 263 )
253 264 configitem(
254 b'acl.deny', b'.*', default=None, generic=True,
265 b'acl.deny',
266 b'.*',
267 default=None,
268 generic=True,
255 269 )
256 270 configitem(
257 b'acl.allow', b'.*', default=None, generic=True,
271 b'acl.allow',
272 b'.*',
273 default=None,
274 generic=True,
258 275 )
259 276 configitem(
260 b'acl', b'sources', default=lambda: [b'serve'],
277 b'acl',
278 b'sources',
279 default=lambda: [b'serve'],
261 280 )
262 281
263 282
@@ -42,7 +42,9 b' configtable = {}'
42 42 configitem = registrar.configitem(configtable)
43 43
44 44 configitem(
45 b'automv', b'similarity', default=95,
45 b'automv',
46 b'similarity',
47 default=95,
46 48 )
47 49
48 50
@@ -72,19 +72,29 b' configtable = {}'
72 72 configitem = registrar.configitem(configtable)
73 73
74 74 configitem(
75 b'blackbox', b'dirty', default=False,
75 b'blackbox',
76 b'dirty',
77 default=False,
76 78 )
77 79 configitem(
78 b'blackbox', b'maxsize', default=b'1 MB',
80 b'blackbox',
81 b'maxsize',
82 default=b'1 MB',
79 83 )
80 84 configitem(
81 b'blackbox', b'logsource', default=False,
85 b'blackbox',
86 b'logsource',
87 default=False,
82 88 )
83 89 configitem(
84 b'blackbox', b'maxfiles', default=7,
90 b'blackbox',
91 b'maxfiles',
92 default=7,
85 93 )
86 94 configitem(
87 b'blackbox', b'track', default=lambda: [b'*'],
95 b'blackbox',
96 b'track',
97 default=lambda: [b'*'],
88 98 )
89 99 configitem(
90 100 b'blackbox',
@@ -92,7 +102,9 b' configitem('
92 102 default=lambda: [b'chgserver', b'cmdserver', b'extension'],
93 103 )
94 104 configitem(
95 b'blackbox', b'date-format', default=b'%Y/%m/%d %H:%M:%S',
105 b'blackbox',
106 b'date-format',
107 default=b'%Y/%m/%d %H:%M:%S',
96 108 )
97 109
98 110 _lastlogger = loggingutil.proxylogger()
@@ -189,14 +201,15 b' def reposetup(ui, repo):'
189 201
190 202 @command(
191 203 b'blackbox',
192 [(b'l', b'limit', 10, _(b'the number of events to show')),],
204 [
205 (b'l', b'limit', 10, _(b'the number of events to show')),
206 ],
193 207 _(b'hg blackbox [OPTION]...'),
194 208 helpcategory=command.CATEGORY_MAINTENANCE,
195 209 helpbasic=True,
196 210 )
197 211 def blackbox(ui, repo, *revs, **opts):
198 '''view the recent repository events
199 '''
212 """view the recent repository events"""
200 213
201 214 if not repo.vfs.exists(b'blackbox.log'):
202 215 return
@@ -325,22 +325,34 b' configtable = {}'
325 325 configitem = registrar.configitem(configtable)
326 326
327 327 configitem(
328 b'bugzilla', b'apikey', default=b'',
328 b'bugzilla',
329 b'apikey',
330 default=b'',
329 331 )
330 332 configitem(
331 b'bugzilla', b'bzdir', default=b'/var/www/html/bugzilla',
333 b'bugzilla',
334 b'bzdir',
335 default=b'/var/www/html/bugzilla',
332 336 )
333 337 configitem(
334 b'bugzilla', b'bzemail', default=None,
338 b'bugzilla',
339 b'bzemail',
340 default=None,
335 341 )
336 342 configitem(
337 b'bugzilla', b'bzurl', default=b'http://localhost/bugzilla/',
343 b'bugzilla',
344 b'bzurl',
345 default=b'http://localhost/bugzilla/',
338 346 )
339 347 configitem(
340 b'bugzilla', b'bzuser', default=None,
348 b'bugzilla',
349 b'bzuser',
350 default=None,
341 351 )
342 352 configitem(
343 b'bugzilla', b'db', default=b'bugs',
353 b'bugzilla',
354 b'db',
355 default=b'bugs',
344 356 )
345 357 configitem(
346 358 b'bugzilla',
@@ -353,19 +365,29 b' configitem('
353 365 ),
354 366 )
355 367 configitem(
356 b'bugzilla', b'fixresolution', default=b'FIXED',
368 b'bugzilla',
369 b'fixresolution',
370 default=b'FIXED',
357 371 )
358 372 configitem(
359 b'bugzilla', b'fixstatus', default=b'RESOLVED',
373 b'bugzilla',
374 b'fixstatus',
375 default=b'RESOLVED',
360 376 )
361 377 configitem(
362 b'bugzilla', b'host', default=b'localhost',
378 b'bugzilla',
379 b'host',
380 default=b'localhost',
363 381 )
364 382 configitem(
365 b'bugzilla', b'notify', default=configitem.dynamicdefault,
383 b'bugzilla',
384 b'notify',
385 default=configitem.dynamicdefault,
366 386 )
367 387 configitem(
368 b'bugzilla', b'password', default=None,
388 b'bugzilla',
389 b'password',
390 default=None,
369 391 )
370 392 configitem(
371 393 b'bugzilla',
@@ -377,25 +399,39 b' configitem('
377 399 ),
378 400 )
379 401 configitem(
380 b'bugzilla', b'strip', default=0,
402 b'bugzilla',
403 b'strip',
404 default=0,
381 405 )
382 406 configitem(
383 b'bugzilla', b'style', default=None,
407 b'bugzilla',
408 b'style',
409 default=None,
384 410 )
385 411 configitem(
386 b'bugzilla', b'template', default=None,
412 b'bugzilla',
413 b'template',
414 default=None,
387 415 )
388 416 configitem(
389 b'bugzilla', b'timeout', default=5,
417 b'bugzilla',
418 b'timeout',
419 default=5,
390 420 )
391 421 configitem(
392 b'bugzilla', b'user', default=b'bugs',
422 b'bugzilla',
423 b'user',
424 default=b'bugs',
393 425 )
394 426 configitem(
395 b'bugzilla', b'usermap', default=None,
427 b'bugzilla',
428 b'usermap',
429 default=None,
396 430 )
397 431 configitem(
398 b'bugzilla', b'version', default=None,
432 b'bugzilla',
433 b'version',
434 default=None,
399 435 )
400 436
401 437
@@ -430,29 +466,29 b' class bzaccess(object):'
430 466 '''remove bug IDs where node occurs in comment text from bugs.'''
431 467
432 468 def updatebug(self, bugid, newstate, text, committer):
433 '''update the specified bug. Add comment text and set new states.
469 """update the specified bug. Add comment text and set new states.
434 470
435 471 If possible add the comment as being from the committer of
436 472 the changeset. Otherwise use the default Bugzilla user.
437 '''
473 """
438 474
439 475 def notify(self, bugs, committer):
440 '''Force sending of Bugzilla notification emails.
476 """Force sending of Bugzilla notification emails.
441 477
442 478 Only required if the access method does not trigger notification
443 479 emails automatically.
444 '''
480 """
445 481
446 482
447 483 # Bugzilla via direct access to MySQL database.
448 484 class bzmysql(bzaccess):
449 '''Support for direct MySQL access to Bugzilla.
485 """Support for direct MySQL access to Bugzilla.
450 486
451 487 The earliest Bugzilla version this is tested with is version 2.16.
452 488
453 489 If your Bugzilla is version 3.4 or above, you are strongly
454 490 recommended to use the XMLRPC access method instead.
455 '''
491 """
456 492
457 493 @staticmethod
458 494 def sql_buglist(ids):
@@ -581,9 +617,9 b' class bzmysql(bzaccess):'
581 617 return userid
582 618
583 619 def get_bugzilla_user(self, committer):
584 '''See if committer is a registered bugzilla user. Return
620 """See if committer is a registered bugzilla user. Return
585 621 bugzilla username and userid if so. If not, return default
586 bugzilla username and userid.'''
622 bugzilla username and userid."""
587 623 user = self.map_committer(committer)
588 624 try:
589 625 userid = self.get_user_id(user)
@@ -604,10 +640,10 b' class bzmysql(bzaccess):'
604 640 return (user, userid)
605 641
606 642 def updatebug(self, bugid, newstate, text, committer):
607 '''update bug state with comment text.
643 """update bug state with comment text.
608 644
609 645 Try adding comment as committer of changeset, otherwise as
610 default bugzilla user.'''
646 default bugzilla user."""
611 647 if len(newstate) > 0:
612 648 self.ui.warn(_(b"Bugzilla/MySQL cannot update bug state\n"))
613 649
@@ -869,7 +905,7 b' class bzxmlrpcemail(bzxmlrpc):'
869 905 return b"@%s = %s" % (fieldname, pycompat.bytestr(value))
870 906
871 907 def send_bug_modify_email(self, bugid, commands, comment, committer):
872 '''send modification message to Bugzilla bug via email.
908 """send modification message to Bugzilla bug via email.
873 909
874 910 The message format is documented in the Bugzilla email_in.pl
875 911 specification. commands is a list of command lines, comment is the
@@ -878,7 +914,7 b' class bzxmlrpcemail(bzxmlrpc):'
878 914 To stop users from crafting commit comments with
879 915 Bugzilla commands, specify the bug ID via the message body, rather
880 916 than the subject line, and leave a blank line after it.
881 '''
917 """
882 918 user = self.map_committer(committer)
883 919 matches = self.bzproxy.User.get(
884 920 {b'match': [user], b'token': self.bztoken}
@@ -1016,11 +1052,11 b' class bzrestapi(bzaccess):'
1016 1052 del bugs[bugid]
1017 1053
1018 1054 def updatebug(self, bugid, newstate, text, committer):
1019 '''update the specified bug. Add comment text and set new states.
1055 """update the specified bug. Add comment text and set new states.
1020 1056
1021 1057 If possible add the comment as being from the committer of
1022 1058 the changeset. Otherwise use the default Bugzilla user.
1023 '''
1059 """
1024 1060 bugmod = {}
1025 1061 if b'hours' in newstate:
1026 1062 bugmod[b'work_time'] = newstate[b'hours']
@@ -1050,11 +1086,11 b' class bzrestapi(bzaccess):'
1050 1086 self.ui.debug(b'added comment to bug %s\n' % bugid)
1051 1087
1052 1088 def notify(self, bugs, committer):
1053 '''Force sending of Bugzilla notification emails.
1089 """Force sending of Bugzilla notification emails.
1054 1090
1055 1091 Only required if the access method does not trigger notification
1056 1092 emails automatically.
1057 '''
1093 """
1058 1094 pass
1059 1095
1060 1096
@@ -1092,12 +1128,12 b' class bugzilla(object):'
1092 1128 self.split_re = re.compile(br'\D+')
1093 1129
1094 1130 def find_bugs(self, ctx):
1095 '''return bugs dictionary created from commit comment.
1131 """return bugs dictionary created from commit comment.
1096 1132
1097 1133 Extract bug info from changeset comments. Filter out any that are
1098 1134 not known to Bugzilla, and any that already have a reference to
1099 1135 the given changeset in their comments.
1100 '''
1136 """
1101 1137 start = 0
1102 1138 bugs = {}
1103 1139 bugmatch = self.bug_re.search(ctx.description(), start)
@@ -1152,8 +1188,8 b' class bugzilla(object):'
1152 1188 '''update bugzilla bug with reference to changeset.'''
1153 1189
1154 1190 def webroot(root):
1155 '''strip leading prefix of repo root and turn into
1156 url-safe path.'''
1191 """strip leading prefix of repo root and turn into
1192 url-safe path."""
1157 1193 count = int(self.ui.config(b'bugzilla', b'strip'))
1158 1194 root = util.pconvert(root)
1159 1195 while count > 0:
@@ -1195,9 +1231,9 b' class bugzilla(object):'
1195 1231
1196 1232
1197 1233 def hook(ui, repo, hooktype, node=None, **kwargs):
1198 '''add comment to bugzilla for each changeset that refers to a
1234 """add comment to bugzilla for each changeset that refers to a
1199 1235 bugzilla bug id. only add a comment once per bug, so same change
1200 seen multiple times does not fill bug with duplicate data.'''
1236 seen multiple times does not fill bug with duplicate data."""
1201 1237 if node is None:
1202 1238 raise error.Abort(
1203 1239 _(b'hook type %s does not pass a changeset id') % hooktype
@@ -156,7 +156,7 b' def countrate(ui, repo, amap, *pats, **o'
156 156 inferrepo=True,
157 157 )
158 158 def churn(ui, repo, *pats, **opts):
159 '''histogram of changes to the repository
159 """histogram of changes to the repository
160 160
161 161 This command will display a histogram representing the number
162 162 of changed lines or revisions, grouped according to the given
@@ -193,7 +193,7 b' def churn(ui, repo, *pats, **opts):'
193 193 Such a file may be specified with the --aliases option, otherwise
194 194 a .hgchurn file will be looked for in the working directory root.
195 195 Aliases will be split from the rightmost "=".
196 '''
196 """
197 197
198 198 def pad(s, l):
199 199 return s + b" " * (l - encoding.colwidth(s))
@@ -536,7 +536,7 b' def debugsvnlog(ui, **opts):'
536 536 norepo=True,
537 537 )
538 538 def debugcvsps(ui, *args, **opts):
539 '''create changeset information from CVS
539 """create changeset information from CVS
540 540
541 541 This command is intended as a debugging tool for the CVS to
542 542 Mercurial converter, and can be used as a direct replacement for
@@ -545,7 +545,7 b' def debugcvsps(ui, *args, **opts):'
545 545 Hg debugcvsps reads the CVS rlog for current directory (or any
546 546 named directory) in the CVS repository, and converts the log to a
547 547 series of changesets based on matching commit log entries and
548 dates.'''
548 dates."""
549 549 return cvsps.debugcvsps(ui, *args, **opts)
550 550
551 551
@@ -21,7 +21,11 b' from . import common'
21 21
22 22 # these do not work with demandimport, blacklist
23 23 demandimport.IGNORES.update(
24 [b'bzrlib.transactions', b'bzrlib.urlutils', b'ElementPath',]
24 [
25 b'bzrlib.transactions',
26 b'bzrlib.urlutils',
27 b'ElementPath',
28 ]
25 29 )
26 30
27 31 try:
@@ -172,8 +172,8 b' class converter_source(object):'
172 172 self.encoding = b'utf-8'
173 173
174 174 def checkhexformat(self, revstr, mapname=b'splicemap'):
175 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
176 such format for their revision numbering
175 """fails if revstr is not a 40 byte hex. mercurial and git both uses
176 such format for their revision numbering
177 177 """
178 178 if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
179 179 raise error.Abort(
@@ -283,8 +283,7 b' class converter_source(object):'
283 283 return False
284 284
285 285 def hasnativeclose(self):
286 """Return true if this source has ability to close branch.
287 """
286 """Return true if this source has ability to close branch."""
288 287 return False
289 288
290 289 def lookuprev(self, rev):
@@ -303,8 +302,8 b' class converter_source(object):'
303 302
304 303 def checkrevformat(self, revstr, mapname=b'splicemap'):
305 304 """revstr is a string that describes a revision in the given
306 source control system. Return true if revstr has correct
307 format.
305 source control system. Return true if revstr has correct
306 format.
308 307 """
309 308 return True
310 309
@@ -96,7 +96,7 b' def recode(s):'
96 96
97 97
98 98 def mapbranch(branch, branchmap):
99 '''
99 """
100 100 >>> bmap = {b'default': b'branch1'}
101 101 >>> for i in [b'', None]:
102 102 ... mapbranch(i, bmap)
@@ -115,7 +115,7 b' def mapbranch(branch, branchmap):'
115 115 'branch4'
116 116 'branch4'
117 117 'branch5'
118 '''
118 """
119 119 # If branch is None or empty, this commit is coming from the source
120 120 # repository's default branch and destined for the default branch in the
121 121 # destination repository. For such commits, using a literal "default"
@@ -228,14 +228,14 b' class converter(object):'
228 228 self.branchmap = mapfile(ui, opts.get(b'branchmap'))
229 229
230 230 def parsesplicemap(self, path):
231 """ check and validate the splicemap format and
232 return a child/parents dictionary.
233 Format checking has two parts.
234 1. generic format which is same across all source types
235 2. specific format checking which may be different for
236 different source type. This logic is implemented in
237 checkrevformat function in source files like
238 hg.py, subversion.py etc.
231 """check and validate the splicemap format and
232 return a child/parents dictionary.
233 Format checking has two parts.
234 1. generic format which is same across all source types
235 2. specific format checking which may be different for
236 different source type. This logic is implemented in
237 checkrevformat function in source files like
238 hg.py, subversion.py etc.
239 239 """
240 240
241 241 if not path:
@@ -275,8 +275,8 b' class converter(object):'
275 275 return m
276 276
277 277 def walktree(self, heads):
278 '''Return a mapping that identifies the uncommitted parents of every
279 uncommitted changeset.'''
278 """Return a mapping that identifies the uncommitted parents of every
279 uncommitted changeset."""
280 280 visit = list(heads)
281 281 known = set()
282 282 parents = {}
@@ -332,8 +332,8 b' class converter(object):'
332 332 parents[c] = pc
333 333
334 334 def toposort(self, parents, sortmode):
335 '''Return an ordering such that every uncommitted changeset is
336 preceded by all its uncommitted ancestors.'''
335 """Return an ordering such that every uncommitted changeset is
336 preceded by all its uncommitted ancestors."""
337 337
338 338 def mapchildren(parents):
339 339 """Return a (children, roots) tuple where 'children' maps parent
@@ -29,25 +29,25 b' pickle = util.pickle'
29 29
30 30
31 31 class logentry(object):
32 '''Class logentry has the following attributes:
33 .author - author name as CVS knows it
34 .branch - name of branch this revision is on
35 .branches - revision tuple of branches starting at this revision
36 .comment - commit message
37 .commitid - CVS commitid or None
38 .date - the commit date as a (time, tz) tuple
39 .dead - true if file revision is dead
40 .file - Name of file
41 .lines - a tuple (+lines, -lines) or None
42 .parent - Previous revision of this entry
43 .rcs - name of file as returned from CVS
44 .revision - revision number as tuple
45 .tags - list of tags on the file
46 .synthetic - is this a synthetic "file ... added on ..." revision?
47 .mergepoint - the branch that has been merged from (if present in
48 rlog output) or None
49 .branchpoints - the branches that start at the current entry or empty
50 '''
32 """Class logentry has the following attributes:
33 .author - author name as CVS knows it
34 .branch - name of branch this revision is on
35 .branches - revision tuple of branches starting at this revision
36 .comment - commit message
37 .commitid - CVS commitid or None
38 .date - the commit date as a (time, tz) tuple
39 .dead - true if file revision is dead
40 .file - Name of file
41 .lines - a tuple (+lines, -lines) or None
42 .parent - Previous revision of this entry
43 .rcs - name of file as returned from CVS
44 .revision - revision number as tuple
45 .tags - list of tags on the file
46 .synthetic - is this a synthetic "file ... added on ..." revision?
47 .mergepoint - the branch that has been merged from (if present in
48 rlog output) or None
49 .branchpoints - the branches that start at the current entry or empty
50 """
51 51
52 52 def __init__(self, **entries):
53 53 self.synthetic = False
@@ -580,20 +580,20 b' def createlog(ui, directory=None, root=b'
580 580
581 581
582 582 class changeset(object):
583 '''Class changeset has the following attributes:
584 .id - integer identifying this changeset (list index)
585 .author - author name as CVS knows it
586 .branch - name of branch this changeset is on, or None
587 .comment - commit message
588 .commitid - CVS commitid or None
589 .date - the commit date as a (time,tz) tuple
590 .entries - list of logentry objects in this changeset
591 .parents - list of one or two parent changesets
592 .tags - list of tags on this changeset
593 .synthetic - from synthetic revision "file ... added on branch ..."
594 .mergepoint- the branch that has been merged from or None
595 .branchpoints- the branches that start at the current entry or empty
596 '''
583 """Class changeset has the following attributes:
584 .id - integer identifying this changeset (list index)
585 .author - author name as CVS knows it
586 .branch - name of branch this changeset is on, or None
587 .comment - commit message
588 .commitid - CVS commitid or None
589 .date - the commit date as a (time,tz) tuple
590 .entries - list of logentry objects in this changeset
591 .parents - list of one or two parent changesets
592 .tags - list of tags on this changeset
593 .synthetic - from synthetic revision "file ... added on branch ..."
594 .mergepoint- the branch that has been merged from or None
595 .branchpoints- the branches that start at the current entry or empty
596 """
597 597
598 598 def __init__(self, **entries):
599 599 self.id = None
@@ -945,10 +945,10 b' def createchangeset(ui, log, fuzz=60, me'
945 945
946 946
947 947 def debugcvsps(ui, *args, **opts):
948 '''Read CVS rlog for current directory or named path in
948 """Read CVS rlog for current directory or named path in
949 949 repository, and convert the log to changesets based on matching
950 950 commit log entries and dates.
951 '''
951 """
952 952 opts = pycompat.byteskwargs(opts)
953 953 if opts[b"new_cache"]:
954 954 cache = b"write"
@@ -19,14 +19,14 b' SKIPREV = common.SKIPREV'
19 19
20 20
21 21 def rpairs(path):
22 '''Yield tuples with path split at '/', starting with the full path.
22 """Yield tuples with path split at '/', starting with the full path.
23 23 No leading, trailing or double '/', please.
24 24 >>> for x in rpairs(b'foo/bar/baz'): print(x)
25 25 ('foo/bar/baz', '')
26 26 ('foo/bar', 'baz')
27 27 ('foo', 'bar/baz')
28 28 ('.', 'foo/bar/baz')
29 '''
29 """
30 30 i = len(path)
31 31 while i != -1:
32 32 yield path[:i], path[i + 1 :]
@@ -35,17 +35,17 b' def rpairs(path):'
35 35
36 36
37 37 def normalize(path):
38 ''' We use posixpath.normpath to support cross-platform path format.
39 However, it doesn't handle None input. So we wrap it up. '''
38 """We use posixpath.normpath to support cross-platform path format.
39 However, it doesn't handle None input. So we wrap it up."""
40 40 if path is None:
41 41 return None
42 42 return posixpath.normpath(path)
43 43
44 44
45 45 class filemapper(object):
46 '''Map and filter filenames when importing.
46 """Map and filter filenames when importing.
47 47 A name can be mapped to itself, a new name, or None (omit from new
48 repository).'''
48 repository)."""
49 49
50 50 def __init__(self, ui, path=None):
51 51 self.ui = ui
@@ -118,13 +118,19 b' configtable = {}'
118 118 configitem = registrar.configitem(configtable)
119 119
120 120 configitem(
121 b'eol', b'fix-trailing-newline', default=False,
121 b'eol',
122 b'fix-trailing-newline',
123 default=False,
122 124 )
123 125 configitem(
124 b'eol', b'native', default=pycompat.oslinesep,
126 b'eol',
127 b'native',
128 default=pycompat.oslinesep,
125 129 )
126 130 configitem(
127 b'eol', b'only-consistent', default=True,
131 b'eol',
132 b'only-consistent',
133 default=True,
128 134 )
129 135
130 136 # Matches a lone LF, i.e., one that is not part of CRLF.
@@ -118,19 +118,29 b' configtable = {}'
118 118 configitem = registrar.configitem(configtable)
119 119
120 120 configitem(
121 b'extdiff', br'opts\..*', default=b'', generic=True,
121 b'extdiff',
122 br'opts\..*',
123 default=b'',
124 generic=True,
122 125 )
123 126
124 127 configitem(
125 b'extdiff', br'gui\..*', generic=True,
128 b'extdiff',
129 br'gui\..*',
130 generic=True,
126 131 )
127 132
128 133 configitem(
129 b'diff-tools', br'.*\.diffargs$', default=None, generic=True,
134 b'diff-tools',
135 br'.*\.diffargs$',
136 default=None,
137 generic=True,
130 138 )
131 139
132 140 configitem(
133 b'diff-tools', br'.*\.gui$', generic=True,
141 b'diff-tools',
142 br'.*\.gui$',
143 generic=True,
134 144 )
135 145
136 146 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -141,9 +151,9 b" testedwith = b'ships-with-hg-core'"
141 151
142 152
143 153 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
144 '''snapshot files as of some revision
154 """snapshot files as of some revision
145 155 if not using snapshot, -I/-X does not work and recursive diff
146 in tools like kdiff3 and meld displays too many files.'''
156 in tools like kdiff3 and meld displays too many files."""
147 157 dirname = os.path.basename(repo.root)
148 158 if dirname == b"":
149 159 dirname = b"root"
@@ -230,9 +240,9 b' def formatcmdline('
230 240
231 241
232 242 def _systembackground(cmd, environ=None, cwd=None):
233 ''' like 'procutil.system', but returns the Popen object directly
234 so we don't have to wait on it.
235 '''
243 """like 'procutil.system', but returns the Popen object directly
244 so we don't have to wait on it.
245 """
236 246 env = procutil.shellenviron(environ)
237 247 proc = subprocess.Popen(
238 248 procutil.tonativestr(cmd),
@@ -530,13 +540,13 b' def diffrevs('
530 540
531 541
532 542 def dodiff(ui, repo, cmdline, pats, opts, guitool=False):
533 '''Do the actual diff:
543 """Do the actual diff:
534 544
535 545 - copy to a temp structure if diffing 2 internal revisions
536 546 - copy to a temp structure if diffing working revision with
537 547 another one and more than 1 file is changed
538 548 - just invoke the diff for a single file in the working dir
539 '''
549 """
540 550
541 551 cmdutil.check_at_most_one_arg(opts, b'rev', b'change')
542 552 revs = opts.get(b'rev')
@@ -628,14 +638,16 b' extdiffopts = ('
628 638
629 639 @command(
630 640 b'extdiff',
631 [(b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),]
641 [
642 (b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),
643 ]
632 644 + extdiffopts,
633 645 _(b'hg extdiff [OPT]... [FILE]...'),
634 646 helpcategory=command.CATEGORY_FILE_CONTENTS,
635 647 inferrepo=True,
636 648 )
637 649 def extdiff(ui, repo, *pats, **opts):
638 '''use external program to diff repository (or selected files)
650 """use external program to diff repository (or selected files)
639 651
640 652 Show differences between revisions for the specified files, using
641 653 an external program. The default program used is diff, with
@@ -664,7 +676,7 b' def extdiff(ui, repo, *pats, **opts):'
664 676
665 677 The --confirm option will prompt the user before each invocation of
666 678 the external program. It is ignored if --per-file isn't specified.
667 '''
679 """
668 680 opts = pycompat.byteskwargs(opts)
669 681 program = opts.get(b'program')
670 682 option = opts.get(b'option')
@@ -70,13 +70,19 b' configtable = {}'
70 70 configitem = registrar.configitem(configtable)
71 71
72 72 configitem(
73 b'factotum', b'executable', default=b'/bin/auth/factotum',
73 b'factotum',
74 b'executable',
75 default=b'/bin/auth/factotum',
74 76 )
75 77 configitem(
76 b'factotum', b'mountpoint', default=b'/mnt/factotum',
78 b'factotum',
79 b'mountpoint',
80 default=b'/mnt/factotum',
77 81 )
78 82 configitem(
79 b'factotum', b'service', default=b'hg',
83 b'factotum',
84 b'service',
85 default=b'hg',
80 86 )
81 87
82 88
@@ -54,7 +54,7 b" testedwith = b'ships-with-hg-core'"
54 54 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
55 55 )
56 56 def fetch(ui, repo, source=b'default', **opts):
57 '''pull changes from a remote repository, merge new changes if needed.
57 """pull changes from a remote repository, merge new changes if needed.
58 58
59 59 This finds all changes from the repository at the specified path
60 60 or URL and adds them to the local repository.
@@ -71,7 +71,7 b" def fetch(ui, repo, source=b'default', *"
71 71 See :hg:`help dates` for a list of formats valid for -d/--date.
72 72
73 73 Returns 0 on success.
74 '''
74 """
75 75
76 76 opts = pycompat.byteskwargs(opts)
77 77 date = opts.get(b'date')
@@ -372,7 +372,7 b' def cleanup(repo, replacements, wdirwrit'
372 372
373 373
374 374 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
375 """"Constructs the list of files to be fixed at specific revisions
375 """ "Constructs the list of files to be fixed at specific revisions
376 376
377 377 It is up to the caller how to consume the work items, and the only
378 378 dependence between them is that replacement revisions must be committed in
@@ -154,25 +154,40 b' configtable = {}'
154 154 configitem = registrar.configitem(configtable)
155 155
156 156 configitem(
157 b'fsmonitor', b'mode', default=b'on',
157 b'fsmonitor',
158 b'mode',
159 default=b'on',
158 160 )
159 161 configitem(
160 b'fsmonitor', b'walk_on_invalidate', default=False,
162 b'fsmonitor',
163 b'walk_on_invalidate',
164 default=False,
161 165 )
162 166 configitem(
163 b'fsmonitor', b'timeout', default=b'2',
167 b'fsmonitor',
168 b'timeout',
169 default=b'2',
164 170 )
165 171 configitem(
166 b'fsmonitor', b'blacklistusers', default=list,
172 b'fsmonitor',
173 b'blacklistusers',
174 default=list,
175 )
176 configitem(
177 b'fsmonitor',
178 b'watchman_exe',
179 default=b'watchman',
167 180 )
168 181 configitem(
169 b'fsmonitor', b'watchman_exe', default=b'watchman',
182 b'fsmonitor',
183 b'verbose',
184 default=True,
185 experimental=True,
170 186 )
171 187 configitem(
172 b'fsmonitor', b'verbose', default=True, experimental=True,
173 )
174 configitem(
175 b'experimental', b'fsmonitor.transaction_notify', default=False,
188 b'experimental',
189 b'fsmonitor.transaction_notify',
190 default=False,
176 191 )
177 192
178 193 # This extension is incompatible with the following blacklisted extensions
@@ -271,11 +286,11 b' def _watchmantofsencoding(path):'
271 286
272 287
273 288 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
274 '''Replacement for dirstate.walk, hooking into Watchman.
289 """Replacement for dirstate.walk, hooking into Watchman.
275 290
276 291 Whenever full is False, ignored is False, and the Watchman client is
277 292 available, use Watchman combined with saved state to possibly return only a
278 subset of files.'''
293 subset of files."""
279 294
280 295 def bail(reason):
281 296 self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
@@ -731,8 +746,8 b' def extsetup(ui):'
731 746
732 747
733 748 def wrapsymlink(orig, source, link_name):
734 ''' if we create a dangling symlink, also touch the parent dir
735 to encourage fsevents notifications to work more correctly '''
749 """if we create a dangling symlink, also touch the parent dir
750 to encourage fsevents notifications to work more correctly"""
736 751 try:
737 752 return orig(source, link_name)
738 753 finally:
@@ -743,13 +758,13 b' def wrapsymlink(orig, source, link_name)'
743 758
744 759
745 760 class state_update(object):
746 ''' This context manager is responsible for dispatching the state-enter
747 and state-leave signals to the watchman service. The enter and leave
748 methods can be invoked manually (for scenarios where context manager
749 semantics are not possible). If parameters oldnode and newnode are None,
750 they will be populated based on current working copy in enter and
751 leave, respectively. Similarly, if the distance is none, it will be
752 calculated based on the oldnode and newnode in the leave method.'''
761 """This context manager is responsible for dispatching the state-enter
762 and state-leave signals to the watchman service. The enter and leave
763 methods can be invoked manually (for scenarios where context manager
764 semantics are not possible). If parameters oldnode and newnode are None,
765 they will be populated based on current working copy in enter and
766 leave, respectively. Similarly, if the distance is none, it will be
767 calculated based on the oldnode and newnode in the leave method."""
753 768
754 769 def __init__(
755 770 self,
@@ -282,11 +282,11 b' class SocketConnectError(WatchmanError):'
282 282
283 283 class SocketTimeout(WatchmanError):
284 284 """A specialized exception raised for socket timeouts during communication to/from watchman.
285 This makes it easier to implement non-blocking loops as callers can easily distinguish
286 between a routine timeout and an actual error condition.
285 This makes it easier to implement non-blocking loops as callers can easily distinguish
286 between a routine timeout and an actual error condition.
287 287
288 Note that catching WatchmanError will also catch this as it is a super-class, so backwards
289 compatibility in exception handling is preserved.
288 Note that catching WatchmanError will also catch this as it is a super-class, so backwards
289 compatibility in exception handling is preserved.
290 290 """
291 291
292 292
@@ -323,7 +323,7 b' class Transport(object):'
323 323 pass
324 324
325 325 def readLine(self):
326 """ read a line
326 """read a line
327 327 Maintains its own buffer, callers of the transport should not mix
328 328 calls to readBytes and readLine.
329 329 """
@@ -409,7 +409,7 b' class UnixSocketTransport(Transport):'
409 409
410 410
411 411 def _get_overlapped_result_ex_impl(pipe, olap, nbytes, millis, alertable):
412 """ Windows 7 and earlier does not support GetOverlappedResultEx. The
412 """Windows 7 and earlier does not support GetOverlappedResultEx. The
413 413 alternative is to use GetOverlappedResult and wait for read or write
414 414 operation to complete. This is done be using CreateEvent and
415 415 WaitForSingleObjectEx. CreateEvent, WaitForSingleObjectEx
@@ -510,9 +510,9 b' class WindowsNamedPipeTransport(Transpor'
510 510 self.timeout = int(value * 1000)
511 511
512 512 def readBytes(self, size):
513 """ A read can block for an unbounded amount of time, even if the
514 kernel reports that the pipe handle is signalled, so we need to
515 always perform our reads asynchronously
513 """A read can block for an unbounded amount of time, even if the
514 kernel reports that the pipe handle is signalled, so we need to
515 always perform our reads asynchronously
516 516 """
517 517
518 518 # try to satisfy the read from any buffered data
@@ -627,7 +627,7 b' def _default_binpath(binpath=None):'
627 627
628 628
629 629 class CLIProcessTransport(Transport):
630 """ open a pipe to the cli to talk to the service
630 """open a pipe to the cli to talk to the service
631 631 This intended to be used only in the test harness!
632 632
633 633 The CLI is an oddball because we only support JSON input
@@ -739,8 +739,8 b' class BserCodec(Codec):'
739 739
740 740
741 741 class ImmutableBserCodec(BserCodec):
742 """ use the BSER encoding, decoding values using the newer
743 immutable object support """
742 """use the BSER encoding, decoding values using the newer
743 immutable object support"""
744 744
745 745 def _loads(self, response):
746 746 return bser.loads(
@@ -817,8 +817,8 b' class Bser2WithFallbackCodec(BserCodec):'
817 817
818 818
819 819 class ImmutableBser2Codec(Bser2WithFallbackCodec, ImmutableBserCodec):
820 """ use the BSER encoding, decoding values using the newer
821 immutable object support """
820 """use the BSER encoding, decoding values using the newer
821 immutable object support"""
822 822
823 823 pass
824 824
@@ -1050,7 +1050,7 b' class client(object):'
1050 1050 self.sendConn = None
1051 1051
1052 1052 def receive(self):
1053 """ receive the next PDU from the watchman service
1053 """receive the next PDU from the watchman service
1054 1054
1055 1055 If the client has activated subscriptions or logs then
1056 1056 this PDU may be a unilateral PDU sent by the service to
@@ -1098,7 +1098,7 b' class client(object):'
1098 1098 return False
1099 1099
1100 1100 def getLog(self, remove=True):
1101 """ Retrieve buffered log data
1101 """Retrieve buffered log data
1102 1102
1103 1103 If remove is true the data will be removed from the buffer.
1104 1104 Otherwise it will be left in the buffer
@@ -1109,7 +1109,7 b' class client(object):'
1109 1109 return res
1110 1110
1111 1111 def getSubscription(self, name, remove=True, root=None):
1112 """ Retrieve the data associated with a named subscription
1112 """Retrieve the data associated with a named subscription
1113 1113
1114 1114 If remove is True (the default), the subscription data is removed
1115 1115 from the buffer. Otherwise the data is returned but left in
@@ -1144,7 +1144,7 b' class client(object):'
1144 1144 return sub
1145 1145
1146 1146 def query(self, *args):
1147 """ Send a query to the watchman service and return the response
1147 """Send a query to the watchman service and return the response
1148 1148
1149 1149 This call will block until the response is returned.
1150 1150 If any unilateral responses are sent by the service in between
@@ -55,8 +55,8 b' def check(version, name):'
55 55
56 56
57 57 def synthesize(vers, opts):
58 """ Synthesize a capability enabled version response
59 This is a very limited emulation for relatively recent feature sets
58 """Synthesize a capability enabled version response
59 This is a very limited emulation for relatively recent feature sets
60 60 """
61 61 parsed_version = parse_version(vers["version"])
62 62 vers["capabilities"] = {}
@@ -33,7 +33,9 b' configtable = {}'
33 33 configitem = registrar.configitem(configtable)
34 34 # git.log-index-cache-miss: internal knob for testing
35 35 configitem(
36 b"git", b"log-index-cache-miss", default=False,
36 b"git",
37 b"log-index-cache-miss",
38 default=False,
37 39 )
38 40
39 41 # TODO: extract an interface for this in core
@@ -224,8 +226,7 b' class gitbmstore(object):'
224 226 return bname
225 227
226 228 def applychanges(self, repo, tr, changes):
227 """Apply a list of changes to bookmarks
228 """
229 """Apply a list of changes to bookmarks"""
229 230 # TODO: this should respect transactions, but that's going to
230 231 # require enlarging the gitbmstore to know how to do in-memory
231 232 # temporary writes and read those back prior to transaction
@@ -127,7 +127,7 b' class gittreemanifest(object):'
127 127 return dir in self._dirs
128 128
129 129 def diff(self, other, match=lambda x: True, clean=False):
130 '''Finds changes between the current manifest and m2.
130 """Finds changes between the current manifest and m2.
131 131
132 132 The result is returned as a dict with filename as key and
133 133 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
@@ -135,7 +135,7 b' class gittreemanifest(object):'
135 135 in the current/other manifest. Where the file does not exist,
136 136 the nodeid will be None and the flags will be the empty
137 137 string.
138 '''
138 """
139 139 result = {}
140 140
141 141 def _iterativediff(t1, t2, subdir):
@@ -59,10 +59,10 b' def convert(s):'
59 59 helpbasic=True,
60 60 )
61 61 def githelp(ui, repo, *args, **kwargs):
62 '''suggests the Mercurial equivalent of the given git command
62 """suggests the Mercurial equivalent of the given git command
63 63
64 64 Usage: hg githelp -- <git command>
65 '''
65 """
66 66
67 67 if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
68 68 raise error.Abort(
@@ -37,13 +37,20 b' configtable = {}'
37 37 configitem = registrar.configitem(configtable)
38 38
39 39 configitem(
40 b'gpg', b'cmd', default=b'gpg',
40 b'gpg',
41 b'cmd',
42 default=b'gpg',
41 43 )
42 44 configitem(
43 b'gpg', b'key', default=None,
45 b'gpg',
46 b'key',
47 default=None,
44 48 )
45 49 configitem(
46 b'gpg', b'.*', default=None, generic=True,
50 b'gpg',
51 b'.*',
52 default=None,
53 generic=True,
47 54 )
48 55
49 56 # Custom help category
@@ -78,7 +85,11 b' class gpg(object):'
78 85 fp.close()
79 86 gpgcmd = (
80 87 b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\""
81 % (self.path, sigfile, datafile,)
88 % (
89 self.path,
90 sigfile,
91 datafile,
92 )
82 93 )
83 94 ret = procutil.filter(b"", gpgcmd)
84 95 finally:
@@ -65,7 +65,9 b' configtable = {}'
65 65 configitem = registrar.configitem(configtable)
66 66
67 67 configitem(
68 b'hgk', b'path', default=b'hgk',
68 b'hgk',
69 b'path',
70 default=b'hgk',
69 71 )
70 72
71 73
@@ -247,22 +247,34 b' command = registrar.command(cmdtable)'
247 247 configtable = {}
248 248 configitem = registrar.configitem(configtable)
249 249 configitem(
250 b'experimental', b'histedit.autoverb', default=False,
250 b'experimental',
251 b'histedit.autoverb',
252 default=False,
251 253 )
252 254 configitem(
253 b'histedit', b'defaultrev', default=None,
255 b'histedit',
256 b'defaultrev',
257 default=None,
254 258 )
255 259 configitem(
256 b'histedit', b'dropmissing', default=False,
260 b'histedit',
261 b'dropmissing',
262 default=False,
257 263 )
258 264 configitem(
259 b'histedit', b'linelen', default=80,
265 b'histedit',
266 b'linelen',
267 default=80,
260 268 )
261 269 configitem(
262 b'histedit', b'singletransaction', default=False,
270 b'histedit',
271 b'singletransaction',
272 default=False,
263 273 )
264 274 configitem(
265 b'ui', b'interface.histedit', default=None,
275 b'ui',
276 b'interface.histedit',
277 default=None,
266 278 )
267 279 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
268 280
@@ -280,7 +292,7 b' internalactions = set()'
280 292
281 293
282 294 def geteditcomment(ui, first, last):
283 """ construct the editor comment
295 """construct the editor comment
284 296 The comment includes::
285 297 - an intro
286 298 - sorted primary commands
@@ -477,8 +489,7 b' class histeditaction(object):'
477 489
478 490 @classmethod
479 491 def fromrule(cls, state, rule):
480 """Parses the given rule, returning an instance of the histeditaction.
481 """
492 """Parses the given rule, returning an instance of the histeditaction."""
482 493 ruleid = rule.strip().split(b' ', 1)[0]
483 494 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
484 495 # Check for validation of rule ids and get the rulehash
@@ -544,7 +555,7 b' class histeditaction(object):'
544 555
545 556 def tostate(self):
546 557 """Print an action in format used by histedit state files
547 (the first line is a verb, the remainder is the second)
558 (the first line is a verb, the remainder is the second)
548 559 """
549 560 return b"%s\n%s" % (self.verb, node.hex(self.node))
550 561
@@ -1178,8 +1189,8 b' class histeditrule(object):'
1178 1189
1179 1190 # ============ EVENTS ===============
1180 1191 def movecursor(state, oldpos, newpos):
1181 '''Change the rule/changeset that the cursor is pointing to, regardless of
1182 current mode (you can switch between patches from the view patch window).'''
1192 """Change the rule/changeset that the cursor is pointing to, regardless of
1193 current mode (you can switch between patches from the view patch window)."""
1183 1194 state[b'pos'] = newpos
1184 1195
1185 1196 mode, _ = state[b'mode']
@@ -1256,8 +1267,8 b' def cycleaction(state, pos, next=False):'
1256 1267
1257 1268
1258 1269 def changeview(state, delta, unit):
1259 '''Change the region of whatever is being viewed (a patch or the list of
1260 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.'''
1270 """Change the region of whatever is being viewed (a patch or the list of
1271 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
1261 1272 mode, _ = state[b'mode']
1262 1273 if mode != MODE_PATCH:
1263 1274 return
@@ -1582,8 +1593,12 b' pgup/K: move patch up, pgdn/J: move patc'
1582 1593 b'mode': (MODE_INIT, MODE_INIT),
1583 1594 b'page_height': None,
1584 1595 b'modes': {
1585 MODE_RULES: {b'line_offset': 0,},
1586 MODE_PATCH: {b'line_offset': 0,},
1596 MODE_RULES: {
1597 b'line_offset': 0,
1598 },
1599 MODE_PATCH: {
1600 b'line_offset': 0,
1601 },
1587 1602 },
1588 1603 b'repo': repo,
1589 1604 }
@@ -40,10 +40,14 b' configtable = {}'
40 40 configitem = registrar.configitem(configtable)
41 41
42 42 configitem(
43 b'notify_obsoleted', b'domain', default=None,
43 b'notify_obsoleted',
44 b'domain',
45 default=None,
44 46 )
45 47 configitem(
46 b'notify_obsoleted', b'messageidseed', default=None,
48 b'notify_obsoleted',
49 b'messageidseed',
50 default=None,
47 51 )
48 52 configitem(
49 53 b'notify_obsoleted',
@@ -39,10 +39,14 b' configtable = {}'
39 39 configitem = registrar.configitem(configtable)
40 40
41 41 configitem(
42 b'notify_published', b'domain', default=None,
42 b'notify_published',
43 b'domain',
44 default=None,
43 45 )
44 46 configitem(
45 b'notify_published', b'messageidseed', default=None,
47 b'notify_published',
48 b'messageidseed',
49 default=None,
46 50 )
47 51 configitem(
48 52 b'notify_published',
@@ -154,37 +154,59 b' configtable = {}'
154 154 configitem = registrar.configitem(configtable)
155 155
156 156 configitem(
157 b'infinitepush', b'server', default=False,
157 b'infinitepush',
158 b'server',
159 default=False,
158 160 )
159 161 configitem(
160 b'infinitepush', b'storetype', default=b'',
162 b'infinitepush',
163 b'storetype',
164 default=b'',
161 165 )
162 166 configitem(
163 b'infinitepush', b'indextype', default=b'',
167 b'infinitepush',
168 b'indextype',
169 default=b'',
164 170 )
165 171 configitem(
166 b'infinitepush', b'indexpath', default=b'',
172 b'infinitepush',
173 b'indexpath',
174 default=b'',
167 175 )
168 176 configitem(
169 b'infinitepush', b'storeallparts', default=False,
177 b'infinitepush',
178 b'storeallparts',
179 default=False,
170 180 )
171 181 configitem(
172 b'infinitepush', b'reponame', default=b'',
182 b'infinitepush',
183 b'reponame',
184 default=b'',
173 185 )
174 186 configitem(
175 b'scratchbranch', b'storepath', default=b'',
187 b'scratchbranch',
188 b'storepath',
189 default=b'',
176 190 )
177 191 configitem(
178 b'infinitepush', b'branchpattern', default=b'',
192 b'infinitepush',
193 b'branchpattern',
194 default=b'',
179 195 )
180 196 configitem(
181 b'infinitepush', b'pushtobundlestore', default=False,
197 b'infinitepush',
198 b'pushtobundlestore',
199 default=False,
182 200 )
183 201 configitem(
184 b'experimental', b'server-bundlestore-bookmark', default=b'',
202 b'experimental',
203 b'server-bundlestore-bookmark',
204 default=b'',
185 205 )
186 206 configitem(
187 b'experimental', b'infinitepush-scratchpush', default=False,
207 b'experimental',
208 b'infinitepush-scratchpush',
209 default=False,
188 210 )
189 211
190 212 experimental = b'experimental'
@@ -249,13 +271,13 b' def _getloglevel(ui):'
249 271
250 272
251 273 def _tryhoist(ui, remotebookmark):
252 '''returns a bookmarks with hoisted part removed
274 """returns a bookmarks with hoisted part removed
253 275
254 276 Remotenames extension has a 'hoist' config that allows to use remote
255 277 bookmarks without specifying remote path. For example, 'hg update master'
256 278 works as well as 'hg update remote/master'. We want to allow the same in
257 279 infinitepush.
258 '''
280 """
259 281
260 282 if common.isremotebooksenabled(ui):
261 283 hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/'
@@ -427,11 +449,11 b' def _readbundlerevs(bundlerepo):'
427 449
428 450
429 451 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
430 '''Tells remotefilelog to include all changed files to the changegroup
452 """Tells remotefilelog to include all changed files to the changegroup
431 453
432 454 By default remotefilelog doesn't include file content to the changegroup.
433 455 But we need to include it if we are fetching from bundlestore.
434 '''
456 """
435 457 changedfiles = set()
436 458 cl = bundlerepo.changelog
437 459 for r in bundlerevs:
@@ -457,11 +479,11 b' def _includefilelogstobundle(bundlecaps,'
457 479
458 480
459 481 def _rebundle(bundlerepo, bundleroots, unknownhead):
460 '''
482 """
461 483 Bundle may include more revision then user requested. For example,
462 484 if user asks for revision but bundle also consists its descendants.
463 485 This function will filter out all revision that user is not requested.
464 '''
486 """
465 487 parts = []
466 488
467 489 version = b'02'
@@ -499,10 +521,10 b' def _needsrebundling(head, bundlerepo):'
499 521
500 522
501 523 def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
502 '''generates bundle that will be send to the user
524 """generates bundle that will be send to the user
503 525
504 526 returns tuple with raw bundle string and bundle type
505 '''
527 """
506 528 parts = []
507 529 if not _needsrebundling(head, bundlerepo):
508 530 with util.posixfile(bundlefile, b"rb") as f:
@@ -1022,7 +1044,12 b' def storetobundlestore(orig, repo, op, u'
1022 1044 )
1023 1045 rpart.addparam(b'return', b'1', mandatory=False)
1024 1046
1025 op.records.add(part.type, {b'return': 1,})
1047 op.records.add(
1048 part.type,
1049 {
1050 b'return': 1,
1051 },
1052 )
1026 1053 if bundlepart:
1027 1054 bundler.addpart(bundlepart)
1028 1055
@@ -1112,7 +1139,12 b' def processparts(orig, repo, op, unbundl'
1112 1139 bundle2._processpart(op, part)
1113 1140
1114 1141 if handleallparts:
1115 op.records.add(part.type, {b'return': 1,})
1142 op.records.add(
1143 part.type,
1144 {
1145 b'return': 1,
1146 },
1147 )
1116 1148 if bundlepart:
1117 1149 bundler.addpart(bundlepart)
1118 1150
@@ -1284,11 +1316,11 b' def _maybeaddpushbackpart(op, bookmark, '
1284 1316
1285 1317
1286 1318 def bundle2pushkey(orig, op, part):
1287 '''Wrapper of bundle2.handlepushkey()
1319 """Wrapper of bundle2.handlepushkey()
1288 1320
1289 1321 The only goal is to skip calling the original function if flag is set.
1290 1322 It's set if infinitepush push is happening.
1291 '''
1323 """
1292 1324 if op.records[scratchbranchparttype + b'_skippushkey']:
1293 1325 if op.reply is not None:
1294 1326 rpart = op.reply.newpart(b'reply:pushkey')
@@ -1300,11 +1332,11 b' def bundle2pushkey(orig, op, part):'
1300 1332
1301 1333
1302 1334 def bundle2handlephases(orig, op, part):
1303 '''Wrapper of bundle2.handlephases()
1335 """Wrapper of bundle2.handlephases()
1304 1336
1305 1337 The only goal is to skip calling the original function if flag is set.
1306 1338 It's set if infinitepush push is happening.
1307 '''
1339 """
1308 1340
1309 1341 if op.records[scratchbranchparttype + b'_skipphaseheads']:
1310 1342 return
@@ -1313,11 +1345,11 b' def bundle2handlephases(orig, op, part):'
1313 1345
1314 1346
1315 1347 def _asyncsavemetadata(root, nodes):
1316 '''starts a separate process that fills metadata for the nodes
1348 """starts a separate process that fills metadata for the nodes
1317 1349
1318 1350 This function creates a separate process and doesn't wait for it's
1319 1351 completion. This was done to avoid slowing down pushes
1320 '''
1352 """
1321 1353
1322 1354 maxnodes = 50
1323 1355 if len(nodes) > maxnodes:
@@ -90,11 +90,11 b' def _validaterevset(repo, revset, bookma'
90 90
91 91
92 92 def _handlelfs(repo, missing):
93 '''Special case if lfs is enabled
93 """Special case if lfs is enabled
94 94
95 95 If lfs is enabled then we need to call prepush hook
96 96 to make sure large files are uploaded to lfs
97 '''
97 """
98 98 try:
99 99 lfsmod = extensions.find(b'lfs')
100 100 lfsmod.wrapper.uploadblobsfromrevs(repo, missing)
@@ -47,8 +47,7 b' class indexapi(object):'
47 47 raise NotImplementedError()
48 48
49 49 def deletebookmarks(self, patterns):
50 """Accepts list of bookmarks and deletes them.
51 """
50 """Accepts list of bookmarks and deletes them."""
52 51 raise NotImplementedError()
53 52
54 53 def getbundle(self, node):
@@ -28,9 +28,9 b' def _convertbookmarkpattern(pattern):'
28 28
29 29
30 30 class sqlindexapi(indexapi.indexapi):
31 '''
31 """
32 32 Sql backend for infinitepush index. See schema.sql
33 '''
33 """
34 34
35 35 def __init__(
36 36 self,
@@ -158,13 +158,14 b' configtable = {}'
158 158 configitem = registrar.configitem(configtable)
159 159
160 160 configitem(
161 b'keywordset', b'svn', default=False,
161 b'keywordset',
162 b'svn',
163 default=False,
162 164 )
163 165 # date like in cvs' $Date
164 166 @templatefilter(b'utcdate', intype=templateutil.date)
165 167 def utcdate(date):
166 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
167 '''
168 """Date. Returns a UTC-date in this format: "2009/08/18 11:00:13"."""
168 169 dateformat = b'%Y/%m/%d %H:%M:%S'
169 170 return dateutil.datestr((date[0], 0), dateformat)
170 171
@@ -172,18 +173,18 b' def utcdate(date):'
172 173 # date like in svn's $Date
173 174 @templatefilter(b'svnisodate', intype=templateutil.date)
174 175 def svnisodate(date):
175 '''Date. Returns a date in this format: "2009-08-18 13:00:13
176 """Date. Returns a date in this format: "2009-08-18 13:00:13
176 177 +0200 (Tue, 18 Aug 2009)".
177 '''
178 """
178 179 return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
179 180
180 181
181 182 # date like in svn's $Id
182 183 @templatefilter(b'svnutcdate', intype=templateutil.date)
183 184 def svnutcdate(date):
184 '''Date. Returns a UTC-date in this format: "2009-08-18
185 """Date. Returns a UTC-date in this format: "2009-08-18
185 186 11:00:13Z".
186 '''
187 """
187 188 dateformat = b'%Y-%m-%d %H:%M:%SZ'
188 189 return dateutil.datestr((date[0], 0), dateformat)
189 190
@@ -221,25 +222,25 b' def _defaultkwmaps(ui):'
221 222
222 223
223 224 def _shrinktext(text, subfunc):
224 '''Helper for keyword expansion removal in text.
225 Depending on subfunc also returns number of substitutions.'''
225 """Helper for keyword expansion removal in text.
226 Depending on subfunc also returns number of substitutions."""
226 227 return subfunc(br'$\1$', text)
227 228
228 229
229 230 def _preselect(wstatus, changed):
230 '''Retrieves modified and added files from a working directory state
231 """Retrieves modified and added files from a working directory state
231 232 and returns the subset of each contained in given changed files
232 retrieved from a change context.'''
233 retrieved from a change context."""
233 234 modified = [f for f in wstatus.modified if f in changed]
234 235 added = [f for f in wstatus.added if f in changed]
235 236 return modified, added
236 237
237 238
238 239 class kwtemplater(object):
239 '''
240 """
240 241 Sets up keyword templates, corresponding keyword regex, and
241 242 provides keyword substitution functions.
242 '''
243 """
243 244
244 245 def __init__(self, ui, repo, inc, exc):
245 246 self.ui = ui
@@ -304,8 +305,8 b' class kwtemplater(object):'
304 305 return data
305 306
306 307 def iskwfile(self, cand, ctx):
307 '''Returns subset of candidates which are configured for keyword
308 expansion but are not symbolic links.'''
308 """Returns subset of candidates which are configured for keyword
309 expansion but are not symbolic links."""
309 310 return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
310 311
311 312 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
@@ -374,18 +375,18 b' class kwtemplater(object):'
374 375 return lines
375 376
376 377 def wread(self, fname, data):
377 '''If in restricted mode returns data read from wdir with
378 keyword substitutions removed.'''
378 """If in restricted mode returns data read from wdir with
379 keyword substitutions removed."""
379 380 if self.restrict:
380 381 return self.shrink(fname, data)
381 382 return data
382 383
383 384
384 385 class kwfilelog(filelog.filelog):
385 '''
386 """
386 387 Subclass of filelog to hook into its read, add, cmp methods.
387 388 Keywords are "stored" unexpanded, and processed on reading.
388 '''
389 """
389 390
390 391 def __init__(self, opener, kwt, path):
391 392 super(kwfilelog, self).__init__(opener, path)
@@ -411,8 +412,8 b' class kwfilelog(filelog.filelog):'
411 412
412 413
413 414 def _status(ui, repo, wctx, kwt, *pats, **opts):
414 '''Bails out if [keyword] configuration is not active.
415 Returns status of working directory.'''
415 """Bails out if [keyword] configuration is not active.
416 Returns status of working directory."""
416 417 if kwt:
417 418 opts = pycompat.byteskwargs(opts)
418 419 return repo.status(
@@ -448,7 +449,7 b' def _kwfwrite(ui, repo, expand, *pats, *'
448 449 optionalrepo=True,
449 450 )
450 451 def demo(ui, repo, *args, **opts):
451 '''print [keywordmaps] configuration and an expansion example
452 """print [keywordmaps] configuration and an expansion example
452 453
453 454 Show current, custom, or default keyword template maps and their
454 455 expansions.
@@ -459,7 +460,7 b' def demo(ui, repo, *args, **opts):'
459 460 Use -d/--default to disable current configuration.
460 461
461 462 See :hg:`help templates` for information on templates and filters.
462 '''
463 """
463 464
464 465 def demoitems(section, items):
465 466 ui.write(b'[%s]\n' % section)
@@ -547,12 +548,12 b' def demo(ui, repo, *args, **opts):'
547 548 inferrepo=True,
548 549 )
549 550 def expand(ui, repo, *pats, **opts):
550 '''expand keywords in the working directory
551 """expand keywords in the working directory
551 552
552 553 Run after (re)enabling keyword expansion.
553 554
554 555 kwexpand refuses to run if given files contain local changes.
555 '''
556 """
556 557 # 3rd argument sets expansion to True
557 558 _kwfwrite(ui, repo, True, *pats, **opts)
558 559
@@ -569,7 +570,7 b' def expand(ui, repo, *pats, **opts):'
569 570 inferrepo=True,
570 571 )
571 572 def files(ui, repo, *pats, **opts):
572 '''show files configured for keyword expansion
573 """show files configured for keyword expansion
573 574
574 575 List which files in the working directory are matched by the
575 576 [keyword] configuration patterns.
@@ -588,7 +589,7 b' def files(ui, repo, *pats, **opts):'
588 589 k = keyword expansion candidate (not tracked)
589 590 I = ignored
590 591 i = ignored (not tracked)
591 '''
592 """
592 593 kwt = getattr(repo, '_keywordkwt', None)
593 594 wctx = repo[None]
594 595 status = _status(ui, repo, wctx, kwt, *pats, **opts)
@@ -634,12 +635,12 b' def files(ui, repo, *pats, **opts):'
634 635 inferrepo=True,
635 636 )
636 637 def shrink(ui, repo, *pats, **opts):
637 '''revert expanded keywords in the working directory
638 """revert expanded keywords in the working directory
638 639
639 640 Must be run before changing/disabling active keywords.
640 641
641 642 kwshrink refuses to run if given files contain local changes.
642 '''
643 """
643 644 # 3rd argument sets expansion to False
644 645 _kwfwrite(ui, repo, False, *pats, **opts)
645 646
@@ -648,8 +649,8 b' def shrink(ui, repo, *pats, **opts):'
648 649
649 650
650 651 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
651 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
652 rejects or conflicts due to expanded keywords in working dir.'''
652 """Monkeypatch/wrap patch.patchfile.__init__ to avoid
653 rejects or conflicts due to expanded keywords in working dir."""
653 654 orig(self, ui, gp, backend, store, eolmode)
654 655 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
655 656 if kwt:
@@ -702,7 +703,7 b' def kw_amend(orig, ui, repo, old, extra,'
702 703
703 704
704 705 def kw_copy(orig, ui, repo, pats, opts, rename=False):
705 '''Wraps cmdutil.copy so that copy/rename destinations do not
706 """Wraps cmdutil.copy so that copy/rename destinations do not
706 707 contain expanded keywords.
707 708 Note that the source of a regular file destination may also be a
708 709 symlink:
@@ -710,7 +711,7 b' def kw_copy(orig, ui, repo, pats, opts, '
710 711 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
711 712 For the latter we have to follow the symlink to find out whether its
712 713 target is configured for expansion and we therefore must unexpand the
713 keywords in the destination.'''
714 keywords in the destination."""
714 715 kwt = getattr(repo, '_keywordkwt', None)
715 716 if kwt is None:
716 717 return orig(ui, repo, pats, opts, rename)
@@ -722,9 +723,9 b' def kw_copy(orig, ui, repo, pats, opts, '
722 723 cwd = repo.getcwd()
723 724
724 725 def haskwsource(dest):
725 '''Returns true if dest is a regular file and configured for
726 """Returns true if dest is a regular file and configured for
726 727 expansion or a symlink which points to a file configured for
727 expansion. '''
728 expansion."""
728 729 source = repo.dirstate.copied(dest)
729 730 if b'l' in wctx.flags(source):
730 731 source = pathutil.canonpath(
@@ -785,12 +786,12 b' def kwfilectx_cmp(orig, self, fctx):'
785 786
786 787
787 788 def uisetup(ui):
788 ''' Monkeypatches dispatch._parse to retrieve user command.
789 """Monkeypatches dispatch._parse to retrieve user command.
789 790 Overrides file method to return kwfilelog instead of filelog
790 791 if file matches user configuration.
791 792 Wraps commit to overwrite configured files with updated
792 793 keyword substitutions.
793 Monkeypatches patch and webcommands.'''
794 Monkeypatches patch and webcommands."""
794 795
795 796 def kwdispatch_parse(orig, ui, args):
796 797 '''Monkeypatch dispatch._parse to obtain running hg command.'''
@@ -136,13 +136,19 b' eh.merge(overrides.eh)'
136 136 eh.merge(proto.eh)
137 137
138 138 eh.configitem(
139 b'largefiles', b'minsize', default=eh.configitem.dynamicdefault,
139 b'largefiles',
140 b'minsize',
141 default=eh.configitem.dynamicdefault,
140 142 )
141 143 eh.configitem(
142 b'largefiles', b'patterns', default=list,
144 b'largefiles',
145 b'patterns',
146 default=list,
143 147 )
144 148 eh.configitem(
145 b'largefiles', b'usercache', default=None,
149 b'largefiles',
150 b'usercache',
151 default=None,
146 152 )
147 153
148 154 cmdtable = eh.cmdtable
@@ -17,8 +17,8 b' from . import lfutil'
17 17
18 18
19 19 class StoreError(Exception):
20 '''Raised when there is a problem getting files from or putting
21 files to a central store.'''
20 """Raised when there is a problem getting files from or putting
21 files to a central store."""
22 22
23 23 def __init__(self, filename, hash, url, detail):
24 24 self.filename = filename
@@ -49,19 +49,19 b' class basestore(object):'
49 49 raise NotImplementedError(b'abstract method')
50 50
51 51 def exists(self, hashes):
52 '''Check to see if the store contains the given hashes. Given an
53 iterable of hashes it returns a mapping from hash to bool.'''
52 """Check to see if the store contains the given hashes. Given an
53 iterable of hashes it returns a mapping from hash to bool."""
54 54 raise NotImplementedError(b'abstract method')
55 55
56 56 def get(self, files):
57 '''Get the specified largefiles from the store and write to local
57 """Get the specified largefiles from the store and write to local
58 58 files under repo.root. files is a list of (filename, hash)
59 59 tuples. Return (success, missing), lists of files successfully
60 60 downloaded and those not found in the store. success is a list
61 61 of (filename, hash) tuples; missing is a list of filenames that
62 62 we could not get. (The detailed error message will already have
63 63 been presented to the user, so missing is just supplied as a
64 summary.)'''
64 summary.)"""
65 65 success = []
66 66 missing = []
67 67 ui = self.ui
@@ -123,9 +123,9 b' class basestore(object):'
123 123 return True
124 124
125 125 def verify(self, revs, contents=False):
126 '''Verify the existence (and, optionally, contents) of every big
126 """Verify the existence (and, optionally, contents) of every big
127 127 file revision referenced by every changeset in revs.
128 Return 0 if all is well, non-zero on any errors.'''
128 Return 0 if all is well, non-zero on any errors."""
129 129
130 130 self.ui.status(
131 131 _(b'searching %d changesets for largefiles\n') % len(revs)
@@ -163,17 +163,17 b' class basestore(object):'
163 163 return int(failed)
164 164
165 165 def _getfile(self, tmpfile, filename, hash):
166 '''Fetch one revision of one file from the store and write it
166 """Fetch one revision of one file from the store and write it
167 167 to tmpfile. Compute the hash of the file on-the-fly as it
168 168 downloads and return the hash. Close tmpfile. Raise
169 169 StoreError if unable to download the file (e.g. it does not
170 exist in the store).'''
170 exist in the store)."""
171 171 raise NotImplementedError(b'abstract method')
172 172
173 173 def _verifyfiles(self, contents, filestocheck):
174 '''Perform the actual verification of files in the store.
174 """Perform the actual verification of files in the store.
175 175 'contents' controls verification of content hash.
176 176 'filestocheck' is list of files to check.
177 177 Returns _true_ if any problems are found!
178 '''
178 """
179 179 raise NotImplementedError(b'abstract method')
@@ -66,7 +66,7 b' eh = exthelper.exthelper()'
66 66 inferrepo=True,
67 67 )
68 68 def lfconvert(ui, src, dest, *pats, **opts):
69 '''convert a normal repository to a largefiles repository
69 """convert a normal repository to a largefiles repository
70 70
71 71 Convert repository SOURCE to a new repository DEST, identical to
72 72 SOURCE except that certain files will be converted as largefiles:
@@ -82,7 +82,7 b' def lfconvert(ui, src, dest, *pats, **op'
82 82 repository.
83 83
84 84 Use --to-normal to convert largefiles back to normal files; after
85 this, the DEST repository can be used without largefiles at all.'''
85 this, the DEST repository can be used without largefiles at all."""
86 86
87 87 opts = pycompat.byteskwargs(opts)
88 88 if opts[b'to_normal']:
@@ -393,8 +393,8 b' def _converttags(ui, revmap, data):'
393 393
394 394
395 395 def _islfile(file, ctx, matcher, size):
396 '''Return true if file should be considered a largefile, i.e.
397 matcher matches it or it is larger than size.'''
396 """Return true if file should be considered a largefile, i.e.
397 matcher matches it or it is larger than size."""
398 398 # never store special .hg* files as largefiles
399 399 if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
400 400 return False
@@ -440,11 +440,11 b' def uploadlfiles(ui, rsrc, rdst, files):'
440 440
441 441
442 442 def verifylfiles(ui, repo, all=False, contents=False):
443 '''Verify that every largefile revision in the current changeset
443 """Verify that every largefile revision in the current changeset
444 444 exists in the central store. With --contents, also verify that
445 445 the contents of each local largefile file revision are correct (SHA-1 hash
446 446 matches the revision ID). With --all, check every changeset in
447 this repository.'''
447 this repository."""
448 448 if all:
449 449 revs = repo.revs(b'all()')
450 450 else:
@@ -455,12 +455,12 b' def verifylfiles(ui, repo, all=False, co'
455 455
456 456
457 457 def cachelfiles(ui, repo, node, filelist=None):
458 '''cachelfiles ensures that all largefiles needed by the specified revision
458 """cachelfiles ensures that all largefiles needed by the specified revision
459 459 are present in the repository's largefile cache.
460 460
461 461 returns a tuple (cached, missing). cached is the list of files downloaded
462 462 by this operation; missing is the list of files that were needed but could
463 not be found.'''
463 not be found."""
464 464 lfiles = lfutil.listlfiles(repo, node)
465 465 if filelist:
466 466 lfiles = set(lfiles) & set(filelist)
@@ -502,11 +502,11 b' def downloadlfiles(ui, repo):'
502 502 def updatelfiles(
503 503 ui, repo, filelist=None, printmessage=None, normallookup=False
504 504 ):
505 '''Update largefiles according to standins in the working directory
505 """Update largefiles according to standins in the working directory
506 506
507 507 If ``printmessage`` is other than ``None``, it means "print (or
508 508 ignore, for false) message forcibly".
509 '''
509 """
510 510 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
511 511 with repo.wlock():
512 512 lfdirstate = lfutil.openlfdirstate(ui, repo)
@@ -80,10 +80,10 b' def link(src, dest):'
80 80
81 81
82 82 def usercachepath(ui, hash):
83 '''Return the correct location in the "global" largefiles cache for a file
83 """Return the correct location in the "global" largefiles cache for a file
84 84 with the given hash.
85 85 This cache is used for sharing of largefiles across repositories - both
86 to preserve download bandwidth and storage space.'''
86 to preserve download bandwidth and storage space."""
87 87 return os.path.join(_usercachedir(ui), hash)
88 88
89 89
@@ -143,9 +143,9 b' def inusercache(ui, hash):'
143 143
144 144
145 145 def findfile(repo, hash):
146 '''Return store path of the largefile with the specified hash.
146 """Return store path of the largefile with the specified hash.
147 147 As a side effect, the file might be linked from user cache.
148 Return None if the file can't be found locally.'''
148 Return None if the file can't be found locally."""
149 149 path, exists = findstorepath(repo, hash)
150 150 if exists:
151 151 repo.ui.note(_(b'found %s in store\n') % hash)
@@ -191,10 +191,10 b' class largefilesdirstate(dirstate.dirsta'
191 191
192 192
193 193 def openlfdirstate(ui, repo, create=True):
194 '''
194 """
195 195 Return a dirstate object that tracks largefiles: i.e. its root is
196 196 the repo root, but it is saved in .hg/largefiles/dirstate.
197 '''
197 """
198 198 vfs = repo.vfs
199 199 lfstoredir = longname
200 200 opener = vfsmod.vfs(vfs.join(lfstoredir))
@@ -245,8 +245,8 b' def lfdirstatestatus(lfdirstate, repo):'
245 245
246 246
247 247 def listlfiles(repo, rev=None, matcher=None):
248 '''return a list of largefiles in the working copy or the
249 specified changeset'''
248 """return a list of largefiles in the working copy or the
249 specified changeset"""
250 250
251 251 if matcher is None:
252 252 matcher = getstandinmatcher(repo)
@@ -265,18 +265,18 b' def instore(repo, hash, forcelocal=False'
265 265
266 266
267 267 def storepath(repo, hash, forcelocal=False):
268 '''Return the correct location in the repository largefiles store for a
269 file with the given hash.'''
268 """Return the correct location in the repository largefiles store for a
269 file with the given hash."""
270 270 if not forcelocal and repo.shared():
271 271 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
272 272 return repo.vfs.join(longname, hash)
273 273
274 274
275 275 def findstorepath(repo, hash):
276 '''Search through the local store path(s) to find the file for the given
276 """Search through the local store path(s) to find the file for the given
277 277 hash. If the file is not found, its path in the primary store is returned.
278 278 The return value is a tuple of (path, exists(path)).
279 '''
279 """
280 280 # For shared repos, the primary store is in the share source. But for
281 281 # backward compatibility, force a lookup in the local store if it wasn't
282 282 # found in the share source.
@@ -291,11 +291,11 b' def findstorepath(repo, hash):'
291 291
292 292
293 293 def copyfromcache(repo, hash, filename):
294 '''Copy the specified largefile from the repo or system cache to
294 """Copy the specified largefile from the repo or system cache to
295 295 filename in the repository. Return true on success or false if the
296 296 file was not found in either cache (which should not happened:
297 297 this is meant to be called only after ensuring that the needed
298 largefile exists in the cache).'''
298 largefile exists in the cache)."""
299 299 wvfs = repo.wvfs
300 300 path = findfile(repo, hash)
301 301 if path is None:
@@ -354,8 +354,8 b' def copytostoreabsolute(repo, file, hash'
354 354
355 355
356 356 def linktousercache(repo, hash):
357 '''Link / copy the largefile with the specified hash from the store
358 to the cache.'''
357 """Link / copy the largefile with the specified hash from the store
358 to the cache."""
359 359 path = usercachepath(repo.ui, hash)
360 360 link(storepath(repo, hash), path)
361 361
@@ -380,9 +380,9 b' def getstandinmatcher(repo, rmatcher=Non'
380 380
381 381
382 382 def composestandinmatcher(repo, rmatcher):
383 '''Return a matcher that accepts standins corresponding to the
383 """Return a matcher that accepts standins corresponding to the
384 384 files accepted by rmatcher. Pass the list of files in the matcher
385 as the paths specified by the user.'''
385 as the paths specified by the user."""
386 386 smatcher = getstandinmatcher(repo, rmatcher)
387 387 isstandin = smatcher.matchfn
388 388
@@ -395,8 +395,8 b' def composestandinmatcher(repo, rmatcher'
395 395
396 396
397 397 def standin(filename):
398 '''Return the repo-relative path to the standin for the specified big
399 file.'''
398 """Return the repo-relative path to the standin for the specified big
399 file."""
400 400 # Notes:
401 401 # 1) Some callers want an absolute path, but for instance addlargefiles
402 402 # needs it repo-relative so it can be passed to repo[None].add(). So
@@ -408,8 +408,8 b' def standin(filename):'
408 408
409 409
410 410 def isstandin(filename):
411 '''Return true if filename is a big file standin. filename must be
412 in Mercurial's internal form (slash-separated).'''
411 """Return true if filename is a big file standin. filename must be
412 in Mercurial's internal form (slash-separated)."""
413 413 return filename.startswith(shortnameslash)
414 414
415 415
@@ -439,9 +439,9 b' def updatestandin(repo, lfile, standin):'
439 439
440 440
441 441 def readasstandin(fctx):
442 '''read hex hash from given filectx of standin file
442 """read hex hash from given filectx of standin file
443 443
444 This encapsulates how "standin" data is stored into storage layer.'''
444 This encapsulates how "standin" data is stored into storage layer."""
445 445 return fctx.data().strip()
446 446
447 447
@@ -451,8 +451,8 b' def writestandin(repo, standin, hash, ex'
451 451
452 452
453 453 def copyandhash(instream, outfile):
454 '''Read bytes from instream (iterable) and write them to outfile,
455 computing the SHA-1 hash of the data along the way. Return the hash.'''
454 """Read bytes from instream (iterable) and write them to outfile,
455 computing the SHA-1 hash of the data along the way. Return the hash."""
456 456 hasher = hashutil.sha1(b'')
457 457 for data in instream:
458 458 hasher.update(data)
@@ -635,11 +635,11 b' def getlfilestoupload(repo, missing, add'
635 635
636 636
637 637 def updatestandinsbymatch(repo, match):
638 '''Update standins in the working directory according to specified match
638 """Update standins in the working directory according to specified match
639 639
640 640 This returns (possibly modified) ``match`` object to be used for
641 641 subsequent commit process.
642 '''
642 """
643 643
644 644 ui = repo.ui
645 645
@@ -741,7 +741,7 b' def updatestandinsbymatch(repo, match):'
741 741
742 742
743 743 class automatedcommithook(object):
744 '''Stateful hook to update standins at the 1st commit of resuming
744 """Stateful hook to update standins at the 1st commit of resuming
745 745
746 746 For efficiency, updating standins in the working directory should
747 747 be avoided while automated committing (like rebase, transplant and
@@ -750,7 +750,7 b' class automatedcommithook(object):'
750 750 But the 1st commit of resuming automated committing (e.g. ``rebase
751 751 --continue``) should update them, because largefiles may be
752 752 modified manually.
753 '''
753 """
754 754
755 755 def __init__(self, resuming):
756 756 self.resuming = resuming
@@ -764,14 +764,14 b' class automatedcommithook(object):'
764 764
765 765
766 766 def getstatuswriter(ui, repo, forcibly=None):
767 '''Return the function to write largefiles specific status out
767 """Return the function to write largefiles specific status out
768 768
769 769 If ``forcibly`` is ``None``, this returns the last element of
770 770 ``repo._lfstatuswriters`` as "default" writer function.
771 771
772 772 Otherwise, this returns the function to always write out (or
773 773 ignore if ``not forcibly``) status.
774 '''
774 """
775 775 if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
776 776 return repo._lfstatuswriters[-1]
777 777 else:
@@ -20,9 +20,9 b' from . import ('
20 20
21 21
22 22 class localstore(basestore.basestore):
23 '''localstore first attempts to grab files out of the store in the remote
23 """localstore first attempts to grab files out of the store in the remote
24 24 Mercurial repository. Failing that, it attempts to grab the files from
25 the user cache.'''
25 the user cache."""
26 26
27 27 def __init__(self, ui, repo, remote):
28 28 self.remote = remote.local()
@@ -58,8 +58,8 b" MERGE_ACTION_LARGEFILE_MARK_REMOVED = b'"
58 58
59 59
60 60 def composelargefilematcher(match, manifest):
61 '''create a matcher that matches only the largefiles in the original
62 matcher'''
61 """create a matcher that matches only the largefiles in the original
62 matcher"""
63 63 m = copy.copy(match)
64 64 lfile = lambda f: lfutil.standin(f) in manifest
65 65 m._files = [lf for lf in m._files if lfile(lf)]
@@ -586,11 +586,17 b' def overridecalculateupdates('
586 586 mresult.addfile(lfile, b'k', None, b'replaces standin')
587 587 if branchmerge:
588 588 mresult.addfile(
589 standin, b'k', None, b'replaced by non-standin',
589 standin,
590 b'k',
591 None,
592 b'replaced by non-standin',
590 593 )
591 594 else:
592 595 mresult.addfile(
593 standin, b'r', None, b'replaced by non-standin',
596 standin,
597 b'r',
598 None,
599 b'replaced by non-standin',
594 600 )
595 601 elif lm in (b'g', b'dc') and sm != b'r':
596 602 if lm == b'dc':
@@ -610,7 +616,10 b' def overridecalculateupdates('
610 616 if branchmerge:
611 617 # largefile can be restored from standin safely
612 618 mresult.addfile(
613 lfile, b'k', None, b'replaced by standin',
619 lfile,
620 b'k',
621 None,
622 b'replaced by standin',
614 623 )
615 624 mresult.addfile(standin, b'k', None, b'replaces standin')
616 625 else:
@@ -628,7 +637,10 b' def overridecalculateupdates('
628 637 else: # pick remote normal file
629 638 mresult.addfile(lfile, b'g', largs, b'replaces standin')
630 639 mresult.addfile(
631 standin, b'r', None, b'replaced by non-standin',
640 standin,
641 b'r',
642 None,
643 b'replaced by non-standin',
632 644 )
633 645
634 646 return mresult
@@ -39,8 +39,8 b' httpoldcallstream = None'
39 39
40 40
41 41 def putlfile(repo, proto, sha):
42 '''Server command for putting a largefile into a repository's local store
43 and into the user cache.'''
42 """Server command for putting a largefile into a repository's local store
43 and into the user cache."""
44 44 with proto.mayberedirectstdio() as output:
45 45 path = lfutil.storepath(repo, sha)
46 46 util.makedirs(os.path.dirname(path))
@@ -69,8 +69,8 b' def putlfile(repo, proto, sha):'
69 69
70 70
71 71 def getlfile(repo, proto, sha):
72 '''Server command for retrieving a largefile from the repository-local
73 cache or user cache.'''
72 """Server command for retrieving a largefile from the repository-local
73 cache or user cache."""
74 74 filename = lfutil.findfile(repo, sha)
75 75 if not filename:
76 76 raise error.Abort(
@@ -93,12 +93,12 b' def getlfile(repo, proto, sha):'
93 93
94 94
95 95 def statlfile(repo, proto, sha):
96 '''Server command for checking if a largefile is present - returns '2\n' if
96 """Server command for checking if a largefile is present - returns '2\n' if
97 97 the largefile is missing, '0\n' if it seems to be in good condition.
98 98
99 99 The value 1 is reserved for mismatched checksum, but that is too expensive
100 100 to be verified on every stat and must be caught be running 'hg verify'
101 server side.'''
101 server side."""
102 102 filename = lfutil.findfile(repo, sha)
103 103 if not filename:
104 104 return wireprototypes.bytesresponse(b'2\n')
@@ -194,8 +194,8 b' def _capabilities(orig, repo, proto):'
194 194
195 195
196 196 def heads(orig, repo, proto):
197 '''Wrap server command - largefile capable clients will know to call
198 lheads instead'''
197 """Wrap server command - largefile capable clients will know to call
198 lheads instead"""
199 199 if lfutil.islfilesrepo(repo):
200 200 return wireprototypes.ooberror(LARGEFILES_REQUIRED_MSG)
201 201
@@ -146,8 +146,8 b' class remotestore(basestore.basestore):'
146 146 raise NotImplementedError(b'abstract method')
147 147
148 148 def _stat(self, hashes):
149 '''Get information about availability of files specified by
149 """Get information about availability of files specified by
150 150 hashes in the remote store. Return dictionary mapping hashes
151 151 to return code where 0 means that file is available, other
152 values if not.'''
152 values if not."""
153 153 raise NotImplementedError(b'abstract method')
@@ -360,7 +360,7 b' def reposetup(ui, repo):'
360 360 # TODO: _subdirlfs should be moved into "lfutil.py", because
361 361 # it is referred only from "lfutil.updatestandinsbymatch"
362 362 def _subdirlfs(self, files, lfiles):
363 '''
363 """
364 364 Adjust matched file list
365 365 If we pass a directory to commit whose only committable files
366 366 are largefiles, the core commit code aborts before finding
@@ -370,7 +370,7 b' def reposetup(ui, repo):'
370 370 we explicitly add the largefiles to the match list and remove
371 371 the directory.
372 372 In other cases, we leave the match list unmodified.
373 '''
373 """
374 374 actualfiles = []
375 375 dirs = []
376 376 regulars = []
@@ -30,13 +30,23 b' class wirestore(remotestore.remotestore)'
30 30 return self.remote.getlfile(hash)
31 31
32 32 def _stat(self, hashes):
33 '''For each hash, return 0 if it is available, other values if not.
33 """For each hash, return 0 if it is available, other values if not.
34 34 It is usually 2 if the largefile is missing, but might be 1 the server
35 has a corrupted copy.'''
35 has a corrupted copy."""
36 36
37 37 with self.remote.commandexecutor() as e:
38 38 fs = []
39 39 for hash in hashes:
40 fs.append((hash, e.callcommand(b'statlfile', {b'sha': hash,})))
40 fs.append(
41 (
42 hash,
43 e.callcommand(
44 b'statlfile',
45 {
46 b'sha': hash,
47 },
48 ),
49 )
50 )
41 51
42 52 return {hash: f.result() for hash, f in fs}
@@ -172,33 +172,51 b' reposetup = eh.finalreposetup'
172 172 templatekeyword = eh.templatekeyword
173 173
174 174 eh.configitem(
175 b'experimental', b'lfs.serve', default=True,
175 b'experimental',
176 b'lfs.serve',
177 default=True,
176 178 )
177 179 eh.configitem(
178 b'experimental', b'lfs.user-agent', default=None,
180 b'experimental',
181 b'lfs.user-agent',
182 default=None,
179 183 )
180 184 eh.configitem(
181 b'experimental', b'lfs.disableusercache', default=False,
185 b'experimental',
186 b'lfs.disableusercache',
187 default=False,
182 188 )
183 189 eh.configitem(
184 b'experimental', b'lfs.worker-enable', default=True,
190 b'experimental',
191 b'lfs.worker-enable',
192 default=True,
185 193 )
186 194
187 195 eh.configitem(
188 b'lfs', b'url', default=None,
196 b'lfs',
197 b'url',
198 default=None,
189 199 )
190 200 eh.configitem(
191 b'lfs', b'usercache', default=None,
201 b'lfs',
202 b'usercache',
203 default=None,
192 204 )
193 205 # Deprecated
194 206 eh.configitem(
195 b'lfs', b'threshold', default=None,
207 b'lfs',
208 b'threshold',
209 default=None,
196 210 )
197 211 eh.configitem(
198 b'lfs', b'track', default=b'none()',
212 b'lfs',
213 b'track',
214 default=b'none()',
199 215 )
200 216 eh.configitem(
201 b'lfs', b'retry', default=5,
217 b'lfs',
218 b'retry',
219 default=5,
202 220 )
203 221
204 222 lfsprocessor = (
@@ -96,8 +96,7 b' class nullvfs(lfsvfs):'
96 96
97 97
98 98 class lfsuploadfile(httpconnectionmod.httpsendfile):
99 """a file-like object that supports keepalive.
100 """
99 """a file-like object that supports keepalive."""
101 100
102 101 def __init__(self, ui, filename):
103 102 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
@@ -258,9 +257,9 b' class local(object):'
258 257
259 258
260 259 def _urlerrorreason(urlerror):
261 '''Create a friendly message for the given URLError to be used in an
260 """Create a friendly message for the given URLError to be used in an
262 261 LfsRemoteError message.
263 '''
262 """
264 263 inst = urlerror
265 264
266 265 if isinstance(urlerror.reason, Exception):
@@ -338,7 +337,10 b' class _gitlfsremote(object):'
338 337 ]
339 338 requestdata = pycompat.bytesurl(
340 339 json.dumps(
341 {'objects': objects, 'operation': pycompat.strurl(action),}
340 {
341 'objects': objects,
342 'operation': pycompat.strurl(action),
343 }
342 344 )
343 345 )
344 346 url = b'%s/objects/batch' % self.baseurl
@@ -381,10 +381,10 b' def candownload(repo):'
381 381
382 382
383 383 def uploadblobsfromrevs(repo, revs):
384 '''upload lfs blobs introduced by revs
384 """upload lfs blobs introduced by revs
385 385
386 386 Note: also used by other extensions e. g. infinitepush. avoid renaming.
387 '''
387 """
388 388 if _canskipupload(repo):
389 389 return
390 390 pointers = extractpointers(repo, revs)
@@ -125,16 +125,24 b' configtable = {}'
125 125 configitem = registrar.configitem(configtable)
126 126
127 127 configitem(
128 b'mq', b'git', default=b'auto',
128 b'mq',
129 b'git',
130 default=b'auto',
129 131 )
130 132 configitem(
131 b'mq', b'keepchanges', default=False,
133 b'mq',
134 b'keepchanges',
135 default=False,
132 136 )
133 137 configitem(
134 b'mq', b'plain', default=False,
138 b'mq',
139 b'plain',
140 default=False,
135 141 )
136 142 configitem(
137 b'mq', b'secret', default=False,
143 b'mq',
144 b'secret',
145 default=False,
138 146 )
139 147
140 148 # force load strip extension formerly included in mq and import some utility
@@ -156,8 +164,8 b' strip = strip.strip'
156 164
157 165
158 166 def checksubstate(repo, baserev=None):
159 '''return list of subrepos at a different revision than substate.
160 Abort if any subrepos have uncommitted changes.'''
167 """return list of subrepos at a different revision than substate.
168 Abort if any subrepos have uncommitted changes."""
161 169 inclsubs = []
162 170 wctx = repo[None]
163 171 if baserev:
@@ -449,9 +457,9 b' class patchheader(object):'
449 457 __str__ = encoding.strmethod(__bytes__)
450 458
451 459 def _delmsg(self):
452 '''Remove existing message, keeping the rest of the comments fields.
460 """Remove existing message, keeping the rest of the comments fields.
453 461 If comments contains 'subject: ', message will prepend
454 the field and a blank line.'''
462 the field and a blank line."""
455 463 if self.message:
456 464 subj = b'subject: ' + self.message[0].lower()
457 465 for i in pycompat.xrange(len(self.comments)):
@@ -949,8 +957,8 b' class queue(object):'
949 957 return (0, head)
950 958
951 959 def patch(self, repo, patchfile):
952 '''Apply patchfile to the working directory.
953 patchfile: name of patch file'''
960 """Apply patchfile to the working directory.
961 patchfile: name of patch file"""
954 962 files = set()
955 963 try:
956 964 fuzz = patchmod.patch(
@@ -1363,7 +1371,7 b' class queue(object):'
1363 1371
1364 1372 def new(self, repo, patchfn, *pats, **opts):
1365 1373 """options:
1366 msg: a string or a no-argument function returning a string
1374 msg: a string or a no-argument function returning a string
1367 1375 """
1368 1376 opts = pycompat.byteskwargs(opts)
1369 1377 msg = opts.get(b'msg')
@@ -1718,7 +1726,10 b' class queue(object):'
1718 1726 except: # re-raises
1719 1727 self.ui.warn(_(b'cleaning up working directory...\n'))
1720 1728 cmdutil.revert(
1721 self.ui, repo, repo[b'.'], no_backup=True,
1729 self.ui,
1730 repo,
1731 repo[b'.'],
1732 no_backup=True,
1722 1733 )
1723 1734 # only remove unknown files that we know we touched or
1724 1735 # created while patching
@@ -2823,7 +2834,7 b' def init(ui, repo, **opts):'
2823 2834 norepo=True,
2824 2835 )
2825 2836 def clone(ui, source, dest=None, **opts):
2826 '''clone main and patch repository at same time
2837 """clone main and patch repository at same time
2827 2838
2828 2839 If source is local, destination will have no patches applied. If
2829 2840 source is remote, this command can not check if patches are
@@ -2838,7 +2849,7 b' def clone(ui, source, dest=None, **opts)'
2838 2849 would be created by :hg:`init --mq`.
2839 2850
2840 2851 Return 0 on success.
2841 '''
2852 """
2842 2853 opts = pycompat.byteskwargs(opts)
2843 2854
2844 2855 def patchdir(repo):
@@ -2937,7 +2948,10 b' def commit(ui, repo, *pats, **opts):'
2937 2948
2938 2949 @command(
2939 2950 b"qseries",
2940 [(b'm', b'missing', None, _(b'print patches not in series')),] + seriesopts,
2951 [
2952 (b'm', b'missing', None, _(b'print patches not in series')),
2953 ]
2954 + seriesopts,
2941 2955 _(b'hg qseries [-ms]'),
2942 2956 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2943 2957 )
@@ -3282,9 +3296,9 b' def fold(ui, repo, *files, **opts):'
3282 3296 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3283 3297 )
3284 3298 def goto(ui, repo, patch, **opts):
3285 '''push or pop patches until named patch is at top of stack
3286
3287 Returns 0 on success.'''
3299 """push or pop patches until named patch is at top of stack
3300
3301 Returns 0 on success."""
3288 3302 opts = pycompat.byteskwargs(opts)
3289 3303 opts = fixkeepchangesopts(ui, opts)
3290 3304 q = repo.mq
@@ -3321,7 +3335,7 b' def goto(ui, repo, patch, **opts):'
3321 3335 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3322 3336 )
3323 3337 def guard(ui, repo, *args, **opts):
3324 '''set or print guards for a patch
3338 """set or print guards for a patch
3325 3339
3326 3340 Guards control whether a patch can be pushed. A patch with no
3327 3341 guards is always pushed. A patch with a positive guard ("+foo") is
@@ -3341,7 +3355,7 b' def guard(ui, repo, *args, **opts):'
3341 3355 hg qguard other.patch -- +2.6.17 -stable
3342 3356
3343 3357 Returns 0 on success.
3344 '''
3358 """
3345 3359
3346 3360 def status(idx):
3347 3361 guards = q.seriesguards[idx] or [b'unguarded']
@@ -3712,7 +3726,7 b' def save(ui, repo, **opts):'
3712 3726 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3713 3727 )
3714 3728 def select(ui, repo, *args, **opts):
3715 '''set or print guarded patches to push
3729 """set or print guarded patches to push
3716 3730
3717 3731 Use the :hg:`qguard` command to set or print guards on patch, then use
3718 3732 qselect to tell mq which guards to use. A patch will be pushed if
@@ -3744,7 +3758,7 b' def select(ui, repo, *args, **opts):'
3744 3758 Use -s/--series to print a list of all guards in the series file
3745 3759 (no other arguments needed). Use -v for more information.
3746 3760
3747 Returns 0 on success.'''
3761 Returns 0 on success."""
3748 3762
3749 3763 q = repo.mq
3750 3764 opts = pycompat.byteskwargs(opts)
@@ -3888,7 +3902,7 b' def finish(ui, repo, *revrange, **opts):'
3888 3902 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3889 3903 )
3890 3904 def qqueue(ui, repo, name=None, **opts):
3891 '''manage multiple patch queues
3905 """manage multiple patch queues
3892 3906
3893 3907 Supports switching between different patch queues, as well as creating
3894 3908 new patch queues and deleting existing ones.
@@ -3907,7 +3921,7 b' def qqueue(ui, repo, name=None, **opts):'
3907 3921 active queue.
3908 3922
3909 3923 Returns 0 on success.
3910 '''
3924 """
3911 3925 q = repo.mq
3912 3926 _defaultqueue = b'patches'
3913 3927 _allqueues = b'patches.queues'
@@ -4250,8 +4264,7 b' revsetpredicate = registrar.revsetpredic'
4250 4264
4251 4265 @revsetpredicate(b'mq()')
4252 4266 def revsetmq(repo, subset, x):
4253 """Changesets managed by MQ.
4254 """
4267 """Changesets managed by MQ."""
4255 4268 revsetlang.getargs(x, 0, 0, _(b"mq takes no arguments"))
4256 4269 applied = {repo[r.node].rev() for r in repo.mq.applied}
4257 4270 return smartset.baseset([r for r in subset if r in applied])
@@ -78,7 +78,14 b' def getbundlechangegrouppart_narrow('
78 78
79 79
80 80 def generateellipsesbundle2(
81 bundler, repo, include, exclude, version, common, heads, depth,
81 bundler,
82 repo,
83 include,
84 exclude,
85 version,
86 common,
87 heads,
88 depth,
82 89 ):
83 90 match = narrowspec.match(repo.root, include=include, exclude=exclude)
84 91 if depth is not None:
@@ -113,7 +120,13 b' def generateellipsesbundle2('
113 120
114 121
115 122 def generate_ellipses_bundle2_for_widening(
116 bundler, repo, oldmatch, newmatch, version, common, known,
123 bundler,
124 repo,
125 oldmatch,
126 newmatch,
127 version,
128 common,
129 known,
117 130 ):
118 131 common = set(common or [nullid])
119 132 # Steps:
@@ -120,7 +120,13 b' def narrow_widen('
120 120 )
121 121 else:
122 122 narrowbundle2.generate_ellipses_bundle2_for_widening(
123 bundler, repo, oldmatch, newmatch, cgversion, common, known,
123 bundler,
124 repo,
125 oldmatch,
126 newmatch,
127 cgversion,
128 common,
129 known,
124 130 )
125 131 except error.Abort as exc:
126 132 bundler = bundle2.bundle20(repo.ui)
@@ -190,64 +190,104 b' configtable = {}'
190 190 configitem = registrar.configitem(configtable)
191 191
192 192 configitem(
193 b'notify', b'changegroup', default=None,
193 b'notify',
194 b'changegroup',
195 default=None,
194 196 )
195 197 configitem(
196 b'notify', b'config', default=None,
198 b'notify',
199 b'config',
200 default=None,
197 201 )
198 202 configitem(
199 b'notify', b'diffstat', default=True,
203 b'notify',
204 b'diffstat',
205 default=True,
200 206 )
201 207 configitem(
202 b'notify', b'domain', default=None,
208 b'notify',
209 b'domain',
210 default=None,
203 211 )
204 212 configitem(
205 b'notify', b'messageidseed', default=None,
213 b'notify',
214 b'messageidseed',
215 default=None,
206 216 )
207 217 configitem(
208 b'notify', b'fromauthor', default=None,
218 b'notify',
219 b'fromauthor',
220 default=None,
209 221 )
210 222 configitem(
211 b'notify', b'incoming', default=None,
223 b'notify',
224 b'incoming',
225 default=None,
212 226 )
213 227 configitem(
214 b'notify', b'maxdiff', default=300,
228 b'notify',
229 b'maxdiff',
230 default=300,
215 231 )
216 232 configitem(
217 b'notify', b'maxdiffstat', default=-1,
233 b'notify',
234 b'maxdiffstat',
235 default=-1,
218 236 )
219 237 configitem(
220 b'notify', b'maxsubject', default=67,
238 b'notify',
239 b'maxsubject',
240 default=67,
221 241 )
222 242 configitem(
223 b'notify', b'mbox', default=None,
243 b'notify',
244 b'mbox',
245 default=None,
224 246 )
225 247 configitem(
226 b'notify', b'merge', default=True,
248 b'notify',
249 b'merge',
250 default=True,
227 251 )
228 252 configitem(
229 b'notify', b'outgoing', default=None,
253 b'notify',
254 b'outgoing',
255 default=None,
230 256 )
231 257 configitem(
232 b'notify', b'reply-to-predecessor', default=False,
258 b'notify',
259 b'reply-to-predecessor',
260 default=False,
233 261 )
234 262 configitem(
235 b'notify', b'sources', default=b'serve',
263 b'notify',
264 b'sources',
265 default=b'serve',
236 266 )
237 267 configitem(
238 b'notify', b'showfunc', default=None,
268 b'notify',
269 b'showfunc',
270 default=None,
239 271 )
240 272 configitem(
241 b'notify', b'strip', default=0,
273 b'notify',
274 b'strip',
275 default=0,
242 276 )
243 277 configitem(
244 b'notify', b'style', default=None,
278 b'notify',
279 b'style',
280 default=None,
245 281 )
246 282 configitem(
247 b'notify', b'template', default=None,
283 b'notify',
284 b'template',
285 default=None,
248 286 )
249 287 configitem(
250 b'notify', b'test', default=True,
288 b'notify',
289 b'test',
290 default=True,
251 291 )
252 292
253 293 # template for single changeset can include email headers.
@@ -539,10 +579,10 b' class notifier(object):'
539 579
540 580
541 581 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
542 '''send email notifications to interested subscribers.
582 """send email notifications to interested subscribers.
543 583
544 584 if used as changegroup hook, send one email for all changesets in
545 changegroup. else send one email per changeset.'''
585 changegroup. else send one email per changeset."""
546 586
547 587 n = notifier(ui, repo, hooktype)
548 588 ctx = repo.unfiltered()[node]
@@ -41,7 +41,9 b' configtable = {}'
41 41 configitem = registrar.configitem(configtable)
42 42
43 43 configitem(
44 b'pager', b'attend', default=lambda: attended,
44 b'pager',
45 b'attend',
46 default=lambda: attended,
45 47 )
46 48
47 49
@@ -110,34 +110,54 b' configtable = {}'
110 110 configitem = registrar.configitem(configtable)
111 111
112 112 configitem(
113 b'patchbomb', b'bundletype', default=None,
113 b'patchbomb',
114 b'bundletype',
115 default=None,
114 116 )
115 117 configitem(
116 b'patchbomb', b'bcc', default=None,
118 b'patchbomb',
119 b'bcc',
120 default=None,
117 121 )
118 122 configitem(
119 b'patchbomb', b'cc', default=None,
123 b'patchbomb',
124 b'cc',
125 default=None,
120 126 )
121 127 configitem(
122 b'patchbomb', b'confirm', default=False,
128 b'patchbomb',
129 b'confirm',
130 default=False,
123 131 )
124 132 configitem(
125 b'patchbomb', b'flagtemplate', default=None,
133 b'patchbomb',
134 b'flagtemplate',
135 default=None,
126 136 )
127 137 configitem(
128 b'patchbomb', b'from', default=None,
138 b'patchbomb',
139 b'from',
140 default=None,
129 141 )
130 142 configitem(
131 b'patchbomb', b'intro', default=b'auto',
143 b'patchbomb',
144 b'intro',
145 default=b'auto',
132 146 )
133 147 configitem(
134 b'patchbomb', b'publicurl', default=None,
148 b'patchbomb',
149 b'publicurl',
150 default=None,
135 151 )
136 152 configitem(
137 b'patchbomb', b'reply-to', default=None,
153 b'patchbomb',
154 b'reply-to',
155 default=None,
138 156 )
139 157 configitem(
140 b'patchbomb', b'to', default=None,
158 b'patchbomb',
159 b'to',
160 default=None,
141 161 )
142 162
143 163 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -148,8 +168,7 b" testedwith = b'ships-with-hg-core'"
148 168
149 169
150 170 def _addpullheader(seq, ctx):
151 """Add a header pointing to a public URL where the changeset is available
152 """
171 """Add a header pointing to a public URL where the changeset is available"""
153 172 repo = ctx.repo()
154 173 # experimental config: patchbomb.publicurl
155 174 # waiting for some logic that check that the changeset are available on the
@@ -656,7 +675,7 b' emailopts = ['
656 675 helpcategory=command.CATEGORY_IMPORT_EXPORT,
657 676 )
658 677 def email(ui, repo, *revs, **opts):
659 '''send changesets by email
678 """send changesets by email
660 679
661 680 By default, diffs are sent in the format generated by
662 681 :hg:`export`, one per message. The series starts with a "[PATCH 0
@@ -739,7 +758,7 b' def email(ui, repo, *revs, **opts):'
739 758
740 759 Before using this command, you will need to enable email in your
741 760 hgrc. See the [email] section in hgrc(5) for details.
742 '''
761 """
743 762 opts = pycompat.byteskwargs(opts)
744 763
745 764 _charsets = mail._charsets(ui)
@@ -108,33 +108,51 b' uisetup = eh.finaluisetup'
108 108
109 109 # developer config: phabricator.batchsize
110 110 eh.configitem(
111 b'phabricator', b'batchsize', default=12,
111 b'phabricator',
112 b'batchsize',
113 default=12,
112 114 )
113 115 eh.configitem(
114 b'phabricator', b'callsign', default=None,
116 b'phabricator',
117 b'callsign',
118 default=None,
115 119 )
116 120 eh.configitem(
117 b'phabricator', b'curlcmd', default=None,
121 b'phabricator',
122 b'curlcmd',
123 default=None,
118 124 )
119 125 # developer config: phabricator.debug
120 126 eh.configitem(
121 b'phabricator', b'debug', default=False,
127 b'phabricator',
128 b'debug',
129 default=False,
122 130 )
123 131 # developer config: phabricator.repophid
124 132 eh.configitem(
125 b'phabricator', b'repophid', default=None,
133 b'phabricator',
134 b'repophid',
135 default=None,
126 136 )
127 137 eh.configitem(
128 b'phabricator', b'url', default=None,
138 b'phabricator',
139 b'url',
140 default=None,
129 141 )
130 142 eh.configitem(
131 b'phabsend', b'confirm', default=False,
143 b'phabsend',
144 b'confirm',
145 default=False,
132 146 )
133 147 eh.configitem(
134 b'phabimport', b'secret', default=False,
148 b'phabimport',
149 b'secret',
150 default=False,
135 151 )
136 152 eh.configitem(
137 b'phabimport', b'obsolete', default=False,
153 b'phabimport',
154 b'obsolete',
155 default=False,
138 156 )
139 157
140 158 colortable = {
@@ -166,8 +184,7 b' colortable = {'
166 184
167 185 @eh.wrapfunction(localrepo, "loadhgrc")
168 186 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
169 """Load ``.arcconfig`` content into a ui instance on repository open.
170 """
187 """Load ``.arcconfig`` content into a ui instance on repository open."""
171 188 result = False
172 189 arcconfig = {}
173 190
@@ -633,8 +650,7 b' class DiffFileType(object):'
633 650
634 651 @attr.s
635 652 class phabhunk(dict):
636 """Represents a Differential hunk, which is owned by a Differential change
637 """
653 """Represents a Differential hunk, which is owned by a Differential change"""
638 654
639 655 oldOffset = attr.ib(default=0) # camelcase-required
640 656 oldLength = attr.ib(default=0) # camelcase-required
@@ -1512,7 +1528,9 b' def phabsend(ui, repo, *revs, **opts):'
1512 1528 mapping.get(old.p2().node(), (old.p2(),))[0],
1513 1529 ]
1514 1530 newdesc = rewriteutil.update_hash_refs(
1515 repo, newdesc, mapping,
1531 repo,
1532 newdesc,
1533 mapping,
1516 1534 )
1517 1535 new = context.metadataonlyctx(
1518 1536 repo,
@@ -2227,7 +2245,10 b' def template_review(context, mapping):'
2227 2245 m = _differentialrevisiondescre.search(ctx.description())
2228 2246 if m:
2229 2247 return templateutil.hybriddict(
2230 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2248 {
2249 b'url': m.group('url'),
2250 b'id': b"D%s" % m.group('id'),
2251 }
2231 2252 )
2232 2253 else:
2233 2254 tags = ctx.repo().nodetags(ctx.node())
@@ -2238,14 +2259,18 b' def template_review(context, mapping):'
2238 2259 url += b'/'
2239 2260 url += t
2240 2261
2241 return templateutil.hybriddict({b'url': url, b'id': t,})
2262 return templateutil.hybriddict(
2263 {
2264 b'url': url,
2265 b'id': t,
2266 }
2267 )
2242 2268 return None
2243 2269
2244 2270
2245 2271 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2246 2272 def template_status(context, mapping):
2247 """:phabstatus: String. Status of Phabricator differential.
2248 """
2273 """:phabstatus: String. Status of Phabricator differential."""
2249 2274 ctx = context.resource(mapping, b'ctx')
2250 2275 repo = context.resource(mapping, b'repo')
2251 2276 ui = context.resource(mapping, b'ui')
@@ -2259,7 +2284,10 b' def template_status(context, mapping):'
2259 2284 for drev in drevs:
2260 2285 if int(drev[b'id']) == drevid:
2261 2286 return templateutil.hybriddict(
2262 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2287 {
2288 b'url': drev[b'uri'],
2289 b'status': drev[b'statusName'],
2290 }
2263 2291 )
2264 2292 return None
2265 2293
@@ -67,7 +67,7 b" testedwith = b'ships-with-hg-core'"
67 67 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
68 68 )
69 69 def purge(ui, repo, *dirs, **opts):
70 '''removes files not tracked by Mercurial
70 """removes files not tracked by Mercurial
71 71
72 72 Delete files not known to Mercurial. This is useful to test local
73 73 and uncommitted changes in an otherwise-clean source tree.
@@ -95,7 +95,7 b' def purge(ui, repo, *dirs, **opts):'
95 95 you forgot to add to the repository. If you only want to print the
96 96 list of files that this program would delete, use the --print
97 97 option.
98 '''
98 """
99 99 opts = pycompat.byteskwargs(opts)
100 100 cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
101 101
@@ -507,10 +507,10 b' class rebaseruntime(object):'
507 507 ui.note(_(b'rebase merging completed\n'))
508 508
509 509 def _concludenode(self, rev, editor, commitmsg=None):
510 '''Commit the wd changes with parents p1 and p2.
510 """Commit the wd changes with parents p1 and p2.
511 511
512 512 Reuse commit info from rev but also store useful information in extra.
513 Return node of committed revision.'''
513 Return node of committed revision."""
514 514 repo = self.repo
515 515 ctx = repo[rev]
516 516 if commitmsg is None:
@@ -1135,7 +1135,11 b' def _dryrunrebase(ui, repo, action, opts'
1135 1135 overrides = {(b'rebase', b'singletransaction'): True}
1136 1136 with ui.configoverride(overrides, b'rebase'):
1137 1137 _origrebase(
1138 ui, repo, action, opts, rbsrt,
1138 ui,
1139 repo,
1140 action,
1141 opts,
1142 rbsrt,
1139 1143 )
1140 1144 except error.ConflictResolutionRequired:
1141 1145 ui.status(_(b'hit a merge conflict\n'))
@@ -1447,8 +1451,8 b' def externalparent(repo, state, destance'
1447 1451
1448 1452
1449 1453 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1450 '''Commit the memory changes with parents p1 and p2.
1451 Return node of committed revision.'''
1454 """Commit the memory changes with parents p1 and p2.
1455 Return node of committed revision."""
1452 1456 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1453 1457 # ``branch`` (used when passing ``--keepbranches``).
1454 1458 branch = None
@@ -1475,8 +1479,8 b' def commitmemorynode(repo, wctx, editor,'
1475 1479
1476 1480
1477 1481 def commitnode(repo, editor, extra, user, date, commitmsg):
1478 '''Commit the wd changes with parents p1 and p2.
1479 Return node of committed revision.'''
1482 """Commit the wd changes with parents p1 and p2.
1483 Return node of committed revision."""
1480 1484 dsguard = util.nullcontextmanager()
1481 1485 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1482 1486 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
@@ -1965,11 +1969,11 b' def sortsource(destmap):'
1965 1969
1966 1970
1967 1971 def buildstate(repo, destmap, collapse):
1968 '''Define which revisions are going to be rebased and where
1972 """Define which revisions are going to be rebased and where
1969 1973
1970 1974 repo: repo
1971 1975 destmap: {srcrev: destrev}
1972 '''
1976 """
1973 1977 rebaseset = destmap.keys()
1974 1978 originalwd = repo[b'.'].rev()
1975 1979
@@ -39,7 +39,7 b" testedwith = b'ships-with-hg-core'"
39 39 helpcategory=command.CATEGORY_COMMITTING,
40 40 )
41 41 def record(ui, repo, *pats, **opts):
42 '''interactively select changes to commit
42 """interactively select changes to commit
43 43
44 44 If a list of files is omitted, all changes reported by :hg:`status`
45 45 will be candidates for recording.
@@ -65,7 +65,7 b' def record(ui, repo, *pats, **opts):'
65 65
66 66 ? - display help
67 67
68 This command is not available when committing a merge.'''
68 This command is not available when committing a merge."""
69 69
70 70 if not ui.interactive():
71 71 raise error.Abort(
@@ -106,11 +106,11 b' def qrefresh(origfn, ui, repo, *pats, **'
106 106 inferrepo=True,
107 107 )
108 108 def qrecord(ui, repo, patch, *pats, **opts):
109 '''interactively record a new patch
109 """interactively record a new patch
110 110
111 111 See :hg:`help qnew` & :hg:`help record` for more information and
112 112 usage.
113 '''
113 """
114 114 return _qrecord(b'qnew', ui, repo, patch, *pats, **opts)
115 115
116 116
@@ -758,10 +758,10 b' def getrenamedfn(orig, repo, endrev=None'
758 758 rcache = {}
759 759
760 760 def getrenamed(fn, rev):
761 '''looks up all renames for a file (up to endrev) the first
761 """looks up all renames for a file (up to endrev) the first
762 762 time the file is given. It indexes on the changerev and only
763 763 parses the manifest if linkrev != changerev.
764 Returns rename info for fn at changerev rev.'''
764 Returns rename info for fn at changerev rev."""
765 765 if rev in rcache.setdefault(fn, {}):
766 766 return rcache[fn][rev]
767 767
@@ -822,8 +822,7 b' def filelogrevset(orig, repo, subset, x)'
822 822
823 823 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
824 824 def gc(ui, *args, **opts):
825 '''garbage collect the client and server filelog caches
826 '''
825 """garbage collect the client and server filelog caches"""
827 826 cachepaths = set()
828 827
829 828 # get the system client cache
@@ -1105,7 +1104,9 b' def _fileprefetchhook(repo, revmatches):'
1105 1104
1106 1105 @command(
1107 1106 b'debugremotefilelog',
1108 [(b'd', b'decompress', None, _(b'decompress the filelog first')),],
1107 [
1108 (b'd', b'decompress', None, _(b'decompress the filelog first')),
1109 ],
1109 1110 _(b'hg debugremotefilelog <path>'),
1110 1111 norepo=True,
1111 1112 )
@@ -1115,7 +1116,9 b' def debugremotefilelog(ui, path, **opts)'
1115 1116
1116 1117 @command(
1117 1118 b'verifyremotefilelog',
1118 [(b'd', b'decompress', None, _(b'decompress the filelogs first')),],
1119 [
1120 (b'd', b'decompress', None, _(b'decompress the filelogs first')),
1121 ],
1119 1122 _(b'hg verifyremotefilelogs <directory>'),
1120 1123 norepo=True,
1121 1124 )
@@ -103,7 +103,7 b' class basestore(object):'
103 103 def _cleanupdirectory(self, rootdir):
104 104 """Removes the empty directories and unnecessary files within the root
105 105 directory recursively. Note that this method does not remove the root
106 directory itself. """
106 directory itself."""
107 107
108 108 oldfiles = set()
109 109 otherfiles = set()
@@ -17,8 +17,7 b' from . import ('
17 17
18 18
19 19 class ChainIndicies(object):
20 """A static class for easy reference to the delta chain indicies.
21 """
20 """A static class for easy reference to the delta chain indicies."""
22 21
23 22 # The filename of this revision delta
24 23 NAME = 0
@@ -73,8 +72,7 b' class unioncontentstore(basestore.baseun'
73 72
74 73 @basestore.baseunionstore.retriable
75 74 def getdelta(self, name, node):
76 """Return the single delta entry for the given name/node pair.
77 """
75 """Return the single delta entry for the given name/node pair."""
78 76 for store in self.stores:
79 77 try:
80 78 return store.getdelta(name, node)
@@ -302,8 +302,7 b' def _getfiles_threaded('
302 302
303 303
304 304 class fileserverclient(object):
305 """A client for requesting files from the remote file server.
306 """
305 """A client for requesting files from the remote file server."""
307 306
308 307 def __init__(self, repo):
309 308 ui = repo.ui
@@ -568,8 +567,7 b' class fileserverclient(object):'
568 567 def prefetch(
569 568 self, fileids, force=False, fetchdata=True, fetchhistory=False
570 569 ):
571 """downloads the given file versions to the cache
572 """
570 """downloads the given file versions to the cache"""
573 571 repo = self.repo
574 572 idstocheck = []
575 573 for file, id in fileids:
@@ -63,8 +63,8 b' class remotefilectx(context.filectx):'
63 63 return self.linkrev()
64 64
65 65 def filectx(self, fileid, changeid=None):
66 '''opens an arbitrary revision of the file without
67 opening a new filelog'''
66 """opens an arbitrary revision of the file without
67 opening a new filelog"""
68 68 return remotefilectx(
69 69 self._repo,
70 70 self._path,
@@ -40,8 +40,7 b' from . import ('
40 40
41 41
42 42 def setupserver(ui, repo):
43 """Sets up a normal Mercurial repo so it can serve files to shallow repos.
44 """
43 """Sets up a normal Mercurial repo so it can serve files to shallow repos."""
45 44 onetimesetup(ui)
46 45
47 46 # don't send files to shallow clients during pulls
@@ -79,8 +78,7 b' onetime = False'
79 78
80 79
81 80 def onetimesetup(ui):
82 """Configures the wireprotocol for both clients and servers.
83 """
81 """Configures the wireprotocol for both clients and servers."""
84 82 global onetime
85 83 if onetime:
86 84 return
@@ -281,8 +279,7 b' def _loadfileblob(repo, cachepath, path,'
281 279
282 280
283 281 def getflogheads(repo, proto, path):
284 """A server api for requesting a filelog's heads
285 """
282 """A server api for requesting a filelog's heads"""
286 283 flog = repo.file(path)
287 284 heads = flog.heads()
288 285 return b'\n'.join((hex(head) for head in heads if head != nullid))
@@ -309,8 +306,7 b' def getfile(repo, proto, file, node):'
309 306
310 307
311 308 def getfiles(repo, proto):
312 """A server api for requesting particular versions of particular files.
313 """
309 """A server api for requesting particular versions of particular files."""
314 310 if shallowutil.isenabled(repo):
315 311 raise error.Abort(_(b'cannot fetch remote files from shallow repo'))
316 312 if not isinstance(proto, _sshv1server):
@@ -54,8 +54,7 b' def backgroundrepack(repo, incremental=T'
54 54
55 55
56 56 def fullrepack(repo, options=None):
57 """If ``packsonly`` is True, stores creating only loose objects are skipped.
58 """
57 """If ``packsonly`` is True, stores creating only loose objects are skipped."""
59 58 if util.safehasattr(repo, 'shareddatastores'):
60 59 datasource = contentstore.unioncontentstore(*repo.shareddatastores)
61 60 historysource = metadatastore.unionmetadatastore(
@@ -874,8 +873,7 b' class repackledger(object):'
874 873
875 874
876 875 class repackentry(object):
877 """Simple class representing a single revision entry in the repackledger.
878 """
876 """Simple class representing a single revision entry in the repackledger."""
879 877
880 878 __slots__ = (
881 879 'filename',
@@ -161,11 +161,11 b' def wraprepo(repo):'
161 161 return path
162 162
163 163 def maybesparsematch(self, *revs, **kwargs):
164 '''
164 """
165 165 A wrapper that allows the remotefilelog to invoke sparsematch() if
166 166 this is a sparse repository, or returns None if this is not a
167 167 sparse repository.
168 '''
168 """
169 169 if revs:
170 170 ret = sparse.matcher(repo, revs=revs)
171 171 else:
@@ -217,8 +217,7 b' def wraprepo(repo):'
217 217 def backgroundprefetch(
218 218 self, revs, base=None, repack=False, pats=None, opts=None
219 219 ):
220 """Runs prefetch in background with optional repack
221 """
220 """Runs prefetch in background with optional repack"""
222 221 cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch']
223 222 if repack:
224 223 cmd.append(b'--repack')
@@ -66,13 +66,19 b' templatekeyword = registrar.templatekeyw'
66 66 revsetpredicate = registrar.revsetpredicate()
67 67
68 68 configitem(
69 b'remotenames', b'bookmarks', default=True,
69 b'remotenames',
70 b'bookmarks',
71 default=True,
70 72 )
71 73 configitem(
72 b'remotenames', b'branches', default=True,
74 b'remotenames',
75 b'branches',
76 default=True,
73 77 )
74 78 configitem(
75 b'remotenames', b'hoistedpeer', default=b'default',
79 b'remotenames',
80 b'hoistedpeer',
81 default=b'default',
76 82 )
77 83
78 84
@@ -142,8 +142,7 b' def extsetup(ui):'
142 142
143 143 @command(b'debugexpandscheme', norepo=True)
144 144 def expandscheme(ui, url, **opts):
145 """given a repo path, provide the scheme-expanded path
146 """
145 """given a repo path, provide the scheme-expanded path"""
147 146 repo = hg._peerlookup(url)
148 147 if isinstance(repo, ShortRepository):
149 148 url = repo.resolve(url)
@@ -75,7 +75,12 b" testedwith = b'ships-with-hg-core'"
75 75 [
76 76 (b'U', b'noupdate', None, _(b'do not create a working directory')),
77 77 (b'B', b'bookmarks', None, _(b'also share bookmarks')),
78 (b'', b'relative', None, _(b'point to source using a relative path'),),
78 (
79 b'',
80 b'relative',
81 None,
82 _(b'point to source using a relative path'),
83 ),
79 84 ],
80 85 _(b'[-U] [-B] SOURCE [DEST]'),
81 86 helpcategory=command.CATEGORY_REPO_CREATION,
@@ -62,10 +62,14 b' configtable = {}'
62 62 configitem = registrar.configitem(configtable)
63 63
64 64 configitem(
65 b'transplant', b'filter', default=None,
65 b'transplant',
66 b'filter',
67 default=None,
66 68 )
67 69 configitem(
68 b'transplant', b'log', default=None,
70 b'transplant',
71 b'log',
72 default=None,
69 73 )
70 74
71 75
@@ -140,8 +144,8 b' class transplanter(object):'
140 144 self.getcommiteditor = getcommiteditor
141 145
142 146 def applied(self, repo, node, parent):
143 '''returns True if a node is already an ancestor of parent
144 or is parent or has already been transplanted'''
147 """returns True if a node is already an ancestor of parent
148 or is parent or has already been transplanted"""
145 149 if hasnode(repo, parent):
146 150 parentrev = repo.changelog.rev(parent)
147 151 if hasnode(repo, node):
@@ -682,7 +686,7 b' def browserevs(ui, repo, nodes, opts):'
682 686 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
683 687 )
684 688 def transplant(ui, repo, *revs, **opts):
685 '''transplant changesets from another branch
689 """transplant changesets from another branch
686 690
687 691 Selected changesets will be applied on top of the current working
688 692 directory with the log of the original changeset. The changesets
@@ -731,7 +735,7 b' def transplant(ui, repo, *revs, **opts):'
731 735 If a changeset application fails, you can fix the merge by hand
732 736 and then resume where you left off by calling :hg:`transplant
733 737 --continue/-c`.
734 '''
738 """
735 739 with repo.wlock():
736 740 return _dotransplant(ui, repo, *revs, **opts)
737 741
@@ -743,9 +747,9 b' def _dotransplant(ui, repo, *revs, **opt'
743 747 yield node
744 748
745 749 def transplantwalk(repo, dest, heads, match=util.always):
746 '''Yield all nodes that are ancestors of a head but not ancestors
750 """Yield all nodes that are ancestors of a head but not ancestors
747 751 of dest.
748 If no heads are specified, the heads of repo will be used.'''
752 If no heads are specified, the heads of repo will be used."""
749 753 if not heads:
750 754 heads = repo.heads()
751 755 ancestors = []
@@ -886,8 +890,7 b' revsetpredicate = registrar.revsetpredic'
886 890
887 891 @revsetpredicate(b'transplanted([set])')
888 892 def revsettransplanted(repo, subset, x):
889 """Transplanted changesets in set, or all transplanted changesets.
890 """
893 """Transplanted changesets in set, or all transplanted changesets."""
891 894 if x:
892 895 s = revset.getset(repo, subset, x)
893 896 else:
@@ -43,10 +43,14 b' configtable = {}'
43 43 configitem = registrar.configitem(configtable)
44 44
45 45 configitem(
46 b'experimental', b'uncommitondirtywdir', default=False,
46 b'experimental',
47 b'uncommitondirtywdir',
48 default=False,
47 49 )
48 50 configitem(
49 b'experimental', b'uncommit.keep', default=False,
51 b'experimental',
52 b'uncommit.keep',
53 default=False,
50 54 )
51 55
52 56 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -70,7 +70,9 b' configitem = registrar.configitem(config'
70 70 # Encoding.encoding may be updated by --encoding option.
71 71 # Use a lambda do delay the resolution.
72 72 configitem(
73 b'win32mbcs', b'encoding', default=lambda: encoding.encoding,
73 b'win32mbcs',
74 b'encoding',
75 default=lambda: encoding.encoding,
74 76 )
75 77
76 78 _encoding = None # see extsetup
@@ -62,7 +62,9 b' configtable = {}'
62 62 configitem = registrar.configitem(configtable)
63 63
64 64 configitem(
65 b'win32text', b'warn', default=True,
65 b'win32text',
66 b'warn',
67 default=True,
66 68 )
67 69
68 70 # regexp for single LF without CR preceding.
@@ -33,8 +33,7 b' def levelchecker(level, msgidpat):'
33 33
34 34
35 35 def match(checker, pe):
36 """Examine whether POEntry "pe" is target of specified checker or not
37 """
36 """Examine whether POEntry "pe" is target of specified checker or not"""
38 37 if not checker.match(pe.msgid):
39 38 return
40 39 # examine suppression by translator comment
@@ -148,11 +148,11 b' def ancestors(pfunc, *orignodes):'
148 148
149 149
150 150 class incrementalmissingancestors(object):
151 '''persistent state used to calculate missing ancestors incrementally
151 """persistent state used to calculate missing ancestors incrementally
152 152
153 153 Although similar in spirit to lazyancestors below, this is a separate class
154 154 because trying to support contains and missingancestors operations with the
155 same internal data structures adds needless complexity.'''
155 same internal data structures adds needless complexity."""
156 156
157 157 def __init__(self, pfunc, bases):
158 158 self.bases = set(bases)
@@ -198,12 +198,12 b' class incrementalmissingancestors(object'
198 198 break
199 199
200 200 def missingancestors(self, revs):
201 '''return all the ancestors of revs that are not ancestors of self.bases
201 """return all the ancestors of revs that are not ancestors of self.bases
202 202
203 203 This may include elements from revs.
204 204
205 205 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
206 revision number order, which is a topological order.'''
206 revision number order, which is a topological order."""
207 207 revsvisit = set(revs)
208 208 basesvisit = self.bases
209 209 pfunc = self.pfunc
@@ -37,8 +37,8 b' stringio = util.stringio'
37 37
38 38
39 39 def tidyprefix(dest, kind, prefix):
40 '''choose prefix to use for names in archive. make sure prefix is
41 safe for consumers.'''
40 """choose prefix to use for names in archive. make sure prefix is
41 safe for consumers."""
42 42
43 43 if prefix:
44 44 prefix = util.normpath(prefix)
@@ -132,8 +132,8 b' def buildmetadata(ctx):'
132 132
133 133
134 134 class tarit(object):
135 '''write archive to tar file or stream. can write uncompressed,
136 or compress with gzip or bzip2.'''
135 """write archive to tar file or stream. can write uncompressed,
136 or compress with gzip or bzip2."""
137 137
138 138 if pycompat.ispy3:
139 139 GzipFileWithTime = gzip.GzipFile # camelcase-required
@@ -185,8 +185,10 b' class tarit(object):'
185 185 mtime=mtime,
186 186 )
187 187 self.fileobj = gzfileobj
188 return tarfile.TarFile.taropen( # pytype: disable=attribute-error
189 name, pycompat.sysstr(mode), gzfileobj
188 return (
189 tarfile.TarFile.taropen( # pytype: disable=attribute-error
190 name, pycompat.sysstr(mode), gzfileobj
191 )
190 192 )
191 193 else:
192 194 try:
@@ -224,8 +226,8 b' class tarit(object):'
224 226
225 227
226 228 class zipit(object):
227 '''write archive to zip file or stream. can write uncompressed,
228 or compressed with deflate.'''
229 """write archive to zip file or stream. can write uncompressed,
230 or compressed with deflate."""
229 231
230 232 def __init__(self, dest, mtime, compress=True):
231 233 if isinstance(dest, bytes):
@@ -316,7 +318,7 b' def archive('
316 318 mtime=None,
317 319 subrepos=False,
318 320 ):
319 '''create archive of repo as it was at node.
321 """create archive of repo as it was at node.
320 322
321 323 dest can be name of directory, name of archive file, or file
322 324 object to write archive to.
@@ -333,7 +335,7 b' def archive('
333 335 mtime is the modified time, in seconds, or None to use the changeset time.
334 336
335 337 subrepos tells whether to include subrepos.
336 '''
338 """
337 339
338 340 if kind == b'txz' and not pycompat.ispy3:
339 341 raise error.Abort(_(b'xz compression is only available in Python 3'))
@@ -189,8 +189,7 b' class bmstore(object):'
189 189 return self._nodemap.get(node, [])
190 190
191 191 def applychanges(self, repo, tr, changes):
192 """Apply a list of changes to bookmarks
193 """
192 """Apply a list of changes to bookmarks"""
194 193 bmchanges = tr.changes.get(b'bookmarks')
195 194 for name, node in changes:
196 195 old = self._refmap.get(name)
@@ -422,8 +421,8 b' def headsforactive(repo):'
422 421
423 422
424 423 def calculateupdate(ui, repo):
425 '''Return a tuple (activemark, movemarkfrom) indicating the active bookmark
426 and where to move the active bookmark from, if needed.'''
424 """Return a tuple (activemark, movemarkfrom) indicating the active bookmark
425 and where to move the active bookmark from, if needed."""
427 426 checkout, movemarkfrom = None, None
428 427 activemark = repo._activebookmark
429 428 if isactivewdirparent(repo):
@@ -509,7 +508,7 b' def pushbookmark(repo, key, old, new):'
509 508
510 509
511 510 def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
512 '''Compare bookmarks between srcmarks and dstmarks
511 """Compare bookmarks between srcmarks and dstmarks
513 512
514 513 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
515 514 differ, invalid)", each are list of bookmarks below:
@@ -532,7 +531,7 b' def comparebookmarks(repo, srcmarks, dst'
532 531
533 532 If "targets" is specified, only bookmarks listed in it are
534 533 examined.
535 '''
534 """
536 535
537 536 if targets:
538 537 bset = set(targets)
@@ -585,14 +584,14 b' def comparebookmarks(repo, srcmarks, dst'
585 584
586 585
587 586 def _diverge(ui, b, path, localmarks, remotenode):
588 '''Return appropriate diverged bookmark for specified ``path``
587 """Return appropriate diverged bookmark for specified ``path``
589 588
590 589 This returns None, if it is failed to assign any divergent
591 590 bookmark name.
592 591
593 592 This reuses already existing one with "@number" suffix, if it
594 593 refers ``remotenode``.
595 '''
594 """
596 595 if b == b'@':
597 596 b = b''
598 597 # try to use an @pathalias suffix
@@ -762,13 +761,17 b' def updatefromremote(ui, repo, remotemar'
762 761
763 762
764 763 def incoming(ui, repo, peer):
765 '''Show bookmarks incoming from other to repo
766 '''
764 """Show bookmarks incoming from other to repo"""
767 765 ui.status(_(b"searching for changed bookmarks\n"))
768 766
769 767 with peer.commandexecutor() as e:
770 768 remotemarks = unhexlifybookmarks(
771 e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
769 e.callcommand(
770 b'listkeys',
771 {
772 b'namespace': b'bookmarks',
773 },
774 ).result()
772 775 )
773 776
774 777 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -813,8 +816,7 b' def incoming(ui, repo, peer):'
813 816
814 817
815 818 def outgoing(ui, repo, other):
816 '''Show bookmarks outgoing from repo to other
817 '''
819 """Show bookmarks outgoing from repo to other"""
818 820 ui.status(_(b"searching for changed bookmarks\n"))
819 821
820 822 remotemarks = unhexlifybookmarks(other.listkeys(b'bookmarks'))
@@ -863,13 +865,18 b' def outgoing(ui, repo, other):'
863 865
864 866
865 867 def summary(repo, peer):
866 '''Compare bookmarks between repo and other for "hg summary" output
868 """Compare bookmarks between repo and other for "hg summary" output
867 869
868 870 This returns "(# of incoming, # of outgoing)" tuple.
869 '''
871 """
870 872 with peer.commandexecutor() as e:
871 873 remotemarks = unhexlifybookmarks(
872 e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
874 e.callcommand(
875 b'listkeys',
876 {
877 b'namespace': b'bookmarks',
878 },
879 ).result()
873 880 )
874 881
875 882 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -41,7 +41,17 b' if pycompat.TYPE_CHECKING:'
41 41 )
42 42
43 43 assert any(
44 (Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Union,)
44 (
45 Any,
46 Callable,
47 Dict,
48 Iterable,
49 List,
50 Optional,
51 Set,
52 Tuple,
53 Union,
54 )
45 55 )
46 56
47 57 subsettable = repoviewutil.subsettable
@@ -139,8 +149,7 b' class BranchMapCache(object):'
139 149
140 150
141 151 def _unknownnode(node):
142 """ raises ValueError when branchcache found a node which does not exists
143 """
152 """raises ValueError when branchcache found a node which does not exists"""
144 153 raise ValueError('node %s does not exist' % pycompat.sysstr(hex(node)))
145 154
146 155
@@ -183,9 +192,9 b' class branchcache(object):'
183 192 hasnode=None,
184 193 ):
185 194 # type: (Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None
186 """ hasnode is a function which can be used to verify whether changelog
195 """hasnode is a function which can be used to verify whether changelog
187 196 has a given node or not. If it's not provided, we assume that every node
188 we have exists in changelog """
197 we have exists in changelog"""
189 198 self.tipnode = tipnode
190 199 self.tiprev = tiprev
191 200 self.filteredhash = filteredhash
@@ -304,7 +313,7 b' class branchcache(object):'
304 313 return bcache
305 314
306 315 def load(self, repo, lineiter):
307 """ fully loads the branchcache by reading from the file using the line
316 """fully loads the branchcache by reading from the file using the line
308 317 iterator passed"""
309 318 for line in lineiter:
310 319 line = line.rstrip(b'\n')
@@ -340,8 +349,8 b' class branchcache(object):'
340 349 return False
341 350
342 351 def _branchtip(self, heads):
343 '''Return tuple with last open head in heads and false,
344 otherwise return last closed head and true.'''
352 """Return tuple with last open head in heads and false,
353 otherwise return last closed head and true."""
345 354 tip = heads[-1]
346 355 closed = True
347 356 for h in reversed(heads):
@@ -352,9 +361,9 b' class branchcache(object):'
352 361 return tip, closed
353 362
354 363 def branchtip(self, branch):
355 '''Return the tipmost open head on branch head, otherwise return the
364 """Return the tipmost open head on branch head, otherwise return the
356 365 tipmost closed head on branch.
357 Raise KeyError for unknown branch.'''
366 Raise KeyError for unknown branch."""
358 367 return self._branchtip(self[branch])[0]
359 368
360 369 def iteropen(self, nodes):
@@ -489,7 +489,12 b' def processparts(repo, op, unbundler):'
489 489
490 490 def _processchangegroup(op, cg, tr, source, url, **kwargs):
491 491 ret = cg.apply(op.repo, tr, source, url, **kwargs)
492 op.records.add(b'changegroup', {b'return': ret,})
492 op.records.add(
493 b'changegroup',
494 {
495 b'return': ret,
496 },
497 )
493 498 return ret
494 499
495 500
@@ -1647,8 +1652,7 b' def bundle2caps(remote):'
1647 1652
1648 1653
1649 1654 def obsmarkersversion(caps):
1650 """extract the list of supported obsmarkers versions from a bundle2caps dict
1651 """
1655 """extract the list of supported obsmarkers versions from a bundle2caps dict"""
1652 1656 obscaps = caps.get(b'obsmarkers', ())
1653 1657 return [int(c[1:]) for c in obscaps if c.startswith(b'V')]
1654 1658
@@ -328,8 +328,7 b' class bundlerepository(object):'
328 328 self._cgunpacker = changegroup.getunbundler(version, cgstream, b'UN')
329 329
330 330 def _writetempbundle(self, readfn, suffix, header=b''):
331 """Write a temporary file to disk
332 """
331 """Write a temporary file to disk"""
333 332 fdtemp, temp = self.vfs.mkstemp(prefix=b"hg-bundle-", suffix=suffix)
334 333 self.tempfile = temp
335 334
@@ -530,7 +529,7 b' class bundletransactionmanager(object):'
530 529 def getremotechanges(
531 530 ui, repo, peer, onlyheads=None, bundlename=None, force=False
532 531 ):
533 '''obtains a bundle of changes incoming from peer
532 """obtains a bundle of changes incoming from peer
534 533
535 534 "onlyheads" restricts the returned changes to those reachable from the
536 535 specified heads.
@@ -548,7 +547,7 b' def getremotechanges('
548 547 "cleanupfn" must be called without arguments when you're done processing
549 548 the changes; it closes both the original "peer" and the one returned
550 549 here.
551 '''
550 """
552 551 tmp = discovery.findcommonincoming(repo, peer, heads=onlyheads, force=force)
553 552 common, incoming, rheads = tmp
554 553 if not incoming:
@@ -611,7 +610,10 b' def getremotechanges('
611 610 with peer.commandexecutor() as e:
612 611 cg = e.callcommand(
613 612 b'changegroup',
614 {b'nodes': incoming, b'source': b'incoming',},
613 {
614 b'nodes': incoming,
615 b'source': b'incoming',
616 },
615 617 ).result()
616 618
617 619 rheads = None
@@ -655,7 +657,10 b' def getremotechanges('
655 657
656 658 with peer.commandexecutor() as e:
657 659 remotephases = e.callcommand(
658 b'listkeys', {b'namespace': b'phases',}
660 b'listkeys',
661 {
662 b'namespace': b'phases',
663 },
659 664 ).result()
660 665
661 666 pullop = exchange.pulloperation(bundlerepo, peer, heads=reponodes)
@@ -91,8 +91,8 b' def stripdesc(desc):'
91 91
92 92
93 93 class appender(object):
94 '''the changelog index must be updated last on disk, so we use this class
95 to delay writes to it'''
94 """the changelog index must be updated last on disk, so we use this class
95 to delay writes to it"""
96 96
97 97 def __init__(self, vfs, name, mode, buf):
98 98 self.data = buf
@@ -399,7 +399,7 b' def filterchunks(ui, originalhunks, usec'
399 399
400 400
401 401 def recordfilter(ui, originalhunks, match, operation=None):
402 """ Prompts the user to filter the originalhunks and return a list of
402 """Prompts the user to filter the originalhunks and return a list of
403 403 selected hunks.
404 404 *operation* is used for to build ui messages to indicate the user what
405 405 kind of filtering they are doing: reverting, committing, shelving, etc.
@@ -1078,7 +1078,7 b' def findrepo(p):'
1078 1078
1079 1079
1080 1080 def bailifchanged(repo, merge=True, hint=None):
1081 """ enforce the precondition that working directory must be clean.
1081 """enforce the precondition that working directory must be clean.
1082 1082
1083 1083 'merge' can be set to false if a pending uncommitted merge should be
1084 1084 ignored (such as when 'update --check' runs).
@@ -2184,7 +2184,7 b' def export('
2184 2184 opts=None,
2185 2185 match=None,
2186 2186 ):
2187 '''export changesets as hg patches
2187 """export changesets as hg patches
2188 2188
2189 2189 Args:
2190 2190 repo: The repository from which we're exporting revisions.
@@ -2205,7 +2205,7 b' def export('
2205 2205 fntemplate specified: Each rev is written to a unique file named using
2206 2206 the given template.
2207 2207 Otherwise: All revs will be written to basefm.
2208 '''
2208 """
2209 2209 _prefetchchangedfiles(repo, revs, match)
2210 2210
2211 2211 if not fntemplate:
@@ -3476,7 +3476,8 b' def revert(ui, repo, ctx, *pats, **opts)'
3476 3476 repo, [f for sublist in oplist for f in sublist]
3477 3477 )
3478 3478 prefetch(
3479 repo, [(ctx.rev(), matchfiles)],
3479 repo,
3480 [(ctx.rev(), matchfiles)],
3480 3481 )
3481 3482 match = scmutil.match(repo[None], pats)
3482 3483 _performrevert(
@@ -3724,10 +3725,10 b' summaryremotehooks = util.hooks()'
3724 3725
3725 3726
3726 3727 def checkunfinished(repo, commit=False, skipmerge=False):
3727 '''Look for an unfinished multistep operation, like graft, and abort
3728 """Look for an unfinished multistep operation, like graft, and abort
3728 3729 if found. It's probably good to check this right before
3729 3730 bailifchanged().
3730 '''
3731 """
3731 3732 # Check for non-clearable states first, so things like rebase will take
3732 3733 # precedence over update.
3733 3734 for state in statemod._unfinishedstates:
@@ -3753,9 +3754,9 b' def checkunfinished(repo, commit=False, '
3753 3754
3754 3755
3755 3756 def clearunfinished(repo):
3756 '''Check for unfinished operations (as above), and clear the ones
3757 """Check for unfinished operations (as above), and clear the ones
3757 3758 that are clearable.
3758 '''
3759 """
3759 3760 for state in statemod._unfinishedstates:
3760 3761 if state._reportonly:
3761 3762 continue
@@ -3770,8 +3771,8 b' def clearunfinished(repo):'
3770 3771
3771 3772
3772 3773 def getunfinishedstate(repo):
3773 ''' Checks for unfinished operations and returns statecheck object
3774 for it'''
3774 """Checks for unfinished operations and returns statecheck object
3775 for it"""
3775 3776 for state in statemod._unfinishedstates:
3776 3777 if state.isunfinished(repo):
3777 3778 return state
@@ -3779,7 +3780,7 b' def getunfinishedstate(repo):'
3779 3780
3780 3781
3781 3782 def howtocontinue(repo):
3782 '''Check for an unfinished operation and return the command to finish
3783 """Check for an unfinished operation and return the command to finish
3783 3784 it.
3784 3785
3785 3786 statemod._unfinishedstates list is checked for an unfinished operation
@@ -3788,7 +3789,7 b' def howtocontinue(repo):'
3788 3789
3789 3790 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3790 3791 a boolean.
3791 '''
3792 """
3792 3793 contmsg = _(b"continue: %s")
3793 3794 for state in statemod._unfinishedstates:
3794 3795 if not state._continueflag:
@@ -3801,13 +3802,13 b' def howtocontinue(repo):'
3801 3802
3802 3803
3803 3804 def checkafterresolved(repo):
3804 '''Inform the user about the next action after completing hg resolve
3805 """Inform the user about the next action after completing hg resolve
3805 3806
3806 3807 If there's a an unfinished operation that supports continue flag,
3807 3808 howtocontinue will yield repo.ui.warn as the reporter.
3808 3809
3809 3810 Otherwise, it will yield repo.ui.note.
3810 '''
3811 """
3811 3812 msg, warning = howtocontinue(repo)
3812 3813 if msg is not None:
3813 3814 if warning:
@@ -3817,14 +3818,14 b' def checkafterresolved(repo):'
3817 3818
3818 3819
3819 3820 def wrongtooltocontinue(repo, task):
3820 '''Raise an abort suggesting how to properly continue if there is an
3821 """Raise an abort suggesting how to properly continue if there is an
3821 3822 active task.
3822 3823
3823 3824 Uses howtocontinue() to find the active task.
3824 3825
3825 3826 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3826 3827 a hint.
3827 '''
3828 """
3828 3829 after = howtocontinue(repo)
3829 3830 hint = None
3830 3831 if after[1]:
@@ -605,7 +605,7 b' def annotate(ui, repo, *pats, **opts):'
605 605 helpcategory=command.CATEGORY_IMPORT_EXPORT,
606 606 )
607 607 def archive(ui, repo, dest, **opts):
608 '''create an unversioned archive of a repository revision
608 """create an unversioned archive of a repository revision
609 609
610 610 By default, the revision used is the parent of the working
611 611 directory; use -r/--rev to specify a different revision.
@@ -644,7 +644,7 b' def archive(ui, repo, dest, **opts):'
644 644 removed.
645 645
646 646 Returns 0 on success.
647 '''
647 """
648 648
649 649 opts = pycompat.byteskwargs(opts)
650 650 rev = opts.get(b'rev')
@@ -718,7 +718,7 b' def archive(ui, repo, dest, **opts):'
718 718 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
719 719 )
720 720 def backout(ui, repo, node=None, rev=None, **opts):
721 '''reverse effect of earlier changeset
721 """reverse effect of earlier changeset
722 722
723 723 Prepare a new changeset with the effect of REV undone in the
724 724 current working directory. If no conflicts were encountered,
@@ -768,7 +768,7 b' def backout(ui, repo, node=None, rev=Non'
768 768
769 769 Returns 0 on success, 1 if nothing to backout or there are unresolved
770 770 files.
771 '''
771 """
772 772 with repo.wlock(), repo.lock():
773 773 return _dobackout(ui, repo, node, rev, **opts)
774 774
@@ -1166,7 +1166,7 b' def bisect('
1166 1166 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1167 1167 )
1168 1168 def bookmark(ui, repo, *names, **opts):
1169 '''create a new bookmark or list existing bookmarks
1169 """create a new bookmark or list existing bookmarks
1170 1170
1171 1171 Bookmarks are labels on changesets to help track lines of development.
1172 1172 Bookmarks are unversioned and can be moved, renamed and deleted.
@@ -1224,7 +1224,7 b' def bookmark(ui, repo, *names, **opts):'
1224 1224 - print only the active bookmark name::
1225 1225
1226 1226 hg book -ql .
1227 '''
1227 """
1228 1228 opts = pycompat.byteskwargs(opts)
1229 1229 force = opts.get(b'force')
1230 1230 rev = opts.get(b'rev')
@@ -2804,7 +2804,9 b' def files(ui, repo, *pats, **opts):'
2804 2804
2805 2805 @command(
2806 2806 b'forget',
2807 [(b'i', b'interactive', None, _(b'use interactive mode')),]
2807 [
2808 (b'i', b'interactive', None, _(b'use interactive mode')),
2809 ]
2808 2810 + walkopts
2809 2811 + dryrunopts,
2810 2812 _(b'[OPTION]... FILE...'),
@@ -2904,7 +2906,7 b' def forget(ui, repo, *pats, **opts):'
2904 2906 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2905 2907 )
2906 2908 def graft(ui, repo, *revs, **opts):
2907 '''copy changes from other branches onto the current branch
2909 """copy changes from other branches onto the current branch
2908 2910
2909 2911 This command uses Mercurial's merge logic to copy individual
2910 2912 changes from other branches without merging branches in the
@@ -2997,7 +2999,7 b' def graft(ui, repo, *revs, **opts):'
2997 2999 See :hg:`help revisions` for more about specifying revisions.
2998 3000
2999 3001 Returns 0 on successful completion, 1 if there are unresolved files.
3000 '''
3002 """
3001 3003 with repo.wlock():
3002 3004 return _dograft(ui, repo, *revs, **opts)
3003 3005
@@ -5261,7 +5263,12 b' def postincoming(ui, repo, modheads, opt'
5261 5263 None,
5262 5264 _(b'run even when remote repository is unrelated'),
5263 5265 ),
5264 (b'', b'confirm', None, _(b'confirm pull before applying changes'),),
5266 (
5267 b'',
5268 b'confirm',
5269 None,
5270 _(b'confirm pull before applying changes'),
5271 ),
5265 5272 (
5266 5273 b'r',
5267 5274 b'rev',
@@ -5518,7 +5525,9 b' def push(ui, repo, dest=None, **opts):'
5518 5525
5519 5526 if opts.get(b'all_bookmarks'):
5520 5527 cmdutil.check_incompatible_arguments(
5521 opts, b'all_bookmarks', [b'bookmark', b'rev'],
5528 opts,
5529 b'all_bookmarks',
5530 [b'bookmark', b'rev'],
5522 5531 )
5523 5532 opts[b'bookmark'] = list(repo._bookmarks)
5524 5533
@@ -5608,7 +5617,9 b' def push(ui, repo, dest=None, **opts):'
5608 5617
5609 5618 @command(
5610 5619 b'recover',
5611 [(b'', b'verify', False, b"run `hg verify` after successful recover"),],
5620 [
5621 (b'', b'verify', False, b"run `hg verify` after successful recover"),
5622 ],
5612 5623 helpcategory=command.CATEGORY_MAINTENANCE,
5613 5624 )
5614 5625 def recover(ui, repo, **opts):
@@ -6448,7 +6459,7 b' def serve(ui, repo, **opts):'
6448 6459 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6449 6460 )
6450 6461 def shelve(ui, repo, *pats, **opts):
6451 '''save and set aside changes from the working directory
6462 """save and set aside changes from the working directory
6452 6463
6453 6464 Shelving takes files that "hg status" reports as not clean, saves
6454 6465 the modifications to a bundle (a shelved change), and reverts the
@@ -6479,7 +6490,7 b' def shelve(ui, repo, *pats, **opts):'
6479 6490
6480 6491 To delete specific shelved changes, use ``--delete``. To delete
6481 6492 all shelved changes, use ``--cleanup``.
6482 '''
6493 """
6483 6494 opts = pycompat.byteskwargs(opts)
6484 6495 allowables = [
6485 6496 (b'addremove', {b'create'}), # 'create' is pseudo action
@@ -7707,8 +7718,7 b' def version_(ui, **opts):'
7707 7718
7708 7719
7709 7720 def loadcmdtable(ui, name, cmdtable):
7710 """Load command functions from specified cmdtable
7711 """
7721 """Load command functions from specified cmdtable"""
7712 7722 overrides = [cmd for cmd in cmdtable if cmd in table]
7713 7723 if overrides:
7714 7724 ui.warn(
@@ -316,8 +316,8 b' class server(object):'
316 316 return -1
317 317
318 318 def runcommand(self):
319 """ reads a list of \0 terminated arguments, executes
320 and writes the return code to the result channel """
319 """reads a list of \0 terminated arguments, executes
320 and writes the return code to the result channel"""
321 321 from . import dispatch # avoid cycle
322 322
323 323 args = self._readlist()
@@ -98,7 +98,11 b' def commitctx(repo, ctx, error=False, or'
98 98 )
99 99 xp1, xp2 = p1.hex(), p2 and p2.hex() or b''
100 100 repo.hook(
101 b'pretxncommit', throw=True, node=hex(n), parent1=xp1, parent2=xp2,
101 b'pretxncommit',
102 throw=True,
103 node=hex(n),
104 parent1=xp1,
105 parent2=xp2,
102 106 )
103 107 # set the new commit is proper phase
104 108 targetphase = subrepoutil.newcommitphase(repo.ui, ctx)
@@ -154,10 +158,10 b' def _prepare_files(tr, ctx, error=False,'
154 158
155 159
156 160 def _get_salvaged(repo, ms, ctx):
157 """ returns a list of salvaged files
161 """returns a list of salvaged files
158 162
159 163 returns empty list if config option which process salvaged files are
160 not enabled """
164 not enabled"""
161 165 salvaged = []
162 166 copy_sd = repo.filecopiesmode == b'changeset-sidedata'
163 167 if copy_sd and len(ctx.parents()) > 1:
@@ -238,7 +242,14 b' def _process_files(tr, ctx, ms, files, e'
238 242
239 243
240 244 def _filecommit(
241 repo, fctx, manifest1, manifest2, linkrev, tr, includecopymeta, ms,
245 repo,
246 fctx,
247 manifest1,
248 manifest2,
249 linkrev,
250 tr,
251 includecopymeta,
252 ms,
242 253 ):
243 254 """
244 255 commit an individual file as part of a larger transaction
@@ -208,9 +208,11 b' class config(object):'
208 208 def read(self, path, fp=None, sections=None, remap=None):
209 209 if not fp:
210 210 fp = util.posixfile(path, b'rb')
211 assert getattr(fp, 'mode', 'rb') == 'rb', (
212 b'config files must be opened in binary mode, got fp=%r mode=%r'
213 % (fp, fp.mode,)
211 assert (
212 getattr(fp, 'mode', 'rb') == 'rb'
213 ), b'config files must be opened in binary mode, got fp=%r mode=%r' % (
214 fp,
215 fp.mode,
214 216 )
215 217
216 218 dir = os.path.dirname(path)
This diff has been collapsed as it changes many lines, (3311 lines changed) Show them Hide them
@@ -133,78 +133,127 b' coreconfigitem = getitemregister(coreite'
133 133
134 134 def _registerdiffopts(section, configprefix=b''):
135 135 coreconfigitem(
136 section, configprefix + b'nodates', default=False,
136 section,
137 configprefix + b'nodates',
138 default=False,
137 139 )
138 140 coreconfigitem(
139 section, configprefix + b'showfunc', default=False,
141 section,
142 configprefix + b'showfunc',
143 default=False,
140 144 )
141 145 coreconfigitem(
142 section, configprefix + b'unified', default=None,
146 section,
147 configprefix + b'unified',
148 default=None,
143 149 )
144 150 coreconfigitem(
145 section, configprefix + b'git', default=False,
151 section,
152 configprefix + b'git',
153 default=False,
146 154 )
147 155 coreconfigitem(
148 section, configprefix + b'ignorews', default=False,
156 section,
157 configprefix + b'ignorews',
158 default=False,
149 159 )
150 160 coreconfigitem(
151 section, configprefix + b'ignorewsamount', default=False,
161 section,
162 configprefix + b'ignorewsamount',
163 default=False,
152 164 )
153 165 coreconfigitem(
154 section, configprefix + b'ignoreblanklines', default=False,
166 section,
167 configprefix + b'ignoreblanklines',
168 default=False,
155 169 )
156 170 coreconfigitem(
157 section, configprefix + b'ignorewseol', default=False,
171 section,
172 configprefix + b'ignorewseol',
173 default=False,
158 174 )
159 175 coreconfigitem(
160 section, configprefix + b'nobinary', default=False,
176 section,
177 configprefix + b'nobinary',
178 default=False,
161 179 )
162 180 coreconfigitem(
163 section, configprefix + b'noprefix', default=False,
181 section,
182 configprefix + b'noprefix',
183 default=False,
164 184 )
165 185 coreconfigitem(
166 section, configprefix + b'word-diff', default=False,
186 section,
187 configprefix + b'word-diff',
188 default=False,
167 189 )
168 190
169 191
170 192 coreconfigitem(
171 b'alias', b'.*', default=dynamicdefault, generic=True,
172 )
173 coreconfigitem(
174 b'auth', b'cookiefile', default=None,
193 b'alias',
194 b'.*',
195 default=dynamicdefault,
196 generic=True,
197 )
198 coreconfigitem(
199 b'auth',
200 b'cookiefile',
201 default=None,
175 202 )
176 203 _registerdiffopts(section=b'annotate')
177 204 # bookmarks.pushing: internal hack for discovery
178 205 coreconfigitem(
179 b'bookmarks', b'pushing', default=list,
206 b'bookmarks',
207 b'pushing',
208 default=list,
180 209 )
181 210 # bundle.mainreporoot: internal hack for bundlerepo
182 211 coreconfigitem(
183 b'bundle', b'mainreporoot', default=b'',
184 )
185 coreconfigitem(
186 b'censor', b'policy', default=b'abort', experimental=True,
187 )
188 coreconfigitem(
189 b'chgserver', b'idletimeout', default=3600,
190 )
191 coreconfigitem(
192 b'chgserver', b'skiphash', default=False,
193 )
194 coreconfigitem(
195 b'cmdserver', b'log', default=None,
196 )
197 coreconfigitem(
198 b'cmdserver', b'max-log-files', default=7,
199 )
200 coreconfigitem(
201 b'cmdserver', b'max-log-size', default=b'1 MB',
202 )
203 coreconfigitem(
204 b'cmdserver', b'max-repo-cache', default=0, experimental=True,
205 )
206 coreconfigitem(
207 b'cmdserver', b'message-encodings', default=list,
212 b'bundle',
213 b'mainreporoot',
214 default=b'',
215 )
216 coreconfigitem(
217 b'censor',
218 b'policy',
219 default=b'abort',
220 experimental=True,
221 )
222 coreconfigitem(
223 b'chgserver',
224 b'idletimeout',
225 default=3600,
226 )
227 coreconfigitem(
228 b'chgserver',
229 b'skiphash',
230 default=False,
231 )
232 coreconfigitem(
233 b'cmdserver',
234 b'log',
235 default=None,
236 )
237 coreconfigitem(
238 b'cmdserver',
239 b'max-log-files',
240 default=7,
241 )
242 coreconfigitem(
243 b'cmdserver',
244 b'max-log-size',
245 default=b'1 MB',
246 )
247 coreconfigitem(
248 b'cmdserver',
249 b'max-repo-cache',
250 default=0,
251 experimental=True,
252 )
253 coreconfigitem(
254 b'cmdserver',
255 b'message-encodings',
256 default=list,
208 257 )
209 258 coreconfigitem(
210 259 b'cmdserver',
@@ -212,16 +261,25 b' coreconfigitem('
212 261 default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
213 262 )
214 263 coreconfigitem(
215 b'cmdserver', b'shutdown-on-interrupt', default=True,
216 )
217 coreconfigitem(
218 b'color', b'.*', default=None, generic=True,
219 )
220 coreconfigitem(
221 b'color', b'mode', default=b'auto',
222 )
223 coreconfigitem(
224 b'color', b'pagermode', default=dynamicdefault,
264 b'cmdserver',
265 b'shutdown-on-interrupt',
266 default=True,
267 )
268 coreconfigitem(
269 b'color',
270 b'.*',
271 default=None,
272 generic=True,
273 )
274 coreconfigitem(
275 b'color',
276 b'mode',
277 default=b'auto',
278 )
279 coreconfigitem(
280 b'color',
281 b'pagermode',
282 default=dynamicdefault,
225 283 )
226 284 coreconfigitem(
227 285 b'command-templates',
@@ -230,7 +288,10 b' coreconfigitem('
230 288 alias=[(b'ui', b'graphnodetemplate')],
231 289 )
232 290 coreconfigitem(
233 b'command-templates', b'log', default=None, alias=[(b'ui', b'logtemplate')],
291 b'command-templates',
292 b'log',
293 default=None,
294 alias=[(b'ui', b'logtemplate')],
234 295 )
235 296 coreconfigitem(
236 297 b'command-templates',
@@ -252,7 +313,9 b' coreconfigitem('
252 313 alias=[(b'ui', b'pre-merge-tool-output-template')],
253 314 )
254 315 coreconfigitem(
255 b'command-templates', b'oneline-summary', default=None,
316 b'command-templates',
317 b'oneline-summary',
318 default=None,
256 319 )
257 320 coreconfigitem(
258 321 b'command-templates',
@@ -262,327 +325,546 b' coreconfigitem('
262 325 )
263 326 _registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
264 327 coreconfigitem(
265 b'commands', b'commit.post-status', default=False,
266 )
267 coreconfigitem(
268 b'commands', b'grep.all-files', default=False, experimental=True,
269 )
270 coreconfigitem(
271 b'commands', b'merge.require-rev', default=False,
272 )
273 coreconfigitem(
274 b'commands', b'push.require-revs', default=False,
275 )
276 coreconfigitem(
277 b'commands', b'resolve.confirm', default=False,
278 )
279 coreconfigitem(
280 b'commands', b'resolve.explicit-re-merge', default=False,
281 )
282 coreconfigitem(
283 b'commands', b'resolve.mark-check', default=b'none',
328 b'commands',
329 b'commit.post-status',
330 default=False,
331 )
332 coreconfigitem(
333 b'commands',
334 b'grep.all-files',
335 default=False,
336 experimental=True,
337 )
338 coreconfigitem(
339 b'commands',
340 b'merge.require-rev',
341 default=False,
342 )
343 coreconfigitem(
344 b'commands',
345 b'push.require-revs',
346 default=False,
347 )
348 coreconfigitem(
349 b'commands',
350 b'resolve.confirm',
351 default=False,
352 )
353 coreconfigitem(
354 b'commands',
355 b'resolve.explicit-re-merge',
356 default=False,
357 )
358 coreconfigitem(
359 b'commands',
360 b'resolve.mark-check',
361 default=b'none',
284 362 )
285 363 _registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
286 364 coreconfigitem(
287 b'commands', b'show.aliasprefix', default=list,
288 )
289 coreconfigitem(
290 b'commands', b'status.relative', default=False,
291 )
292 coreconfigitem(
293 b'commands', b'status.skipstates', default=[], experimental=True,
294 )
295 coreconfigitem(
296 b'commands', b'status.terse', default=b'',
297 )
298 coreconfigitem(
299 b'commands', b'status.verbose', default=False,
300 )
301 coreconfigitem(
302 b'commands', b'update.check', default=None,
303 )
304 coreconfigitem(
305 b'commands', b'update.requiredest', default=False,
306 )
307 coreconfigitem(
308 b'committemplate', b'.*', default=None, generic=True,
309 )
310 coreconfigitem(
311 b'convert', b'bzr.saverev', default=True,
312 )
313 coreconfigitem(
314 b'convert', b'cvsps.cache', default=True,
315 )
316 coreconfigitem(
317 b'convert', b'cvsps.fuzz', default=60,
318 )
319 coreconfigitem(
320 b'convert', b'cvsps.logencoding', default=None,
321 )
322 coreconfigitem(
323 b'convert', b'cvsps.mergefrom', default=None,
324 )
325 coreconfigitem(
326 b'convert', b'cvsps.mergeto', default=None,
327 )
328 coreconfigitem(
329 b'convert', b'git.committeractions', default=lambda: [b'messagedifferent'],
330 )
331 coreconfigitem(
332 b'convert', b'git.extrakeys', default=list,
333 )
334 coreconfigitem(
335 b'convert', b'git.findcopiesharder', default=False,
336 )
337 coreconfigitem(
338 b'convert', b'git.remoteprefix', default=b'remote',
339 )
340 coreconfigitem(
341 b'convert', b'git.renamelimit', default=400,
342 )
343 coreconfigitem(
344 b'convert', b'git.saverev', default=True,
345 )
346 coreconfigitem(
347 b'convert', b'git.similarity', default=50,
348 )
349 coreconfigitem(
350 b'convert', b'git.skipsubmodules', default=False,
351 )
352 coreconfigitem(
353 b'convert', b'hg.clonebranches', default=False,
354 )
355 coreconfigitem(
356 b'convert', b'hg.ignoreerrors', default=False,
357 )
358 coreconfigitem(
359 b'convert', b'hg.preserve-hash', default=False,
360 )
361 coreconfigitem(
362 b'convert', b'hg.revs', default=None,
363 )
364 coreconfigitem(
365 b'convert', b'hg.saverev', default=False,
366 )
367 coreconfigitem(
368 b'convert', b'hg.sourcename', default=None,
369 )
370 coreconfigitem(
371 b'convert', b'hg.startrev', default=None,
372 )
373 coreconfigitem(
374 b'convert', b'hg.tagsbranch', default=b'default',
375 )
376 coreconfigitem(
377 b'convert', b'hg.usebranchnames', default=True,
378 )
379 coreconfigitem(
380 b'convert', b'ignoreancestorcheck', default=False, experimental=True,
381 )
382 coreconfigitem(
383 b'convert', b'localtimezone', default=False,
384 )
385 coreconfigitem(
386 b'convert', b'p4.encoding', default=dynamicdefault,
387 )
388 coreconfigitem(
389 b'convert', b'p4.startrev', default=0,
390 )
391 coreconfigitem(
392 b'convert', b'skiptags', default=False,
393 )
394 coreconfigitem(
395 b'convert', b'svn.debugsvnlog', default=True,
396 )
397 coreconfigitem(
398 b'convert', b'svn.trunk', default=None,
399 )
400 coreconfigitem(
401 b'convert', b'svn.tags', default=None,
402 )
403 coreconfigitem(
404 b'convert', b'svn.branches', default=None,
405 )
406 coreconfigitem(
407 b'convert', b'svn.startrev', default=0,
408 )
409 coreconfigitem(
410 b'debug', b'dirstate.delaywrite', default=0,
411 )
412 coreconfigitem(
413 b'defaults', b'.*', default=None, generic=True,
414 )
415 coreconfigitem(
416 b'devel', b'all-warnings', default=False,
417 )
418 coreconfigitem(
419 b'devel', b'bundle2.debug', default=False,
420 )
421 coreconfigitem(
422 b'devel', b'bundle.delta', default=b'',
423 )
424 coreconfigitem(
425 b'devel', b'cache-vfs', default=None,
426 )
427 coreconfigitem(
428 b'devel', b'check-locks', default=False,
429 )
430 coreconfigitem(
431 b'devel', b'check-relroot', default=False,
432 )
433 coreconfigitem(
434 b'devel', b'default-date', default=None,
435 )
436 coreconfigitem(
437 b'devel', b'deprec-warn', default=False,
438 )
439 coreconfigitem(
440 b'devel', b'disableloaddefaultcerts', default=False,
441 )
442 coreconfigitem(
443 b'devel', b'warn-empty-changegroup', default=False,
444 )
445 coreconfigitem(
446 b'devel', b'legacy.exchange', default=list,
447 )
448 coreconfigitem(
449 b'devel', b'persistent-nodemap', default=False,
450 )
451 coreconfigitem(
452 b'devel', b'servercafile', default=b'',
453 )
454 coreconfigitem(
455 b'devel', b'serverexactprotocol', default=b'',
456 )
457 coreconfigitem(
458 b'devel', b'serverrequirecert', default=False,
459 )
460 coreconfigitem(
461 b'devel', b'strip-obsmarkers', default=True,
462 )
463 coreconfigitem(
464 b'devel', b'warn-config', default=None,
465 )
466 coreconfigitem(
467 b'devel', b'warn-config-default', default=None,
468 )
469 coreconfigitem(
470 b'devel', b'user.obsmarker', default=None,
471 )
472 coreconfigitem(
473 b'devel', b'warn-config-unknown', default=None,
474 )
475 coreconfigitem(
476 b'devel', b'debug.copies', default=False,
477 )
478 coreconfigitem(
479 b'devel', b'debug.extensions', default=False,
480 )
481 coreconfigitem(
482 b'devel', b'debug.repo-filters', default=False,
483 )
484 coreconfigitem(
485 b'devel', b'debug.peer-request', default=False,
486 )
487 coreconfigitem(
488 b'devel', b'discovery.randomize', default=True,
365 b'commands',
366 b'show.aliasprefix',
367 default=list,
368 )
369 coreconfigitem(
370 b'commands',
371 b'status.relative',
372 default=False,
373 )
374 coreconfigitem(
375 b'commands',
376 b'status.skipstates',
377 default=[],
378 experimental=True,
379 )
380 coreconfigitem(
381 b'commands',
382 b'status.terse',
383 default=b'',
384 )
385 coreconfigitem(
386 b'commands',
387 b'status.verbose',
388 default=False,
389 )
390 coreconfigitem(
391 b'commands',
392 b'update.check',
393 default=None,
394 )
395 coreconfigitem(
396 b'commands',
397 b'update.requiredest',
398 default=False,
399 )
400 coreconfigitem(
401 b'committemplate',
402 b'.*',
403 default=None,
404 generic=True,
405 )
406 coreconfigitem(
407 b'convert',
408 b'bzr.saverev',
409 default=True,
410 )
411 coreconfigitem(
412 b'convert',
413 b'cvsps.cache',
414 default=True,
415 )
416 coreconfigitem(
417 b'convert',
418 b'cvsps.fuzz',
419 default=60,
420 )
421 coreconfigitem(
422 b'convert',
423 b'cvsps.logencoding',
424 default=None,
425 )
426 coreconfigitem(
427 b'convert',
428 b'cvsps.mergefrom',
429 default=None,
430 )
431 coreconfigitem(
432 b'convert',
433 b'cvsps.mergeto',
434 default=None,
435 )
436 coreconfigitem(
437 b'convert',
438 b'git.committeractions',
439 default=lambda: [b'messagedifferent'],
440 )
441 coreconfigitem(
442 b'convert',
443 b'git.extrakeys',
444 default=list,
445 )
446 coreconfigitem(
447 b'convert',
448 b'git.findcopiesharder',
449 default=False,
450 )
451 coreconfigitem(
452 b'convert',
453 b'git.remoteprefix',
454 default=b'remote',
455 )
456 coreconfigitem(
457 b'convert',
458 b'git.renamelimit',
459 default=400,
460 )
461 coreconfigitem(
462 b'convert',
463 b'git.saverev',
464 default=True,
465 )
466 coreconfigitem(
467 b'convert',
468 b'git.similarity',
469 default=50,
470 )
471 coreconfigitem(
472 b'convert',
473 b'git.skipsubmodules',
474 default=False,
475 )
476 coreconfigitem(
477 b'convert',
478 b'hg.clonebranches',
479 default=False,
480 )
481 coreconfigitem(
482 b'convert',
483 b'hg.ignoreerrors',
484 default=False,
485 )
486 coreconfigitem(
487 b'convert',
488 b'hg.preserve-hash',
489 default=False,
490 )
491 coreconfigitem(
492 b'convert',
493 b'hg.revs',
494 default=None,
495 )
496 coreconfigitem(
497 b'convert',
498 b'hg.saverev',
499 default=False,
500 )
501 coreconfigitem(
502 b'convert',
503 b'hg.sourcename',
504 default=None,
505 )
506 coreconfigitem(
507 b'convert',
508 b'hg.startrev',
509 default=None,
510 )
511 coreconfigitem(
512 b'convert',
513 b'hg.tagsbranch',
514 default=b'default',
515 )
516 coreconfigitem(
517 b'convert',
518 b'hg.usebranchnames',
519 default=True,
520 )
521 coreconfigitem(
522 b'convert',
523 b'ignoreancestorcheck',
524 default=False,
525 experimental=True,
526 )
527 coreconfigitem(
528 b'convert',
529 b'localtimezone',
530 default=False,
531 )
532 coreconfigitem(
533 b'convert',
534 b'p4.encoding',
535 default=dynamicdefault,
536 )
537 coreconfigitem(
538 b'convert',
539 b'p4.startrev',
540 default=0,
541 )
542 coreconfigitem(
543 b'convert',
544 b'skiptags',
545 default=False,
546 )
547 coreconfigitem(
548 b'convert',
549 b'svn.debugsvnlog',
550 default=True,
551 )
552 coreconfigitem(
553 b'convert',
554 b'svn.trunk',
555 default=None,
556 )
557 coreconfigitem(
558 b'convert',
559 b'svn.tags',
560 default=None,
561 )
562 coreconfigitem(
563 b'convert',
564 b'svn.branches',
565 default=None,
566 )
567 coreconfigitem(
568 b'convert',
569 b'svn.startrev',
570 default=0,
571 )
572 coreconfigitem(
573 b'debug',
574 b'dirstate.delaywrite',
575 default=0,
576 )
577 coreconfigitem(
578 b'defaults',
579 b'.*',
580 default=None,
581 generic=True,
582 )
583 coreconfigitem(
584 b'devel',
585 b'all-warnings',
586 default=False,
587 )
588 coreconfigitem(
589 b'devel',
590 b'bundle2.debug',
591 default=False,
592 )
593 coreconfigitem(
594 b'devel',
595 b'bundle.delta',
596 default=b'',
597 )
598 coreconfigitem(
599 b'devel',
600 b'cache-vfs',
601 default=None,
602 )
603 coreconfigitem(
604 b'devel',
605 b'check-locks',
606 default=False,
607 )
608 coreconfigitem(
609 b'devel',
610 b'check-relroot',
611 default=False,
612 )
613 coreconfigitem(
614 b'devel',
615 b'default-date',
616 default=None,
617 )
618 coreconfigitem(
619 b'devel',
620 b'deprec-warn',
621 default=False,
622 )
623 coreconfigitem(
624 b'devel',
625 b'disableloaddefaultcerts',
626 default=False,
627 )
628 coreconfigitem(
629 b'devel',
630 b'warn-empty-changegroup',
631 default=False,
632 )
633 coreconfigitem(
634 b'devel',
635 b'legacy.exchange',
636 default=list,
637 )
638 coreconfigitem(
639 b'devel',
640 b'persistent-nodemap',
641 default=False,
642 )
643 coreconfigitem(
644 b'devel',
645 b'servercafile',
646 default=b'',
647 )
648 coreconfigitem(
649 b'devel',
650 b'serverexactprotocol',
651 default=b'',
652 )
653 coreconfigitem(
654 b'devel',
655 b'serverrequirecert',
656 default=False,
657 )
658 coreconfigitem(
659 b'devel',
660 b'strip-obsmarkers',
661 default=True,
662 )
663 coreconfigitem(
664 b'devel',
665 b'warn-config',
666 default=None,
667 )
668 coreconfigitem(
669 b'devel',
670 b'warn-config-default',
671 default=None,
672 )
673 coreconfigitem(
674 b'devel',
675 b'user.obsmarker',
676 default=None,
677 )
678 coreconfigitem(
679 b'devel',
680 b'warn-config-unknown',
681 default=None,
682 )
683 coreconfigitem(
684 b'devel',
685 b'debug.copies',
686 default=False,
687 )
688 coreconfigitem(
689 b'devel',
690 b'debug.extensions',
691 default=False,
692 )
693 coreconfigitem(
694 b'devel',
695 b'debug.repo-filters',
696 default=False,
697 )
698 coreconfigitem(
699 b'devel',
700 b'debug.peer-request',
701 default=False,
702 )
703 coreconfigitem(
704 b'devel',
705 b'discovery.randomize',
706 default=True,
489 707 )
490 708 _registerdiffopts(section=b'diff')
491 709 coreconfigitem(
492 b'email', b'bcc', default=None,
493 )
494 coreconfigitem(
495 b'email', b'cc', default=None,
496 )
497 coreconfigitem(
498 b'email', b'charsets', default=list,
499 )
500 coreconfigitem(
501 b'email', b'from', default=None,
502 )
503 coreconfigitem(
504 b'email', b'method', default=b'smtp',
505 )
506 coreconfigitem(
507 b'email', b'reply-to', default=None,
508 )
509 coreconfigitem(
510 b'email', b'to', default=None,
511 )
512 coreconfigitem(
513 b'experimental', b'archivemetatemplate', default=dynamicdefault,
514 )
515 coreconfigitem(
516 b'experimental', b'auto-publish', default=b'publish',
517 )
518 coreconfigitem(
519 b'experimental', b'bundle-phases', default=False,
520 )
521 coreconfigitem(
522 b'experimental', b'bundle2-advertise', default=True,
523 )
524 coreconfigitem(
525 b'experimental', b'bundle2-output-capture', default=False,
526 )
527 coreconfigitem(
528 b'experimental', b'bundle2.pushback', default=False,
529 )
530 coreconfigitem(
531 b'experimental', b'bundle2lazylocking', default=False,
532 )
533 coreconfigitem(
534 b'experimental', b'bundlecomplevel', default=None,
535 )
536 coreconfigitem(
537 b'experimental', b'bundlecomplevel.bzip2', default=None,
538 )
539 coreconfigitem(
540 b'experimental', b'bundlecomplevel.gzip', default=None,
541 )
542 coreconfigitem(
543 b'experimental', b'bundlecomplevel.none', default=None,
544 )
545 coreconfigitem(
546 b'experimental', b'bundlecomplevel.zstd', default=None,
547 )
548 coreconfigitem(
549 b'experimental', b'changegroup3', default=False,
550 )
551 coreconfigitem(
552 b'experimental', b'cleanup-as-archived', default=False,
553 )
554 coreconfigitem(
555 b'experimental', b'clientcompressionengines', default=list,
556 )
557 coreconfigitem(
558 b'experimental', b'copytrace', default=b'on',
559 )
560 coreconfigitem(
561 b'experimental', b'copytrace.movecandidateslimit', default=100,
562 )
563 coreconfigitem(
564 b'experimental', b'copytrace.sourcecommitlimit', default=100,
565 )
566 coreconfigitem(
567 b'experimental', b'copies.read-from', default=b"filelog-only",
568 )
569 coreconfigitem(
570 b'experimental', b'copies.write-to', default=b'filelog-only',
571 )
572 coreconfigitem(
573 b'experimental', b'crecordtest', default=None,
574 )
575 coreconfigitem(
576 b'experimental', b'directaccess', default=False,
577 )
578 coreconfigitem(
579 b'experimental', b'directaccess.revnums', default=False,
580 )
581 coreconfigitem(
582 b'experimental', b'editortmpinhg', default=False,
583 )
584 coreconfigitem(
585 b'experimental', b'evolution', default=list,
710 b'email',
711 b'bcc',
712 default=None,
713 )
714 coreconfigitem(
715 b'email',
716 b'cc',
717 default=None,
718 )
719 coreconfigitem(
720 b'email',
721 b'charsets',
722 default=list,
723 )
724 coreconfigitem(
725 b'email',
726 b'from',
727 default=None,
728 )
729 coreconfigitem(
730 b'email',
731 b'method',
732 default=b'smtp',
733 )
734 coreconfigitem(
735 b'email',
736 b'reply-to',
737 default=None,
738 )
739 coreconfigitem(
740 b'email',
741 b'to',
742 default=None,
743 )
744 coreconfigitem(
745 b'experimental',
746 b'archivemetatemplate',
747 default=dynamicdefault,
748 )
749 coreconfigitem(
750 b'experimental',
751 b'auto-publish',
752 default=b'publish',
753 )
754 coreconfigitem(
755 b'experimental',
756 b'bundle-phases',
757 default=False,
758 )
759 coreconfigitem(
760 b'experimental',
761 b'bundle2-advertise',
762 default=True,
763 )
764 coreconfigitem(
765 b'experimental',
766 b'bundle2-output-capture',
767 default=False,
768 )
769 coreconfigitem(
770 b'experimental',
771 b'bundle2.pushback',
772 default=False,
773 )
774 coreconfigitem(
775 b'experimental',
776 b'bundle2lazylocking',
777 default=False,
778 )
779 coreconfigitem(
780 b'experimental',
781 b'bundlecomplevel',
782 default=None,
783 )
784 coreconfigitem(
785 b'experimental',
786 b'bundlecomplevel.bzip2',
787 default=None,
788 )
789 coreconfigitem(
790 b'experimental',
791 b'bundlecomplevel.gzip',
792 default=None,
793 )
794 coreconfigitem(
795 b'experimental',
796 b'bundlecomplevel.none',
797 default=None,
798 )
799 coreconfigitem(
800 b'experimental',
801 b'bundlecomplevel.zstd',
802 default=None,
803 )
804 coreconfigitem(
805 b'experimental',
806 b'changegroup3',
807 default=False,
808 )
809 coreconfigitem(
810 b'experimental',
811 b'cleanup-as-archived',
812 default=False,
813 )
814 coreconfigitem(
815 b'experimental',
816 b'clientcompressionengines',
817 default=list,
818 )
819 coreconfigitem(
820 b'experimental',
821 b'copytrace',
822 default=b'on',
823 )
824 coreconfigitem(
825 b'experimental',
826 b'copytrace.movecandidateslimit',
827 default=100,
828 )
829 coreconfigitem(
830 b'experimental',
831 b'copytrace.sourcecommitlimit',
832 default=100,
833 )
834 coreconfigitem(
835 b'experimental',
836 b'copies.read-from',
837 default=b"filelog-only",
838 )
839 coreconfigitem(
840 b'experimental',
841 b'copies.write-to',
842 default=b'filelog-only',
843 )
844 coreconfigitem(
845 b'experimental',
846 b'crecordtest',
847 default=None,
848 )
849 coreconfigitem(
850 b'experimental',
851 b'directaccess',
852 default=False,
853 )
854 coreconfigitem(
855 b'experimental',
856 b'directaccess.revnums',
857 default=False,
858 )
859 coreconfigitem(
860 b'experimental',
861 b'editortmpinhg',
862 default=False,
863 )
864 coreconfigitem(
865 b'experimental',
866 b'evolution',
867 default=list,
586 868 )
587 869 coreconfigitem(
588 870 b'experimental',
@@ -591,10 +873,14 b' coreconfigitem('
591 873 alias=[(b'experimental', b'allowdivergence')],
592 874 )
593 875 coreconfigitem(
594 b'experimental', b'evolution.allowunstable', default=None,
595 )
596 coreconfigitem(
597 b'experimental', b'evolution.createmarkers', default=None,
876 b'experimental',
877 b'evolution.allowunstable',
878 default=None,
879 )
880 coreconfigitem(
881 b'experimental',
882 b'evolution.createmarkers',
883 default=None,
598 884 )
599 885 coreconfigitem(
600 886 b'experimental',
@@ -603,109 +889,173 b' coreconfigitem('
603 889 alias=[(b'experimental', b'effect-flags')],
604 890 )
605 891 coreconfigitem(
606 b'experimental', b'evolution.exchange', default=None,
607 )
608 coreconfigitem(
609 b'experimental', b'evolution.bundle-obsmarker', default=False,
610 )
611 coreconfigitem(
612 b'experimental', b'log.topo', default=False,
613 )
614 coreconfigitem(
615 b'experimental', b'evolution.report-instabilities', default=True,
616 )
617 coreconfigitem(
618 b'experimental', b'evolution.track-operation', default=True,
892 b'experimental',
893 b'evolution.exchange',
894 default=None,
895 )
896 coreconfigitem(
897 b'experimental',
898 b'evolution.bundle-obsmarker',
899 default=False,
900 )
901 coreconfigitem(
902 b'experimental',
903 b'log.topo',
904 default=False,
905 )
906 coreconfigitem(
907 b'experimental',
908 b'evolution.report-instabilities',
909 default=True,
910 )
911 coreconfigitem(
912 b'experimental',
913 b'evolution.track-operation',
914 default=True,
619 915 )
620 916 # repo-level config to exclude a revset visibility
621 917 #
622 918 # The target use case is to use `share` to expose different subset of the same
623 919 # repository, especially server side. See also `server.view`.
624 920 coreconfigitem(
625 b'experimental', b'extra-filter-revs', default=None,
626 )
627 coreconfigitem(
628 b'experimental', b'maxdeltachainspan', default=-1,
921 b'experimental',
922 b'extra-filter-revs',
923 default=None,
924 )
925 coreconfigitem(
926 b'experimental',
927 b'maxdeltachainspan',
928 default=-1,
629 929 )
630 930 # tracks files which were undeleted (merge might delete them but we explicitly
631 931 # kept/undeleted them) and creates new filenodes for them
632 932 coreconfigitem(
633 b'experimental', b'merge-track-salvaged', default=False,
634 )
635 coreconfigitem(
636 b'experimental', b'mergetempdirprefix', default=None,
637 )
638 coreconfigitem(
639 b'experimental', b'mmapindexthreshold', default=None,
640 )
641 coreconfigitem(
642 b'experimental', b'narrow', default=False,
643 )
644 coreconfigitem(
645 b'experimental', b'nonnormalparanoidcheck', default=False,
646 )
647 coreconfigitem(
648 b'experimental', b'exportableenviron', default=list,
649 )
650 coreconfigitem(
651 b'experimental', b'extendedheader.index', default=None,
652 )
653 coreconfigitem(
654 b'experimental', b'extendedheader.similarity', default=False,
655 )
656 coreconfigitem(
657 b'experimental', b'graphshorten', default=False,
658 )
659 coreconfigitem(
660 b'experimental', b'graphstyle.parent', default=dynamicdefault,
661 )
662 coreconfigitem(
663 b'experimental', b'graphstyle.missing', default=dynamicdefault,
664 )
665 coreconfigitem(
666 b'experimental', b'graphstyle.grandparent', default=dynamicdefault,
667 )
668 coreconfigitem(
669 b'experimental', b'hook-track-tags', default=False,
670 )
671 coreconfigitem(
672 b'experimental', b'httppeer.advertise-v2', default=False,
673 )
674 coreconfigitem(
675 b'experimental', b'httppeer.v2-encoder-order', default=None,
676 )
677 coreconfigitem(
678 b'experimental', b'httppostargs', default=False,
933 b'experimental',
934 b'merge-track-salvaged',
935 default=False,
936 )
937 coreconfigitem(
938 b'experimental',
939 b'mergetempdirprefix',
940 default=None,
941 )
942 coreconfigitem(
943 b'experimental',
944 b'mmapindexthreshold',
945 default=None,
946 )
947 coreconfigitem(
948 b'experimental',
949 b'narrow',
950 default=False,
951 )
952 coreconfigitem(
953 b'experimental',
954 b'nonnormalparanoidcheck',
955 default=False,
956 )
957 coreconfigitem(
958 b'experimental',
959 b'exportableenviron',
960 default=list,
961 )
962 coreconfigitem(
963 b'experimental',
964 b'extendedheader.index',
965 default=None,
966 )
967 coreconfigitem(
968 b'experimental',
969 b'extendedheader.similarity',
970 default=False,
971 )
972 coreconfigitem(
973 b'experimental',
974 b'graphshorten',
975 default=False,
976 )
977 coreconfigitem(
978 b'experimental',
979 b'graphstyle.parent',
980 default=dynamicdefault,
981 )
982 coreconfigitem(
983 b'experimental',
984 b'graphstyle.missing',
985 default=dynamicdefault,
986 )
987 coreconfigitem(
988 b'experimental',
989 b'graphstyle.grandparent',
990 default=dynamicdefault,
991 )
992 coreconfigitem(
993 b'experimental',
994 b'hook-track-tags',
995 default=False,
996 )
997 coreconfigitem(
998 b'experimental',
999 b'httppeer.advertise-v2',
1000 default=False,
1001 )
1002 coreconfigitem(
1003 b'experimental',
1004 b'httppeer.v2-encoder-order',
1005 default=None,
1006 )
1007 coreconfigitem(
1008 b'experimental',
1009 b'httppostargs',
1010 default=False,
679 1011 )
680 1012 coreconfigitem(b'experimental', b'nointerrupt', default=False)
681 1013 coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
682 1014
683 1015 coreconfigitem(
684 b'experimental', b'obsmarkers-exchange-debug', default=False,
685 )
686 coreconfigitem(
687 b'experimental', b'remotenames', default=False,
688 )
689 coreconfigitem(
690 b'experimental', b'removeemptydirs', default=True,
691 )
692 coreconfigitem(
693 b'experimental', b'revert.interactive.select-to-keep', default=False,
694 )
695 coreconfigitem(
696 b'experimental', b'revisions.prefixhexnode', default=False,
697 )
698 coreconfigitem(
699 b'experimental', b'revlogv2', default=None,
700 )
701 coreconfigitem(
702 b'experimental', b'revisions.disambiguatewithin', default=None,
703 )
704 coreconfigitem(
705 b'experimental', b'rust.index', default=False,
706 )
707 coreconfigitem(
708 b'experimental', b'server.filesdata.recommended-batch-size', default=50000,
1016 b'experimental',
1017 b'obsmarkers-exchange-debug',
1018 default=False,
1019 )
1020 coreconfigitem(
1021 b'experimental',
1022 b'remotenames',
1023 default=False,
1024 )
1025 coreconfigitem(
1026 b'experimental',
1027 b'removeemptydirs',
1028 default=True,
1029 )
1030 coreconfigitem(
1031 b'experimental',
1032 b'revert.interactive.select-to-keep',
1033 default=False,
1034 )
1035 coreconfigitem(
1036 b'experimental',
1037 b'revisions.prefixhexnode',
1038 default=False,
1039 )
1040 coreconfigitem(
1041 b'experimental',
1042 b'revlogv2',
1043 default=None,
1044 )
1045 coreconfigitem(
1046 b'experimental',
1047 b'revisions.disambiguatewithin',
1048 default=None,
1049 )
1050 coreconfigitem(
1051 b'experimental',
1052 b'rust.index',
1053 default=False,
1054 )
1055 coreconfigitem(
1056 b'experimental',
1057 b'server.filesdata.recommended-batch-size',
1058 default=50000,
709 1059 )
710 1060 coreconfigitem(
711 1061 b'experimental',
@@ -713,10 +1063,14 b' coreconfigitem('
713 1063 default=100000,
714 1064 )
715 1065 coreconfigitem(
716 b'experimental', b'server.stream-narrow-clones', default=False,
717 )
718 coreconfigitem(
719 b'experimental', b'single-head-per-branch', default=False,
1066 b'experimental',
1067 b'server.stream-narrow-clones',
1068 default=False,
1069 )
1070 coreconfigitem(
1071 b'experimental',
1072 b'single-head-per-branch',
1073 default=False,
720 1074 )
721 1075 coreconfigitem(
722 1076 b'experimental',
@@ -724,73 +1078,125 b' coreconfigitem('
724 1078 default=False,
725 1079 )
726 1080 coreconfigitem(
727 b'experimental', b'sshserver.support-v2', default=False,
728 )
729 coreconfigitem(
730 b'experimental', b'sparse-read', default=False,
731 )
732 coreconfigitem(
733 b'experimental', b'sparse-read.density-threshold', default=0.50,
734 )
735 coreconfigitem(
736 b'experimental', b'sparse-read.min-gap-size', default=b'65K',
737 )
738 coreconfigitem(
739 b'experimental', b'treemanifest', default=False,
740 )
741 coreconfigitem(
742 b'experimental', b'update.atomic-file', default=False,
743 )
744 coreconfigitem(
745 b'experimental', b'sshpeer.advertise-v2', default=False,
746 )
747 coreconfigitem(
748 b'experimental', b'web.apiserver', default=False,
749 )
750 coreconfigitem(
751 b'experimental', b'web.api.http-v2', default=False,
752 )
753 coreconfigitem(
754 b'experimental', b'web.api.debugreflect', default=False,
755 )
756 coreconfigitem(
757 b'experimental', b'worker.wdir-get-thread-safe', default=False,
758 )
759 coreconfigitem(
760 b'experimental', b'worker.repository-upgrade', default=False,
761 )
762 coreconfigitem(
763 b'experimental', b'xdiff', default=False,
764 )
765 coreconfigitem(
766 b'extensions', b'.*', default=None, generic=True,
767 )
768 coreconfigitem(
769 b'extdata', b'.*', default=None, generic=True,
770 )
771 coreconfigitem(
772 b'format', b'bookmarks-in-store', default=False,
773 )
774 coreconfigitem(
775 b'format', b'chunkcachesize', default=None, experimental=True,
776 )
777 coreconfigitem(
778 b'format', b'dotencode', default=True,
779 )
780 coreconfigitem(
781 b'format', b'generaldelta', default=False, experimental=True,
782 )
783 coreconfigitem(
784 b'format', b'manifestcachesize', default=None, experimental=True,
785 )
786 coreconfigitem(
787 b'format', b'maxchainlen', default=dynamicdefault, experimental=True,
788 )
789 coreconfigitem(
790 b'format', b'obsstore-version', default=None,
791 )
792 coreconfigitem(
793 b'format', b'sparse-revlog', default=True,
1081 b'experimental',
1082 b'sshserver.support-v2',
1083 default=False,
1084 )
1085 coreconfigitem(
1086 b'experimental',
1087 b'sparse-read',
1088 default=False,
1089 )
1090 coreconfigitem(
1091 b'experimental',
1092 b'sparse-read.density-threshold',
1093 default=0.50,
1094 )
1095 coreconfigitem(
1096 b'experimental',
1097 b'sparse-read.min-gap-size',
1098 default=b'65K',
1099 )
1100 coreconfigitem(
1101 b'experimental',
1102 b'treemanifest',
1103 default=False,
1104 )
1105 coreconfigitem(
1106 b'experimental',
1107 b'update.atomic-file',
1108 default=False,
1109 )
1110 coreconfigitem(
1111 b'experimental',
1112 b'sshpeer.advertise-v2',
1113 default=False,
1114 )
1115 coreconfigitem(
1116 b'experimental',
1117 b'web.apiserver',
1118 default=False,
1119 )
1120 coreconfigitem(
1121 b'experimental',
1122 b'web.api.http-v2',
1123 default=False,
1124 )
1125 coreconfigitem(
1126 b'experimental',
1127 b'web.api.debugreflect',
1128 default=False,
1129 )
1130 coreconfigitem(
1131 b'experimental',
1132 b'worker.wdir-get-thread-safe',
1133 default=False,
1134 )
1135 coreconfigitem(
1136 b'experimental',
1137 b'worker.repository-upgrade',
1138 default=False,
1139 )
1140 coreconfigitem(
1141 b'experimental',
1142 b'xdiff',
1143 default=False,
1144 )
1145 coreconfigitem(
1146 b'extensions',
1147 b'.*',
1148 default=None,
1149 generic=True,
1150 )
1151 coreconfigitem(
1152 b'extdata',
1153 b'.*',
1154 default=None,
1155 generic=True,
1156 )
1157 coreconfigitem(
1158 b'format',
1159 b'bookmarks-in-store',
1160 default=False,
1161 )
1162 coreconfigitem(
1163 b'format',
1164 b'chunkcachesize',
1165 default=None,
1166 experimental=True,
1167 )
1168 coreconfigitem(
1169 b'format',
1170 b'dotencode',
1171 default=True,
1172 )
1173 coreconfigitem(
1174 b'format',
1175 b'generaldelta',
1176 default=False,
1177 experimental=True,
1178 )
1179 coreconfigitem(
1180 b'format',
1181 b'manifestcachesize',
1182 default=None,
1183 experimental=True,
1184 )
1185 coreconfigitem(
1186 b'format',
1187 b'maxchainlen',
1188 default=dynamicdefault,
1189 experimental=True,
1190 )
1191 coreconfigitem(
1192 b'format',
1193 b'obsstore-version',
1194 default=None,
1195 )
1196 coreconfigitem(
1197 b'format',
1198 b'sparse-revlog',
1199 default=True,
794 1200 )
795 1201 coreconfigitem(
796 1202 b'format',
@@ -799,13 +1205,19 b' coreconfigitem('
799 1205 alias=[(b'experimental', b'format.compression')],
800 1206 )
801 1207 coreconfigitem(
802 b'format', b'usefncache', default=True,
803 )
804 coreconfigitem(
805 b'format', b'usegeneraldelta', default=True,
806 )
807 coreconfigitem(
808 b'format', b'usestore', default=True,
1208 b'format',
1209 b'usefncache',
1210 default=True,
1211 )
1212 coreconfigitem(
1213 b'format',
1214 b'usegeneraldelta',
1215 default=True,
1216 )
1217 coreconfigitem(
1218 b'format',
1219 b'usestore',
1220 default=True,
809 1221 )
810 1222 # Right now, the only efficient implement of the nodemap logic is in Rust, so
811 1223 # the persistent nodemap feature needs to stay experimental as long as the Rust
@@ -820,43 +1232,77 b' coreconfigitem('
820 1232 experimental=True,
821 1233 )
822 1234 coreconfigitem(
823 b'format', b'exp-use-side-data', default=False, experimental=True,
824 )
825 coreconfigitem(
826 b'format', b'exp-share-safe', default=False, experimental=True,
827 )
828 coreconfigitem(
829 b'format', b'internal-phase', default=False, experimental=True,
830 )
831 coreconfigitem(
832 b'fsmonitor', b'warn_when_unused', default=True,
833 )
834 coreconfigitem(
835 b'fsmonitor', b'warn_update_file_count', default=50000,
836 )
837 coreconfigitem(
838 b'fsmonitor', b'warn_update_file_count_rust', default=400000,
839 )
840 coreconfigitem(
841 b'help', br'hidden-command\..*', default=False, generic=True,
842 )
843 coreconfigitem(
844 b'help', br'hidden-topic\..*', default=False, generic=True,
845 )
846 coreconfigitem(
847 b'hooks', b'.*', default=dynamicdefault, generic=True,
848 )
849 coreconfigitem(
850 b'hgweb-paths', b'.*', default=list, generic=True,
851 )
852 coreconfigitem(
853 b'hostfingerprints', b'.*', default=list, generic=True,
854 )
855 coreconfigitem(
856 b'hostsecurity', b'ciphers', default=None,
857 )
858 coreconfigitem(
859 b'hostsecurity', b'minimumprotocol', default=dynamicdefault,
1235 b'format',
1236 b'exp-use-side-data',
1237 default=False,
1238 experimental=True,
1239 )
1240 coreconfigitem(
1241 b'format',
1242 b'exp-share-safe',
1243 default=False,
1244 experimental=True,
1245 )
1246 coreconfigitem(
1247 b'format',
1248 b'internal-phase',
1249 default=False,
1250 experimental=True,
1251 )
1252 coreconfigitem(
1253 b'fsmonitor',
1254 b'warn_when_unused',
1255 default=True,
1256 )
1257 coreconfigitem(
1258 b'fsmonitor',
1259 b'warn_update_file_count',
1260 default=50000,
1261 )
1262 coreconfigitem(
1263 b'fsmonitor',
1264 b'warn_update_file_count_rust',
1265 default=400000,
1266 )
1267 coreconfigitem(
1268 b'help',
1269 br'hidden-command\..*',
1270 default=False,
1271 generic=True,
1272 )
1273 coreconfigitem(
1274 b'help',
1275 br'hidden-topic\..*',
1276 default=False,
1277 generic=True,
1278 )
1279 coreconfigitem(
1280 b'hooks',
1281 b'.*',
1282 default=dynamicdefault,
1283 generic=True,
1284 )
1285 coreconfigitem(
1286 b'hgweb-paths',
1287 b'.*',
1288 default=list,
1289 generic=True,
1290 )
1291 coreconfigitem(
1292 b'hostfingerprints',
1293 b'.*',
1294 default=list,
1295 generic=True,
1296 )
1297 coreconfigitem(
1298 b'hostsecurity',
1299 b'ciphers',
1300 default=None,
1301 )
1302 coreconfigitem(
1303 b'hostsecurity',
1304 b'minimumprotocol',
1305 default=dynamicdefault,
860 1306 )
861 1307 coreconfigitem(
862 1308 b'hostsecurity',
@@ -865,73 +1311,122 b' coreconfigitem('
865 1311 generic=True,
866 1312 )
867 1313 coreconfigitem(
868 b'hostsecurity', b'.*:ciphers$', default=dynamicdefault, generic=True,
869 )
870 coreconfigitem(
871 b'hostsecurity', b'.*:fingerprints$', default=list, generic=True,
872 )
873 coreconfigitem(
874 b'hostsecurity', b'.*:verifycertsfile$', default=None, generic=True,
1314 b'hostsecurity',
1315 b'.*:ciphers$',
1316 default=dynamicdefault,
1317 generic=True,
1318 )
1319 coreconfigitem(
1320 b'hostsecurity',
1321 b'.*:fingerprints$',
1322 default=list,
1323 generic=True,
1324 )
1325 coreconfigitem(
1326 b'hostsecurity',
1327 b'.*:verifycertsfile$',
1328 default=None,
1329 generic=True,
875 1330 )
876 1331
877 1332 coreconfigitem(
878 b'http_proxy', b'always', default=False,
879 )
880 coreconfigitem(
881 b'http_proxy', b'host', default=None,
882 )
883 coreconfigitem(
884 b'http_proxy', b'no', default=list,
885 )
886 coreconfigitem(
887 b'http_proxy', b'passwd', default=None,
888 )
889 coreconfigitem(
890 b'http_proxy', b'user', default=None,
1333 b'http_proxy',
1334 b'always',
1335 default=False,
1336 )
1337 coreconfigitem(
1338 b'http_proxy',
1339 b'host',
1340 default=None,
1341 )
1342 coreconfigitem(
1343 b'http_proxy',
1344 b'no',
1345 default=list,
1346 )
1347 coreconfigitem(
1348 b'http_proxy',
1349 b'passwd',
1350 default=None,
1351 )
1352 coreconfigitem(
1353 b'http_proxy',
1354 b'user',
1355 default=None,
891 1356 )
892 1357
893 1358 coreconfigitem(
894 b'http', b'timeout', default=None,
1359 b'http',
1360 b'timeout',
1361 default=None,
895 1362 )
896 1363
897 1364 coreconfigitem(
898 b'logtoprocess', b'commandexception', default=None,
899 )
900 coreconfigitem(
901 b'logtoprocess', b'commandfinish', default=None,
902 )
903 coreconfigitem(
904 b'logtoprocess', b'command', default=None,
905 )
906 coreconfigitem(
907 b'logtoprocess', b'develwarn', default=None,
908 )
909 coreconfigitem(
910 b'logtoprocess', b'uiblocked', default=None,
911 )
912 coreconfigitem(
913 b'merge', b'checkunknown', default=b'abort',
914 )
915 coreconfigitem(
916 b'merge', b'checkignored', default=b'abort',
917 )
918 coreconfigitem(
919 b'experimental', b'merge.checkpathconflicts', default=False,
920 )
921 coreconfigitem(
922 b'merge', b'followcopies', default=True,
923 )
924 coreconfigitem(
925 b'merge', b'on-failure', default=b'continue',
926 )
927 coreconfigitem(
928 b'merge', b'preferancestor', default=lambda: [b'*'], experimental=True,
929 )
930 coreconfigitem(
931 b'merge', b'strict-capability-check', default=False,
932 )
933 coreconfigitem(
934 b'merge-tools', b'.*', default=None, generic=True,
1365 b'logtoprocess',
1366 b'commandexception',
1367 default=None,
1368 )
1369 coreconfigitem(
1370 b'logtoprocess',
1371 b'commandfinish',
1372 default=None,
1373 )
1374 coreconfigitem(
1375 b'logtoprocess',
1376 b'command',
1377 default=None,
1378 )
1379 coreconfigitem(
1380 b'logtoprocess',
1381 b'develwarn',
1382 default=None,
1383 )
1384 coreconfigitem(
1385 b'logtoprocess',
1386 b'uiblocked',
1387 default=None,
1388 )
1389 coreconfigitem(
1390 b'merge',
1391 b'checkunknown',
1392 default=b'abort',
1393 )
1394 coreconfigitem(
1395 b'merge',
1396 b'checkignored',
1397 default=b'abort',
1398 )
1399 coreconfigitem(
1400 b'experimental',
1401 b'merge.checkpathconflicts',
1402 default=False,
1403 )
1404 coreconfigitem(
1405 b'merge',
1406 b'followcopies',
1407 default=True,
1408 )
1409 coreconfigitem(
1410 b'merge',
1411 b'on-failure',
1412 default=b'continue',
1413 )
1414 coreconfigitem(
1415 b'merge',
1416 b'preferancestor',
1417 default=lambda: [b'*'],
1418 experimental=True,
1419 )
1420 coreconfigitem(
1421 b'merge',
1422 b'strict-capability-check',
1423 default=False,
1424 )
1425 coreconfigitem(
1426 b'merge-tools',
1427 b'.*',
1428 default=None,
1429 generic=True,
935 1430 )
936 1431 coreconfigitem(
937 1432 b'merge-tools',
@@ -941,10 +1436,18 b' coreconfigitem('
941 1436 priority=-1,
942 1437 )
943 1438 coreconfigitem(
944 b'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
945 )
946 coreconfigitem(
947 b'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
1439 b'merge-tools',
1440 br'.*\.binary$',
1441 default=False,
1442 generic=True,
1443 priority=-1,
1444 )
1445 coreconfigitem(
1446 b'merge-tools',
1447 br'.*\.check$',
1448 default=list,
1449 generic=True,
1450 priority=-1,
948 1451 )
949 1452 coreconfigitem(
950 1453 b'merge-tools',
@@ -961,10 +1464,18 b' coreconfigitem('
961 1464 priority=-1,
962 1465 )
963 1466 coreconfigitem(
964 b'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
965 )
966 coreconfigitem(
967 b'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
1467 b'merge-tools',
1468 br'.*\.fixeol$',
1469 default=False,
1470 generic=True,
1471 priority=-1,
1472 )
1473 coreconfigitem(
1474 b'merge-tools',
1475 br'.*\.gui$',
1476 default=False,
1477 generic=True,
1478 priority=-1,
968 1479 )
969 1480 coreconfigitem(
970 1481 b'merge-tools',
@@ -981,7 +1492,11 b' coreconfigitem('
981 1492 priority=-1,
982 1493 )
983 1494 coreconfigitem(
984 b'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
1495 b'merge-tools',
1496 br'.*\.priority$',
1497 default=0,
1498 generic=True,
1499 priority=-1,
985 1500 )
986 1501 coreconfigitem(
987 1502 b'merge-tools',
@@ -991,100 +1506,168 b' coreconfigitem('
991 1506 priority=-1,
992 1507 )
993 1508 coreconfigitem(
994 b'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
995 )
996 coreconfigitem(
997 b'pager', b'attend-.*', default=dynamicdefault, generic=True,
998 )
999 coreconfigitem(
1000 b'pager', b'ignore', default=list,
1001 )
1002 coreconfigitem(
1003 b'pager', b'pager', default=dynamicdefault,
1004 )
1005 coreconfigitem(
1006 b'patch', b'eol', default=b'strict',
1007 )
1008 coreconfigitem(
1009 b'patch', b'fuzz', default=2,
1010 )
1011 coreconfigitem(
1012 b'paths', b'default', default=None,
1013 )
1014 coreconfigitem(
1015 b'paths', b'default-push', default=None,
1016 )
1017 coreconfigitem(
1018 b'paths', b'.*', default=None, generic=True,
1019 )
1020 coreconfigitem(
1021 b'phases', b'checksubrepos', default=b'follow',
1022 )
1023 coreconfigitem(
1024 b'phases', b'new-commit', default=b'draft',
1025 )
1026 coreconfigitem(
1027 b'phases', b'publish', default=True,
1028 )
1029 coreconfigitem(
1030 b'profiling', b'enabled', default=False,
1031 )
1032 coreconfigitem(
1033 b'profiling', b'format', default=b'text',
1034 )
1035 coreconfigitem(
1036 b'profiling', b'freq', default=1000,
1037 )
1038 coreconfigitem(
1039 b'profiling', b'limit', default=30,
1040 )
1041 coreconfigitem(
1042 b'profiling', b'nested', default=0,
1043 )
1044 coreconfigitem(
1045 b'profiling', b'output', default=None,
1046 )
1047 coreconfigitem(
1048 b'profiling', b'showmax', default=0.999,
1049 )
1050 coreconfigitem(
1051 b'profiling', b'showmin', default=dynamicdefault,
1052 )
1053 coreconfigitem(
1054 b'profiling', b'showtime', default=True,
1055 )
1056 coreconfigitem(
1057 b'profiling', b'sort', default=b'inlinetime',
1058 )
1059 coreconfigitem(
1060 b'profiling', b'statformat', default=b'hotpath',
1061 )
1062 coreconfigitem(
1063 b'profiling', b'time-track', default=dynamicdefault,
1064 )
1065 coreconfigitem(
1066 b'profiling', b'type', default=b'stat',
1067 )
1068 coreconfigitem(
1069 b'progress', b'assume-tty', default=False,
1070 )
1071 coreconfigitem(
1072 b'progress', b'changedelay', default=1,
1073 )
1074 coreconfigitem(
1075 b'progress', b'clear-complete', default=True,
1076 )
1077 coreconfigitem(
1078 b'progress', b'debug', default=False,
1079 )
1080 coreconfigitem(
1081 b'progress', b'delay', default=3,
1082 )
1083 coreconfigitem(
1084 b'progress', b'disable', default=False,
1085 )
1086 coreconfigitem(
1087 b'progress', b'estimateinterval', default=60.0,
1509 b'merge-tools',
1510 br'.*\.symlink$',
1511 default=False,
1512 generic=True,
1513 priority=-1,
1514 )
1515 coreconfigitem(
1516 b'pager',
1517 b'attend-.*',
1518 default=dynamicdefault,
1519 generic=True,
1520 )
1521 coreconfigitem(
1522 b'pager',
1523 b'ignore',
1524 default=list,
1525 )
1526 coreconfigitem(
1527 b'pager',
1528 b'pager',
1529 default=dynamicdefault,
1530 )
1531 coreconfigitem(
1532 b'patch',
1533 b'eol',
1534 default=b'strict',
1535 )
1536 coreconfigitem(
1537 b'patch',
1538 b'fuzz',
1539 default=2,
1540 )
1541 coreconfigitem(
1542 b'paths',
1543 b'default',
1544 default=None,
1545 )
1546 coreconfigitem(
1547 b'paths',
1548 b'default-push',
1549 default=None,
1550 )
1551 coreconfigitem(
1552 b'paths',
1553 b'.*',
1554 default=None,
1555 generic=True,
1556 )
1557 coreconfigitem(
1558 b'phases',
1559 b'checksubrepos',
1560 default=b'follow',
1561 )
1562 coreconfigitem(
1563 b'phases',
1564 b'new-commit',
1565 default=b'draft',
1566 )
1567 coreconfigitem(
1568 b'phases',
1569 b'publish',
1570 default=True,
1571 )
1572 coreconfigitem(
1573 b'profiling',
1574 b'enabled',
1575 default=False,
1576 )
1577 coreconfigitem(
1578 b'profiling',
1579 b'format',
1580 default=b'text',
1581 )
1582 coreconfigitem(
1583 b'profiling',
1584 b'freq',
1585 default=1000,
1586 )
1587 coreconfigitem(
1588 b'profiling',
1589 b'limit',
1590 default=30,
1591 )
1592 coreconfigitem(
1593 b'profiling',
1594 b'nested',
1595 default=0,
1596 )
1597 coreconfigitem(
1598 b'profiling',
1599 b'output',
1600 default=None,
1601 )
1602 coreconfigitem(
1603 b'profiling',
1604 b'showmax',
1605 default=0.999,
1606 )
1607 coreconfigitem(
1608 b'profiling',
1609 b'showmin',
1610 default=dynamicdefault,
1611 )
1612 coreconfigitem(
1613 b'profiling',
1614 b'showtime',
1615 default=True,
1616 )
1617 coreconfigitem(
1618 b'profiling',
1619 b'sort',
1620 default=b'inlinetime',
1621 )
1622 coreconfigitem(
1623 b'profiling',
1624 b'statformat',
1625 default=b'hotpath',
1626 )
1627 coreconfigitem(
1628 b'profiling',
1629 b'time-track',
1630 default=dynamicdefault,
1631 )
1632 coreconfigitem(
1633 b'profiling',
1634 b'type',
1635 default=b'stat',
1636 )
1637 coreconfigitem(
1638 b'progress',
1639 b'assume-tty',
1640 default=False,
1641 )
1642 coreconfigitem(
1643 b'progress',
1644 b'changedelay',
1645 default=1,
1646 )
1647 coreconfigitem(
1648 b'progress',
1649 b'clear-complete',
1650 default=True,
1651 )
1652 coreconfigitem(
1653 b'progress',
1654 b'debug',
1655 default=False,
1656 )
1657 coreconfigitem(
1658 b'progress',
1659 b'delay',
1660 default=3,
1661 )
1662 coreconfigitem(
1663 b'progress',
1664 b'disable',
1665 default=False,
1666 )
1667 coreconfigitem(
1668 b'progress',
1669 b'estimateinterval',
1670 default=60.0,
1088 1671 )
1089 1672 coreconfigitem(
1090 1673 b'progress',
@@ -1092,16 +1675,24 b' coreconfigitem('
1092 1675 default=lambda: [b'topic', b'bar', b'number', b'estimate'],
1093 1676 )
1094 1677 coreconfigitem(
1095 b'progress', b'refresh', default=0.1,
1096 )
1097 coreconfigitem(
1098 b'progress', b'width', default=dynamicdefault,
1099 )
1100 coreconfigitem(
1101 b'pull', b'confirm', default=False,
1102 )
1103 coreconfigitem(
1104 b'push', b'pushvars.server', default=False,
1678 b'progress',
1679 b'refresh',
1680 default=0.1,
1681 )
1682 coreconfigitem(
1683 b'progress',
1684 b'width',
1685 default=dynamicdefault,
1686 )
1687 coreconfigitem(
1688 b'pull',
1689 b'confirm',
1690 default=False,
1691 )
1692 coreconfigitem(
1693 b'push',
1694 b'pushvars.server',
1695 default=False,
1105 1696 )
1106 1697 coreconfigitem(
1107 1698 b'rewrite',
@@ -1110,13 +1701,21 b' coreconfigitem('
1110 1701 alias=[(b'ui', b'history-editing-backup')],
1111 1702 )
1112 1703 coreconfigitem(
1113 b'rewrite', b'update-timestamp', default=False,
1114 )
1115 coreconfigitem(
1116 b'rewrite', b'empty-successor', default=b'skip', experimental=True,
1117 )
1118 coreconfigitem(
1119 b'storage', b'new-repo-backend', default=b'revlogv1', experimental=True,
1704 b'rewrite',
1705 b'update-timestamp',
1706 default=False,
1707 )
1708 coreconfigitem(
1709 b'rewrite',
1710 b'empty-successor',
1711 default=b'skip',
1712 experimental=True,
1713 )
1714 coreconfigitem(
1715 b'storage',
1716 b'new-repo-backend',
1717 default=b'revlogv1',
1718 experimental=True,
1120 1719 )
1121 1720 coreconfigitem(
1122 1721 b'storage',
@@ -1133,37 +1732,59 b' coreconfigitem('
1133 1732 b'storage', b'revlog.nodemap.mode', default=b'compat', experimental=True
1134 1733 )
1135 1734 coreconfigitem(
1136 b'storage', b'revlog.reuse-external-delta', default=True,
1137 )
1138 coreconfigitem(
1139 b'storage', b'revlog.reuse-external-delta-parent', default=None,
1140 )
1141 coreconfigitem(
1142 b'storage', b'revlog.zlib.level', default=None,
1143 )
1144 coreconfigitem(
1145 b'storage', b'revlog.zstd.level', default=None,
1146 )
1147 coreconfigitem(
1148 b'server', b'bookmarks-pushkey-compat', default=True,
1149 )
1150 coreconfigitem(
1151 b'server', b'bundle1', default=True,
1152 )
1153 coreconfigitem(
1154 b'server', b'bundle1gd', default=None,
1155 )
1156 coreconfigitem(
1157 b'server', b'bundle1.pull', default=None,
1158 )
1159 coreconfigitem(
1160 b'server', b'bundle1gd.pull', default=None,
1161 )
1162 coreconfigitem(
1163 b'server', b'bundle1.push', default=None,
1164 )
1165 coreconfigitem(
1166 b'server', b'bundle1gd.push', default=None,
1735 b'storage',
1736 b'revlog.reuse-external-delta',
1737 default=True,
1738 )
1739 coreconfigitem(
1740 b'storage',
1741 b'revlog.reuse-external-delta-parent',
1742 default=None,
1743 )
1744 coreconfigitem(
1745 b'storage',
1746 b'revlog.zlib.level',
1747 default=None,
1748 )
1749 coreconfigitem(
1750 b'storage',
1751 b'revlog.zstd.level',
1752 default=None,
1753 )
1754 coreconfigitem(
1755 b'server',
1756 b'bookmarks-pushkey-compat',
1757 default=True,
1758 )
1759 coreconfigitem(
1760 b'server',
1761 b'bundle1',
1762 default=True,
1763 )
1764 coreconfigitem(
1765 b'server',
1766 b'bundle1gd',
1767 default=None,
1768 )
1769 coreconfigitem(
1770 b'server',
1771 b'bundle1.pull',
1772 default=None,
1773 )
1774 coreconfigitem(
1775 b'server',
1776 b'bundle1gd.pull',
1777 default=None,
1778 )
1779 coreconfigitem(
1780 b'server',
1781 b'bundle1.push',
1782 default=None,
1783 )
1784 coreconfigitem(
1785 b'server',
1786 b'bundle1gd.push',
1787 default=None,
1167 1788 )
1168 1789 coreconfigitem(
1169 1790 b'server',
@@ -1172,73 +1793,120 b' coreconfigitem('
1172 1793 alias=[(b'experimental', b'bundle2.stream')],
1173 1794 )
1174 1795 coreconfigitem(
1175 b'server', b'compressionengines', default=list,
1176 )
1177 coreconfigitem(
1178 b'server', b'concurrent-push-mode', default=b'check-related',
1179 )
1180 coreconfigitem(
1181 b'server', b'disablefullbundle', default=False,
1182 )
1183 coreconfigitem(
1184 b'server', b'maxhttpheaderlen', default=1024,
1185 )
1186 coreconfigitem(
1187 b'server', b'pullbundle', default=False,
1188 )
1189 coreconfigitem(
1190 b'server', b'preferuncompressed', default=False,
1191 )
1192 coreconfigitem(
1193 b'server', b'streamunbundle', default=False,
1194 )
1195 coreconfigitem(
1196 b'server', b'uncompressed', default=True,
1197 )
1198 coreconfigitem(
1199 b'server', b'uncompressedallowsecret', default=False,
1200 )
1201 coreconfigitem(
1202 b'server', b'view', default=b'served',
1203 )
1204 coreconfigitem(
1205 b'server', b'validate', default=False,
1206 )
1207 coreconfigitem(
1208 b'server', b'zliblevel', default=-1,
1209 )
1210 coreconfigitem(
1211 b'server', b'zstdlevel', default=3,
1212 )
1213 coreconfigitem(
1214 b'share', b'pool', default=None,
1215 )
1216 coreconfigitem(
1217 b'share', b'poolnaming', default=b'identity',
1218 )
1219 coreconfigitem(
1220 b'shelve', b'maxbackups', default=10,
1221 )
1222 coreconfigitem(
1223 b'smtp', b'host', default=None,
1224 )
1225 coreconfigitem(
1226 b'smtp', b'local_hostname', default=None,
1227 )
1228 coreconfigitem(
1229 b'smtp', b'password', default=None,
1230 )
1231 coreconfigitem(
1232 b'smtp', b'port', default=dynamicdefault,
1233 )
1234 coreconfigitem(
1235 b'smtp', b'tls', default=b'none',
1236 )
1237 coreconfigitem(
1238 b'smtp', b'username', default=None,
1239 )
1240 coreconfigitem(
1241 b'sparse', b'missingwarning', default=True, experimental=True,
1796 b'server',
1797 b'compressionengines',
1798 default=list,
1799 )
1800 coreconfigitem(
1801 b'server',
1802 b'concurrent-push-mode',
1803 default=b'check-related',
1804 )
1805 coreconfigitem(
1806 b'server',
1807 b'disablefullbundle',
1808 default=False,
1809 )
1810 coreconfigitem(
1811 b'server',
1812 b'maxhttpheaderlen',
1813 default=1024,
1814 )
1815 coreconfigitem(
1816 b'server',
1817 b'pullbundle',
1818 default=False,
1819 )
1820 coreconfigitem(
1821 b'server',
1822 b'preferuncompressed',
1823 default=False,
1824 )
1825 coreconfigitem(
1826 b'server',
1827 b'streamunbundle',
1828 default=False,
1829 )
1830 coreconfigitem(
1831 b'server',
1832 b'uncompressed',
1833 default=True,
1834 )
1835 coreconfigitem(
1836 b'server',
1837 b'uncompressedallowsecret',
1838 default=False,
1839 )
1840 coreconfigitem(
1841 b'server',
1842 b'view',
1843 default=b'served',
1844 )
1845 coreconfigitem(
1846 b'server',
1847 b'validate',
1848 default=False,
1849 )
1850 coreconfigitem(
1851 b'server',
1852 b'zliblevel',
1853 default=-1,
1854 )
1855 coreconfigitem(
1856 b'server',
1857 b'zstdlevel',
1858 default=3,
1859 )
1860 coreconfigitem(
1861 b'share',
1862 b'pool',
1863 default=None,
1864 )
1865 coreconfigitem(
1866 b'share',
1867 b'poolnaming',
1868 default=b'identity',
1869 )
1870 coreconfigitem(
1871 b'shelve',
1872 b'maxbackups',
1873 default=10,
1874 )
1875 coreconfigitem(
1876 b'smtp',
1877 b'host',
1878 default=None,
1879 )
1880 coreconfigitem(
1881 b'smtp',
1882 b'local_hostname',
1883 default=None,
1884 )
1885 coreconfigitem(
1886 b'smtp',
1887 b'password',
1888 default=None,
1889 )
1890 coreconfigitem(
1891 b'smtp',
1892 b'port',
1893 default=dynamicdefault,
1894 )
1895 coreconfigitem(
1896 b'smtp',
1897 b'tls',
1898 default=b'none',
1899 )
1900 coreconfigitem(
1901 b'smtp',
1902 b'username',
1903 default=None,
1904 )
1905 coreconfigitem(
1906 b'sparse',
1907 b'missingwarning',
1908 default=True,
1909 experimental=True,
1242 1910 )
1243 1911 coreconfigitem(
1244 1912 b'subrepos',
@@ -1246,367 +1914,612 b' coreconfigitem('
1246 1914 default=dynamicdefault, # to make backporting simpler
1247 1915 )
1248 1916 coreconfigitem(
1249 b'subrepos', b'hg:allowed', default=dynamicdefault,
1250 )
1251 coreconfigitem(
1252 b'subrepos', b'git:allowed', default=dynamicdefault,
1253 )
1254 coreconfigitem(
1255 b'subrepos', b'svn:allowed', default=dynamicdefault,
1256 )
1257 coreconfigitem(
1258 b'templates', b'.*', default=None, generic=True,
1259 )
1260 coreconfigitem(
1261 b'templateconfig', b'.*', default=dynamicdefault, generic=True,
1262 )
1263 coreconfigitem(
1264 b'trusted', b'groups', default=list,
1265 )
1266 coreconfigitem(
1267 b'trusted', b'users', default=list,
1268 )
1269 coreconfigitem(
1270 b'ui', b'_usedassubrepo', default=False,
1271 )
1272 coreconfigitem(
1273 b'ui', b'allowemptycommit', default=False,
1274 )
1275 coreconfigitem(
1276 b'ui', b'archivemeta', default=True,
1277 )
1278 coreconfigitem(
1279 b'ui', b'askusername', default=False,
1280 )
1281 coreconfigitem(
1282 b'ui', b'available-memory', default=None,
1917 b'subrepos',
1918 b'hg:allowed',
1919 default=dynamicdefault,
1920 )
1921 coreconfigitem(
1922 b'subrepos',
1923 b'git:allowed',
1924 default=dynamicdefault,
1925 )
1926 coreconfigitem(
1927 b'subrepos',
1928 b'svn:allowed',
1929 default=dynamicdefault,
1930 )
1931 coreconfigitem(
1932 b'templates',
1933 b'.*',
1934 default=None,
1935 generic=True,
1936 )
1937 coreconfigitem(
1938 b'templateconfig',
1939 b'.*',
1940 default=dynamicdefault,
1941 generic=True,
1942 )
1943 coreconfigitem(
1944 b'trusted',
1945 b'groups',
1946 default=list,
1947 )
1948 coreconfigitem(
1949 b'trusted',
1950 b'users',
1951 default=list,
1952 )
1953 coreconfigitem(
1954 b'ui',
1955 b'_usedassubrepo',
1956 default=False,
1957 )
1958 coreconfigitem(
1959 b'ui',
1960 b'allowemptycommit',
1961 default=False,
1962 )
1963 coreconfigitem(
1964 b'ui',
1965 b'archivemeta',
1966 default=True,
1967 )
1968 coreconfigitem(
1969 b'ui',
1970 b'askusername',
1971 default=False,
1972 )
1973 coreconfigitem(
1974 b'ui',
1975 b'available-memory',
1976 default=None,
1283 1977 )
1284 1978
1285 1979 coreconfigitem(
1286 b'ui', b'clonebundlefallback', default=False,
1287 )
1288 coreconfigitem(
1289 b'ui', b'clonebundleprefers', default=list,
1290 )
1291 coreconfigitem(
1292 b'ui', b'clonebundles', default=True,
1293 )
1294 coreconfigitem(
1295 b'ui', b'color', default=b'auto',
1296 )
1297 coreconfigitem(
1298 b'ui', b'commitsubrepos', default=False,
1299 )
1300 coreconfigitem(
1301 b'ui', b'debug', default=False,
1302 )
1303 coreconfigitem(
1304 b'ui', b'debugger', default=None,
1305 )
1306 coreconfigitem(
1307 b'ui', b'editor', default=dynamicdefault,
1308 )
1309 coreconfigitem(
1310 b'ui', b'detailed-exit-code', default=False, experimental=True,
1311 )
1312 coreconfigitem(
1313 b'ui', b'fallbackencoding', default=None,
1314 )
1315 coreconfigitem(
1316 b'ui', b'forcecwd', default=None,
1317 )
1318 coreconfigitem(
1319 b'ui', b'forcemerge', default=None,
1320 )
1321 coreconfigitem(
1322 b'ui', b'formatdebug', default=False,
1323 )
1324 coreconfigitem(
1325 b'ui', b'formatjson', default=False,
1326 )
1327 coreconfigitem(
1328 b'ui', b'formatted', default=None,
1329 )
1330 coreconfigitem(
1331 b'ui', b'interactive', default=None,
1332 )
1333 coreconfigitem(
1334 b'ui', b'interface', default=None,
1335 )
1336 coreconfigitem(
1337 b'ui', b'interface.chunkselector', default=None,
1338 )
1339 coreconfigitem(
1340 b'ui', b'large-file-limit', default=10000000,
1341 )
1342 coreconfigitem(
1343 b'ui', b'logblockedtimes', default=False,
1344 )
1345 coreconfigitem(
1346 b'ui', b'merge', default=None,
1347 )
1348 coreconfigitem(
1349 b'ui', b'mergemarkers', default=b'basic',
1350 )
1351 coreconfigitem(
1352 b'ui', b'message-output', default=b'stdio',
1353 )
1354 coreconfigitem(
1355 b'ui', b'nontty', default=False,
1356 )
1357 coreconfigitem(
1358 b'ui', b'origbackuppath', default=None,
1359 )
1360 coreconfigitem(
1361 b'ui', b'paginate', default=True,
1362 )
1363 coreconfigitem(
1364 b'ui', b'patch', default=None,
1365 )
1366 coreconfigitem(
1367 b'ui', b'portablefilenames', default=b'warn',
1368 )
1369 coreconfigitem(
1370 b'ui', b'promptecho', default=False,
1371 )
1372 coreconfigitem(
1373 b'ui', b'quiet', default=False,
1374 )
1375 coreconfigitem(
1376 b'ui', b'quietbookmarkmove', default=False,
1377 )
1378 coreconfigitem(
1379 b'ui', b'relative-paths', default=b'legacy',
1380 )
1381 coreconfigitem(
1382 b'ui', b'remotecmd', default=b'hg',
1383 )
1384 coreconfigitem(
1385 b'ui', b'report_untrusted', default=True,
1386 )
1387 coreconfigitem(
1388 b'ui', b'rollback', default=True,
1389 )
1390 coreconfigitem(
1391 b'ui', b'signal-safe-lock', default=True,
1392 )
1393 coreconfigitem(
1394 b'ui', b'slash', default=False,
1395 )
1396 coreconfigitem(
1397 b'ui', b'ssh', default=b'ssh',
1398 )
1399 coreconfigitem(
1400 b'ui', b'ssherrorhint', default=None,
1401 )
1402 coreconfigitem(
1403 b'ui', b'statuscopies', default=False,
1404 )
1405 coreconfigitem(
1406 b'ui', b'strict', default=False,
1407 )
1408 coreconfigitem(
1409 b'ui', b'style', default=b'',
1410 )
1411 coreconfigitem(
1412 b'ui', b'supportcontact', default=None,
1413 )
1414 coreconfigitem(
1415 b'ui', b'textwidth', default=78,
1416 )
1417 coreconfigitem(
1418 b'ui', b'timeout', default=b'600',
1419 )
1420 coreconfigitem(
1421 b'ui', b'timeout.warn', default=0,
1422 )
1423 coreconfigitem(
1424 b'ui', b'timestamp-output', default=False,
1425 )
1426 coreconfigitem(
1427 b'ui', b'traceback', default=False,
1428 )
1429 coreconfigitem(
1430 b'ui', b'tweakdefaults', default=False,
1980 b'ui',
1981 b'clonebundlefallback',
1982 default=False,
1983 )
1984 coreconfigitem(
1985 b'ui',
1986 b'clonebundleprefers',
1987 default=list,
1988 )
1989 coreconfigitem(
1990 b'ui',
1991 b'clonebundles',
1992 default=True,
1993 )
1994 coreconfigitem(
1995 b'ui',
1996 b'color',
1997 default=b'auto',
1998 )
1999 coreconfigitem(
2000 b'ui',
2001 b'commitsubrepos',
2002 default=False,
2003 )
2004 coreconfigitem(
2005 b'ui',
2006 b'debug',
2007 default=False,
2008 )
2009 coreconfigitem(
2010 b'ui',
2011 b'debugger',
2012 default=None,
2013 )
2014 coreconfigitem(
2015 b'ui',
2016 b'editor',
2017 default=dynamicdefault,
2018 )
2019 coreconfigitem(
2020 b'ui',
2021 b'detailed-exit-code',
2022 default=False,
2023 experimental=True,
2024 )
2025 coreconfigitem(
2026 b'ui',
2027 b'fallbackencoding',
2028 default=None,
2029 )
2030 coreconfigitem(
2031 b'ui',
2032 b'forcecwd',
2033 default=None,
2034 )
2035 coreconfigitem(
2036 b'ui',
2037 b'forcemerge',
2038 default=None,
2039 )
2040 coreconfigitem(
2041 b'ui',
2042 b'formatdebug',
2043 default=False,
2044 )
2045 coreconfigitem(
2046 b'ui',
2047 b'formatjson',
2048 default=False,
2049 )
2050 coreconfigitem(
2051 b'ui',
2052 b'formatted',
2053 default=None,
2054 )
2055 coreconfigitem(
2056 b'ui',
2057 b'interactive',
2058 default=None,
2059 )
2060 coreconfigitem(
2061 b'ui',
2062 b'interface',
2063 default=None,
2064 )
2065 coreconfigitem(
2066 b'ui',
2067 b'interface.chunkselector',
2068 default=None,
2069 )
2070 coreconfigitem(
2071 b'ui',
2072 b'large-file-limit',
2073 default=10000000,
2074 )
2075 coreconfigitem(
2076 b'ui',
2077 b'logblockedtimes',
2078 default=False,
2079 )
2080 coreconfigitem(
2081 b'ui',
2082 b'merge',
2083 default=None,
2084 )
2085 coreconfigitem(
2086 b'ui',
2087 b'mergemarkers',
2088 default=b'basic',
2089 )
2090 coreconfigitem(
2091 b'ui',
2092 b'message-output',
2093 default=b'stdio',
2094 )
2095 coreconfigitem(
2096 b'ui',
2097 b'nontty',
2098 default=False,
2099 )
2100 coreconfigitem(
2101 b'ui',
2102 b'origbackuppath',
2103 default=None,
2104 )
2105 coreconfigitem(
2106 b'ui',
2107 b'paginate',
2108 default=True,
2109 )
2110 coreconfigitem(
2111 b'ui',
2112 b'patch',
2113 default=None,
2114 )
2115 coreconfigitem(
2116 b'ui',
2117 b'portablefilenames',
2118 default=b'warn',
2119 )
2120 coreconfigitem(
2121 b'ui',
2122 b'promptecho',
2123 default=False,
2124 )
2125 coreconfigitem(
2126 b'ui',
2127 b'quiet',
2128 default=False,
2129 )
2130 coreconfigitem(
2131 b'ui',
2132 b'quietbookmarkmove',
2133 default=False,
2134 )
2135 coreconfigitem(
2136 b'ui',
2137 b'relative-paths',
2138 default=b'legacy',
2139 )
2140 coreconfigitem(
2141 b'ui',
2142 b'remotecmd',
2143 default=b'hg',
2144 )
2145 coreconfigitem(
2146 b'ui',
2147 b'report_untrusted',
2148 default=True,
2149 )
2150 coreconfigitem(
2151 b'ui',
2152 b'rollback',
2153 default=True,
2154 )
2155 coreconfigitem(
2156 b'ui',
2157 b'signal-safe-lock',
2158 default=True,
2159 )
2160 coreconfigitem(
2161 b'ui',
2162 b'slash',
2163 default=False,
2164 )
2165 coreconfigitem(
2166 b'ui',
2167 b'ssh',
2168 default=b'ssh',
2169 )
2170 coreconfigitem(
2171 b'ui',
2172 b'ssherrorhint',
2173 default=None,
2174 )
2175 coreconfigitem(
2176 b'ui',
2177 b'statuscopies',
2178 default=False,
2179 )
2180 coreconfigitem(
2181 b'ui',
2182 b'strict',
2183 default=False,
2184 )
2185 coreconfigitem(
2186 b'ui',
2187 b'style',
2188 default=b'',
2189 )
2190 coreconfigitem(
2191 b'ui',
2192 b'supportcontact',
2193 default=None,
2194 )
2195 coreconfigitem(
2196 b'ui',
2197 b'textwidth',
2198 default=78,
2199 )
2200 coreconfigitem(
2201 b'ui',
2202 b'timeout',
2203 default=b'600',
2204 )
2205 coreconfigitem(
2206 b'ui',
2207 b'timeout.warn',
2208 default=0,
2209 )
2210 coreconfigitem(
2211 b'ui',
2212 b'timestamp-output',
2213 default=False,
2214 )
2215 coreconfigitem(
2216 b'ui',
2217 b'traceback',
2218 default=False,
2219 )
2220 coreconfigitem(
2221 b'ui',
2222 b'tweakdefaults',
2223 default=False,
1431 2224 )
1432 2225 coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
1433 2226 coreconfigitem(
1434 b'ui', b'verbose', default=False,
1435 )
1436 coreconfigitem(
1437 b'verify', b'skipflags', default=None,
1438 )
1439 coreconfigitem(
1440 b'web', b'allowbz2', default=False,
1441 )
1442 coreconfigitem(
1443 b'web', b'allowgz', default=False,
1444 )
1445 coreconfigitem(
1446 b'web', b'allow-pull', alias=[(b'web', b'allowpull')], default=True,
1447 )
1448 coreconfigitem(
1449 b'web', b'allow-push', alias=[(b'web', b'allow_push')], default=list,
1450 )
1451 coreconfigitem(
1452 b'web', b'allowzip', default=False,
1453 )
1454 coreconfigitem(
1455 b'web', b'archivesubrepos', default=False,
1456 )
1457 coreconfigitem(
1458 b'web', b'cache', default=True,
1459 )
1460 coreconfigitem(
1461 b'web', b'comparisoncontext', default=5,
1462 )
1463 coreconfigitem(
1464 b'web', b'contact', default=None,
1465 )
1466 coreconfigitem(
1467 b'web', b'deny_push', default=list,
1468 )
1469 coreconfigitem(
1470 b'web', b'guessmime', default=False,
1471 )
1472 coreconfigitem(
1473 b'web', b'hidden', default=False,
1474 )
1475 coreconfigitem(
1476 b'web', b'labels', default=list,
1477 )
1478 coreconfigitem(
1479 b'web', b'logoimg', default=b'hglogo.png',
1480 )
1481 coreconfigitem(
1482 b'web', b'logourl', default=b'https://mercurial-scm.org/',
1483 )
1484 coreconfigitem(
1485 b'web', b'accesslog', default=b'-',
1486 )
1487 coreconfigitem(
1488 b'web', b'address', default=b'',
1489 )
1490 coreconfigitem(
1491 b'web', b'allow-archive', alias=[(b'web', b'allow_archive')], default=list,
1492 )
1493 coreconfigitem(
1494 b'web', b'allow_read', default=list,
1495 )
1496 coreconfigitem(
1497 b'web', b'baseurl', default=None,
1498 )
1499 coreconfigitem(
1500 b'web', b'cacerts', default=None,
1501 )
1502 coreconfigitem(
1503 b'web', b'certificate', default=None,
1504 )
1505 coreconfigitem(
1506 b'web', b'collapse', default=False,
1507 )
1508 coreconfigitem(
1509 b'web', b'csp', default=None,
1510 )
1511 coreconfigitem(
1512 b'web', b'deny_read', default=list,
1513 )
1514 coreconfigitem(
1515 b'web', b'descend', default=True,
1516 )
1517 coreconfigitem(
1518 b'web', b'description', default=b"",
1519 )
1520 coreconfigitem(
1521 b'web', b'encoding', default=lambda: encoding.encoding,
1522 )
1523 coreconfigitem(
1524 b'web', b'errorlog', default=b'-',
1525 )
1526 coreconfigitem(
1527 b'web', b'ipv6', default=False,
1528 )
1529 coreconfigitem(
1530 b'web', b'maxchanges', default=10,
1531 )
1532 coreconfigitem(
1533 b'web', b'maxfiles', default=10,
1534 )
1535 coreconfigitem(
1536 b'web', b'maxshortchanges', default=60,
1537 )
1538 coreconfigitem(
1539 b'web', b'motd', default=b'',
1540 )
1541 coreconfigitem(
1542 b'web', b'name', default=dynamicdefault,
1543 )
1544 coreconfigitem(
1545 b'web', b'port', default=8000,
1546 )
1547 coreconfigitem(
1548 b'web', b'prefix', default=b'',
1549 )
1550 coreconfigitem(
1551 b'web', b'push_ssl', default=True,
1552 )
1553 coreconfigitem(
1554 b'web', b'refreshinterval', default=20,
1555 )
1556 coreconfigitem(
1557 b'web', b'server-header', default=None,
1558 )
1559 coreconfigitem(
1560 b'web', b'static', default=None,
1561 )
1562 coreconfigitem(
1563 b'web', b'staticurl', default=None,
1564 )
1565 coreconfigitem(
1566 b'web', b'stripes', default=1,
1567 )
1568 coreconfigitem(
1569 b'web', b'style', default=b'paper',
1570 )
1571 coreconfigitem(
1572 b'web', b'templates', default=None,
1573 )
1574 coreconfigitem(
1575 b'web', b'view', default=b'served', experimental=True,
1576 )
1577 coreconfigitem(
1578 b'worker', b'backgroundclose', default=dynamicdefault,
2227 b'ui',
2228 b'verbose',
2229 default=False,
2230 )
2231 coreconfigitem(
2232 b'verify',
2233 b'skipflags',
2234 default=None,
2235 )
2236 coreconfigitem(
2237 b'web',
2238 b'allowbz2',
2239 default=False,
2240 )
2241 coreconfigitem(
2242 b'web',
2243 b'allowgz',
2244 default=False,
2245 )
2246 coreconfigitem(
2247 b'web',
2248 b'allow-pull',
2249 alias=[(b'web', b'allowpull')],
2250 default=True,
2251 )
2252 coreconfigitem(
2253 b'web',
2254 b'allow-push',
2255 alias=[(b'web', b'allow_push')],
2256 default=list,
2257 )
2258 coreconfigitem(
2259 b'web',
2260 b'allowzip',
2261 default=False,
2262 )
2263 coreconfigitem(
2264 b'web',
2265 b'archivesubrepos',
2266 default=False,
2267 )
2268 coreconfigitem(
2269 b'web',
2270 b'cache',
2271 default=True,
2272 )
2273 coreconfigitem(
2274 b'web',
2275 b'comparisoncontext',
2276 default=5,
2277 )
2278 coreconfigitem(
2279 b'web',
2280 b'contact',
2281 default=None,
2282 )
2283 coreconfigitem(
2284 b'web',
2285 b'deny_push',
2286 default=list,
2287 )
2288 coreconfigitem(
2289 b'web',
2290 b'guessmime',
2291 default=False,
2292 )
2293 coreconfigitem(
2294 b'web',
2295 b'hidden',
2296 default=False,
2297 )
2298 coreconfigitem(
2299 b'web',
2300 b'labels',
2301 default=list,
2302 )
2303 coreconfigitem(
2304 b'web',
2305 b'logoimg',
2306 default=b'hglogo.png',
2307 )
2308 coreconfigitem(
2309 b'web',
2310 b'logourl',
2311 default=b'https://mercurial-scm.org/',
2312 )
2313 coreconfigitem(
2314 b'web',
2315 b'accesslog',
2316 default=b'-',
2317 )
2318 coreconfigitem(
2319 b'web',
2320 b'address',
2321 default=b'',
2322 )
2323 coreconfigitem(
2324 b'web',
2325 b'allow-archive',
2326 alias=[(b'web', b'allow_archive')],
2327 default=list,
2328 )
2329 coreconfigitem(
2330 b'web',
2331 b'allow_read',
2332 default=list,
2333 )
2334 coreconfigitem(
2335 b'web',
2336 b'baseurl',
2337 default=None,
2338 )
2339 coreconfigitem(
2340 b'web',
2341 b'cacerts',
2342 default=None,
2343 )
2344 coreconfigitem(
2345 b'web',
2346 b'certificate',
2347 default=None,
2348 )
2349 coreconfigitem(
2350 b'web',
2351 b'collapse',
2352 default=False,
2353 )
2354 coreconfigitem(
2355 b'web',
2356 b'csp',
2357 default=None,
2358 )
2359 coreconfigitem(
2360 b'web',
2361 b'deny_read',
2362 default=list,
2363 )
2364 coreconfigitem(
2365 b'web',
2366 b'descend',
2367 default=True,
2368 )
2369 coreconfigitem(
2370 b'web',
2371 b'description',
2372 default=b"",
2373 )
2374 coreconfigitem(
2375 b'web',
2376 b'encoding',
2377 default=lambda: encoding.encoding,
2378 )
2379 coreconfigitem(
2380 b'web',
2381 b'errorlog',
2382 default=b'-',
2383 )
2384 coreconfigitem(
2385 b'web',
2386 b'ipv6',
2387 default=False,
2388 )
2389 coreconfigitem(
2390 b'web',
2391 b'maxchanges',
2392 default=10,
2393 )
2394 coreconfigitem(
2395 b'web',
2396 b'maxfiles',
2397 default=10,
2398 )
2399 coreconfigitem(
2400 b'web',
2401 b'maxshortchanges',
2402 default=60,
2403 )
2404 coreconfigitem(
2405 b'web',
2406 b'motd',
2407 default=b'',
2408 )
2409 coreconfigitem(
2410 b'web',
2411 b'name',
2412 default=dynamicdefault,
2413 )
2414 coreconfigitem(
2415 b'web',
2416 b'port',
2417 default=8000,
2418 )
2419 coreconfigitem(
2420 b'web',
2421 b'prefix',
2422 default=b'',
2423 )
2424 coreconfigitem(
2425 b'web',
2426 b'push_ssl',
2427 default=True,
2428 )
2429 coreconfigitem(
2430 b'web',
2431 b'refreshinterval',
2432 default=20,
2433 )
2434 coreconfigitem(
2435 b'web',
2436 b'server-header',
2437 default=None,
2438 )
2439 coreconfigitem(
2440 b'web',
2441 b'static',
2442 default=None,
2443 )
2444 coreconfigitem(
2445 b'web',
2446 b'staticurl',
2447 default=None,
2448 )
2449 coreconfigitem(
2450 b'web',
2451 b'stripes',
2452 default=1,
2453 )
2454 coreconfigitem(
2455 b'web',
2456 b'style',
2457 default=b'paper',
2458 )
2459 coreconfigitem(
2460 b'web',
2461 b'templates',
2462 default=None,
2463 )
2464 coreconfigitem(
2465 b'web',
2466 b'view',
2467 default=b'served',
2468 experimental=True,
2469 )
2470 coreconfigitem(
2471 b'worker',
2472 b'backgroundclose',
2473 default=dynamicdefault,
1579 2474 )
1580 2475 # Windows defaults to a limit of 512 open files. A buffer of 128
1581 2476 # should give us enough headway.
1582 2477 coreconfigitem(
1583 b'worker', b'backgroundclosemaxqueue', default=384,
1584 )
1585 coreconfigitem(
1586 b'worker', b'backgroundcloseminfilecount', default=2048,
1587 )
1588 coreconfigitem(
1589 b'worker', b'backgroundclosethreadcount', default=4,
1590 )
1591 coreconfigitem(
1592 b'worker', b'enabled', default=True,
1593 )
1594 coreconfigitem(
1595 b'worker', b'numcpus', default=None,
2478 b'worker',
2479 b'backgroundclosemaxqueue',
2480 default=384,
2481 )
2482 coreconfigitem(
2483 b'worker',
2484 b'backgroundcloseminfilecount',
2485 default=2048,
2486 )
2487 coreconfigitem(
2488 b'worker',
2489 b'backgroundclosethreadcount',
2490 default=4,
2491 )
2492 coreconfigitem(
2493 b'worker',
2494 b'enabled',
2495 default=True,
2496 )
2497 coreconfigitem(
2498 b'worker',
2499 b'numcpus',
2500 default=None,
1596 2501 )
1597 2502
1598 2503 # Rebase related configuration moved to core because other extension are doing
1599 2504 # strange things. For example, shelve import the extensions to reuse some bit
1600 2505 # without formally loading it.
1601 2506 coreconfigitem(
1602 b'commands', b'rebase.requiredest', default=False,
1603 )
1604 coreconfigitem(
1605 b'experimental', b'rebaseskipobsolete', default=True,
1606 )
1607 coreconfigitem(
1608 b'rebase', b'singletransaction', default=False,
1609 )
1610 coreconfigitem(
1611 b'rebase', b'experimental.inmemory', default=False,
1612 )
2507 b'commands',
2508 b'rebase.requiredest',
2509 default=False,
2510 )
2511 coreconfigitem(
2512 b'experimental',
2513 b'rebaseskipobsolete',
2514 default=True,
2515 )
2516 coreconfigitem(
2517 b'rebase',
2518 b'singletransaction',
2519 default=False,
2520 )
2521 coreconfigitem(
2522 b'rebase',
2523 b'experimental.inmemory',
2524 default=False,
2525 )
@@ -316,9 +316,9 b' class basectx(object):'
316 316 return subrepo.nullsubrepo(self, path, pctx)
317 317
318 318 def workingsub(self, path):
319 '''return a subrepo for the stored revision, or wdir if this is a wdir
319 """return a subrepo for the stored revision, or wdir if this is a wdir
320 320 context.
321 '''
321 """
322 322 return subrepo.subrepo(self, path, allowwdir=True)
323 323
324 324 def match(
@@ -1054,8 +1054,7 b' class basefilectx(object):'
1054 1054 return lkr
1055 1055
1056 1056 def isintroducedafter(self, changelogrev):
1057 """True if a filectx has been introduced after a given floor revision
1058 """
1057 """True if a filectx has been introduced after a given floor revision"""
1059 1058 if self.linkrev() >= changelogrev:
1060 1059 return True
1061 1060 introrev = self._introrev(stoprev=changelogrev)
@@ -1232,7 +1231,7 b' class basefilectx(object):'
1232 1231
1233 1232 class filectx(basefilectx):
1234 1233 """A filecontext object makes access to data related to a particular
1235 filerevision convenient."""
1234 filerevision convenient."""
1236 1235
1237 1236 def __init__(
1238 1237 self,
@@ -1244,15 +1243,16 b' class filectx(basefilectx):'
1244 1243 changectx=None,
1245 1244 ):
1246 1245 """changeid must be a revision number, if specified.
1247 fileid can be a file revision or node."""
1246 fileid can be a file revision or node."""
1248 1247 self._repo = repo
1249 1248 self._path = path
1250 1249
1251 1250 assert (
1252 1251 changeid is not None or fileid is not None or changectx is not None
1253 ), (
1254 b"bad args: changeid=%r, fileid=%r, changectx=%r"
1255 % (changeid, fileid, changectx,)
1252 ), b"bad args: changeid=%r, fileid=%r, changectx=%r" % (
1253 changeid,
1254 fileid,
1255 changectx,
1256 1256 )
1257 1257
1258 1258 if filelog is not None:
@@ -1289,8 +1289,8 b' class filectx(basefilectx):'
1289 1289 return self._repo.unfiltered()[self._changeid]
1290 1290
1291 1291 def filectx(self, fileid, changeid=None):
1292 '''opens an arbitrary revision of the file without
1293 opening a new filelog'''
1292 """opens an arbitrary revision of the file without
1293 opening a new filelog"""
1294 1294 return filectx(
1295 1295 self._repo,
1296 1296 self._path,
@@ -2101,7 +2101,7 b' class committablefilectx(basefilectx):'
2101 2101
2102 2102 class workingfilectx(committablefilectx):
2103 2103 """A workingfilectx object makes access to data related to a particular
2104 file in the working directory convenient."""
2104 file in the working directory convenient."""
2105 2105
2106 2106 def __init__(self, repo, path, filelog=None, workingctx=None):
2107 2107 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
@@ -2702,8 +2702,7 b' class workingcommitctx(workingctx):'
2702 2702
2703 2703 @propertycache
2704 2704 def _changedset(self):
2705 """Return the set of files changed in this context
2706 """
2705 """Return the set of files changed in this context"""
2707 2706 changed = set(self._status.modified)
2708 2707 changed.update(self._status.added)
2709 2708 changed.update(self._status.removed)
@@ -2877,8 +2876,7 b' class memctx(committablectx):'
2877 2876
2878 2877 @propertycache
2879 2878 def _status(self):
2880 """Calculate exact status from ``files`` specified at construction
2881 """
2879 """Calculate exact status from ``files`` specified at construction"""
2882 2880 man1 = self.p1().manifest()
2883 2881 p2 = self._parents[1]
2884 2882 # "1 < len(self._parents)" can't be used for checking
@@ -702,7 +702,7 b' def mergecopies(repo, c1, c2, base):'
702 702
703 703
704 704 def _isfullcopytraceable(repo, c1, base):
705 """ Checks that if base, source and destination are all no-public branches,
705 """Checks that if base, source and destination are all no-public branches,
706 706 if yes let's use the full copytrace algorithm for increased capabilities
707 707 since it will be fast enough.
708 708
@@ -770,14 +770,16 b' class branch_copies(object):'
770 770 self.movewithdir = {} if movewithdir is None else movewithdir
771 771
772 772 def __repr__(self):
773 return (
774 '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>'
775 % (self.copy, self.renamedelete, self.dirmove, self.movewithdir,)
773 return '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' % (
774 self.copy,
775 self.renamedelete,
776 self.dirmove,
777 self.movewithdir,
776 778 )
777 779
778 780
779 781 def _fullcopytracing(repo, c1, c2, base):
780 """ The full copytracing algorithm which finds all the new files that were
782 """The full copytracing algorithm which finds all the new files that were
781 783 added from merge base up to the top commit and for each file it checks if
782 784 this file was copied from another file.
783 785
@@ -967,7 +969,7 b' def _dir_renames(repo, ctx, copy, fullco'
967 969
968 970
969 971 def _heuristicscopytracing(repo, c1, c2, base):
970 """ Fast copytracing using filename heuristics
972 """Fast copytracing using filename heuristics
971 973
972 974 Assumes that moves or renames are of following two types:
973 975
@@ -1000,7 +1000,7 b' class curseschunkselector(object):'
1000 1000
1001 1001 def toggleallbetween(self):
1002 1002 """toggle applied on or off for all items in range [lastapplied,
1003 current]. """
1003 current]."""
1004 1004 if (
1005 1005 not self.lastapplieditem
1006 1006 or self.currentselecteditem == self.lastapplieditem
@@ -682,7 +682,7 b' def _decoratelines(text, fctx):'
682 682
683 683
684 684 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
685 r'''
685 r"""
686 686 Given parent and child fctxes and annotate data for parents, for all lines
687 687 in either parent that match the child, annotate the child with the parent's
688 688 data.
@@ -691,7 +691,7 b' def _annotatepair(parents, childfctx, ch'
691 691 annotate data as well such that child is never blamed for any lines.
692 692
693 693 See test-annotate.py for unit tests.
694 '''
694 """
695 695 pblocks = [
696 696 (parent, mdiff.allblocks(parent.text, child.text, opts=diffopts))
697 697 for parent in parents
@@ -425,7 +425,7 b' def dagtext('
425 425 usedots=False,
426 426 maxlinewidth=70,
427 427 ):
428 '''generates lines of a textual representation for a dag event stream
428 """generates lines of a textual representation for a dag event stream
429 429
430 430 events should generate what parsedag() does, so:
431 431
@@ -501,7 +501,7 b' def dagtext('
501 501 >>> dagtext(parsedag(b'+1 :f +1 :p2 *f */p2'))
502 502 '+1 :f +1 :p2 *f */p2'
503 503
504 '''
504 """
505 505 return b"\n".join(
506 506 dagtextlines(
507 507 dag,
@@ -1062,11 +1062,14 b' def debugdiscovery(ui, repo, remoteurl=b'
1062 1062
1063 1063
1064 1064 @command(
1065 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1065 b'debugdownload',
1066 [
1067 (b'o', b'output', b'', _(b'path')),
1068 ],
1069 optionalrepo=True,
1066 1070 )
1067 1071 def debugdownload(ui, repo, url, output=None, **opts):
1068 """download a resource using Mercurial logic and config
1069 """
1072 """download a resource using Mercurial logic and config"""
1070 1073 fh = urlmod.open(ui, url, output)
1071 1074
1072 1075 dest = ui
@@ -1510,10 +1513,10 b' def debugindexstats(ui, repo):'
1510 1513
1511 1514 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1512 1515 def debuginstall(ui, **opts):
1513 '''test Mercurial installation
1516 """test Mercurial installation
1514 1517
1515 1518 Returns 0 on success.
1516 '''
1519 """
1517 1520 opts = pycompat.byteskwargs(opts)
1518 1521
1519 1522 problems = 0
@@ -2173,8 +2176,7 b' def debugnamecomplete(ui, repo, *args):'
2173 2176 ],
2174 2177 )
2175 2178 def debugnodemap(ui, repo, **opts):
2176 """write and inspect on disk nodemap
2177 """
2179 """write and inspect on disk nodemap"""
2178 2180 if opts['dump_new']:
2179 2181 unfi = repo.unfiltered()
2180 2182 cl = unfi.changelog
@@ -2402,13 +2404,13 b' def debugp1copies(ui, repo, **opts):'
2402 2404 _(b'FILESPEC...'),
2403 2405 )
2404 2406 def debugpathcomplete(ui, repo, *specs, **opts):
2405 '''complete part or all of a tracked path
2407 """complete part or all of a tracked path
2406 2408
2407 2409 This command supports shells that offer path name completion. It
2408 2410 currently completes only files already known to the dirstate.
2409 2411
2410 2412 Completion extends only to the next path segment unless
2411 --full is specified, in which case entire paths are used.'''
2413 --full is specified, in which case entire paths are used."""
2412 2414
2413 2415 def complete(path, acceptable):
2414 2416 dirstate = repo.dirstate
@@ -2587,13 +2589,13 b' def debugpickmergetool(ui, repo, *pats, '
2587 2589
2588 2590 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2589 2591 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2590 '''access the pushkey key/value protocol
2592 """access the pushkey key/value protocol
2591 2593
2592 2594 With two args, list the keys in the given namespace.
2593 2595
2594 2596 With five args, set a key to new if it currently is set to old.
2595 2597 Reports success or failure.
2596 '''
2598 """
2597 2599
2598 2600 target = hg.peer(ui, {}, repopath)
2599 2601 if keyinfo:
@@ -3432,7 +3434,7 b' def debugsidedata(ui, repo, file_, rev=N'
3432 3434
3433 3435 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3434 3436 def debugssl(ui, repo, source=None, **opts):
3435 '''test a secure connection to a server
3437 """test a secure connection to a server
3436 3438
3437 3439 This builds the certificate chain for the server on Windows, installing the
3438 3440 missing intermediates and trusted root via Windows Update if necessary. It
@@ -3443,7 +3445,7 b' def debugssl(ui, repo, source=None, **op'
3443 3445
3444 3446 If the update succeeds, retry the original operation. Otherwise, the cause
3445 3447 of the SSL error is likely another issue.
3446 '''
3448 """
3447 3449 if not pycompat.iswindows:
3448 3450 raise error.Abort(
3449 3451 _(b'certificate chain building is only possible on Windows')
@@ -3785,7 +3787,9 b' def debugtemplate(ui, repo, tmpl, **opts'
3785 3787
3786 3788 @command(
3787 3789 b'debuguigetpass',
3788 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3790 [
3791 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3792 ],
3789 3793 _(b'[-p TEXT]'),
3790 3794 norepo=True,
3791 3795 )
@@ -3801,7 +3805,9 b" def debuguigetpass(ui, prompt=b''):"
3801 3805
3802 3806 @command(
3803 3807 b'debuguiprompt',
3804 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3808 [
3809 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3810 ],
3805 3811 _(b'[-p TEXT]'),
3806 3812 norepo=True,
3807 3813 )
@@ -4314,7 +4320,10 b' def debugwireproto(ui, repo, path=None, '
4314 4320 {
4315 4321 'loggingfh': ui,
4316 4322 'loggingname': b's',
4317 'loggingopts': {'logdata': True, 'logdataapis': False,},
4323 'loggingopts': {
4324 'logdata': True,
4325 'logdataapis': False,
4326 },
4318 4327 }
4319 4328 )
4320 4329
@@ -43,14 +43,14 b' def difffeatureopts('
43 43 formatchanging=False,
44 44 configprefix=b'',
45 45 ):
46 '''return diffopts with only opted-in features parsed
46 """return diffopts with only opted-in features parsed
47 47
48 48 Features:
49 49 - git: git-style diffs
50 50 - whitespace: whitespace options like ignoreblanklines and ignorews
51 51 - formatchanging: options that will likely break or cause correctness issues
52 52 with most diff parsers
53 '''
53 """
54 54
55 55 def get(key, name=None, getter=ui.configbool, forceplain=None):
56 56 if opts:
@@ -74,12 +74,12 b' def _getfsnow(vfs):'
74 74 @interfaceutil.implementer(intdirstate.idirstate)
75 75 class dirstate(object):
76 76 def __init__(self, opener, ui, root, validate, sparsematchfn):
77 '''Create a new dirstate object.
77 """Create a new dirstate object.
78 78
79 79 opener is an open()-like callable that can be used to open the
80 80 dirstate file; root is the root of the directory tracked by
81 81 the dirstate.
82 '''
82 """
83 83 self._opener = opener
84 84 self._validate = validate
85 85 self._root = root
@@ -112,12 +112,12 b' class dirstate(object):'
112 112
113 113 @contextlib.contextmanager
114 114 def parentchange(self):
115 '''Context manager for handling dirstate parents.
115 """Context manager for handling dirstate parents.
116 116
117 117 If an exception occurs in the scope of the context manager,
118 118 the incoherent dirstate won't be written when wlock is
119 119 released.
120 '''
120 """
121 121 self._parentwriters += 1
122 122 yield
123 123 # Typically we want the "undo" step of a context manager in a
@@ -128,9 +128,9 b' class dirstate(object):'
128 128 self._parentwriters -= 1
129 129
130 130 def pendingparentchange(self):
131 '''Returns true if the dirstate is in the middle of a set of changes
131 """Returns true if the dirstate is in the middle of a set of changes
132 132 that modify the dirstate parent.
133 '''
133 """
134 134 return self._parentwriters > 0
135 135
136 136 @propertycache
@@ -247,12 +247,12 b' class dirstate(object):'
247 247 return encoding.getcwd()
248 248
249 249 def getcwd(self):
250 '''Return the path from which a canonical path is calculated.
250 """Return the path from which a canonical path is calculated.
251 251
252 252 This path should be used to resolve file patterns or to convert
253 253 canonical paths back to file paths for display. It shouldn't be
254 254 used to get real file paths. Use vfs functions instead.
255 '''
255 """
256 256 cwd = self._cwd
257 257 if cwd == self._root:
258 258 return b''
@@ -275,7 +275,7 b' class dirstate(object):'
275 275 return path
276 276
277 277 def __getitem__(self, key):
278 '''Return the current state of key (a filename) in the dirstate.
278 """Return the current state of key (a filename) in the dirstate.
279 279
280 280 States are:
281 281 n normal
@@ -283,7 +283,7 b' class dirstate(object):'
283 283 r marked for removal
284 284 a marked for addition
285 285 ? not tracked
286 '''
286 """
287 287 return self._map.get(key, (b"?",))[0]
288 288
289 289 def __contains__(self, key):
@@ -370,11 +370,11 b' class dirstate(object):'
370 370 raise
371 371
372 372 def invalidate(self):
373 '''Causes the next access to reread the dirstate.
373 """Causes the next access to reread the dirstate.
374 374
375 375 This is different from localrepo.invalidatedirstate() because it always
376 376 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
377 check whether the dirstate has changed before rereading it.'''
377 check whether the dirstate has changed before rereading it."""
378 378
379 379 for a in ("_map", "_branch", "_ignore"):
380 380 if a in self.__dict__:
@@ -426,7 +426,7 b' class dirstate(object):'
426 426 self._map.addfile(f, oldstate, state, mode, size, mtime)
427 427
428 428 def normal(self, f, parentfiledata=None):
429 '''Mark a file normal and clean.
429 """Mark a file normal and clean.
430 430
431 431 parentfiledata: (mode, size, mtime) of the clean file
432 432
@@ -434,7 +434,7 b' class dirstate(object):'
434 434 size), as or close as possible from the point where we
435 435 determined the file was clean, to limit the risk of the
436 436 file having been changed by an external process between the
437 moment where the file was determined to be clean and now.'''
437 moment where the file was determined to be clean and now."""
438 438 if parentfiledata:
439 439 (mode, size, mtime) = parentfiledata
440 440 else:
@@ -581,7 +581,7 b' class dirstate(object):'
581 581 return folded
582 582
583 583 def normalize(self, path, isknown=False, ignoremissing=False):
584 '''
584 """
585 585 normalize the case of a pathname when on a casefolding filesystem
586 586
587 587 isknown specifies whether the filename came from walking the
@@ -596,7 +596,7 b' class dirstate(object):'
596 596 - version of name already stored in the dirstate
597 597 - version of name stored on disk
598 598 - version provided via command arguments
599 '''
599 """
600 600
601 601 if self._checkcase:
602 602 return self._normalize(path, isknown, ignoremissing)
@@ -643,11 +643,11 b' class dirstate(object):'
643 643 self._dirty = True
644 644
645 645 def identity(self):
646 '''Return identity of dirstate itself to detect changing in storage
646 """Return identity of dirstate itself to detect changing in storage
647 647
648 648 If identity of previous dirstate is equal to this, writing
649 649 changes based on the former dirstate out can keep consistency.
650 '''
650 """
651 651 return self._map.identity
652 652
653 653 def write(self, tr):
@@ -769,14 +769,14 b' class dirstate(object):'
769 769 return (None, -1, b"")
770 770
771 771 def _walkexplicit(self, match, subrepos):
772 '''Get stat data about the files explicitly specified by match.
772 """Get stat data about the files explicitly specified by match.
773 773
774 774 Return a triple (results, dirsfound, dirsnotfound).
775 775 - results is a mapping from filename to stat result. It also contains
776 776 listings mapping subrepos and .hg to None.
777 777 - dirsfound is a list of files found to be directories.
778 778 - dirsnotfound is a list of files that the dirstate thinks are
779 directories and that were not found.'''
779 directories and that were not found."""
780 780
781 781 def badtype(mode):
782 782 kind = _(b'unknown')
@@ -904,7 +904,7 b' class dirstate(object):'
904 904 return results, dirsfound, dirsnotfound
905 905
906 906 def walk(self, match, subrepos, unknown, ignored, full=True):
907 '''
907 """
908 908 Walk recursively through the directory tree, finding all files
909 909 matched by match.
910 910
@@ -913,7 +913,7 b' class dirstate(object):'
913 913 Return a dict mapping filename to stat-like object (either
914 914 mercurial.osutil.stat instance or return value of os.stat()).
915 915
916 '''
916 """
917 917 # full is a flag that extensions that hook into walk can use -- this
918 918 # implementation doesn't use it at all. This satisfies the contract
919 919 # because we only guarantee a "maybe".
@@ -1168,7 +1168,7 b' class dirstate(object):'
1168 1168 return (lookup, status)
1169 1169
1170 1170 def status(self, match, subrepos, ignored, clean, unknown):
1171 '''Determine the status of the working copy relative to the
1171 """Determine the status of the working copy relative to the
1172 1172 dirstate and return a pair of (unsure, status), where status is of type
1173 1173 scmutil.status and:
1174 1174
@@ -1182,7 +1182,7 b' class dirstate(object):'
1182 1182 status.clean:
1183 1183 files that have definitely not been modified since the
1184 1184 dirstate was written
1185 '''
1185 """
1186 1186 listignored, listclean, listunknown = ignored, clean, unknown
1187 1187 lookup, modified, added, unknown, ignored = [], [], [], [], []
1188 1188 removed, deleted, clean = [], [], []
@@ -1305,9 +1305,9 b' class dirstate(object):'
1305 1305 return (lookup, status)
1306 1306
1307 1307 def matches(self, match):
1308 '''
1308 """
1309 1309 return files in the dirstate (in whatever state) filtered by match
1310 '''
1310 """
1311 1311 dmap = self._map
1312 1312 if rustmod is not None:
1313 1313 dmap = self._map._rustmap
@@ -17,7 +17,7 b' from . import ('
17 17
18 18
19 19 class dirstateguard(util.transactional):
20 '''Restore dirstate at unexpected failure.
20 """Restore dirstate at unexpected failure.
21 21
22 22 At the construction, this class does:
23 23
@@ -28,7 +28,7 b' class dirstateguard(util.transactional):'
28 28 is invoked before ``close()``.
29 29
30 30 This just removes the backup file at ``close()`` before ``release()``.
31 '''
31 """
32 32
33 33 def __init__(self, repo, name):
34 34 self._repo = repo
@@ -75,7 +75,7 b' def findcommonincoming(repo, remote, hea'
75 75
76 76
77 77 class outgoing(object):
78 '''Represents the result of a findcommonoutgoing() call.
78 """Represents the result of a findcommonoutgoing() call.
79 79
80 80 Members:
81 81
@@ -94,7 +94,7 b' class outgoing(object):'
94 94 remotely.
95 95
96 96 Some members are computed on demand from the heads, unless provided upfront
97 by discovery.'''
97 by discovery."""
98 98
99 99 def __init__(
100 100 self, repo, commonheads=None, ancestorsof=None, missingroots=None
@@ -157,7 +157,7 b' class outgoing(object):'
157 157 def findcommonoutgoing(
158 158 repo, other, onlyheads=None, force=False, commoninc=None, portable=False
159 159 ):
160 '''Return an outgoing instance to identify the nodes present in repo but
160 """Return an outgoing instance to identify the nodes present in repo but
161 161 not in other.
162 162
163 163 If onlyheads is given, only nodes ancestral to nodes in onlyheads
@@ -168,7 +168,7 b' def findcommonoutgoing('
168 168 findcommonincoming(repo, other, force) to avoid recomputing it here.
169 169
170 170 If portable is given, compute more conservative common and ancestorsof,
171 to make bundles created from the instance more portable.'''
171 to make bundles created from the instance more portable."""
172 172 # declare an empty outgoing object to be filled later
173 173 og = outgoing(repo, None, None)
174 174
@@ -332,7 +332,10 b' def _nowarnheads(pushop):'
332 332
333 333 with remote.commandexecutor() as e:
334 334 remotebookmarks = e.callcommand(
335 b'listkeys', {b'namespace': b'bookmarks',}
335 b'listkeys',
336 {
337 b'namespace': b'bookmarks',
338 },
336 339 ).result()
337 340
338 341 bookmarkedheads = set()
@@ -470,7 +473,10 b' def checkheads(pushop):'
470 473 if branch not in (b'default', None):
471 474 errormsg = _(
472 475 b"push creates new remote head %s on branch '%s'"
473 ) % (short(dhs[0]), branch,)
476 ) % (
477 short(dhs[0]),
478 branch,
479 )
474 480 elif repo[dhs[0]].bookmarks():
475 481 errormsg = _(
476 482 b"push creates new remote head %s "
@@ -519,10 +519,10 b' def aliasargs(fn, givenargs):'
519 519
520 520
521 521 def aliasinterpolate(name, args, cmd):
522 '''interpolate args into cmd for shell aliases
522 """interpolate args into cmd for shell aliases
523 523
524 524 This also handles $0, $@ and "$@".
525 '''
525 """
526 526 # util.interpolate can't deal with "$@" (with quotes) because it's only
527 527 # built to match prefix + patterns.
528 528 replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)}
@@ -630,12 +630,18 b' class cmdalias(object):'
630 630 except error.UnknownCommand:
631 631 self.badalias = _(
632 632 b"alias '%s' resolves to unknown command '%s'"
633 ) % (self.name, cmd,)
633 ) % (
634 self.name,
635 cmd,
636 )
634 637 self.unknowncmd = True
635 638 except error.AmbiguousCommand:
636 639 self.badalias = _(
637 640 b"alias '%s' resolves to ambiguous command '%s'"
638 ) % (self.name, cmd,)
641 ) % (
642 self.name,
643 cmd,
644 )
639 645
640 646 def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
641 647 # confine strings to be passed to i18n.gettext()
@@ -113,8 +113,8 b" fallbackencoding = b'ISO-8859-1'"
113 113
114 114
115 115 class localstr(bytes):
116 '''This class allows strings that are unmodified to be
117 round-tripped to the local encoding and back'''
116 """This class allows strings that are unmodified to be
117 round-tripped to the local encoding and back"""
118 118
119 119 def __new__(cls, u, l):
120 120 s = bytes.__new__(cls, l)
@@ -329,8 +329,8 b' def ucolwidth(d):'
329 329
330 330 def getcols(s, start, c):
331 331 # type: (bytes, int, int) -> bytes
332 '''Use colwidth to find a c-column substring of s starting at byte
333 index start'''
332 """Use colwidth to find a c-column substring of s starting at byte
333 index start"""
334 334 for x in pycompat.xrange(start + c, len(s)):
335 335 t = s[start:x]
336 336 if colwidth(t) == c:
@@ -487,7 +487,7 b' def upperfallback(s):'
487 487
488 488
489 489 class normcasespecs(object):
490 '''what a platform's normcase does to ASCII strings
490 """what a platform's normcase does to ASCII strings
491 491
492 492 This is specified per platform, and should be consistent with what normcase
493 493 on that platform actually does.
@@ -496,7 +496,7 b' class normcasespecs(object):'
496 496 upper: normcase uppercases ASCII strings
497 497 other: the fallback function should always be called
498 498
499 This should be kept in sync with normcase_spec in util.h.'''
499 This should be kept in sync with normcase_spec in util.h."""
500 500
501 501 lower = -1
502 502 upper = 1
@@ -505,7 +505,7 b' class normcasespecs(object):'
505 505
506 506 def jsonescape(s, paranoid=False):
507 507 # type: (Any, Any) -> Any
508 '''returns a string suitable for JSON
508 """returns a string suitable for JSON
509 509
510 510 JSON is problematic for us because it doesn't support non-Unicode
511 511 bytes. To deal with this, we take the following approach:
@@ -547,7 +547,7 b' def jsonescape(s, paranoid=False):'
547 547 'non-BMP: \\\\ud834\\\\udd1e'
548 548 >>> jsonescape(b'<foo@example.org>', paranoid=True)
549 549 '\\\\u003cfoo@example.org\\\\u003e'
550 '''
550 """
551 551
552 552 u8chars = toutf8b(s)
553 553 try:
@@ -569,11 +569,11 b' else:'
569 569
570 570 def getutf8char(s, pos):
571 571 # type: (bytes, int) -> bytes
572 '''get the next full utf-8 character in the given string, starting at pos
572 """get the next full utf-8 character in the given string, starting at pos
573 573
574 574 Raises a UnicodeError if the given location does not start a valid
575 575 utf-8 character.
576 '''
576 """
577 577
578 578 # find how many bytes to attempt decoding from first nibble
579 579 l = _utf8len[ord(s[pos : pos + 1]) >> 4]
@@ -588,7 +588,7 b' def getutf8char(s, pos):'
588 588
589 589 def toutf8b(s):
590 590 # type: (bytes) -> bytes
591 '''convert a local, possibly-binary string into UTF-8b
591 """convert a local, possibly-binary string into UTF-8b
592 592
593 593 This is intended as a generic method to preserve data when working
594 594 with schemes like JSON and XML that have no provision for
@@ -616,7 +616,7 b' def toutf8b(s):'
616 616 arbitrary bytes into an internal Unicode format that can be
617 617 re-encoded back into the original. Here we are exposing the
618 618 internal surrogate encoding as a UTF-8 string.)
619 '''
619 """
620 620
621 621 if isinstance(s, localstr):
622 622 # assume that the original UTF-8 sequence would never contain
@@ -657,7 +657,7 b' def toutf8b(s):'
657 657
658 658 def fromutf8b(s):
659 659 # type: (bytes) -> bytes
660 '''Given a UTF-8b string, return a local, possibly-binary string.
660 """Given a UTF-8b string, return a local, possibly-binary string.
661 661
662 662 return the original binary string. This
663 663 is a round-trip process for strings like filenames, but metadata
@@ -677,7 +677,7 b' def fromutf8b(s):'
677 677 True
678 678 >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
679 679 True
680 '''
680 """
681 681
682 682 if isasciistr(s):
683 683 return s
@@ -394,8 +394,7 b' class UnsupportedMergeRecords(Abort):'
394 394
395 395
396 396 class UnknownVersion(Abort):
397 """generic exception for aborting from an encounter with an unknown version
398 """
397 """generic exception for aborting from an encounter with an unknown version"""
399 398
400 399 def __init__(self, msg, hint=None, version=None):
401 400 self.version = version
@@ -378,14 +378,14 b' def push('
378 378 publish=False,
379 379 opargs=None,
380 380 ):
381 '''Push outgoing changesets (limited by revs) from a local
381 """Push outgoing changesets (limited by revs) from a local
382 382 repository to remote. Return an integer:
383 383 - None means nothing to push
384 384 - 0 means HTTP error
385 385 - 1 means we pushed and remote head count is unchanged *or*
386 386 we have outgoing changesets but refused to push
387 387 - other values as described by addchangegroup()
388 '''
388 """
389 389 if opargs is None:
390 390 opargs = {}
391 391 pushop = pushoperation(
@@ -1510,8 +1510,8 b' def _fullpullbundle2(repo, pullop):'
1510 1510
1511 1511
1512 1512 def add_confirm_callback(repo, pullop):
1513 """ adds a finalize callback to transaction which can be used to show stats
1514 to user and confirm the pull before committing transaction """
1513 """adds a finalize callback to transaction which can be used to show stats
1514 to user and confirm the pull before committing transaction"""
1515 1515
1516 1516 tr = pullop.trmanager.transaction()
1517 1517 scmutil.registersummarycallback(
@@ -1892,7 +1892,11 b' def _pullchangeset(pullop):'
1892 1892 elif pullop.heads is None:
1893 1893 with pullop.remote.commandexecutor() as e:
1894 1894 cg = e.callcommand(
1895 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
1895 b'changegroup',
1896 {
1897 b'nodes': pullop.fetch,
1898 b'source': b'pull',
1899 },
1896 1900 ).result()
1897 1901
1898 1902 elif not pullop.remote.capable(b'changegroupsubset'):
@@ -89,7 +89,10 b' def pull(pullop):'
89 89 continue
90 90
91 91 phases.advanceboundary(
92 repo, tr, phasenumber, csetres[b'nodesbyphase'][phase],
92 repo,
93 tr,
94 phasenumber,
95 csetres[b'nodesbyphase'][phase],
93 96 )
94 97
95 98 # Write bookmark updates.
@@ -189,7 +192,10 b' def _checkuserawstorefiledata(pullop):'
189 192 def _fetchrawstorefiles(repo, remote):
190 193 with remote.commandexecutor() as e:
191 194 objs = e.callcommand(
192 b'rawstorefiledata', {b'files': [b'changelog', b'manifestlog'],}
195 b'rawstorefiledata',
196 {
197 b'files': [b'changelog', b'manifestlog'],
198 },
193 199 ).result()
194 200
195 201 # First object is a summary of files data that follows.
@@ -746,7 +752,10 b' def _fetchfilesfromcsets('
746 752 with remote.commandexecutor() as e:
747 753 args = {
748 754 b'revisions': [
749 {b'type': b'changesetexplicit', b'nodes': batch,}
755 {
756 b'type': b'changesetexplicit',
757 b'nodes': batch,
758 }
750 759 ],
751 760 b'fields': fields,
752 761 b'haveparents': haveparents,
@@ -457,7 +457,7 b' def _loadextra(ui, newindex, extraloader'
457 457
458 458
459 459 def afterloaded(extension, callback):
460 '''Run the specified function after a named extension is loaded.
460 """Run the specified function after a named extension is loaded.
461 461
462 462 If the named extension is already loaded, the callback will be called
463 463 immediately.
@@ -467,7 +467,7 b' def afterloaded(extension, callback):'
467 467
468 468 The callback receives the named argument ``loaded``, which is a boolean
469 469 indicating whether the dependent extension actually loaded.
470 '''
470 """
471 471
472 472 if extension in _extensions:
473 473 # Report loaded as False if the extension is disabled
@@ -500,12 +500,12 b' def populateui(ui):'
500 500
501 501
502 502 def bind(func, *args):
503 '''Partial function application
503 """Partial function application
504 504
505 Returns a new function that is the partial application of args and kwargs
506 to func. For example,
505 Returns a new function that is the partial application of args and kwargs
506 to func. For example,
507 507
508 f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
508 f(1, 2, bar=3) === bind(f, 1)(2, bar=3)"""
509 509 assert callable(func)
510 510
511 511 def closure(*a, **kw):
@@ -618,7 +618,7 b' class wrappedfunction(object):'
618 618
619 619
620 620 def wrapfunction(container, funcname, wrapper):
621 '''Wrap the function named funcname in container
621 """Wrap the function named funcname in container
622 622
623 623 Replace the funcname member in the given container with the specified
624 624 wrapper. The container is typically a module, class, or instance.
@@ -649,7 +649,7 b' def wrapfunction(container, funcname, wr'
649 649 work. Since you cannot control what other extensions are loaded by
650 650 your end users, you should play nicely with others by using the
651 651 subclass trick.
652 '''
652 """
653 653 assert callable(wrapper)
654 654
655 655 origfn = getattr(container, funcname)
@@ -668,7 +668,7 b' def wrapfunction(container, funcname, wr'
668 668
669 669
670 670 def unwrapfunction(container, funcname, wrapper=None):
671 '''undo wrapfunction
671 """undo wrapfunction
672 672
673 673 If wrappers is None, undo the last wrap. Otherwise removes the wrapper
674 674 from the chain of wrappers.
@@ -676,7 +676,7 b' def unwrapfunction(container, funcname, '
676 676 Return the removed wrapper.
677 677 Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
678 678 wrapper is not None but is not found in the wrapper chain.
679 '''
679 """
680 680 chain = getwrapperchain(container, funcname)
681 681 origfn = chain.pop()
682 682 if wrapper is None:
@@ -689,13 +689,13 b' def unwrapfunction(container, funcname, '
689 689
690 690
691 691 def getwrapperchain(container, funcname):
692 '''get a chain of wrappers of a function
692 """get a chain of wrappers of a function
693 693
694 694 Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
695 695
696 696 The wrapper functions are the ones passed to wrapfunction, whose first
697 697 argument is origfunc.
698 '''
698 """
699 699 result = []
700 700 fn = getattr(container, funcname)
701 701 while fn:
@@ -744,11 +744,11 b' def _disabledpaths():'
744 744
745 745
746 746 def _moduledoc(file):
747 '''return the top-level python documentation for the given file
747 """return the top-level python documentation for the given file
748 748
749 749 Loosely inspired by pydoc.source_synopsis(), but rewritten to
750 750 handle triple quotes and to return the whole text instead of just
751 the synopsis'''
751 the synopsis"""
752 752 result = []
753 753
754 754 line = file.readline()
@@ -883,8 +883,8 b' def _finddisabledcmd(ui, cmd, name, path'
883 883
884 884
885 885 def disabledcmd(ui, cmd, strict=False):
886 '''find cmd from disabled extensions without importing.
887 returns (cmdname, extname, doc)'''
886 """find cmd from disabled extensions without importing.
887 returns (cmdname, extname, doc)"""
888 888
889 889 paths = _disabledpaths()
890 890 if not paths:
@@ -1232,8 +1232,7 b' def filemerge(repo, wctx, mynode, orig, '
1232 1232
1233 1233
1234 1234 def loadinternalmerge(ui, extname, registrarobj):
1235 """Load internal merge tool from specified registrarobj
1236 """
1235 """Load internal merge tool from specified registrarobj"""
1237 1236 for name, func in pycompat.iteritems(registrarobj._table):
1238 1237 fullname = b':' + name
1239 1238 internals[fullname] = func
@@ -122,8 +122,7 b' predicate = registrar.filesetpredicate(s'
122 122
123 123 @predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
124 124 def modified(mctx, x):
125 """File that is modified according to :hg:`status`.
126 """
125 """File that is modified according to :hg:`status`."""
127 126 # i18n: "modified" is a keyword
128 127 getargs(x, 0, 0, _(b"modified takes no arguments"))
129 128 s = set(mctx.status().modified)
@@ -132,8 +131,7 b' def modified(mctx, x):'
132 131
133 132 @predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
134 133 def added(mctx, x):
135 """File that is added according to :hg:`status`.
136 """
134 """File that is added according to :hg:`status`."""
137 135 # i18n: "added" is a keyword
138 136 getargs(x, 0, 0, _(b"added takes no arguments"))
139 137 s = set(mctx.status().added)
@@ -142,8 +140,7 b' def added(mctx, x):'
142 140
143 141 @predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
144 142 def removed(mctx, x):
145 """File that is removed according to :hg:`status`.
146 """
143 """File that is removed according to :hg:`status`."""
147 144 # i18n: "removed" is a keyword
148 145 getargs(x, 0, 0, _(b"removed takes no arguments"))
149 146 s = set(mctx.status().removed)
@@ -152,8 +149,7 b' def removed(mctx, x):'
152 149
153 150 @predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
154 151 def deleted(mctx, x):
155 """Alias for ``missing()``.
156 """
152 """Alias for ``missing()``."""
157 153 # i18n: "deleted" is a keyword
158 154 getargs(x, 0, 0, _(b"deleted takes no arguments"))
159 155 s = set(mctx.status().deleted)
@@ -162,8 +158,7 b' def deleted(mctx, x):'
162 158
163 159 @predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
164 160 def missing(mctx, x):
165 """File that is missing according to :hg:`status`.
166 """
161 """File that is missing according to :hg:`status`."""
167 162 # i18n: "missing" is a keyword
168 163 getargs(x, 0, 0, _(b"missing takes no arguments"))
169 164 s = set(mctx.status().deleted)
@@ -190,8 +185,7 b' def ignored(mctx, x):'
190 185
191 186 @predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
192 187 def clean(mctx, x):
193 """File that is clean according to :hg:`status`.
194 """
188 """File that is clean according to :hg:`status`."""
195 189 # i18n: "clean" is a keyword
196 190 getargs(x, 0, 0, _(b"clean takes no arguments"))
197 191 s = set(mctx.status().clean)
@@ -208,8 +202,7 b' def tracked(mctx, x):'
208 202
209 203 @predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
210 204 def binary(mctx, x):
211 """File that appears to be binary (contains NUL bytes).
212 """
205 """File that appears to be binary (contains NUL bytes)."""
213 206 # i18n: "binary" is a keyword
214 207 getargs(x, 0, 0, _(b"binary takes no arguments"))
215 208 return mctx.fpredicate(
@@ -219,8 +212,7 b' def binary(mctx, x):'
219 212
220 213 @predicate(b'exec()')
221 214 def exec_(mctx, x):
222 """File that is marked as executable.
223 """
215 """File that is marked as executable."""
224 216 # i18n: "exec" is a keyword
225 217 getargs(x, 0, 0, _(b"exec takes no arguments"))
226 218 ctx = mctx.ctx
@@ -229,8 +221,7 b' def exec_(mctx, x):'
229 221
230 222 @predicate(b'symlink()')
231 223 def symlink(mctx, x):
232 """File that is marked as a symlink.
233 """
224 """File that is marked as a symlink."""
234 225 # i18n: "symlink" is a keyword
235 226 getargs(x, 0, 0, _(b"symlink takes no arguments"))
236 227 ctx = mctx.ctx
@@ -239,8 +230,7 b' def symlink(mctx, x):'
239 230
240 231 @predicate(b'resolved()', weight=_WEIGHT_STATUS)
241 232 def resolved(mctx, x):
242 """File that is marked resolved according to :hg:`resolve -l`.
243 """
233 """File that is marked resolved according to :hg:`resolve -l`."""
244 234 # i18n: "resolved" is a keyword
245 235 getargs(x, 0, 0, _(b"resolved takes no arguments"))
246 236 if mctx.ctx.rev() is not None:
@@ -253,8 +243,7 b' def resolved(mctx, x):'
253 243
254 244 @predicate(b'unresolved()', weight=_WEIGHT_STATUS)
255 245 def unresolved(mctx, x):
256 """File that is marked unresolved according to :hg:`resolve -l`.
257 """
246 """File that is marked unresolved according to :hg:`resolve -l`."""
258 247 # i18n: "unresolved" is a keyword
259 248 getargs(x, 0, 0, _(b"unresolved takes no arguments"))
260 249 if mctx.ctx.rev() is not None:
@@ -267,8 +256,7 b' def unresolved(mctx, x):'
267 256
268 257 @predicate(b'hgignore()', weight=_WEIGHT_STATUS)
269 258 def hgignore(mctx, x):
270 """File that matches the active .hgignore pattern.
271 """
259 """File that matches the active .hgignore pattern."""
272 260 # i18n: "hgignore" is a keyword
273 261 getargs(x, 0, 0, _(b"hgignore takes no arguments"))
274 262 return mctx.ctx.repo().dirstate._ignore
@@ -288,8 +276,7 b' def portable(mctx, x):'
288 276
289 277 @predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
290 278 def grep(mctx, x):
291 """File contains the given regular expression.
292 """
279 """File contains the given regular expression."""
293 280 try:
294 281 # i18n: "grep" is a keyword
295 282 r = re.compile(getstring(x, _(b"grep requires a pattern")))
@@ -414,8 +401,7 b' def eol(mctx, x):'
414 401
415 402 @predicate(b'copied()')
416 403 def copied(mctx, x):
417 """File that is recorded as being copied.
418 """
404 """File that is recorded as being copied."""
419 405 # i18n: "copied" is a keyword
420 406 getargs(x, 0, 0, _(b"copied takes no arguments"))
421 407
@@ -476,8 +462,7 b' def status(mctx, x):'
476 462
477 463 @predicate(b'subrepo([pattern])')
478 464 def subrepo(mctx, x):
479 """Subrepositories whose paths match the given pattern.
480 """
465 """Subrepositories whose paths match the given pattern."""
481 466 # i18n: "subrepo" is a keyword
482 467 getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
483 468 ctx = mctx.ctx
@@ -628,8 +613,7 b' def match(ctx, cwd, expr, badfn=None):'
628 613
629 614
630 615 def loadpredicate(ui, extname, registrarobj):
631 """Load fileset predicates from specified registrarobj
632 """
616 """Load fileset predicates from specified registrarobj"""
633 617 for name, func in pycompat.iteritems(registrarobj._table):
634 618 symbols[name] = func
635 619
@@ -698,10 +698,10 b' def help_('
698 698 fullname=None,
699 699 **opts
700 700 ):
701 '''
701 """
702 702 Generate the help for 'name' as unformatted restructured text. If
703 703 'name' is None, describe the commands available.
704 '''
704 """
705 705
706 706 opts = pycompat.byteskwargs(opts)
707 707
@@ -243,7 +243,7 b' def peer(uiorrepo, opts, path, create=Fa'
243 243
244 244
245 245 def defaultdest(source):
246 '''return default destination of clone if none is given
246 """return default destination of clone if none is given
247 247
248 248 >>> defaultdest(b'foo')
249 249 'foo'
@@ -257,7 +257,7 b' def defaultdest(source):'
257 257 ''
258 258 >>> defaultdest(b'http://example.org/foo/')
259 259 'foo'
260 '''
260 """
261 261 path = util.url(source).path
262 262 if not path:
263 263 return b''
@@ -333,7 +333,7 b' def share('
333 333
334 334
335 335 def _prependsourcehgrc(repo):
336 """ copies the source repo config and prepend it in current repo .hg/hgrc
336 """copies the source repo config and prepend it in current repo .hg/hgrc
337 337 on unshare. This is only done if the share was perfomed using share safe
338 338 method where we share config of source in shares"""
339 339 srcvfs = vfsmod.vfs(repo.sharedpath)
@@ -443,10 +443,10 b' def _postshareupdate(repo, update, check'
443 443
444 444
445 445 def copystore(ui, srcrepo, destpath):
446 '''copy files from store of srcrepo in destpath
446 """copy files from store of srcrepo in destpath
447 447
448 448 returns destlock
449 '''
449 """
450 450 destlock = None
451 451 try:
452 452 hardlink = None
@@ -517,7 +517,12 b' def clonewithshare('
517 517 for r in rev:
518 518 with srcpeer.commandexecutor() as e:
519 519 remoterevs.append(
520 e.callcommand(b'lookup', {b'key': r,}).result()
520 e.callcommand(
521 b'lookup',
522 {
523 b'key': r,
524 },
525 ).result()
521 526 )
522 527 revs = remoterevs
523 528
@@ -751,7 +756,10 b' def clone('
751 756 try:
752 757 with srcpeer.commandexecutor() as e:
753 758 rootnode = e.callcommand(
754 b'lookup', {b'key': b'0',}
759 b'lookup',
760 {
761 b'key': b'0',
762 },
755 763 ).result()
756 764
757 765 if rootnode != node.nullid:
@@ -900,7 +908,12 b' def clone('
900 908 for rev in revs:
901 909 with srcpeer.commandexecutor() as e:
902 910 remoterevs.append(
903 e.callcommand(b'lookup', {b'key': rev,}).result()
911 e.callcommand(
912 b'lookup',
913 {
914 b'key': rev,
915 },
916 ).result()
904 917 )
905 918 revs = remoterevs
906 919
@@ -974,7 +987,10 b' def clone('
974 987 if update is not True:
975 988 with srcpeer.commandexecutor() as e:
976 989 checkout = e.callcommand(
977 b'lookup', {b'key': update,}
990 b'lookup',
991 {
992 b'key': update,
993 },
978 994 ).result()
979 995
980 996 uprev = None
@@ -1176,7 +1192,10 b' def updatetotally(ui, repo, checkout, br'
1176 1192
1177 1193
1178 1194 def merge(
1179 ctx, force=False, remind=True, labels=None,
1195 ctx,
1196 force=False,
1197 remind=True,
1198 labels=None,
1180 1199 ):
1181 1200 """Branch merge with node, resolving changes. Return true if any
1182 1201 unresolved conflicts."""
@@ -27,7 +27,7 b' from . import ('
27 27
28 28
29 29 def hgweb(config, name=None, baseui=None):
30 '''create an hgweb wsgi object
30 """create an hgweb wsgi object
31 31
32 32 config can be one of:
33 33 - repo object (single repo view)
@@ -35,7 +35,7 b' def hgweb(config, name=None, baseui=None'
35 35 - path to config file (multi-repo view)
36 36 - dict of virtual:real pairs (multi-repo view)
37 37 - list of virtual:real tuples (multi-repo view)
38 '''
38 """
39 39
40 40 if isinstance(config, pycompat.unicode):
41 41 raise error.ProgrammingError(
@@ -51,9 +51,9 b' def ismember(ui, username, userlist):'
51 51
52 52
53 53 def checkauthz(hgweb, req, op):
54 '''Check permission for operation based on request data (including
54 """Check permission for operation based on request data (including
55 55 authentication info). Return if op allowed, else raise an ErrorResponse
56 exception.'''
56 exception."""
57 57
58 58 user = req.remoteuser
59 59
@@ -86,12 +86,12 b' def _stylemap(styles, path=None):'
86 86
87 87
88 88 def makebreadcrumb(url, prefix=b''):
89 '''Return a 'URL breadcrumb' list
89 """Return a 'URL breadcrumb' list
90 90
91 91 A 'URL breadcrumb' is a list of URL-name pairs,
92 92 corresponding to each of the path items on a URL.
93 93 This can be used to create path navigation entries.
94 '''
94 """
95 95 if url.endswith(b'/'):
96 96 url = url[:-1]
97 97 if prefix:
@@ -622,8 +622,8 b' class wsgiresponse(object):'
622 622
623 623
624 624 def wsgiapplication(app_maker):
625 '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
626 can and should now be used as a WSGI application.'''
625 """For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
626 can and should now be used as a WSGI application."""
627 627 application = app_maker()
628 628
629 629 def run_wsgi(env, respond):
@@ -491,11 +491,11 b' def commonentry(repo, ctx):'
491 491
492 492
493 493 def changelistentry(web, ctx):
494 '''Obtain a dictionary to be used for entries in a changelist.
494 """Obtain a dictionary to be used for entries in a changelist.
495 495
496 496 This function is called when producing items for the "entries" list passed
497 497 to the "shortlog" and "changelog" templates.
498 '''
498 """
499 499 repo = web.repo
500 500 rev = ctx.rev()
501 501 n = scmutil.binnode(ctx)
@@ -30,14 +30,14 b' from .utils import ('
30 30
31 31
32 32 def pythonhook(ui, repo, htype, hname, funcname, args, throw):
33 '''call python hook. hook is callable object, looked up as
33 """call python hook. hook is callable object, looked up as
34 34 name in python module. if callable returns "true", hook
35 35 fails, else passes. if hook raises exception, treated as
36 36 hook failure. exception propagates if throw is "true".
37 37
38 38 reason for "true" meaning "hook failed" is so that
39 39 unmodified commands (e.g. mercurial.commands.update) can
40 be run as hooks without wrappers to convert return values.'''
40 be run as hooks without wrappers to convert return values."""
41 41
42 42 if callable(funcname):
43 43 obj = funcname
@@ -766,7 +766,10 b' class httpv2executor(object):'
766 766 % _(b', ').join(sorted(permissions))
767 767 )
768 768
769 permission = {b'push': b'rw', b'pull': b'ro',}[permissions.pop()]
769 permission = {
770 b'push': b'rw',
771 b'pull': b'ro',
772 }[permissions.pop()]
770 773
771 774 handler, resp = sendv2request(
772 775 self._ui,
@@ -942,7 +945,10 b' class httpv2peer(object):'
942 945 # Integer priority for the service. If we could choose from multiple
943 946 # services, we choose the one with the highest priority.
944 947 API_PEERS = {
945 wireprototypes.HTTP_WIREPROTO_V2: {b'init': httpv2peer, b'priority': 50,},
948 wireprototypes.HTTP_WIREPROTO_V2: {
949 b'init': httpv2peer,
950 b'priority': 50,
951 },
946 952 }
947 953
948 954
@@ -9,12 +9,12 b' from . import util as interfaceutil'
9 9
10 10 class idirstate(interfaceutil.Interface):
11 11 def __init__(opener, ui, root, validate, sparsematchfn):
12 '''Create a new dirstate object.
12 """Create a new dirstate object.
13 13
14 14 opener is an open()-like callable that can be used to open the
15 15 dirstate file; root is the root of the directory tracked by
16 16 the dirstate.
17 '''
17 """
18 18
19 19 # TODO: all these private methods and attributes should be made
20 20 # public or removed from the interface.
@@ -31,17 +31,17 b' class idirstate(interfaceutil.Interface)'
31 31
32 32 @contextlib.contextmanager
33 33 def parentchange():
34 '''Context manager for handling dirstate parents.
34 """Context manager for handling dirstate parents.
35 35
36 36 If an exception occurs in the scope of the context manager,
37 37 the incoherent dirstate won't be written when wlock is
38 38 released.
39 '''
39 """
40 40
41 41 def pendingparentchange():
42 '''Returns true if the dirstate is in the middle of a set of changes
42 """Returns true if the dirstate is in the middle of a set of changes
43 43 that modify the dirstate parent.
44 '''
44 """
45 45
46 46 def hasdir(d):
47 47 pass
@@ -50,18 +50,18 b' class idirstate(interfaceutil.Interface)'
50 50 pass
51 51
52 52 def getcwd():
53 '''Return the path from which a canonical path is calculated.
53 """Return the path from which a canonical path is calculated.
54 54
55 55 This path should be used to resolve file patterns or to convert
56 56 canonical paths back to file paths for display. It shouldn't be
57 57 used to get real file paths. Use vfs functions instead.
58 '''
58 """
59 59
60 60 def pathto(f, cwd=None):
61 61 pass
62 62
63 63 def __getitem__(key):
64 '''Return the current state of key (a filename) in the dirstate.
64 """Return the current state of key (a filename) in the dirstate.
65 65
66 66 States are:
67 67 n normal
@@ -69,7 +69,7 b' class idirstate(interfaceutil.Interface)'
69 69 r marked for removal
70 70 a marked for addition
71 71 ? not tracked
72 '''
72 """
73 73
74 74 def __contains__(key):
75 75 """Check if bytestring `key` is known to the dirstate."""
@@ -111,11 +111,11 b' class idirstate(interfaceutil.Interface)'
111 111 pass
112 112
113 113 def invalidate():
114 '''Causes the next access to reread the dirstate.
114 """Causes the next access to reread the dirstate.
115 115
116 116 This is different from localrepo.invalidatedirstate() because it always
117 117 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
118 check whether the dirstate has changed before rereading it.'''
118 check whether the dirstate has changed before rereading it."""
119 119
120 120 def copy(source, dest):
121 121 """Mark dest as a copy of source. Unmark dest if source is None."""
@@ -127,7 +127,7 b' class idirstate(interfaceutil.Interface)'
127 127 pass
128 128
129 129 def normal(f, parentfiledata=None):
130 '''Mark a file normal and clean.
130 """Mark a file normal and clean.
131 131
132 132 parentfiledata: (mode, size, mtime) of the clean file
133 133
@@ -135,7 +135,7 b' class idirstate(interfaceutil.Interface)'
135 135 size), as or close as possible from the point where we
136 136 determined the file was clean, to limit the risk of the
137 137 file having been changed by an external process between the
138 moment where the file was determined to be clean and now.'''
138 moment where the file was determined to be clean and now."""
139 139 pass
140 140
141 141 def normallookup(f):
@@ -157,7 +157,7 b' class idirstate(interfaceutil.Interface)'
157 157 '''Drop a file from the dirstate'''
158 158
159 159 def normalize(path, isknown=False, ignoremissing=False):
160 '''
160 """
161 161 normalize the case of a pathname when on a casefolding filesystem
162 162
163 163 isknown specifies whether the filename came from walking the
@@ -172,7 +172,7 b' class idirstate(interfaceutil.Interface)'
172 172 - version of name already stored in the dirstate
173 173 - version of name stored on disk
174 174 - version provided via command arguments
175 '''
175 """
176 176
177 177 def clear():
178 178 pass
@@ -181,11 +181,11 b' class idirstate(interfaceutil.Interface)'
181 181 pass
182 182
183 183 def identity():
184 '''Return identity of dirstate it to detect changing in storage
184 """Return identity of dirstate it to detect changing in storage
185 185
186 186 If identity of previous dirstate is equal to this, writing
187 187 changes based on the former dirstate out can keep consistency.
188 '''
188 """
189 189
190 190 def write(tr):
191 191 pass
@@ -201,7 +201,7 b' class idirstate(interfaceutil.Interface)'
201 201 """
202 202
203 203 def walk(match, subrepos, unknown, ignored, full=True):
204 '''
204 """
205 205 Walk recursively through the directory tree, finding all files
206 206 matched by match.
207 207
@@ -210,10 +210,10 b' class idirstate(interfaceutil.Interface)'
210 210 Return a dict mapping filename to stat-like object (either
211 211 mercurial.osutil.stat instance or return value of os.stat()).
212 212
213 '''
213 """
214 214
215 215 def status(match, subrepos, ignored, clean, unknown):
216 '''Determine the status of the working copy relative to the
216 """Determine the status of the working copy relative to the
217 217 dirstate and return a pair of (unsure, status), where status is of type
218 218 scmutil.status and:
219 219
@@ -227,12 +227,12 b' class idirstate(interfaceutil.Interface)'
227 227 status.clean:
228 228 files that have definitely not been modified since the
229 229 dirstate was written
230 '''
230 """
231 231
232 232 def matches(match):
233 '''
233 """
234 234 return files in the dirstate (in whatever state) filtered by match
235 '''
235 """
236 236
237 237 def savebackup(tr, backupname):
238 238 '''Save current dirstate into backup file'''
@@ -617,7 +617,7 b' class ifiledata(interfaceutil.Interface)'
617 617 """
618 618
619 619 def revision(node, raw=False):
620 """"Obtain fulltext data for a node.
620 """ "Obtain fulltext data for a node.
621 621
622 622 By default, any storage transformations are applied before the data
623 623 is returned. If ``raw`` is True, non-raw storage transformations
@@ -628,8 +628,7 b' class ifiledata(interfaceutil.Interface)'
628 628 """
629 629
630 630 def rawdata(node):
631 """Obtain raw data for a node.
632 """
631 """Obtain raw data for a node."""
633 632
634 633 def read(node):
635 634 """Resolve file fulltext data.
@@ -112,7 +112,7 b' class ConnectionManager(object):'
112 112 """
113 113 The connection manager must be able to:
114 114 * keep track of all existing
115 """
115 """
116 116
117 117 def __init__(self):
118 118 self._lock = threading.Lock()
@@ -675,8 +675,7 b' def safesend(self, str):'
675 675
676 676
677 677 def wrapgetresponse(cls):
678 """Wraps getresponse in cls with a broken-pipe sane version.
679 """
678 """Wraps getresponse in cls with a broken-pipe sane version."""
680 679
681 680 def safegetresponse(self):
682 681 # In safesend() we might set the _broken_pipe_resp
@@ -96,8 +96,7 b' urlreq = util.urlreq'
96 96
97 97
98 98 class _basefilecache(scmutil.filecache):
99 """All filecache usage on repo are done for logic that should be unfiltered
100 """
99 """All filecache usage on repo are done for logic that should be unfiltered"""
101 100
102 101 def __get__(self, repo, type=None):
103 102 if repo is None:
@@ -400,8 +399,8 b' class localpeer(repository.peer):'
400 399
401 400 @interfaceutil.implementer(repository.ipeerlegacycommands)
402 401 class locallegacypeer(localpeer):
403 '''peer extension which implements legacy methods too; used for tests with
404 restricted capabilities'''
402 """peer extension which implements legacy methods too; used for tests with
403 restricted capabilities"""
405 404
406 405 def __init__(self, repo):
407 406 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
@@ -440,7 +439,7 b' featuresetupfuncs = set()'
440 439
441 440
442 441 def _getsharedvfs(hgvfs, requirements):
443 """ returns the vfs object pointing to root of shared source
442 """returns the vfs object pointing to root of shared source
444 443 repo for a shared repository
445 444
446 445 hgvfs is vfs pointing at .hg/ of current repo (shared one)
@@ -465,7 +464,7 b' def _getsharedvfs(hgvfs, requirements):'
465 464
466 465
467 466 def _readrequires(vfs, allowmissing):
468 """ reads the require file present at root of this vfs
467 """reads the require file present at root of this vfs
469 468 and return a set of requirements
470 469
471 470 If allowmissing is True, we suppress ENOENT if raised"""
@@ -1756,7 +1755,7 b' class localrepository(object):'
1756 1755 return iter(self.changelog)
1757 1756
1758 1757 def revs(self, expr, *args):
1759 '''Find revisions matching a revset.
1758 """Find revisions matching a revset.
1760 1759
1761 1760 The revset is specified as a string ``expr`` that may contain
1762 1761 %-formatting to escape certain types. See ``revsetlang.formatspec``.
@@ -1767,30 +1766,30 b' class localrepository(object):'
1767 1766
1768 1767 Returns a smartset.abstractsmartset, which is a list-like interface
1769 1768 that contains integer revisions.
1770 '''
1769 """
1771 1770 tree = revsetlang.spectree(expr, *args)
1772 1771 return revset.makematcher(tree)(self)
1773 1772
1774 1773 def set(self, expr, *args):
1775 '''Find revisions matching a revset and emit changectx instances.
1774 """Find revisions matching a revset and emit changectx instances.
1776 1775
1777 1776 This is a convenience wrapper around ``revs()`` that iterates the
1778 1777 result and is a generator of changectx instances.
1779 1778
1780 1779 Revset aliases from the configuration are not expanded. To expand
1781 1780 user aliases, consider calling ``scmutil.revrange()``.
1782 '''
1781 """
1783 1782 for r in self.revs(expr, *args):
1784 1783 yield self[r]
1785 1784
1786 1785 def anyrevs(self, specs, user=False, localalias=None):
1787 '''Find revisions matching one of the given revsets.
1786 """Find revisions matching one of the given revsets.
1788 1787
1789 1788 Revset aliases from the configuration are not expanded by default. To
1790 1789 expand user aliases, specify ``user=True``. To provide some local
1791 1790 definitions overriding user aliases, set ``localalias`` to
1792 1791 ``{name: definitionstring}``.
1793 '''
1792 """
1794 1793 if specs == [b'null']:
1795 1794 return revset.baseset([nullrev])
1796 1795 if specs == [b'.']:
@@ -1822,8 +1821,8 b' class localrepository(object):'
1822 1821
1823 1822 @filteredpropertycache
1824 1823 def _tagscache(self):
1825 '''Returns a tagscache object that contains various tags related
1826 caches.'''
1824 """Returns a tagscache object that contains various tags related
1825 caches."""
1827 1826
1828 1827 # This simplifies its cache management by having one decorated
1829 1828 # function (this one) and the rest simply fetch things from it.
@@ -1861,12 +1860,12 b' class localrepository(object):'
1861 1860 return t
1862 1861
1863 1862 def _findtags(self):
1864 '''Do the hard work of finding tags. Return a pair of dicts
1863 """Do the hard work of finding tags. Return a pair of dicts
1865 1864 (tags, tagtypes) where tags maps tag name to node, and tagtypes
1866 1865 maps tag name to a string like \'global\' or \'local\'.
1867 1866 Subclasses or extensions are free to add their own tags, but
1868 1867 should be aware that the returned dicts will be retained for the
1869 duration of the localrepo object.'''
1868 duration of the localrepo object."""
1870 1869
1871 1870 # XXX what tagtype should subclasses/extensions use? Currently
1872 1871 # mq and bookmarks add tags, but do not set the tagtype at all.
@@ -1897,13 +1896,13 b' class localrepository(object):'
1897 1896 return (tags, tagtypes)
1898 1897
1899 1898 def tagtype(self, tagname):
1900 '''
1899 """
1901 1900 return the type of the given tag. result can be:
1902 1901
1903 1902 'local' : a local tag
1904 1903 'global' : a global tag
1905 1904 None : tag does not exist
1906 '''
1905 """
1907 1906
1908 1907 return self._tagscache.tagtypes.get(tagname)
1909 1908
@@ -1933,8 +1932,8 b' class localrepository(object):'
1933 1932 return self._bookmarks.names(node)
1934 1933
1935 1934 def branchmap(self):
1936 '''returns a dictionary {branch: [branchheads]} with branchheads
1937 ordered by increasing revision number'''
1935 """returns a dictionary {branch: [branchheads]} with branchheads
1936 ordered by increasing revision number"""
1938 1937 return self._branchcaches[self]
1939 1938
1940 1939 @unfilteredmethod
@@ -1944,13 +1943,13 b' class localrepository(object):'
1944 1943 return self._revbranchcache
1945 1944
1946 1945 def branchtip(self, branch, ignoremissing=False):
1947 '''return the tip node for a given branch
1946 """return the tip node for a given branch
1948 1947
1949 1948 If ignoremissing is True, then this method will not raise an error.
1950 1949 This is helpful for callers that only expect None for a missing branch
1951 1950 (e.g. namespace).
1952 1951
1953 '''
1952 """
1954 1953 try:
1955 1954 return self.branchmap().branchtip(branch)
1956 1955 except KeyError:
@@ -2014,7 +2013,7 b' class localrepository(object):'
2014 2013
2015 2014 def filectx(self, path, changeid=None, fileid=None, changectx=None):
2016 2015 """changeid must be a changeset revision, if specified.
2017 fileid can be a file revision or node."""
2016 fileid can be a file revision or node."""
2018 2017 return context.filectx(
2019 2018 self, path, changeid, fileid, changectx=changectx
2020 2019 )
@@ -2311,8 +2310,7 b' class localrepository(object):'
2311 2310 tr.addfinalize(b'flush-fncache', self.store.write)
2312 2311
2313 2312 def txnclosehook(tr2):
2314 """To be run if transaction is successful, will schedule a hook run
2315 """
2313 """To be run if transaction is successful, will schedule a hook run"""
2316 2314 # Don't reference tr2 in hook() so we don't hold a reference.
2317 2315 # This reduces memory consumption when there are multiple
2318 2316 # transactions per lock. This can likely go away if issue5045
@@ -2362,8 +2360,7 b' class localrepository(object):'
2362 2360 tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
2363 2361
2364 2362 def txnaborthook(tr2):
2365 """To be run if transaction is aborted
2366 """
2363 """To be run if transaction is aborted"""
2367 2364 reporef().hook(
2368 2365 b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
2369 2366 )
@@ -2620,14 +2617,14 b' class localrepository(object):'
2620 2617 self._quick_access_changeid_invalidate()
2621 2618
2622 2619 def invalidatedirstate(self):
2623 '''Invalidates the dirstate, causing the next call to dirstate
2620 """Invalidates the dirstate, causing the next call to dirstate
2624 2621 to check if it was modified since the last time it was read,
2625 2622 rereading it if it has.
2626 2623
2627 2624 This is different to dirstate.invalidate() that it doesn't always
2628 2625 rereads the dirstate. Use dirstate.invalidate() if you want to
2629 2626 explicitly read the dirstate again (i.e. restoring it to a previous
2630 known good state).'''
2627 known good state)."""
2631 2628 if hasunfilteredcache(self, 'dirstate'):
2632 2629 for k in self.dirstate._filecache:
2633 2630 try:
@@ -2637,13 +2634,13 b' class localrepository(object):'
2637 2634 delattr(self.unfiltered(), 'dirstate')
2638 2635
2639 2636 def invalidate(self, clearfilecache=False):
2640 '''Invalidates both store and non-store parts other than dirstate
2637 """Invalidates both store and non-store parts other than dirstate
2641 2638
2642 2639 If a transaction is running, invalidation of store is omitted,
2643 2640 because discarding in-memory changes might cause inconsistency
2644 2641 (e.g. incomplete fncache causes unintentional failure, but
2645 2642 redundant one doesn't).
2646 '''
2643 """
2647 2644 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2648 2645 for k in list(self._filecache.keys()):
2649 2646 # dirstate is invalidated separately in invalidatedirstate()
@@ -2673,8 +2670,8 b' class localrepository(object):'
2673 2670 self.store.invalidatecaches()
2674 2671
2675 2672 def invalidateall(self):
2676 '''Fully invalidates both store and non-store parts, causing the
2677 subsequent operation to reread any outside changes.'''
2673 """Fully invalidates both store and non-store parts, causing the
2674 subsequent operation to reread any outside changes."""
2678 2675 # extension should hook this to invalidate its caches
2679 2676 self.invalidate()
2680 2677 self.invalidatedirstate()
@@ -2689,7 +2686,13 b' class localrepository(object):'
2689 2686 ce.refresh()
2690 2687
2691 2688 def _lock(
2692 self, vfs, lockname, wait, releasefn, acquirefn, desc,
2689 self,
2690 vfs,
2691 lockname,
2692 wait,
2693 releasefn,
2694 acquirefn,
2695 desc,
2693 2696 ):
2694 2697 timeout = 0
2695 2698 warntimeout = 0
@@ -2726,12 +2729,12 b' class localrepository(object):'
2726 2729 callback(True)
2727 2730
2728 2731 def lock(self, wait=True):
2729 '''Lock the repository store (.hg/store) and return a weak reference
2732 """Lock the repository store (.hg/store) and return a weak reference
2730 2733 to the lock. Use this before modifying the store (e.g. committing or
2731 2734 stripping). If you are opening a transaction, get a lock as well.)
2732 2735
2733 2736 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2734 'wlock' first to avoid a dead-lock hazard.'''
2737 'wlock' first to avoid a dead-lock hazard."""
2735 2738 l = self._currentlock(self._lockref)
2736 2739 if l is not None:
2737 2740 l.lock()
@@ -2749,13 +2752,13 b' class localrepository(object):'
2749 2752 return l
2750 2753
2751 2754 def wlock(self, wait=True):
2752 '''Lock the non-store parts of the repository (everything under
2755 """Lock the non-store parts of the repository (everything under
2753 2756 .hg except .hg/store) and return a weak reference to the lock.
2754 2757
2755 2758 Use this before modifying files in .hg.
2756 2759
2757 2760 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2758 'wlock' first to avoid a dead-lock hazard.'''
2761 'wlock' first to avoid a dead-lock hazard."""
2759 2762 l = self._wlockref and self._wlockref()
2760 2763 if l is not None and l.held:
2761 2764 l.lock()
@@ -2963,7 +2966,7 b' class localrepository(object):'
2963 2966
2964 2967 @unfilteredmethod
2965 2968 def destroying(self):
2966 '''Inform the repository that nodes are about to be destroyed.
2969 """Inform the repository that nodes are about to be destroyed.
2967 2970 Intended for use by strip and rollback, so there's a common
2968 2971 place for anything that has to be done before destroying history.
2969 2972
@@ -2972,7 +2975,7 b' class localrepository(object):'
2972 2975 destroyed is imminent, the repo will be invalidated causing those
2973 2976 changes to stay in memory (waiting for the next unlock), or vanish
2974 2977 completely.
2975 '''
2978 """
2976 2979 # When using the same lock to commit and strip, the phasecache is left
2977 2980 # dirty after committing. Then when we strip, the repo is invalidated,
2978 2981 # causing those changes to disappear.
@@ -2981,10 +2984,10 b' class localrepository(object):'
2981 2984
2982 2985 @unfilteredmethod
2983 2986 def destroyed(self):
2984 '''Inform the repository that nodes have been destroyed.
2987 """Inform the repository that nodes have been destroyed.
2985 2988 Intended for use by strip and rollback, so there's a common
2986 2989 place for anything that has to be done after destroying history.
2987 '''
2990 """
2988 2991 # When one tries to:
2989 2992 # 1) destroy nodes thus calling this method (e.g. strip)
2990 2993 # 2) use phasecache somewhere (e.g. commit)
@@ -3067,13 +3070,13 b' class localrepository(object):'
3067 3070 return sorted(heads, key=self.changelog.rev, reverse=True)
3068 3071
3069 3072 def branchheads(self, branch=None, start=None, closed=False):
3070 '''return a (possibly filtered) list of heads for the given branch
3073 """return a (possibly filtered) list of heads for the given branch
3071 3074
3072 3075 Heads are returned in topological order, from newest to oldest.
3073 3076 If branch is None, use the dirstate branch.
3074 3077 If start is not None, return only heads reachable from start.
3075 3078 If closed is True, return heads that are marked as closed as well.
3076 '''
3079 """
3077 3080 if branch is None:
3078 3081 branch = self[None].branch()
3079 3082 branches = self.branchmap()
@@ -3352,10 +3355,10 b' def newreporequirements(ui, createopts):'
3352 3355
3353 3356
3354 3357 def checkrequirementscompat(ui, requirements):
3355 """ Checks compatibility of repository requirements enabled and disabled.
3358 """Checks compatibility of repository requirements enabled and disabled.
3356 3359
3357 3360 Returns a set of requirements which needs to be dropped because dependend
3358 requirements are not enabled. Also warns users about it """
3361 requirements are not enabled. Also warns users about it"""
3359 3362
3360 3363 dropped = set()
3361 3364
@@ -175,14 +175,14 b' def trylock(ui, vfs, lockname, timeout, '
175 175
176 176
177 177 class lock(object):
178 '''An advisory lock held by one process to control access to a set
178 """An advisory lock held by one process to control access to a set
179 179 of files. Non-cooperating processes or incorrectly written scripts
180 180 can ignore Mercurial's locking scheme and stomp all over the
181 181 repository, so don't do that.
182 182
183 183 Typically used via localrepository.lock() to lock the repository
184 184 store (.hg/store/) or localrepository.wlock() to lock everything
185 else under .hg/.'''
185 else under .hg/."""
186 186
187 187 # lock is symlink on platforms that support it, file on others.
188 188
@@ -417,8 +417,7 b' class changesetprinter(object):'
417 417 )
418 418
419 419 def _exthook(self, ctx):
420 '''empty method used by extension as a hook point
421 '''
420 """empty method used by extension as a hook point"""
422 421
423 422 def _showpatch(self, ctx, graphwidth=0):
424 423 if self._includestat:
@@ -509,13 +508,13 b' class changesetformatter(changesetprinte'
509 508
510 509
511 510 class changesettemplater(changesetprinter):
512 '''format changeset information.
511 """format changeset information.
513 512
514 513 Note: there are a variety of convenience functions to build a
515 514 changesettemplater for common cases. See functions such as:
516 515 maketemplater, changesetdisplayer, buildcommittemplate, or other
517 516 functions that use changesest_templater.
518 '''
517 """
519 518
520 519 # Arguments before "buffered" used to be positional. Consider not
521 520 # adding/removing arguments before "buffered" to not break callers.
@@ -141,7 +141,10 b' def pullremotenames(localrepo, remoterep'
141 141
142 142 with remoterepo.commandexecutor() as e:
143 143 bookmarks = e.callcommand(
144 b'listkeys', {b'namespace': b'bookmarks',}
144 b'listkeys',
145 {
146 b'namespace': b'bookmarks',
147 },
145 148 ).result()
146 149
147 150 # on a push, we don't want to keep obsolete heads since
@@ -44,10 +44,10 b' if pycompat.TYPE_CHECKING:'
44 44
45 45
46 46 class STARTTLS(smtplib.SMTP):
47 '''Derived class to verify the peer certificate for STARTTLS.
47 """Derived class to verify the peer certificate for STARTTLS.
48 48
49 49 This class allows to pass any keyword arguments to SSL socket creation.
50 '''
50 """
51 51
52 52 def __init__(self, ui, host=None, **kwargs):
53 53 smtplib.SMTP.__init__(self, **kwargs)
@@ -76,10 +76,10 b' class STARTTLS(smtplib.SMTP):'
76 76
77 77
78 78 class SMTPS(smtplib.SMTP):
79 '''Derived class to verify the peer certificate for SMTPS.
79 """Derived class to verify the peer certificate for SMTPS.
80 80
81 81 This class allows to pass any keyword arguments to SSL socket creation.
82 '''
82 """
83 83
84 84 def __init__(self, ui, keyfile=None, certfile=None, host=None, **kwargs):
85 85 self.keyfile = keyfile
@@ -221,8 +221,8 b' def _mbox(mbox, sender, recipients, msg)'
221 221
222 222
223 223 def connect(ui, mbox=None):
224 '''make a mail connection. return a function to send mail.
225 call as sendmail(sender, list-of-recipients, msg).'''
224 """make a mail connection. return a function to send mail.
225 call as sendmail(sender, list-of-recipients, msg)."""
226 226 if mbox:
227 227 open(mbox, b'wb').close()
228 228 return lambda s, r, m: _mbox(mbox, s, r, m)
@@ -267,11 +267,11 b' def codec2iana(cs):'
267 267
268 268 def mimetextpatch(s, subtype='plain', display=False):
269 269 # type: (bytes, str, bool) -> email.message.Message
270 '''Return MIME message suitable for a patch.
270 """Return MIME message suitable for a patch.
271 271 Charset will be detected by first trying to decode as us-ascii, then utf-8,
272 272 and finally the global encodings. If all those fail, fall back to
273 273 ISO-8859-1, an encoding with that allows all byte sequences.
274 Transfer encodings will be used if necessary.'''
274 Transfer encodings will be used if necessary."""
275 275
276 276 cs = [
277 277 'us-ascii',
@@ -293,9 +293,9 b" def mimetextpatch(s, subtype='plain', di"
293 293
294 294 def mimetextqp(body, subtype, charset):
295 295 # type: (bytes, str, str) -> email.message.Message
296 '''Return MIME message.
296 """Return MIME message.
297 297 Quoted-printable transfer encoding will be used if necessary.
298 '''
298 """
299 299 cs = email.charset.Charset(charset)
300 300 msg = email.message.Message()
301 301 msg.set_type('text/' + subtype)
@@ -337,11 +337,11 b' def _charsets(ui):'
337 337
338 338 def _encode(ui, s, charsets):
339 339 # type: (Any, bytes, List[str]) -> Tuple[bytes, str]
340 '''Returns (converted) string, charset tuple.
340 """Returns (converted) string, charset tuple.
341 341 Finds out best charset by cycling through sendcharsets in descending
342 342 order. Tries both encoding and fallbackencoding for input. Only as
343 343 last resort send as is in fake ascii.
344 Caveat: Do not use for mail parts containing patches!'''
344 Caveat: Do not use for mail parts containing patches!"""
345 345 sendcharsets = charsets or _charsets(ui)
346 346 if not isinstance(s, bytes):
347 347 # We have unicode data, which we need to try and encode to
@@ -427,9 +427,9 b' def addressencode(ui, address, charsets='
427 427
428 428 def addrlistencode(ui, addrs, charsets=None, display=False):
429 429 # type: (Any, List[bytes], List[str], bool) -> List[str]
430 '''Turns a list of addresses into a list of RFC-2047 compliant headers.
430 """Turns a list of addresses into a list of RFC-2047 compliant headers.
431 431 A single element of input list may contain multiple addresses, but output
432 always has one address per item'''
432 always has one address per item"""
433 433 straddrs = []
434 434 for a in addrs:
435 435 assert isinstance(a, bytes), '%r unexpectedly not a bytestr' % a
@@ -447,8 +447,8 b' def addrlistencode(ui, addrs, charsets=N'
447 447
448 448 def mimeencode(ui, s, charsets=None, display=False):
449 449 # type: (Any, bytes, List[str], bool) -> email.message.Message
450 '''creates mime text object, encodes it if needed, and sets
451 charset and transfer-encoding accordingly.'''
450 """creates mime text object, encodes it if needed, and sets
451 charset and transfer-encoding accordingly."""
452 452 cs = 'us-ascii'
453 453 if not display:
454 454 s, cs = _encode(ui, s, charsets)
@@ -528,8 +528,8 b' class manifestdict(object):'
528 528 return dir in self._dirs
529 529
530 530 def _filesfastpath(self, match):
531 '''Checks whether we can correctly and quickly iterate over matcher
532 files instead of over manifest files.'''
531 """Checks whether we can correctly and quickly iterate over matcher
532 files instead of over manifest files."""
533 533 files = match.files()
534 534 return len(files) < 100 and (
535 535 match.isexact()
@@ -537,13 +537,13 b' class manifestdict(object):'
537 537 )
538 538
539 539 def walk(self, match):
540 '''Generates matching file names.
540 """Generates matching file names.
541 541
542 542 Equivalent to manifest.matches(match).iterkeys(), but without creating
543 543 an entirely new manifest.
544 544
545 545 It also reports nonexistent files by marking them bad with match.bad().
546 '''
546 """
547 547 if match.always():
548 548 for f in iter(self):
549 549 yield f
@@ -591,7 +591,7 b' class manifestdict(object):'
591 591 return m
592 592
593 593 def diff(self, m2, match=None, clean=False):
594 '''Finds changes between the current manifest and m2.
594 """Finds changes between the current manifest and m2.
595 595
596 596 Args:
597 597 m2: the manifest to which this manifest should be compared.
@@ -604,7 +604,7 b' class manifestdict(object):'
604 604 in the current/other manifest. Where the file does not exist,
605 605 the nodeid will be None and the flags will be the empty
606 606 string.
607 '''
607 """
608 608 if match:
609 609 m1 = self._matches(match)
610 610 m2 = m2._matches(match)
@@ -703,14 +703,14 b' class manifestdict(object):'
703 703
704 704
705 705 def _msearch(m, s, lo=0, hi=None):
706 '''return a tuple (start, end) that says where to find s within m.
706 """return a tuple (start, end) that says where to find s within m.
707 707
708 708 If the string is found m[start:end] are the line containing
709 709 that string. If start == end the string was not found and
710 710 they indicate the proper sorted insertion point.
711 711
712 712 m should be a buffer, a memoryview or a byte string.
713 s is a byte string'''
713 s is a byte string"""
714 714
715 715 def advance(i, c):
716 716 while i < lenm and m[i : i + 1] != c:
@@ -909,14 +909,14 b' class treemanifest(object):'
909 909 )
910 910
911 911 def dir(self):
912 '''The directory that this tree manifest represents, including a
913 trailing '/'. Empty string for the repo root directory.'''
912 """The directory that this tree manifest represents, including a
913 trailing '/'. Empty string for the repo root directory."""
914 914 return self._dir
915 915
916 916 def node(self):
917 '''This node of this instance. nullid for unsaved instances. Should
917 """This node of this instance. nullid for unsaved instances. Should
918 918 be updated when the instance is read or written from a revlog.
919 '''
919 """
920 920 assert not self._dirty
921 921 return self._node
922 922
@@ -1157,10 +1157,10 b' class treemanifest(object):'
1157 1157 return dirslash in self._dirs or dirslash in self._lazydirs
1158 1158
1159 1159 def walk(self, match):
1160 '''Generates matching file names.
1160 """Generates matching file names.
1161 1161
1162 1162 It also reports nonexistent files by marking them bad with match.bad().
1163 '''
1163 """
1164 1164 if match.always():
1165 1165 for f in iter(self):
1166 1166 yield f
@@ -1202,8 +1202,7 b' class treemanifest(object):'
1202 1202 yield f
1203 1203
1204 1204 def _matches(self, match):
1205 '''recursively generate a new manifest filtered by the match argument.
1206 '''
1205 """recursively generate a new manifest filtered by the match argument."""
1207 1206 if match.always():
1208 1207 return self.copy()
1209 1208 return self._matches_inner(match)
@@ -1253,7 +1252,7 b' class treemanifest(object):'
1253 1252 raise FastdeltaUnavailable()
1254 1253
1255 1254 def diff(self, m2, match=None, clean=False):
1256 '''Finds changes between the current manifest and m2.
1255 """Finds changes between the current manifest and m2.
1257 1256
1258 1257 Args:
1259 1258 m2: the manifest to which this manifest should be compared.
@@ -1266,7 +1265,7 b' class treemanifest(object):'
1266 1265 in the current/other manifest. Where the file does not exist,
1267 1266 the nodeid will be None and the flags will be the empty
1268 1267 string.
1269 '''
1268 """
1270 1269 if match and not match.always():
1271 1270 m1 = self._matches(match)
1272 1271 m2 = m2._matches(match)
@@ -1546,9 +1545,9 b' class FastdeltaUnavailable(Exception):'
1546 1545
1547 1546 @interfaceutil.implementer(repository.imanifeststorage)
1548 1547 class manifestrevlog(object):
1549 '''A revlog that stores manifest texts. This is responsible for caching the
1548 """A revlog that stores manifest texts. This is responsible for caching the
1550 1549 full-text manifest contents.
1551 '''
1550 """
1552 1551
1553 1552 def __init__(
1554 1553 self,
@@ -2077,12 +2076,12 b' class manifestctx(object):'
2077 2076 return self._data
2078 2077
2079 2078 def readfast(self, shallow=False):
2080 '''Calls either readdelta or read, based on which would be less work.
2079 """Calls either readdelta or read, based on which would be less work.
2081 2080 readdelta is called if the delta is against the p1, and therefore can be
2082 2081 read quickly.
2083 2082
2084 2083 If `shallow` is True, nothing changes since this is a flat manifest.
2085 '''
2084 """
2086 2085 store = self._storage()
2087 2086 r = store.rev(self._node)
2088 2087 deltaparent = store.deltaparent(r)
@@ -2091,12 +2090,12 b' class manifestctx(object):'
2091 2090 return self.read()
2092 2091
2093 2092 def readdelta(self, shallow=False):
2094 '''Returns a manifest containing just the entries that are present
2093 """Returns a manifest containing just the entries that are present
2095 2094 in this manifest, but not in its p1 manifest. This is efficient to read
2096 2095 if the revlog delta is already p1.
2097 2096
2098 2097 Changing the value of `shallow` has no effect on flat manifests.
2099 '''
2098 """
2100 2099 store = self._storage()
2101 2100 r = store.rev(self._node)
2102 2101 d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
@@ -2208,7 +2207,7 b' class treemanifestctx(object):'
2208 2207 return self._storage().parents(self._node)
2209 2208
2210 2209 def readdelta(self, shallow=False):
2211 '''Returns a manifest containing just the entries that are present
2210 """Returns a manifest containing just the entries that are present
2212 2211 in this manifest, but not in its p1 manifest. This is efficient to read
2213 2212 if the revlog delta is already p1.
2214 2213
@@ -2217,7 +2216,7 b' class treemanifestctx(object):'
2217 2216 subdirectory entry will be reported as it appears in the manifest, i.e.
2218 2217 the subdirectory will be reported among files and distinguished only by
2219 2218 its 't' flag.
2220 '''
2219 """
2221 2220 store = self._storage()
2222 2221 if shallow:
2223 2222 r = store.rev(self._node)
@@ -2237,13 +2236,13 b' class treemanifestctx(object):'
2237 2236 return md
2238 2237
2239 2238 def readfast(self, shallow=False):
2240 '''Calls either readdelta or read, based on which would be less work.
2239 """Calls either readdelta or read, based on which would be less work.
2241 2240 readdelta is called if the delta is against the p1, and therefore can be
2242 2241 read quickly.
2243 2242
2244 2243 If `shallow` is True, it only returns the entries from this manifest,
2245 2244 and not any submanifests.
2246 '''
2245 """
2247 2246 store = self._storage()
2248 2247 r = store.rev(self._node)
2249 2248 deltaparent = store.deltaparent(r)
@@ -47,8 +47,8 b' propertycache = util.propertycache'
47 47
48 48
49 49 def _rematcher(regex):
50 '''compile the regexp with the best available regexp engine and return a
51 matcher function'''
50 """compile the regexp with the best available regexp engine and return a
51 matcher function"""
52 52 m = util.re.compile(regex)
53 53 try:
54 54 # slightly faster, provided by facebook's re2 bindings
@@ -82,8 +82,8 b' def _expandsets(cwd, kindpats, ctx=None,'
82 82
83 83
84 84 def _expandsubinclude(kindpats, root):
85 '''Returns the list of subinclude matcher args and the kindpats without the
86 subincludes in it.'''
85 """Returns the list of subinclude matcher args and the kindpats without the
86 subincludes in it."""
87 87 relmatchers = []
88 88 other = []
89 89
@@ -107,7 +107,7 b' def _expandsubinclude(kindpats, root):'
107 107
108 108
109 109 def _kindpatsalwaysmatch(kindpats):
110 """"Checks whether the kindspats match everything, as e.g.
110 """ "Checks whether the kindspats match everything, as e.g.
111 111 'relpath:.' does.
112 112 """
113 113 for kind, pat, source in kindpats:
@@ -117,11 +117,21 b' def _kindpatsalwaysmatch(kindpats):'
117 117
118 118
119 119 def _buildkindpatsmatcher(
120 matchercls, root, cwd, kindpats, ctx=None, listsubrepos=False, badfn=None,
120 matchercls,
121 root,
122 cwd,
123 kindpats,
124 ctx=None,
125 listsubrepos=False,
126 badfn=None,
121 127 ):
122 128 matchers = []
123 129 fms, kindpats = _expandsets(
124 cwd, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn,
130 cwd,
131 kindpats,
132 ctx=ctx,
133 listsubrepos=listsubrepos,
134 badfn=badfn,
125 135 )
126 136 if kindpats:
127 137 m = matchercls(root, kindpats, badfn=badfn)
@@ -321,8 +331,8 b' def badmatch(match, badfn):'
321 331
322 332
323 333 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
324 '''Convert 'kind:pat' from the patterns list to tuples with kind and
325 normalized and rooted patterns and with listfiles expanded.'''
334 """Convert 'kind:pat' from the patterns list to tuples with kind and
335 normalized and rooted patterns and with listfiles expanded."""
326 336 kindpats = []
327 337 for kind, pat in [_patsplit(p, default) for p in patterns]:
328 338 if kind in cwdrelativepatternkinds:
@@ -383,8 +393,8 b' class basematcher(object):'
383 393 # Callbacks related to how the matcher is used by dirstate.walk.
384 394 # Subscribers to these events must monkeypatch the matcher object.
385 395 def bad(self, f, msg):
386 '''Callback from dirstate.walk for each explicit file that can't be
387 found/accessed, with an error message.'''
396 """Callback from dirstate.walk for each explicit file that can't be
397 found/accessed, with an error message."""
388 398
389 399 # If an traversedir is set, it will be called when a directory discovered
390 400 # by recursive traversal is visited.
@@ -395,11 +405,11 b' class basematcher(object):'
395 405 return []
396 406
397 407 def files(self):
398 '''Explicitly listed files or patterns or roots:
408 """Explicitly listed files or patterns or roots:
399 409 if no patterns or .always(): empty list,
400 410 if exact: list exact files,
401 411 if not .anypats(): list all files and dirs,
402 else: optimal roots'''
412 else: optimal roots"""
403 413 return self._files
404 414
405 415 @propertycache
@@ -414,18 +424,18 b' class basematcher(object):'
414 424 return False
415 425
416 426 def visitdir(self, dir):
417 '''Decides whether a directory should be visited based on whether it
427 """Decides whether a directory should be visited based on whether it
418 428 has potential matches in it or one of its subdirectories. This is
419 429 based on the match's primary, included, and excluded patterns.
420 430
421 431 Returns the string 'all' if the given directory and all subdirectories
422 432 should be visited. Otherwise returns True or False indicating whether
423 433 the given directory should be visited.
424 '''
434 """
425 435 return True
426 436
427 437 def visitchildrenset(self, dir):
428 '''Decides whether a directory should be visited based on whether it
438 """Decides whether a directory should be visited based on whether it
429 439 has potential matches in it or one of its subdirectories, and
430 440 potentially lists which subdirectories of that directory should be
431 441 visited. This is based on the match's primary, included, and excluded
@@ -464,27 +474,27 b' class basematcher(object):'
464 474 indicating that there are no files in this dir to investigate (or
465 475 equivalently that if there are files to investigate in 'dir' that it
466 476 will always return 'this').
467 '''
477 """
468 478 return b'this'
469 479
470 480 def always(self):
471 '''Matcher will match everything and .files() will be empty --
472 optimization might be possible.'''
481 """Matcher will match everything and .files() will be empty --
482 optimization might be possible."""
473 483 return False
474 484
475 485 def isexact(self):
476 '''Matcher will match exactly the list of files in .files() --
477 optimization might be possible.'''
486 """Matcher will match exactly the list of files in .files() --
487 optimization might be possible."""
478 488 return False
479 489
480 490 def prefix(self):
481 '''Matcher will match the paths in .files() recursively --
482 optimization might be possible.'''
491 """Matcher will match the paths in .files() recursively --
492 optimization might be possible."""
483 493 return False
484 494
485 495 def anypats(self):
486 '''None of .always(), .isexact(), and .prefix() is true --
487 optimizations will be difficult.'''
496 """None of .always(), .isexact(), and .prefix() is true --
497 optimizations will be difficult."""
488 498 return not self.always() and not self.isexact() and not self.prefix()
489 499
490 500
@@ -734,7 +744,7 b' class includematcher(basematcher):'
734 744
735 745
736 746 class exactmatcher(basematcher):
737 r'''Matches the input files exactly. They are interpreted as paths, not
747 r"""Matches the input files exactly. They are interpreted as paths, not
738 748 patterns (so no kind-prefixes).
739 749
740 750 >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
@@ -752,7 +762,7 b' class exactmatcher(basematcher):'
752 762 False
753 763 >>> m(br're:.*\.c$')
754 764 True
755 '''
765 """
756 766
757 767 def __init__(self, files, badfn=None):
758 768 super(exactmatcher, self).__init__(badfn)
@@ -799,11 +809,11 b' class exactmatcher(basematcher):'
799 809
800 810
801 811 class differencematcher(basematcher):
802 '''Composes two matchers by matching if the first matches and the second
812 """Composes two matchers by matching if the first matches and the second
803 813 does not.
804 814
805 815 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
806 '''
816 """
807 817
808 818 def __init__(self, m1, m2):
809 819 super(differencematcher, self).__init__()
@@ -868,10 +878,10 b' class differencematcher(basematcher):'
868 878
869 879
870 880 def intersectmatchers(m1, m2):
871 '''Composes two matchers by matching if both of them match.
881 """Composes two matchers by matching if both of them match.
872 882
873 883 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
874 '''
884 """
875 885 if m1 is None or m2 is None:
876 886 return m1 or m2
877 887 if m1.always():
@@ -1166,7 +1176,7 b' class unionmatcher(basematcher):'
1166 1176
1167 1177
1168 1178 def patkind(pattern, default=None):
1169 r'''If pattern is 'kind:pat' with a known kind, return kind.
1179 r"""If pattern is 'kind:pat' with a known kind, return kind.
1170 1180
1171 1181 >>> patkind(br're:.*\.c$')
1172 1182 're'
@@ -1177,7 +1187,7 b' def patkind(pattern, default=None):'
1177 1187 >>> patkind(b'main.py')
1178 1188 >>> patkind(b'main.py', default=b're')
1179 1189 're'
1180 '''
1190 """
1181 1191 return _patsplit(pattern, default)[0]
1182 1192
1183 1193
@@ -1192,7 +1202,7 b' def _patsplit(pattern, default):'
1192 1202
1193 1203
1194 1204 def _globre(pat):
1195 r'''Convert an extended glob string to a regexp string.
1205 r"""Convert an extended glob string to a regexp string.
1196 1206
1197 1207 >>> from . import pycompat
1198 1208 >>> def bprint(s):
@@ -1213,7 +1223,7 b' def _globre(pat):'
1213 1223 (?:a|b)
1214 1224 >>> bprint(_globre(br'.\*\?'))
1215 1225 \.\*\?
1216 '''
1226 """
1217 1227 i, n = 0, len(pat)
1218 1228 res = b''
1219 1229 group = 0
@@ -1276,9 +1286,9 b' def _globre(pat):'
1276 1286
1277 1287
1278 1288 def _regex(kind, pat, globsuffix):
1279 '''Convert a (normalized) pattern of any kind into a
1289 """Convert a (normalized) pattern of any kind into a
1280 1290 regular expression.
1281 globsuffix is appended to the regexp of globs.'''
1291 globsuffix is appended to the regexp of globs."""
1282 1292 if not pat and kind in (b'glob', b'relpath'):
1283 1293 return b''
1284 1294 if kind == b're':
@@ -1312,8 +1322,8 b' def _regex(kind, pat, globsuffix):'
1312 1322
1313 1323
1314 1324 def _buildmatch(kindpats, globsuffix, root):
1315 '''Return regexp string and a matcher function for kindpats.
1316 globsuffix is appended to the regexp of globs.'''
1325 """Return regexp string and a matcher function for kindpats.
1326 globsuffix is appended to the regexp of globs."""
1317 1327 matchfuncs = []
1318 1328
1319 1329 subincludes, kindpats = _expandsubinclude(kindpats, root)
@@ -1422,13 +1432,13 b' def _buildregexmatch(kindpats, globsuffi'
1422 1432
1423 1433
1424 1434 def _patternrootsanddirs(kindpats):
1425 '''Returns roots and directories corresponding to each pattern.
1435 """Returns roots and directories corresponding to each pattern.
1426 1436
1427 1437 This calculates the roots and directories exactly matching the patterns and
1428 1438 returns a tuple of (roots, dirs) for each. It does not return other
1429 1439 directories which may also need to be considered, like the parent
1430 1440 directories.
1431 '''
1441 """
1432 1442 r = []
1433 1443 d = []
1434 1444 for kind, pat, source in kindpats:
@@ -1459,7 +1469,7 b' def _roots(kindpats):'
1459 1469
1460 1470
1461 1471 def _rootsdirsandparents(kindpats):
1462 '''Returns roots and exact directories from patterns.
1472 """Returns roots and exact directories from patterns.
1463 1473
1464 1474 `roots` are directories to match recursively, `dirs` should
1465 1475 be matched non-recursively, and `parents` are the implicitly required
@@ -1486,7 +1496,7 b' def _rootsdirsandparents(kindpats):'
1486 1496 ... (b'relre', b'rr', b'')])
1487 1497 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1488 1498 (['', '', ''], []) ['']
1489 '''
1499 """
1490 1500 r, d = _patternrootsanddirs(kindpats)
1491 1501
1492 1502 p = set()
@@ -1503,13 +1513,13 b' def _rootsdirsandparents(kindpats):'
1503 1513
1504 1514
1505 1515 def _explicitfiles(kindpats):
1506 '''Returns the potential explicit filenames from the patterns.
1516 """Returns the potential explicit filenames from the patterns.
1507 1517
1508 1518 >>> _explicitfiles([(b'path', b'foo/bar', b'')])
1509 1519 ['foo/bar']
1510 1520 >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
1511 1521 []
1512 '''
1522 """
1513 1523 # Keep only the pattern kinds where one can specify filenames (vs only
1514 1524 # directory names).
1515 1525 filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
@@ -1528,7 +1538,7 b' def _prefix(kindpats):'
1528 1538
1529 1539
1530 1540 def readpatternfile(filepath, warn, sourceinfo=False):
1531 '''parse a pattern file, returning a list of
1541 """parse a pattern file, returning a list of
1532 1542 patterns. These patterns should be given to compile()
1533 1543 to be validated and converted into a match function.
1534 1544
@@ -1549,7 +1559,7 b' def readpatternfile(filepath, warn, sour'
1549 1559 if sourceinfo is set, returns a list of tuples:
1550 1560 (pattern, lineno, originalline).
1551 1561 This is useful to debug ignore patterns.
1552 '''
1562 """
1553 1563
1554 1564 syntaxes = {
1555 1565 b're': b'relre:',
@@ -39,7 +39,7 b' splitnewlines = bdiff.splitnewlines'
39 39
40 40 # TODO: this looks like it could be an attrs, which might help pytype
41 41 class diffopts(object):
42 '''context is the number of context lines
42 """context is the number of context lines
43 43 text treats all files as text
44 44 showfunc enables diff -p output
45 45 git enables the git extended patch format
@@ -50,7 +50,7 b' class diffopts(object):'
50 50 ignorewsamount ignores changes in the amount of whitespace
51 51 ignoreblanklines ignores changes whose lines are all blank
52 52 upgrade generates git diffs to avoid data loss
53 '''
53 """
54 54
55 55 _HAS_DYNAMIC_ATTRIBUTES = True
56 56
@@ -217,7 +217,10 b' def _checkunknownfiles(repo, wctx, mctx,'
217 217 if config == b'warn':
218 218 warnconflicts.add(f)
219 219 mresult.addfile(
220 f, mergestatemod.ACTION_GET, (fl2, True), b'remote created',
220 f,
221 mergestatemod.ACTION_GET,
222 (fl2, True),
223 b'remote created',
221 224 )
222 225
223 226 for f in sorted(abortconflicts):
@@ -281,7 +284,10 b' def _forgetremoved(wctx, mctx, branchmer'
281 284 for f in wctx.removed():
282 285 if f not in mctx:
283 286 mresult.addfile(
284 f, mergestatemod.ACTION_FORGET, None, b"forget removed",
287 f,
288 mergestatemod.ACTION_FORGET,
289 None,
290 b"forget removed",
285 291 )
286 292
287 293
@@ -544,10 +550,10 b' def _filternarrowactions(narrowmatch, br'
544 550
545 551
546 552 class mergeresult(object):
547 '''An object representing result of merging manifests.
553 """An object representing result of merging manifests.
548 554
549 555 It has information about what actions need to be performed on dirstate
550 mapping of divergent renames and other such cases.'''
556 mapping of divergent renames and other such cases."""
551 557
552 558 def __init__(self):
553 559 """
@@ -572,7 +578,7 b' class mergeresult(object):'
572 578 self._renamedelete = renamedelete
573 579
574 580 def addfile(self, filename, action, data, message):
575 """ adds a new file to the mergeresult object
581 """adds a new file to the mergeresult object
576 582
577 583 filename: file which we are adding
578 584 action: one of mergestatemod.ACTION_*
@@ -589,15 +595,15 b' class mergeresult(object):'
589 595 self._actionmapping[action][filename] = (data, message)
590 596
591 597 def getfile(self, filename, default_return=None):
592 """ returns (action, args, msg) about this file
598 """returns (action, args, msg) about this file
593 599
594 returns default_return if the file is not present """
600 returns default_return if the file is not present"""
595 601 if filename in self._filemapping:
596 602 return self._filemapping[filename]
597 603 return default_return
598 604
599 605 def files(self, actions=None):
600 """ returns files on which provided action needs to perfromed
606 """returns files on which provided action needs to perfromed
601 607
602 608 If actions is None, all files are returned
603 609 """
@@ -613,14 +619,14 b' class mergeresult(object):'
613 619 yield f
614 620
615 621 def removefile(self, filename):
616 """ removes a file from the mergeresult object as the file might
617 not merging anymore """
622 """removes a file from the mergeresult object as the file might
623 not merging anymore"""
618 624 action, data, message = self._filemapping[filename]
619 625 del self._filemapping[filename]
620 626 del self._actionmapping[action][filename]
621 627
622 628 def getactions(self, actions, sort=False):
623 """ get list of files which are marked with these actions
629 """get list of files which are marked with these actions
624 630 if sort is true, files for each action is sorted and then added
625 631
626 632 Returns a list of tuple of form (filename, data, message)
@@ -637,10 +643,10 b' class mergeresult(object):'
637 643 yield f, args, msg
638 644
639 645 def len(self, actions=None):
640 """ returns number of files which needs actions
646 """returns number of files which needs actions
641 647
642 648 if actions is passed, total of number of files in that action
643 only is returned """
649 only is returned"""
644 650
645 651 if actions is None:
646 652 return len(self._filemapping)
@@ -656,8 +662,8 b' class mergeresult(object):'
656 662 yield key, val
657 663
658 664 def addcommitinfo(self, filename, key, value):
659 """ adds key-value information about filename which will be required
660 while committing this merge """
665 """adds key-value information about filename which will be required
666 while committing this merge"""
661 667 self._commitinfo[filename][key] = value
662 668
663 669 @property
@@ -674,8 +680,8 b' class mergeresult(object):'
674 680
675 681 @property
676 682 def actionsdict(self):
677 """ returns a dictionary of actions to be perfomed with action as key
678 and a list of files and related arguments as values """
683 """returns a dictionary of actions to be perfomed with action as key
684 and a list of files and related arguments as values"""
679 685 res = collections.defaultdict(list)
680 686 for a, d in pycompat.iteritems(self._actionmapping):
681 687 for f, (args, msg) in pycompat.iteritems(d):
@@ -689,8 +695,8 b' class mergeresult(object):'
689 695 self._actionmapping[act][f] = data, msg
690 696
691 697 def hasconflicts(self):
692 """ tells whether this merge resulted in some actions which can
693 result in conflicts or not """
698 """tells whether this merge resulted in some actions which can
699 result in conflicts or not"""
694 700 for a in self._actionmapping.keys():
695 701 if (
696 702 a
@@ -839,7 +845,10 b' def manifestmerge('
839 845 nol = b'l' not in fl1 + fl2 + fla
840 846 if n2 == a and fl2 == fla:
841 847 mresult.addfile(
842 f, mergestatemod.ACTION_KEEP, (), b'remote unchanged',
848 f,
849 mergestatemod.ACTION_KEEP,
850 (),
851 b'remote unchanged',
843 852 )
844 853 elif n1 == a and fl1 == fla: # local unchanged - use remote
845 854 if n1 == n2: # optimization: keep local content
@@ -936,11 +945,17 b' def manifestmerge('
936 945 # This file was locally added. We should forget it instead of
937 946 # deleting it.
938 947 mresult.addfile(
939 f, mergestatemod.ACTION_FORGET, None, b'remote deleted',
948 f,
949 mergestatemod.ACTION_FORGET,
950 None,
951 b'remote deleted',
940 952 )
941 953 else:
942 954 mresult.addfile(
943 f, mergestatemod.ACTION_REMOVE, None, b'other deleted',
955 f,
956 mergestatemod.ACTION_REMOVE,
957 None,
958 b'other deleted',
944 959 )
945 960 if branchmerge:
946 961 # the file must be absent after merging,
@@ -1086,7 +1101,7 b' def manifestmerge('
1086 1101
1087 1102 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
1088 1103 """Resolves false conflicts where the nodeid changed but the content
1089 remained the same."""
1104 remained the same."""
1090 1105 # We force a copy of actions.items() because we're going to mutate
1091 1106 # actions as we resolve trivial conflicts.
1092 1107 for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
@@ -1423,7 +1438,13 b' def _prefetchfiles(repo, ctx, mresult):'
1423 1438 prefetch = scmutil.prefetchfiles
1424 1439 matchfiles = scmutil.matchfiles
1425 1440 prefetch(
1426 repo, [(ctx.rev(), matchfiles(repo, files),)],
1441 repo,
1442 [
1443 (
1444 ctx.rev(),
1445 matchfiles(repo, files),
1446 )
1447 ],
1427 1448 )
1428 1449
1429 1450
@@ -1444,7 +1465,13 b' class updateresult(object):'
1444 1465
1445 1466
1446 1467 def applyupdates(
1447 repo, mresult, wctx, mctx, overwrite, wantfiledata, labels=None,
1468 repo,
1469 mresult,
1470 wctx,
1471 mctx,
1472 overwrite,
1473 wantfiledata,
1474 labels=None,
1448 1475 ):
1449 1476 """apply the merge action list to the working directory
1450 1477
@@ -1734,7 +1761,8 b' def _advertisefsmonitor(repo, num_gets, '
1734 1761 if dirstate.rustmod is not None:
1735 1762 # When using rust status, fsmonitor becomes necessary at higher sizes
1736 1763 fsmonitorthreshold = repo.ui.configint(
1737 b'fsmonitor', b'warn_update_file_count_rust',
1764 b'fsmonitor',
1765 b'warn_update_file_count_rust',
1738 1766 )
1739 1767
1740 1768 try:
@@ -2001,7 +2029,10 b' def _update('
2001 2029 0,
2002 2030 ):
2003 2031 mresult.addfile(
2004 f, mergestatemod.ACTION_REMOVE, None, b'prompt delete',
2032 f,
2033 mergestatemod.ACTION_REMOVE,
2034 None,
2035 b'prompt delete',
2005 2036 )
2006 2037 elif f in p1:
2007 2038 mresult.addfile(
@@ -2012,7 +2043,10 b' def _update('
2012 2043 )
2013 2044 else:
2014 2045 mresult.addfile(
2015 f, mergestatemod.ACTION_ADD, None, b'prompt keep',
2046 f,
2047 mergestatemod.ACTION_ADD,
2048 None,
2049 b'prompt keep',
2016 2050 )
2017 2051 elif m == mergestatemod.ACTION_DELETED_CHANGED:
2018 2052 f1, f2, fa, move, anc = args
@@ -2089,7 +2123,13 b' def _update('
2089 2123
2090 2124 wantfiledata = updatedirstate and not branchmerge
2091 2125 stats, getfiledata = applyupdates(
2092 repo, mresult, wc, p2, overwrite, wantfiledata, labels=labels,
2126 repo,
2127 mresult,
2128 wc,
2129 p2,
2130 overwrite,
2131 wantfiledata,
2132 labels=labels,
2093 2133 )
2094 2134
2095 2135 if updatedirstate:
@@ -132,7 +132,7 b' NO_OP_ACTIONS = ('
132 132
133 133
134 134 class _mergestate_base(object):
135 '''track 3-way merge state of individual files
135 """track 3-way merge state of individual files
136 136
137 137 The merge state is stored on disk when needed. Two files are used: one with
138 138 an old format (version 1), and one with a new format (version 2). Version 2
@@ -164,7 +164,7 b' class _mergestate_base(object):'
164 164
165 165 The resolve command transitions between 'u' and 'r' for conflicts and
166 166 'pu' and 'pr' for path conflicts.
167 '''
167 """
168 168
169 169 def __init__(self, repo):
170 170 """Initialize the merge state.
@@ -275,8 +275,8 b' class _mergestate_base(object):'
275 275 self._dirty = True
276 276
277 277 def addcommitinfo(self, path, data):
278 """ stores information which is required at commit
279 into _stateextras """
278 """stores information which is required at commit
279 into _stateextras"""
280 280 self._stateextras[path].update(data)
281 281 self._dirty = True
282 282
@@ -254,8 +254,7 b' def compute_all_files_changes(ctx):'
254 254
255 255
256 256 def _process_root(ctx):
257 """compute the appropriate changed files for a changeset with no parents
258 """
257 """compute the appropriate changed files for a changeset with no parents"""
259 258 # Simple, there was nothing before it, so everything is added.
260 259 md = ChangingFiles()
261 260 manifest = ctx.manifest()
@@ -265,8 +264,7 b' def _process_root(ctx):'
265 264
266 265
267 266 def _process_linear(parent_ctx, children_ctx, parent=1):
268 """compute the appropriate changed files for a changeset with a single parent
269 """
267 """compute the appropriate changed files for a changeset with a single parent"""
270 268 md = ChangingFiles()
271 269 parent_manifest = parent_ctx.manifest()
272 270 children_manifest = children_ctx.manifest()
@@ -515,8 +513,7 b' def _missing_from_all_ancestors(mas, fil'
515 513
516 514
517 515 def computechangesetfilesadded(ctx):
518 """return the list of files added in a changeset
519 """
516 """return the list of files added in a changeset"""
520 517 added = []
521 518 for f in ctx.files():
522 519 if not any(f in p for p in ctx.parents()):
@@ -580,8 +577,7 b' def get_removal_filter(ctx, x=None):'
580 577
581 578
582 579 def computechangesetfilesremoved(ctx):
583 """return the list of files removed in a changeset
584 """
580 """return the list of files removed in a changeset"""
585 581 removed = []
586 582 for f in ctx.files():
587 583 if f not in ctx:
@@ -593,8 +589,7 b' def computechangesetfilesremoved(ctx):'
593 589
594 590
595 591 def computechangesetfilesmerged(ctx):
596 """return the list of files merged in a changeset
597 """
592 """return the list of files merged in a changeset"""
598 593 merged = []
599 594 if len(ctx.parents()) < 2:
600 595 return merged
@@ -52,7 +52,7 b' def subsubsubsubsection(s):'
52 52
53 53
54 54 def replace(text, substs):
55 '''
55 """
56 56 Apply a list of (find, replace) pairs to a text.
57 57
58 58 >>> replace(b"foo bar", [(b'f', b'F'), (b'b', b'B')])
@@ -63,7 +63,7 b' def replace(text, substs):'
63 63 >>> encoding.encoding = b'shiftjis'
64 64 >>> replace(b'\\x81\\\\', [(b'\\\\', b'/')])
65 65 '\\x81\\\\'
66 '''
66 """
67 67
68 68 # some character encodings (cp932 for Japanese, at least) use
69 69 # ASCII characters other than control/alphabet/digit as a part of
@@ -322,10 +322,10 b' def prunecontainers(blocks, keep):'
322 322
323 323
324 324 def findtables(blocks):
325 '''Find simple tables
325 """Find simple tables
326 326
327 Only simple one-line table elements are supported
328 '''
327 Only simple one-line table elements are supported
328 """
329 329
330 330 for block in blocks:
331 331 # Searching for a block that looks like this:
@@ -432,7 +432,11 b' def addmargins(blocks):'
432 432 while i < len(blocks):
433 433 if blocks[i][b'type'] == blocks[i - 1][b'type'] and blocks[i][
434 434 b'type'
435 ] in (b'bullet', b'option', b'field',):
435 ] in (
436 b'bullet',
437 b'option',
438 b'field',
439 ):
436 440 i += 1
437 441 elif not blocks[i - 1][b'lines']:
438 442 # no lines in previous block, do not separate
@@ -226,7 +226,7 b' def clearwcbackup(repo, backupname):'
226 226
227 227
228 228 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
229 r""" Restricts the patterns according to repo settings,
229 r"""Restricts the patterns according to repo settings,
230 230 results in a logical AND operation
231 231
232 232 :param req_includes: requested includes
@@ -998,8 +998,7 b' def _computephasedivergentset(repo):'
998 998
999 999 @cachefor(b'contentdivergent')
1000 1000 def _computecontentdivergentset(repo):
1001 """the set of rev that compete to be the final successors of some revision.
1002 """
1001 """the set of rev that compete to be the final successors of some revision."""
1003 1002 divergent = set()
1004 1003 obsstore = repo.obsstore
1005 1004 newermap = {}
@@ -381,7 +381,7 b' METABLACKLIST = ['
381 381
382 382
383 383 def metanotblacklisted(metaitem):
384 """ Check that the key of a meta item (extrakey, extravalue) does not
384 """Check that the key of a meta item (extrakey, extravalue) does not
385 385 match at least one of the blacklist pattern
386 386 """
387 387 metakey = metaitem[0]
@@ -439,7 +439,7 b' def _cmpdiff(leftctx, rightctx):'
439 439
440 440
441 441 def geteffectflag(source, successors):
442 """ From an obs-marker relation, compute what changed between the
442 """From an obs-marker relation, compute what changed between the
443 443 predecessor and the successor.
444 444 """
445 445 effects = 0
@@ -816,7 +816,7 b' def successorsandmarkers(repo, ctx):'
816 816
817 817
818 818 def _getobsfate(successorssets):
819 """ Compute a changeset obsolescence fate based on its successorssets.
819 """Compute a changeset obsolescence fate based on its successorssets.
820 820 Successors can be the tipmost ones or the immediate ones. This function
821 821 return values are not meant to be shown directly to users, it is meant to
822 822 be used by internal functions only.
@@ -843,7 +843,7 b' def _getobsfate(successorssets):'
843 843
844 844
845 845 def obsfateverb(successorset, markers):
846 """ Return the verb summarizing the successorset and potentially using
846 """Return the verb summarizing the successorset and potentially using
847 847 information from the markers
848 848 """
849 849 if not successorset:
@@ -856,14 +856,12 b' def obsfateverb(successorset, markers):'
856 856
857 857
858 858 def markersdates(markers):
859 """returns the list of dates for a list of markers
860 """
859 """returns the list of dates for a list of markers"""
861 860 return [m[4] for m in markers]
862 861
863 862
864 863 def markersusers(markers):
865 """ Returns a sorted list of markers users without duplicates
866 """
864 """Returns a sorted list of markers users without duplicates"""
867 865 markersmeta = [dict(m[3]) for m in markers]
868 866 users = {
869 867 encoding.tolocal(meta[b'user'])
@@ -875,8 +873,7 b' def markersusers(markers):'
875 873
876 874
877 875 def markersoperations(markers):
878 """ Returns a sorted list of markers operations without duplicates
879 """
876 """Returns a sorted list of markers operations without duplicates"""
880 877 markersmeta = [dict(m[3]) for m in markers]
881 878 operations = {
882 879 meta.get(b'operation') for meta in markersmeta if meta.get(b'operation')
@@ -886,7 +883,7 b' def markersoperations(markers):'
886 883
887 884
888 885 def obsfateprinter(ui, repo, successors, markers, formatctx):
889 """ Build a obsfate string for a single successorset using all obsfate
886 """Build a obsfate string for a single successorset using all obsfate
890 887 related function defined in obsutil
891 888 """
892 889 quiet = ui.quiet
@@ -950,8 +947,7 b' filteredmsgtable = {'
950 947
951 948
952 949 def _getfilteredreason(repo, changeid, ctx):
953 """return a human-friendly string on why a obsolete changeset is hidden
954 """
950 """return a human-friendly string on why a obsolete changeset is hidden"""
955 951 successors = successorssets(repo, ctx.node())
956 952 fate = _getobsfate(successors)
957 953
@@ -406,8 +406,7 b' def matchtree(pattern, tree, placeholder'
406 406
407 407
408 408 def parseerrordetail(inst):
409 """Compose error message from specified ParseError object
410 """
409 """Compose error message from specified ParseError object"""
411 410 if inst.location is not None:
412 411 return _(b'at %d: %s') % (inst.location, inst.message)
413 412 else:
@@ -200,7 +200,7 b' patchheadermap = ['
200 200
201 201 @contextlib.contextmanager
202 202 def extract(ui, fileobj):
203 '''extract patch from data read from fileobj.
203 """extract patch from data read from fileobj.
204 204
205 205 patch can be a normal patch or contained in an email message.
206 206
@@ -214,7 +214,7 b' def extract(ui, fileobj):'
214 214 - p1,
215 215 - p2.
216 216 Any item can be missing from the dictionary. If filename is missing,
217 fileobj did not contain a patch. Caller must unlink filename when done.'''
217 fileobj did not contain a patch. Caller must unlink filename when done."""
218 218
219 219 fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
220 220 tmpfp = os.fdopen(fd, 'wb')
@@ -905,8 +905,7 b' class patchfile(object):'
905 905
906 906
907 907 class header(object):
908 """patch header
909 """
908 """patch header"""
910 909
911 910 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
912 911 diff_re = re.compile(b'diff -r .* (.*)$')
@@ -1854,7 +1853,7 b' def parsepatch(originalchunks, maxcontex'
1854 1853
1855 1854
1856 1855 def pathtransform(path, strip, prefix):
1857 '''turn a path from a patch into a path suitable for the repository
1856 """turn a path from a patch into a path suitable for the repository
1858 1857
1859 1858 prefix, if not empty, is expected to be normalized with a / at the end.
1860 1859
@@ -1873,7 +1872,7 b' def pathtransform(path, strip, prefix):'
1873 1872 >>> pathtransform(b'a/b/c', 3, b'')
1874 1873 Traceback (most recent call last):
1875 1874 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1876 '''
1875 """
1877 1876 pathlen = len(path)
1878 1877 i = 0
1879 1878 if strip == 0:
@@ -2503,7 +2502,7 b' def diff('
2503 2502 copysourcematch=None,
2504 2503 hunksfilterfn=None,
2505 2504 ):
2506 '''yields diff of changes to files between two nodes, or node and
2505 """yields diff of changes to files between two nodes, or node and
2507 2506 working directory.
2508 2507
2509 2508 if node1 is None, use first dirstate parent instead.
@@ -2531,7 +2530,7 b' def diff('
2531 2530
2532 2531 hunksfilterfn, if not None, should be a function taking a filectx and
2533 2532 hunks generator that may yield filtered hunks.
2534 '''
2533 """
2535 2534 if not node1 and not node2:
2536 2535 node1 = repo.dirstate.p1()
2537 2536
@@ -2886,10 +2885,10 b' def diffui(*args, **kw):'
2886 2885
2887 2886
2888 2887 def _filepairs(modified, added, removed, copy, opts):
2889 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2888 """generates tuples (f1, f2, copyop), where f1 is the name of the file
2890 2889 before and f2 is the the name after. For added files, f1 will be None,
2891 2890 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2892 or 'rename' (the latter two only if opts.git is set).'''
2891 or 'rename' (the latter two only if opts.git is set)."""
2893 2892 gone = set()
2894 2893
2895 2894 copyto = {v: k for k, v in copy.items()}
@@ -2948,13 +2947,13 b' def trydiff('
2948 2947 losedatafn,
2949 2948 pathfn,
2950 2949 ):
2951 '''given input data, generate a diff and yield it in blocks
2950 """given input data, generate a diff and yield it in blocks
2952 2951
2953 2952 If generating a diff would lose data like flags or binary data and
2954 2953 losedatafn is not None, it will be called.
2955 2954
2956 2955 pathfn is applied to every path in the diff output.
2957 '''
2956 """
2958 2957
2959 2958 if opts.noprefix:
2960 2959 aprefix = bprefix = b''
@@ -3079,7 +3078,7 b' def trydiff('
3079 3078
3080 3079
3081 3080 def diffcontent(data1, data2, header, binary, opts):
3082 """ diffs two versions of a file.
3081 """diffs two versions of a file.
3083 3082
3084 3083 data1 and data2 are tuples containg:
3085 3084
@@ -3241,9 +3240,9 b' def diffstat(lines, width=80):'
3241 3240
3242 3241
3243 3242 def diffstatui(*args, **kw):
3244 '''like diffstat(), but yields 2-tuples of (output, label) for
3243 """like diffstat(), but yields 2-tuples of (output, label) for
3245 3244 ui.write()
3246 '''
3245 """
3247 3246
3248 3247 for line in diffstat(*args, **kw).splitlines():
3249 3248 if line and line[-1] in b'+-':
@@ -24,7 +24,7 b' def _lowerclean(s):'
24 24
25 25
26 26 class pathauditor(object):
27 '''ensure that a filesystem path contains no banned components.
27 """ensure that a filesystem path contains no banned components.
28 28 the following properties of a path are checked:
29 29
30 30 - ends with a directory separator
@@ -44,7 +44,7 b' class pathauditor(object):'
44 44 If 'cached' is set to True, audited paths and sub-directories are cached.
45 45 Be careful to not keep the cache of unmanaged directories for long because
46 46 audited paths may be replaced with symlinks.
47 '''
47 """
48 48
49 49 def __init__(self, root, callback=None, realfs=True, cached=False):
50 50 self.audited = set()
@@ -59,8 +59,8 b' class pathauditor(object):'
59 59 self.normcase = lambda x: x
60 60
61 61 def __call__(self, path, mode=None):
62 '''Check the relative path.
63 path may contain a pattern (e.g. foodir/**.txt)'''
62 """Check the relative path.
63 path may contain a pattern (e.g. foodir/**.txt)"""
64 64
65 65 path = util.localpath(path)
66 66 normpath = self.normcase(path)
@@ -164,7 +164,7 b' class pathauditor(object):'
164 164
165 165
166 166 def canonpath(root, cwd, myname, auditor=None):
167 '''return the canonical path of myname, given cwd and root
167 """return the canonical path of myname, given cwd and root
168 168
169 169 >>> def check(root, cwd, myname):
170 170 ... a = pathauditor(root, realfs=False)
@@ -204,7 +204,7 b' def canonpath(root, cwd, myname, auditor'
204 204 'filename'
205 205 >>> unixonly(b'/repo', b'/repo/subdir', b'filename', b'subdir/filename')
206 206 'subdir/filename'
207 '''
207 """
208 208 if util.endswithsep(root):
209 209 rootsep = root
210 210 else:
@@ -266,7 +266,7 b' def canonpath(root, cwd, myname, auditor'
266 266
267 267
268 268 def normasprefix(path):
269 '''normalize the specified path as path prefix
269 """normalize the specified path as path prefix
270 270
271 271 Returned value can be used safely for "p.startswith(prefix)",
272 272 "p[len(prefix):]", and so on.
@@ -280,7 +280,7 b' def normasprefix(path):'
280 280 '/foo/bar/'
281 281 >>> normasprefix(b'/').replace(pycompat.ossep, b'/')
282 282 '/'
283 '''
283 """
284 284 d, p = os.path.splitdrive(path)
285 285 if len(p) != len(pycompat.ossep):
286 286 return path + pycompat.ossep
@@ -300,9 +300,9 b' class dirs(object):'
300 300 '''a multiset of directory names from a set of file paths'''
301 301
302 302 def __init__(self, map, skip=None):
303 '''
303 """
304 304 a dict map indicates a dirstate while a list indicates a manifest
305 '''
305 """
306 306 self._dirs = {}
307 307 addpath = self.addpath
308 308 if isinstance(map, dict) and skip is not None:
@@ -76,7 +76,7 b' else:'
76 76
77 77
78 78 def split(p):
79 '''Same as posixpath.split, but faster
79 """Same as posixpath.split, but faster
80 80
81 81 >>> import posixpath
82 82 >>> for f in [b'/absolute/path/to/file',
@@ -88,7 +88,7 b' def split(p):'
88 88 ... b'///multiple_leading_separators_at_root',
89 89 ... b'']:
90 90 ... assert split(f) == posixpath.split(f), f
91 '''
91 """
92 92 ht = p.rsplit(b'/', 1)
93 93 if len(ht) == 1:
94 94 return b'', p
@@ -183,9 +183,9 b' def setflags(f, l, x):'
183 183
184 184
185 185 def copymode(src, dst, mode=None, enforcewritable=False):
186 '''Copy the file mode from the file at path src to dst.
186 """Copy the file mode from the file at path src to dst.
187 187 If src doesn't exist, we're using mode instead. If mode is None, we're
188 using umask.'''
188 using umask."""
189 189 try:
190 190 st_mode = os.lstat(src).st_mode & 0o777
191 191 except OSError as inst:
@@ -359,24 +359,24 b' def checklink(path):'
359 359
360 360
361 361 def checkosfilename(path):
362 '''Check that the base-relative path is a valid filename on this platform.
363 Returns None if the path is ok, or a UI string describing the problem.'''
362 """Check that the base-relative path is a valid filename on this platform.
363 Returns None if the path is ok, or a UI string describing the problem."""
364 364 return None # on posix platforms, every path is ok
365 365
366 366
367 367 def getfsmountpoint(dirpath):
368 '''Get the filesystem mount point from a directory (best-effort)
368 """Get the filesystem mount point from a directory (best-effort)
369 369
370 370 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
371 '''
371 """
372 372 return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
373 373
374 374
375 375 def getfstype(dirpath):
376 '''Get the filesystem type name from a directory (best-effort)
376 """Get the filesystem type name from a directory (best-effort)
377 377
378 378 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
379 '''
379 """
380 380 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
381 381
382 382
@@ -419,7 +419,7 b' normcasefallback = normcase'
419 419 if pycompat.isdarwin:
420 420
421 421 def normcase(path):
422 '''
422 """
423 423 Normalize a filename for OS X-compatible comparison:
424 424 - escape-encode invalid characters
425 425 - decompose to NFD
@@ -434,7 +434,7 b' if pycompat.isdarwin:'
434 434 'e\\xcc\\x81'
435 435 >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
436 436 '%b8%ca%c3\\xca\\xbe%c8.jpg'
437 '''
437 """
438 438
439 439 try:
440 440 return encoding.asciilower(path) # exception for non-ASCII
@@ -475,7 +475,12 b" if pycompat.sysplatform == b'cygwin':"
475 475
476 476 # default mount points
477 477 cygwinmountpoints = sorted(
478 [b"/usr/bin", b"/usr/lib", b"/cygdrive",], reverse=True
478 [
479 b"/usr/bin",
480 b"/usr/lib",
481 b"/cygdrive",
482 ],
483 reverse=True,
479 484 )
480 485
481 486 # use upper-ing as normcase as same as NTFS workaround
@@ -553,10 +558,10 b' def isowner(st):'
553 558
554 559
555 560 def findexe(command):
556 '''Find executable for command searching like which does.
561 """Find executable for command searching like which does.
557 562 If command is a basename then PATH is searched for command.
558 563 PATH isn't searched if command is an absolute or relative path.
559 If command isn't found None is returned.'''
564 If command isn't found None is returned."""
560 565 if pycompat.sysplatform == b'OpenVMS':
561 566 return command
562 567
@@ -587,8 +592,8 b' def setsignalhandler():'
587 592
588 593
589 594 def statfiles(files):
590 '''Stat each file in files. Yield each stat, or None if a file does not
591 exist or has a type we don't care about.'''
595 """Stat each file in files. Yield each stat, or None if a file does not
596 exist or has a type we don't care about."""
592 597 lstat = os.lstat
593 598 getkind = stat.S_IFMT
594 599 for nf in files:
@@ -251,7 +251,7 b' class progbar(object):'
251 251 return False
252 252
253 253 def _calibrateestimate(self, topic, now, pos):
254 '''Adjust starttimes and startvals for topic so ETA works better
254 """Adjust starttimes and startvals for topic so ETA works better
255 255
256 256 If progress is non-linear (ex. get much slower in the last minute),
257 257 it's more friendly to only use a recent time span for ETA and speed
@@ -260,7 +260,7 b' class progbar(object):'
260 260 [======================================> ]
261 261 ^^^^^^^
262 262 estimateinterval, only use this for estimation
263 '''
263 """
264 264 interval = self.estimateinterval
265 265 if interval <= 0:
266 266 return
@@ -21,17 +21,17 b' def isasciistr(s):'
21 21
22 22
23 23 def asciilower(s):
24 '''convert a string to lowercase if ASCII
24 """convert a string to lowercase if ASCII
25 25
26 Raises UnicodeDecodeError if non-ASCII characters are found.'''
26 Raises UnicodeDecodeError if non-ASCII characters are found."""
27 27 s.decode('ascii')
28 28 return s.lower()
29 29
30 30
31 31 def asciiupper(s):
32 '''convert a string to uppercase if ASCII
32 """convert a string to uppercase if ASCII
33 33
34 Raises UnicodeDecodeError if non-ASCII characters are found.'''
34 Raises UnicodeDecodeError if non-ASCII characters are found."""
35 35 s.decode('ascii')
36 36 return s.upper()
37 37
@@ -15,8 +15,7 b' stringio = pycompat.bytesio'
15 15
16 16
17 17 class mpatchError(Exception):
18 """error raised when a delta cannot be decoded
19 """
18 """error raised when a delta cannot be decoded"""
20 19
21 20
22 21 # This attempts to apply a series of patches in time proportional to
@@ -39,7 +39,7 b' def _mode_to_kind(mode):'
39 39
40 40
41 41 def listdir(path, stat=False, skip=None):
42 '''listdir(path, stat=False) -> list_of_tuples
42 """listdir(path, stat=False) -> list_of_tuples
43 43
44 44 Return a sorted list containing information about the entries
45 45 in the directory.
@@ -51,7 +51,7 b' def listdir(path, stat=False, skip=None)'
51 51 Otherwise, each element is a 2-tuple:
52 52
53 53 (name, type)
54 '''
54 """
55 55 result = []
56 56 prefix = path
57 57 if not prefix.endswith(pycompat.ossep):
@@ -222,7 +222,7 b' else:'
222 222 )
223 223
224 224 class posixfile(object):
225 '''a file object aiming for POSIX-like semantics
225 """a file object aiming for POSIX-like semantics
226 226
227 227 CPython's open() returns a file that was opened *without* setting the
228 228 _FILE_SHARE_DELETE flag, which causes rename and unlink to abort.
@@ -231,7 +231,7 b' else:'
231 231 renamed and deleted while they are held open.
232 232 Note that if a file opened with posixfile is unlinked, the file
233 233 remains but cannot be opened again or be recreated under the same name,
234 until all reading processes have closed the file.'''
234 until all reading processes have closed the file."""
235 235
236 236 def __init__(self, name, mode=b'r', bufsize=-1):
237 237 if b'b' in mode:
@@ -290,11 +290,11 b' else:'
290 290 return getattr(self._file, name)
291 291
292 292 def __setattr__(self, name, value):
293 '''mimics the read-only attributes of Python file objects
293 """mimics the read-only attributes of Python file objects
294 294 by raising 'TypeError: readonly attribute' if someone tries:
295 295 f = posixfile('foo.txt')
296 296 f.name = 'bla'
297 '''
297 """
298 298 return self._file.__setattr__(name, value)
299 299
300 300 def __enter__(self):
@@ -234,8 +234,7 b' def parse_index2(data, inline):'
234 234
235 235
236 236 def parse_index_devel_nodemap(data, inline):
237 """like parse_index2, but alway return a PersistentNodeMapIndexObject
238 """
237 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
239 238 return PersistentNodeMapIndexObject(data), None
240 239
241 240
@@ -39,13 +39,13 b' def _expandrcpath(path):'
39 39
40 40
41 41 def envrcitems(env=None):
42 '''Return [(section, name, value, source)] config items.
42 """Return [(section, name, value, source)] config items.
43 43
44 44 The config items are extracted from environment variables specified by env,
45 45 used to override systemrc, but not userrc.
46 46
47 47 If env is not provided, encoding.environ will be used.
48 '''
48 """
49 49 if env is None:
50 50 env = encoding.environ
51 51 checklist = [
@@ -73,7 +73,7 b' def default_rc_resources():'
73 73
74 74
75 75 def rccomponents():
76 '''return an ordered [(type, obj)] about where to load configs.
76 """return an ordered [(type, obj)] about where to load configs.
77 77
78 78 respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is
79 79 used. if $HGRCPATH is not set, the platform default will be used.
@@ -84,7 +84,7 b' def rccomponents():'
84 84 obj is a string, and is the config file path. if type is 'items', obj is a
85 85 list of (section, name, value, source) that should fill the config directly.
86 86 If type is 'resource', obj is a tuple of (package name, resource name).
87 '''
87 """
88 88 envrc = (b'items', envrcitems())
89 89
90 90 if b'HGRCPATH' in encoding.environ:
@@ -108,9 +108,9 b' def rccomponents():'
108 108
109 109
110 110 def defaultpagerenv():
111 '''return a dict of default environment variables and their values,
111 """return a dict of default environment variables and their values,
112 112 intended to be set before starting a pager.
113 '''
113 """
114 114 return {b'LESS': b'FRX', b'LV': b'-c'}
115 115
116 116
@@ -95,8 +95,7 b' class _funcregistrarbase(object):'
95 95 self._table.update(registrarbase._table)
96 96
97 97 def _parsefuncdecl(self, decl):
98 """Parse function declaration and return the name of function in it
99 """
98 """Parse function declaration and return the name of function in it"""
100 99 i = decl.find(b'(')
101 100 if i >= 0:
102 101 return decl[:i]
@@ -121,8 +120,7 b' class _funcregistrarbase(object):'
121 120 return self._docformat % (decl, doc)
122 121
123 122 def _extrasetup(self, name, func):
124 """Execute extra setup for registered function, if needed
125 """
123 """Execute extra setup for registered function, if needed"""
126 124
127 125
128 126 class command(_funcregistrarbase):
@@ -345,8 +343,7 b' class filesetpredicate(_funcregistrarbas'
345 343
346 344
347 345 class _templateregistrarbase(_funcregistrarbase):
348 """Base of decorator to register functions as template specific one
349 """
346 """Base of decorator to register functions as template specific one"""
350 347
351 348 _docformat = b":%s: %s"
352 349
@@ -48,8 +48,7 b' def hideablerevs(repo):'
48 48
49 49
50 50 def pinnedrevs(repo):
51 """revisions blocking hidden changesets from being filtered
52 """
51 """revisions blocking hidden changesets from being filtered"""
53 52
54 53 cl = repo.changelog
55 54 pinned = set()
@@ -1491,8 +1491,8 b' class revlog(object):'
1491 1491
1492 1492 def lookup(self, id):
1493 1493 """locate a node based on:
1494 - revision number or str(revision number)
1495 - nodeid or subset of hex nodeid
1494 - revision number or str(revision number)
1495 - nodeid or subset of hex nodeid
1496 1496 """
1497 1497 n = self._match(id)
1498 1498 if n is not None:
@@ -1771,8 +1771,7 b' class revlog(object):'
1771 1771 return rev - 1
1772 1772
1773 1773 def issnapshot(self, rev):
1774 """tells whether rev is a snapshot
1775 """
1774 """tells whether rev is a snapshot"""
1776 1775 if not self._sparserevlog:
1777 1776 return self.deltaparent(rev) == nullrev
1778 1777 elif util.safehasattr(self.index, b'issnapshot'):
@@ -2037,8 +2036,7 b' class revlog(object):'
2037 2036 self._chunkclear()
2038 2037
2039 2038 def _nodeduplicatecallback(self, transaction, node):
2040 """called when trying to add a node already stored.
2041 """
2039 """called when trying to add a node already stored."""
2042 2040
2043 2041 def addrevision(
2044 2042 self,
@@ -86,8 +86,7 b' def setup_persistent_nodemap(tr, revlog)'
86 86
87 87
88 88 class _NoTransaction(object):
89 """transaction like object to update the nodemap outside a transaction
90 """
89 """transaction like object to update the nodemap outside a transaction"""
91 90
92 91 def __init__(self):
93 92 self._postclose = {}
@@ -129,8 +128,7 b' def update_persistent_nodemap(revlog):'
129 128
130 129
131 130 def _persist_nodemap(tr, revlog, pending=False):
132 """Write nodemap data on disk for a given revlog
133 """
131 """Write nodemap data on disk for a given revlog"""
134 132 if getattr(revlog, 'filteredrevs', ()):
135 133 raise error.ProgrammingError(
136 134 "cannot persist nodemap of a filtered changelog"
@@ -400,15 +398,13 b' def _other_rawdata_filepath(revlog, dock'
400 398
401 399
402 400 def persistent_data(index):
403 """return the persistent binary form for a nodemap for a given index
404 """
401 """return the persistent binary form for a nodemap for a given index"""
405 402 trie = _build_trie(index)
406 403 return _persist_trie(trie)
407 404
408 405
409 406 def update_persistent_data(index, root, max_idx, last_rev):
410 """return the incremental update for persistent nodemap from a given index
411 """
407 """return the incremental update for persistent nodemap from a given index"""
412 408 changed_block, trie = _update_trie(index, root, last_rev)
413 409 return (
414 410 changed_block * S_BLOCK.size,
@@ -529,8 +529,7 b' def ancestorspec(repo, subset, x, n, ord'
529 529
530 530 @predicate(b'author(string)', safe=True, weight=10)
531 531 def author(repo, subset, x):
532 """Alias for ``user(string)``.
533 """
532 """Alias for ``user(string)``."""
534 533 # i18n: "author" is a keyword
535 534 n = getstring(x, _(b"author requires a string"))
536 535 kind, pattern, matcher = _substringmatcher(n, casesensitive=False)
@@ -737,8 +736,7 b' def _children(repo, subset, parentset):'
737 736
738 737 @predicate(b'children(set)', safe=True)
739 738 def children(repo, subset, x):
740 """Child changesets of changesets in set.
741 """
739 """Child changesets of changesets in set."""
742 740 s = getset(repo, fullreposet(repo), x)
743 741 cs = _children(repo, subset, s)
744 742 return subset & cs
@@ -746,8 +744,7 b' def children(repo, subset, x):'
746 744
747 745 @predicate(b'closed()', safe=True, weight=10)
748 746 def closed(repo, subset, x):
749 """Changeset is closed.
750 """
747 """Changeset is closed."""
751 748 # i18n: "closed" is a keyword
752 749 getargs(x, 0, 0, _(b"closed takes no arguments"))
753 750 return subset.filter(
@@ -771,8 +768,7 b' def _commonancestorheads(repo, subset, x'
771 768
772 769 @predicate(b'commonancestors(set)', safe=True)
773 770 def commonancestors(repo, subset, x):
774 """Changesets that are ancestors of every changeset in set.
775 """
771 """Changesets that are ancestors of every changeset in set."""
776 772 startrevs = getset(repo, fullreposet(repo), x, order=anyorder)
777 773 if not startrevs:
778 774 return baseset()
@@ -868,8 +864,7 b' def converted(repo, subset, x):'
868 864
869 865 @predicate(b'date(interval)', safe=True, weight=10)
870 866 def date(repo, subset, x):
871 """Changesets within the interval, see :hg:`help dates`.
872 """
867 """Changesets within the interval, see :hg:`help dates`."""
873 868 # i18n: "date" is a keyword
874 869 ds = getstring(x, _(b"date requires a string"))
875 870 dm = dateutil.matchdate(ds)
@@ -1108,8 +1103,7 b' def extdata(repo, subset, x):'
1108 1103
1109 1104 @predicate(b'extinct()', safe=True)
1110 1105 def extinct(repo, subset, x):
1111 """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)
1112 """
1106 """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)"""
1113 1107 # i18n: "extinct" is a keyword
1114 1108 getargs(x, 0, 0, _(b"extinct takes no arguments"))
1115 1109 extincts = obsmod.getrevs(repo, b'extinct')
@@ -1216,8 +1210,7 b' def filelog(repo, subset, x):'
1216 1210
1217 1211 @predicate(b'first(set, [n])', safe=True, takeorder=True, weight=0)
1218 1212 def first(repo, subset, x, order):
1219 """An alias for limit().
1220 """
1213 """An alias for limit()."""
1221 1214 return limit(repo, subset, x, order)
1222 1215
1223 1216
@@ -1341,8 +1334,7 b' def followlines(repo, subset, x):'
1341 1334
1342 1335 @predicate(b'all()', safe=True)
1343 1336 def getall(repo, subset, x):
1344 """All changesets, the same as ``0:tip``.
1345 """
1337 """All changesets, the same as ``0:tip``."""
1346 1338 # i18n: "all" is a keyword
1347 1339 getargs(x, 0, 0, _(b"all takes no arguments"))
1348 1340 return subset & spanset(repo) # drop "null" if any
@@ -1480,8 +1472,7 b' def hasfile(repo, subset, x):'
1480 1472
1481 1473 @predicate(b'head()', safe=True)
1482 1474 def head(repo, subset, x):
1483 """Changeset is a named branch head.
1484 """
1475 """Changeset is a named branch head."""
1485 1476 # i18n: "head" is a keyword
1486 1477 getargs(x, 0, 0, _(b"head takes no arguments"))
1487 1478 hs = set()
@@ -1493,8 +1484,7 b' def head(repo, subset, x):'
1493 1484
1494 1485 @predicate(b'heads(set)', safe=True, takeorder=True)
1495 1486 def heads(repo, subset, x, order):
1496 """Members of set with no children in set.
1497 """
1487 """Members of set with no children in set."""
1498 1488 # argument set should never define order
1499 1489 if order == defineorder:
1500 1490 order = followorder
@@ -1515,8 +1505,7 b' def heads(repo, subset, x, order):'
1515 1505
1516 1506 @predicate(b'hidden()', safe=True)
1517 1507 def hidden(repo, subset, x):
1518 """Hidden changesets.
1519 """
1508 """Hidden changesets."""
1520 1509 # i18n: "hidden" is a keyword
1521 1510 getargs(x, 0, 0, _(b"hidden takes no arguments"))
1522 1511 hiddenrevs = repoview.filterrevs(repo, b'visible')
@@ -1546,8 +1535,7 b' def keyword(repo, subset, x):'
1546 1535
1547 1536 @predicate(b'limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
1548 1537 def limit(repo, subset, x, order):
1549 """First n members of set, defaulting to 1, starting from offset.
1550 """
1538 """First n members of set, defaulting to 1, starting from offset."""
1551 1539 args = getargsdict(x, b'limit', b'set n offset')
1552 1540 if b'set' not in args:
1553 1541 # i18n: "limit" is a keyword
@@ -1571,8 +1559,7 b' def limit(repo, subset, x, order):'
1571 1559
1572 1560 @predicate(b'last(set, [n])', safe=True, takeorder=True)
1573 1561 def last(repo, subset, x, order):
1574 """Last n members of set, defaulting to 1.
1575 """
1562 """Last n members of set, defaulting to 1."""
1576 1563 # i18n: "last" is a keyword
1577 1564 l = getargs(x, 1, 2, _(b"last requires one or two arguments"))
1578 1565 lim = 1
@@ -1592,8 +1579,7 b' def last(repo, subset, x, order):'
1592 1579
1593 1580 @predicate(b'max(set)', safe=True)
1594 1581 def maxrev(repo, subset, x):
1595 """Changeset with highest revision number in set.
1596 """
1582 """Changeset with highest revision number in set."""
1597 1583 os = getset(repo, fullreposet(repo), x)
1598 1584 try:
1599 1585 m = os.max()
@@ -1608,8 +1594,7 b' def maxrev(repo, subset, x):'
1608 1594
1609 1595 @predicate(b'merge()', safe=True)
1610 1596 def merge(repo, subset, x):
1611 """Changeset is a merge changeset.
1612 """
1597 """Changeset is a merge changeset."""
1613 1598 # i18n: "merge" is a keyword
1614 1599 getargs(x, 0, 0, _(b"merge takes no arguments"))
1615 1600 cl = repo.changelog
@@ -1626,8 +1611,7 b' def merge(repo, subset, x):'
1626 1611
1627 1612 @predicate(b'branchpoint()', safe=True)
1628 1613 def branchpoint(repo, subset, x):
1629 """Changesets with more than one child.
1630 """
1614 """Changesets with more than one child."""
1631 1615 # i18n: "branchpoint" is a keyword
1632 1616 getargs(x, 0, 0, _(b"branchpoint takes no arguments"))
1633 1617 cl = repo.changelog
@@ -1648,8 +1632,7 b' def branchpoint(repo, subset, x):'
1648 1632
1649 1633 @predicate(b'min(set)', safe=True)
1650 1634 def minrev(repo, subset, x):
1651 """Changeset with lowest revision number in set.
1652 """
1635 """Changeset with lowest revision number in set."""
1653 1636 os = getset(repo, fullreposet(repo), x)
1654 1637 try:
1655 1638 m = os.min()
@@ -1715,8 +1698,7 b' def named(repo, subset, x):'
1715 1698
1716 1699 @predicate(b'id(string)', safe=True)
1717 1700 def node_(repo, subset, x):
1718 """Revision non-ambiguously specified by the given hex string prefix.
1719 """
1701 """Revision non-ambiguously specified by the given hex string prefix."""
1720 1702 # i18n: "id" is a keyword
1721 1703 l = getargs(x, 1, 1, _(b"id requires one argument"))
1722 1704 # i18n: "id" is a keyword
@@ -1747,8 +1729,7 b' def node_(repo, subset, x):'
1747 1729
1748 1730 @predicate(b'none()', safe=True)
1749 1731 def none(repo, subset, x):
1750 """No changesets.
1751 """
1732 """No changesets."""
1752 1733 # i18n: "none" is a keyword
1753 1734 getargs(x, 0, 0, _(b"none takes no arguments"))
1754 1735 return baseset()
@@ -1869,8 +1850,7 b' def outgoing(repo, subset, x):'
1869 1850
1870 1851 @predicate(b'p1([set])', safe=True)
1871 1852 def p1(repo, subset, x):
1872 """First parent of changesets in set, or the working directory.
1873 """
1853 """First parent of changesets in set, or the working directory."""
1874 1854 if x is None:
1875 1855 p = repo[x].p1().rev()
1876 1856 if p >= 0:
@@ -1892,8 +1872,7 b' def p1(repo, subset, x):'
1892 1872
1893 1873 @predicate(b'p2([set])', safe=True)
1894 1874 def p2(repo, subset, x):
1895 """Second parent of changesets in set, or the working directory.
1896 """
1875 """Second parent of changesets in set, or the working directory."""
1897 1876 if x is None:
1898 1877 ps = repo[x].parents()
1899 1878 try:
@@ -2305,8 +2284,7 b' def matching(repo, subset, x):'
2305 2284
2306 2285 @predicate(b'reverse(set)', safe=True, takeorder=True, weight=0)
2307 2286 def reverse(repo, subset, x, order):
2308 """Reverse order of set.
2309 """
2287 """Reverse order of set."""
2310 2288 l = getset(repo, subset, x, order)
2311 2289 if order == defineorder:
2312 2290 l.reverse()
@@ -2315,8 +2293,7 b' def reverse(repo, subset, x, order):'
2315 2293
2316 2294 @predicate(b'roots(set)', safe=True)
2317 2295 def roots(repo, subset, x):
2318 """Changesets in set with no parent changeset in set.
2319 """
2296 """Changesets in set with no parent changeset in set."""
2320 2297 s = getset(repo, fullreposet(repo), x)
2321 2298 parents = repo.changelog.parentrevs
2322 2299
@@ -2556,8 +2533,7 b' def tagged(repo, subset, x):'
2556 2533
2557 2534 @predicate(b'orphan()', safe=True)
2558 2535 def orphan(repo, subset, x):
2559 """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)
2560 """
2536 """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)"""
2561 2537 # i18n: "orphan" is a keyword
2562 2538 getargs(x, 0, 0, _(b"orphan takes no arguments"))
2563 2539 orphan = obsmod.getrevs(repo, b'orphan')
@@ -2566,8 +2542,7 b' def orphan(repo, subset, x):'
2566 2542
2567 2543 @predicate(b'unstable()', safe=True)
2568 2544 def unstable(repo, subset, x):
2569 """Changesets with instabilities. (EXPERIMENTAL)
2570 """
2545 """Changesets with instabilities. (EXPERIMENTAL)"""
2571 2546 # i18n: "unstable" is a keyword
2572 2547 getargs(x, 0, 0, b'unstable takes no arguments')
2573 2548 _unstable = set()
@@ -2781,8 +2756,7 b' def makematcher(tree):'
2781 2756
2782 2757
2783 2758 def loadpredicate(ui, extname, registrarobj):
2784 """Load revset predicates from specified registrarobj
2785 """
2759 """Load revset predicates from specified registrarobj"""
2786 2760 for name, func in pycompat.iteritems(registrarobj._table):
2787 2761 symbols[name] = func
2788 2762 if func._safe:
@@ -83,7 +83,7 b' symbols = {}'
83 83
84 84
85 85 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
86 '''
86 """
87 87 Parse a revset statement into a stream of tokens
88 88
89 89 ``syminitletters`` is the set of valid characters for the initial
@@ -102,7 +102,7 b' def tokenize(program, lookup=None, symin'
102 102 >>> list(tokenize(b"@::"))
103 103 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
104 104
105 '''
105 """
106 106 if not isinstance(program, bytes):
107 107 raise error.ProgrammingError(
108 108 b'revset statement must be bytes, got %r' % program
@@ -621,8 +621,7 b' def expandaliases(tree, aliases, warn=No'
621 621
622 622
623 623 def foldconcat(tree):
624 """Fold elements to be concatenated by `##`
625 """
624 """Fold elements to be concatenated by `##`"""
626 625 if not isinstance(tree, tuple) or tree[0] in (
627 626 b'string',
628 627 b'symbol',
@@ -742,7 +741,7 b' def _formatparamexp(args, t):'
742 741
743 742
744 743 def formatspec(expr, *args):
745 '''
744 """
746 745 This is a convenience function for using revsets internally, and
747 746 escapes arguments appropriately. Aliases are intentionally ignored
748 747 so that intended expression behavior isn't accidentally subverted.
@@ -777,7 +776,7 b' def formatspec(expr, *args):'
777 776 "sort((:), 'desc', 'user')"
778 777 >>> formatspec(b'%ls', [b'a', b"'"])
779 778 "_list('a\\\\x00\\\\'')"
780 '''
779 """
781 780 parsed = _parseargs(expr, args)
782 781 ret = []
783 782 for t, arg in parsed:
@@ -66,11 +66,11 b' termsize = scmplatform.termsize'
66 66
67 67 @attr.s(slots=True, repr=False)
68 68 class status(object):
69 '''Struct with a list of files per status.
69 """Struct with a list of files per status.
70 70
71 71 The 'deleted', 'unknown' and 'ignored' properties are only
72 72 relevant to the working copy.
73 '''
73 """
74 74
75 75 modified = attr.ib(default=attr.Factory(list))
76 76 added = attr.ib(default=attr.Factory(list))
@@ -123,9 +123,9 b' def itersubrepos(ctx1, ctx2):'
123 123
124 124
125 125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 """Report no changes for push/pull, excluded is None or a list of
127 127 nodes excluded from the push/pull.
128 '''
128 """
129 129 secretlist = []
130 130 if excluded:
131 131 for n in excluded:
@@ -335,8 +335,8 b' def checkportable(ui, f):'
335 335
336 336
337 337 def checkportabilityalert(ui):
338 '''check if the user's config requests nothing, a warning, or abort for
339 non-portable filenames'''
338 """check if the user's config requests nothing, a warning, or abort for
339 non-portable filenames"""
340 340 val = ui.config(b'ui', b'portablefilenames')
341 341 lval = val.lower()
342 342 bval = stringutil.parsebool(val)
@@ -402,8 +402,8 b' def filteredhash(repo, maxrev):'
402 402
403 403
404 404 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
405 '''yield every hg repository under path, always recursively.
406 The recurse flag will only control recursion into repo working dirs'''
405 """yield every hg repository under path, always recursively.
406 The recurse flag will only control recursion into repo working dirs"""
407 407
408 408 def errhandler(err):
409 409 if err.filename == path:
@@ -793,7 +793,7 b' def increasingwindows(windowsize=8, size'
793 793
794 794
795 795 def walkchangerevs(repo, revs, makefilematcher, prepare):
796 '''Iterate over files and the revs in a "windowed" way.
796 """Iterate over files and the revs in a "windowed" way.
797 797
798 798 Callers most commonly need to iterate backwards over the history
799 799 in which they are interested. Doing so has awful (quadratic-looking)
@@ -805,7 +805,7 b' def walkchangerevs(repo, revs, makefilem'
805 805
806 806 This function returns an iterator yielding contexts. Before
807 807 yielding each context, the iterator will first call the prepare
808 function on each context in the window in forward order.'''
808 function on each context in the window in forward order."""
809 809
810 810 if not revs:
811 811 return []
@@ -897,17 +897,17 b' def subdiruipathfn(subpath, uipathfn):'
897 897
898 898
899 899 def anypats(pats, opts):
900 '''Checks if any patterns, including --include and --exclude were given.
900 """Checks if any patterns, including --include and --exclude were given.
901 901
902 902 Some commands (e.g. addremove) use this condition for deciding whether to
903 903 print absolute or relative paths.
904 '''
904 """
905 905 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
906 906
907 907
908 908 def expandpats(pats):
909 '''Expand bare globs when running on windows.
910 On posix we assume it already has already been done by sh.'''
909 """Expand bare globs when running on windows.
910 On posix we assume it already has already been done by sh."""
911 911 if not util.expandglobs:
912 912 return list(pats)
913 913 ret = []
@@ -928,9 +928,9 b' def expandpats(pats):'
928 928 def matchandpats(
929 929 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
930 930 ):
931 '''Return a matcher and the patterns that were used.
931 """Return a matcher and the patterns that were used.
932 932 The matcher will warn about bad matches, unless an alternate badfn callback
933 is provided.'''
933 is provided."""
934 934 if opts is None:
935 935 opts = {}
936 936 if not globbed and default == b'relpath':
@@ -1001,7 +1001,7 b' def getorigvfs(ui, repo):'
1001 1001
1002 1002
1003 1003 def backuppath(ui, repo, filepath):
1004 '''customize where working copy backup files (.orig files) are created
1004 """customize where working copy backup files (.orig files) are created
1005 1005
1006 1006 Fetch user defined path from config file: [ui] origbackuppath = <path>
1007 1007 Fall back to default (filepath with .orig suffix) if not specified
@@ -1009,7 +1009,7 b' def backuppath(ui, repo, filepath):'
1009 1009 filepath is repo-relative
1010 1010
1011 1011 Returns an absolute path
1012 '''
1012 """
1013 1013 origvfs = getorigvfs(ui, repo)
1014 1014 if origvfs is None:
1015 1015 return repo.wjoin(filepath + b".orig")
@@ -1300,8 +1300,8 b' def addremove(repo, matcher, prefix, uip'
1300 1300
1301 1301
1302 1302 def marktouched(repo, files, similarity=0.0):
1303 '''Assert that files have somehow been operated upon. files are relative to
1304 the repo root.'''
1303 """Assert that files have somehow been operated upon. files are relative to
1304 the repo root."""
1305 1305 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1306 1306 rejected = []
1307 1307
@@ -1335,11 +1335,11 b' def marktouched(repo, files, similarity='
1335 1335
1336 1336
1337 1337 def _interestingfiles(repo, matcher):
1338 '''Walk dirstate with matcher, looking for files that addremove would care
1338 """Walk dirstate with matcher, looking for files that addremove would care
1339 1339 about.
1340 1340
1341 1341 This is different from dirstate.status because it doesn't care about
1342 whether files are modified or clean.'''
1342 whether files are modified or clean."""
1343 1343 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1344 1344 audit_path = pathutil.pathauditor(repo.root, cached=True)
1345 1345
@@ -1394,8 +1394,8 b' def _findrenames(repo, matcher, added, r'
1394 1394
1395 1395
1396 1396 def _markchanges(repo, unknown, deleted, renames):
1397 '''Marks the files in unknown as added, the files in deleted as removed,
1398 and the files in renames as copied.'''
1397 """Marks the files in unknown as added, the files in deleted as removed,
1398 and the files in renames as copied."""
1399 1399 wctx = repo[None]
1400 1400 with repo.wlock():
1401 1401 wctx.forget(deleted)
@@ -1424,10 +1424,10 b' def getrenamedfn(repo, endrev=None):'
1424 1424 endrev = len(repo)
1425 1425
1426 1426 def getrenamed(fn, rev):
1427 '''looks up all renames for a file (up to endrev) the first
1427 """looks up all renames for a file (up to endrev) the first
1428 1428 time the file is given. It indexes on the changerev and only
1429 1429 parses the manifest if linkrev != changerev.
1430 Returns rename info for fn at changerev rev.'''
1430 Returns rename info for fn at changerev rev."""
1431 1431 if fn not in rcache:
1432 1432 rcache[fn] = {}
1433 1433 fl = repo.file(fn)
@@ -1548,7 +1548,7 b' def movedirstate(repo, newctx, match=Non'
1548 1548
1549 1549
1550 1550 def filterrequirements(requirements):
1551 """ filters the requirements into two sets:
1551 """filters the requirements into two sets:
1552 1552
1553 1553 wcreq: requirements which should be written in .hg/requires
1554 1554 storereq: which should be written in .hg/store/requires
@@ -1871,8 +1871,7 b' class progress(object):'
1871 1871
1872 1872
1873 1873 def gdinitconfig(ui):
1874 """helper function to know if a repo should be created as general delta
1875 """
1874 """helper function to know if a repo should be created as general delta"""
1876 1875 # experimental config: format.generaldelta
1877 1876 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1878 1877 b'format', b'usegeneraldelta'
@@ -1880,8 +1879,7 b' def gdinitconfig(ui):'
1880 1879
1881 1880
1882 1881 def gddeltaconfig(ui):
1883 """helper function to know if incoming delta should be optimised
1884 """
1882 """helper function to know if incoming delta should be optimised"""
1885 1883 # experimental config: format.generaldelta
1886 1884 return ui.configbool(b'format', b'generaldelta')
1887 1885
@@ -292,9 +292,9 b' def findcommonheads('
292 292 ancestorsof=None,
293 293 samplegrowth=1.05,
294 294 ):
295 '''Return a tuple (common, anyincoming, remoteheads) used to identify
295 """Return a tuple (common, anyincoming, remoteheads) used to identify
296 296 missing nodes from or in remote.
297 '''
297 """
298 298 start = util.timer()
299 299
300 300 roundtrips = 0
@@ -371,7 +371,10 b' def findcommonheads('
371 371 with remote.commandexecutor() as e:
372 372 fheads = e.callcommand(b'heads', {})
373 373 fknown = e.callcommand(
374 b'known', {b'nodes': [clnode(r) for r in sample],}
374 b'known',
375 {
376 b'nodes': [clnode(r) for r in sample],
377 },
375 378 )
376 379
377 380 srvheadhashes, yesno = fheads.result(), fknown.result()
@@ -449,7 +452,10 b' def findcommonheads('
449 452
450 453 with remote.commandexecutor() as e:
451 454 yesno = e.callcommand(
452 b'known', {b'nodes': [clnode(r) for r in sample],}
455 b'known',
456 {
457 b'nodes': [clnode(r) for r in sample],
458 },
453 459 ).result()
454 460
455 461 full = True
@@ -350,8 +350,7 b' def _restoreactivebookmark(repo, mark):'
350 350
351 351
352 352 def _aborttransaction(repo, tr):
353 '''Abort current transaction for shelve/unshelve, but keep dirstate
354 '''
353 """Abort current transaction for shelve/unshelve, but keep dirstate"""
355 354 dirstatebackupname = b'dirstate.shelve'
356 355 repo.dirstate.savebackup(tr, dirstatebackupname)
357 356 tr.abort()
@@ -15,11 +15,11 b' from . import ('
15 15
16 16
17 17 def _findexactmatches(repo, added, removed):
18 '''find renamed files that have no changes
18 """find renamed files that have no changes
19 19
20 20 Takes a list of new filectxs and a list of removed filectxs, and yields
21 21 (before, after) tuples of exact matches.
22 '''
22 """
23 23 # Build table of removed files: {hash(fctx.data()): [fctx, ...]}.
24 24 # We use hash() to discard fctx.data() from memory.
25 25 hashes = {}
@@ -77,11 +77,11 b' def score(fctx1, fctx2):'
77 77
78 78
79 79 def _findsimilarmatches(repo, added, removed, threshold):
80 '''find potentially renamed files based on similar file content
80 """find potentially renamed files based on similar file content
81 81
82 82 Takes a list of new filectxs and a list of removed filectxs, and yields
83 83 (before, after, score) tuples of partial matches.
84 '''
84 """
85 85 copies = {}
86 86 progress = repo.ui.makeprogress(
87 87 _(b'searching for similar files'), unit=_(b'files'), total=len(removed)
@@ -57,8 +57,7 b' def intersect(ra, rb):'
57 57
58 58
59 59 def compare_range(a, astart, aend, b, bstart, bend):
60 """Compare a[astart:aend] == b[bstart:bend], without slicing.
61 """
60 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
62 61 if (aend - astart) != (bend - bstart):
63 62 return False
64 63 for ia, ib in zip(
@@ -102,8 +101,7 b' class Merge3Text(object):'
102 101 localorother=None,
103 102 minimize=False,
104 103 ):
105 """Return merge in cvs-like form.
106 """
104 """Return merge in cvs-like form."""
107 105 self.conflicts = False
108 106 newline = b'\n'
109 107 if len(self.a) > 0:
@@ -121,8 +121,7 b' class doublepipe(object):'
121 121 return self._call(b'readline')
122 122
123 123 def _call(self, methname, data=None):
124 """call <methname> on "main", forward output of "side" while blocking
125 """
124 """call <methname> on "main", forward output of "side" while blocking"""
126 125 # data can be '' or 0
127 126 if (data is not None and not data) or self._main.closed:
128 127 _forwardoutput(self._ui, self._side)
@@ -227,8 +227,7 b' def _hostsettings(ui, hostname):'
227 227
228 228
229 229 def commonssloptions(minimumprotocol):
230 """Return SSLContext options common to servers and clients.
231 """
230 """Return SSLContext options common to servers and clients."""
232 231 if minimumprotocol not in configprotocols:
233 232 raise ValueError(b'protocol value not supported: %s' % minimumprotocol)
234 233
@@ -617,11 +616,11 b' def _dnsnamematch(dn, hostname, maxwildc'
617 616
618 617
619 618 def _verifycert(cert, hostname):
620 '''Verify that cert (in socket.getpeercert() format) matches hostname.
619 """Verify that cert (in socket.getpeercert() format) matches hostname.
621 620 CRLs is not handled.
622 621
623 622 Returns error message if any problems are found and None on success.
624 '''
623 """
625 624 if not cert:
626 625 return _(b'no certificate received')
627 626
@@ -55,7 +55,7 b' class cmdstate(object):'
55 55 """
56 56
57 57 def __init__(self, repo, fname):
58 """ repo is the repo object
58 """repo is the repo object
59 59 fname is the file name in which data should be stored in .hg directory
60 60 """
61 61 self._repo = repo
@@ -105,11 +105,11 b' class cmdstate(object):'
105 105
106 106 class _statecheck(object):
107 107 """a utility class that deals with multistep operations like graft,
108 histedit, bisect, update etc and check whether such commands
109 are in an unfinished conditition or not and return appropriate message
110 and hint.
111 It also has the ability to register and determine the states of any new
112 multistep operation or multistep command extension.
108 histedit, bisect, update etc and check whether such commands
109 are in an unfinished conditition or not and return appropriate message
110 and hint.
111 It also has the ability to register and determine the states of any new
112 multistep operation or multistep command extension.
113 113 """
114 114
115 115 def __init__(
@@ -173,7 +173,11 b' class _statecheck(object):'
173 173 return _(
174 174 b"use 'hg %s --continue', 'hg %s --abort', "
175 175 b"or 'hg %s --stop'"
176 ) % (self._opname, self._opname, self._opname,)
176 ) % (
177 self._opname,
178 self._opname,
179 self._opname,
180 )
177 181
178 182 return self._cmdhint
179 183
@@ -411,11 +411,11 b' def load_data(path):'
411 411
412 412
413 413 def reset(frequency=None):
414 '''Clear out the state of the profiler. Do not call while the
414 """Clear out the state of the profiler. Do not call while the
415 415 profiler is running.
416 416
417 417 The optional frequency argument specifies the number of samples to
418 collect per second.'''
418 collect per second."""
419 419 assert state.profile_level == 0, b"Can't reset() while statprof is running"
420 420 CodeSite.cache.clear()
421 421 state.reset(frequency)
@@ -525,8 +525,8 b' def display(fp=None, format=3, data=None'
525 525
526 526
527 527 def display_by_line(data, fp):
528 '''Print the profiler data with each sample line represented
529 as one row in a table. Sorted by self-time per line.'''
528 """Print the profiler data with each sample line represented
529 as one row in a table. Sorted by self-time per line."""
530 530 stats = SiteStats.buildstats(data.samples)
531 531 stats.sort(reverse=True, key=lambda x: x.selfseconds())
532 532
@@ -554,9 +554,9 b' def display_by_line(data, fp):'
554 554
555 555
556 556 def display_by_method(data, fp):
557 '''Print the profiler data with each sample function represented
557 """Print the profiler data with each sample function represented
558 558 as one row in a table. Important lines within that function are
559 output as nested rows. Sorted by self-time per line.'''
559 output as nested rows. Sorted by self-time per line."""
560 560 fp.write(
561 561 b'%5.5s %10.10s %7.7s %-8.8s\n'
562 562 % (b'% ', b'cumulative', b'self', b'')
@@ -835,9 +835,9 b' def write_to_flame(data, fp, scriptpath='
835 835
836 836
837 837 def simplifypath(path):
838 '''Attempt to make the path to a Python module easier to read by
838 """Attempt to make the path to a Python module easier to read by
839 839 removing whatever part of the Python search path it was found
840 on.'''
840 on."""
841 841
842 842 if path in _pathcache:
843 843 return _pathcache[path]
@@ -52,7 +52,7 b' def _matchtrackedpath(path, matcher):'
52 52 # This avoids a collision between a file named foo and a dir named
53 53 # foo.i or foo.d
54 54 def _encodedir(path):
55 '''
55 """
56 56 >>> _encodedir(b'data/foo.i')
57 57 'data/foo.i'
58 58 >>> _encodedir(b'data/foo.i/bla.i')
@@ -61,7 +61,7 b' def _encodedir(path):'
61 61 'data/foo.i.hg.hg/bla.i'
62 62 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
63 63 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
64 '''
64 """
65 65 return (
66 66 path.replace(b".hg/", b".hg.hg/")
67 67 .replace(b".i/", b".i.hg/")
@@ -73,14 +73,14 b" encodedir = getattr(parsers, 'encodedir'"
73 73
74 74
75 75 def decodedir(path):
76 '''
76 """
77 77 >>> decodedir(b'data/foo.i')
78 78 'data/foo.i'
79 79 >>> decodedir(b'data/foo.i.hg/bla.i')
80 80 'data/foo.i/bla.i'
81 81 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
82 82 'data/foo.i.hg/bla.i'
83 '''
83 """
84 84 if b".hg/" not in path:
85 85 return path
86 86 return (
@@ -91,14 +91,14 b' def decodedir(path):'
91 91
92 92
93 93 def _reserved():
94 ''' characters that are problematic for filesystems
94 """characters that are problematic for filesystems
95 95
96 96 * ascii escapes (0..31)
97 97 * ascii hi (126..255)
98 98 * windows specials
99 99
100 100 these characters will be escaped by encodefunctions
101 '''
101 """
102 102 winreserved = [ord(x) for x in u'\\:*?"<>|']
103 103 for x in range(32):
104 104 yield x
@@ -109,7 +109,7 b' def _reserved():'
109 109
110 110
111 111 def _buildencodefun():
112 '''
112 """
113 113 >>> enc, dec = _buildencodefun()
114 114
115 115 >>> enc(b'nothing/special.txt')
@@ -131,7 +131,7 b' def _buildencodefun():'
131 131 'the~07quick~adshot'
132 132 >>> dec(b'the~07quick~adshot')
133 133 'the\\x07quick\\xadshot'
134 '''
134 """
135 135 e = b'_'
136 136 xchr = pycompat.bytechr
137 137 asciistr = list(map(xchr, range(127)))
@@ -172,23 +172,23 b' def _buildencodefun():'
172 172
173 173
174 174 def encodefilename(s):
175 '''
175 """
176 176 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
177 177 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
178 '''
178 """
179 179 return _encodefname(encodedir(s))
180 180
181 181
182 182 def decodefilename(s):
183 '''
183 """
184 184 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
185 185 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
186 '''
186 """
187 187 return decodedir(_decodefname(s))
188 188
189 189
190 190 def _buildlowerencodefun():
191 '''
191 """
192 192 >>> f = _buildlowerencodefun()
193 193 >>> f(b'nothing/special.txt')
194 194 'nothing/special.txt'
@@ -198,7 +198,7 b' def _buildlowerencodefun():'
198 198 'hello~3aworld~3f'
199 199 >>> f(b'the\\x07quick\\xADshot')
200 200 'the~07quick~adshot'
201 '''
201 """
202 202 xchr = pycompat.bytechr
203 203 cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
204 204 for x in _reserved():
@@ -220,7 +220,7 b" lowerencode = getattr(parsers, 'lowerenc"
220 220
221 221
222 222 def _auxencode(path, dotencode):
223 '''
223 """
224 224 Encodes filenames containing names reserved by Windows or which end in
225 225 period or space. Does not touch other single reserved characters c.
226 226 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
@@ -240,7 +240,7 b' def _auxencode(path, dotencode):'
240 240 ['foo.~20']
241 241 >>> _auxencode([b' .foo'], True)
242 242 ['~20.foo']
243 '''
243 """
244 244 for i, n in enumerate(path):
245 245 if not n:
246 246 continue
@@ -305,7 +305,7 b' def _hashencode(path, dotencode):'
305 305
306 306
307 307 def _hybridencode(path, dotencode):
308 '''encodes path with a length limit
308 """encodes path with a length limit
309 309
310 310 Encodes all paths that begin with 'data/', according to the following.
311 311
@@ -334,7 +334,7 b' def _hybridencode(path, dotencode):'
334 334
335 335 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
336 336 encoding was used.
337 '''
337 """
338 338 path = encodedir(path)
339 339 ef = _encodefname(path).split(b'/')
340 340 res = b'/'.join(_auxencode(ef, dotencode))
@@ -444,11 +444,11 b' class basicstore(object):'
444 444 return reversed(self._walk(b'', False))
445 445
446 446 def walk(self, matcher=None):
447 '''yields (unencoded, encoded, size)
447 """yields (unencoded, encoded, size)
448 448
449 449 if a matcher is passed, storage files of only those tracked paths
450 450 are passed with matches the matcher
451 '''
451 """
452 452 # yield data files first
453 453 for x in self.datafiles(matcher):
454 454 yield x
@@ -517,10 +517,10 b' class fncache(object):'
517 517 self.addls = set()
518 518
519 519 def ensureloaded(self, warn=None):
520 '''read the fncache file if not already read.
520 """read the fncache file if not already read.
521 521
522 522 If the file on disk is corrupted, raise. If warn is provided,
523 warn and keep going instead.'''
523 warn and keep going instead."""
524 524 if self.entries is None:
525 525 self._load(warn)
526 526
@@ -114,7 +114,12 b' def strip('
114 114 ),
115 115 ),
116 116 (b'', b'no-backup', None, _(b'do not save backup bundle')),
117 (b'', b'nobackup', None, _(b'do not save backup bundle (DEPRECATED)'),),
117 (
118 b'',
119 b'nobackup',
120 None,
121 _(b'do not save backup bundle (DEPRECATED)'),
122 ),
118 123 (b'n', b'', None, _(b'ignored (DEPRECATED)')),
119 124 (
120 125 b'k',
@@ -49,9 +49,9 b' propertycache = util.propertycache'
49 49
50 50
51 51 def _expandedabspath(path):
52 '''
52 """
53 53 get a path or url and if it is a path expand it and return an absolute path
54 '''
54 """
55 55 expandedpath = util.urllocalpath(util.expandpath(path))
56 56 u = util.url(expandedpath)
57 57 if not u.scheme:
@@ -268,8 +268,7 b' class abstractsubrepo(object):'
268 268 )
269 269
270 270 def bailifchanged(self, ignoreupdate=False, hint=None):
271 """raise Abort if subrepository is ``dirty()``
272 """
271 """raise Abort if subrepository is ``dirty()``"""
273 272 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate, missing=True)
274 273 if dirtyreason:
275 274 raise error.Abort(dirtyreason, hint=hint)
@@ -291,8 +290,7 b' class abstractsubrepo(object):'
291 290 raise NotImplementedError
292 291
293 292 def phase(self, state):
294 """returns phase of specified state in the subrepository.
295 """
293 """returns phase of specified state in the subrepository."""
296 294 return phases.public
297 295
298 296 def remove(self):
@@ -384,10 +382,10 b' class abstractsubrepo(object):'
384 382 return total
385 383
386 384 def walk(self, match):
387 '''
385 """
388 386 walk recursively through the directory tree, finding all files
389 387 matched by the match function
390 '''
388 """
391 389
392 390 def forget(self, match, prefix, uipathfn, dryrun, interactive):
393 391 return ([], [])
@@ -423,9 +421,9 b' class abstractsubrepo(object):'
423 421 return revid
424 422
425 423 def unshare(self):
426 '''
424 """
427 425 convert this repository from shared to normal storage.
428 '''
426 """
429 427
430 428 def verify(self, onpush=False):
431 429 """verify the revision of this repository that is held in `_state` is
@@ -437,14 +435,12 b' class abstractsubrepo(object):'
437 435
438 436 @propertycache
439 437 def wvfs(self):
440 """return vfs to access the working directory of this subrepository
441 """
438 """return vfs to access the working directory of this subrepository"""
442 439 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
443 440
444 441 @propertycache
445 442 def _relpath(self):
446 """return path to this subrepository as seen from outermost repository
447 """
443 """return path to this subrepository as seen from outermost repository"""
448 444 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
449 445
450 446
@@ -503,10 +499,10 b' class hgsubrepo(abstractsubrepo):'
503 499 return clean
504 500
505 501 def _calcstorehash(self, remotepath):
506 '''calculate a unique "store hash"
502 """calculate a unique "store hash"
507 503
508 504 This method is used to to detect when there are changes that may
509 require a push to a given remote path.'''
505 require a push to a given remote path."""
510 506 # sort the files that will be hashed in increasing (likely) file size
511 507 filelist = (b'bookmarks', b'store/phaseroots', b'store/00changelog.i')
512 508 yield b'# %s\n' % _expandedabspath(remotepath)
@@ -525,11 +521,11 b' class hgsubrepo(abstractsubrepo):'
525 521 return self._cachestorehashvfs.tryreadlines(cachefile, b'r')
526 522
527 523 def _cachestorehash(self, remotepath):
528 '''cache the current store hash
524 """cache the current store hash
529 525
530 526 Each remote repo requires its own store hash cache, because a subrepo
531 527 store may be "clean" versus a given remote repo, but not versus another
532 '''
528 """
533 529 cachefile = _getstorehashcachename(remotepath)
534 530 with self._repo.lock():
535 531 storehash = list(self._calcstorehash(remotepath))
@@ -537,8 +533,7 b' class hgsubrepo(abstractsubrepo):'
537 533 vfs.writelines(cachefile, storehash, mode=b'wb', notindexed=True)
538 534
539 535 def _getctx(self):
540 '''fetch the context for this subrepo revision, possibly a workingctx
541 '''
536 """fetch the context for this subrepo revision, possibly a workingctx"""
542 537 if self._ctx.rev() is None:
543 538 return self._repo[None] # workingctx if parent is workingctx
544 539 else:
@@ -1048,14 +1043,12 b' class hgsubrepo(abstractsubrepo):'
1048 1043
1049 1044 @propertycache
1050 1045 def wvfs(self):
1051 """return own wvfs for efficiency and consistency
1052 """
1046 """return own wvfs for efficiency and consistency"""
1053 1047 return self._repo.wvfs
1054 1048
1055 1049 @propertycache
1056 1050 def _relpath(self):
1057 """return path to this subrepository as seen from outermost repository
1058 """
1051 """return path to this subrepository as seen from outermost repository"""
1059 1052 # Keep consistent dir separators by avoiding vfs.join(self._path)
1060 1053 return reporelpath(self._repo)
1061 1054
@@ -1170,12 +1163,16 b' class svnsubrepo(abstractsubrepo):'
1170 1163 externals.append(path)
1171 1164 elif item == 'missing':
1172 1165 missing.append(path)
1173 if item not in (
1174 '',
1175 'normal',
1176 'unversioned',
1177 'external',
1178 ) or props not in ('', 'none', 'normal'):
1166 if (
1167 item
1168 not in (
1169 '',
1170 'normal',
1171 'unversioned',
1172 'external',
1173 )
1174 or props not in ('', 'none', 'normal')
1175 ):
1179 1176 changes.append(path)
1180 1177 for path in changes:
1181 1178 for ext in externals:
@@ -1384,7 +1381,7 b' class gitsubrepo(abstractsubrepo):'
1384 1381
1385 1382 @staticmethod
1386 1383 def _checkversion(out):
1387 '''ensure git version is new enough
1384 """ensure git version is new enough
1388 1385
1389 1386 >>> _checkversion = gitsubrepo._checkversion
1390 1387 >>> _checkversion(b'git version 1.6.0')
@@ -1405,7 +1402,7 b' class gitsubrepo(abstractsubrepo):'
1405 1402 'unknown'
1406 1403 >>> _checkversion(b'no')
1407 1404 'unknown'
1408 '''
1405 """
1409 1406 version = gitsubrepo._gitversion(out)
1410 1407 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1411 1408 # despite the docstring comment. For now, error on 1.4.0, warn on
@@ -1516,9 +1513,9 b' class gitsubrepo(abstractsubrepo):'
1516 1513 self._gitcommand([b'update-index', b'-q', b'--refresh'])
1517 1514
1518 1515 def _gitbranchmap(self):
1519 '''returns 2 things:
1516 """returns 2 things:
1520 1517 a map from git branch to revision
1521 a map from revision to branches'''
1518 a map from revision to branches"""
1522 1519 branch2rev = {}
1523 1520 rev2branch = {}
1524 1521
@@ -87,12 +87,12 b' hexnullid = hex(nullid)'
87 87
88 88
89 89 def readtagsformerge(ui, repo, lines, fn=b'', keeplinenums=False):
90 '''read the .hgtags file into a structure that is suitable for merging
90 """read the .hgtags file into a structure that is suitable for merging
91 91
92 92 Depending on the keeplinenums flag, clear the line numbers associated
93 93 with each tag. This is done because only the line numbers of the first
94 94 parent are useful for merging.
95 '''
95 """
96 96 filetags = tagsmod._readtaghist(
97 97 ui, repo, lines, fn=fn, recode=None, calcnodelines=True
98 98 )[1]
@@ -104,7 +104,7 b' def readtagsformerge(ui, repo, lines, fn'
104 104
105 105
106 106 def grouptagnodesbyline(tagnodes):
107 '''
107 """
108 108 Group nearby nodes (i.e. those that must be written next to each other)
109 109
110 110 The input is a list of [node, position] pairs, corresponding to a given tag
@@ -118,7 +118,7 b' def grouptagnodesbyline(tagnodes):'
118 118 position is None).
119 119
120 120 The result is a list of [position, [consecutive node list]]
121 '''
121 """
122 122 firstlinenum = None
123 123 for hexnode, linenum in tagnodes:
124 124 firstlinenum = linenum
@@ -139,14 +139,14 b' def grouptagnodesbyline(tagnodes):'
139 139
140 140
141 141 def writemergedtags(fcd, mergedtags):
142 '''
142 """
143 143 write the merged tags while trying to minimize the diff to the first parent
144 144
145 145 This function uses the ordering info stored on the merged tags dict to
146 146 generate an .hgtags file which is correct (in the sense that its contents
147 147 correspond to the result of the tag merge) while also being as close as
148 148 possible to the first parent's .hgtags file.
149 '''
149 """
150 150 # group the node-tag pairs that must be written next to each other
151 151 for tname, taglist in list(mergedtags.items()):
152 152 mergedtags[tname] = grouptagnodesbyline(taglist)
@@ -175,12 +175,12 b' def writemergedtags(fcd, mergedtags):'
175 175
176 176
177 177 def singletagmerge(p1nodes, p2nodes):
178 '''
178 """
179 179 merge the nodes corresponding to a single tag
180 180
181 181 Note that the inputs are lists of node-linenum pairs (i.e. not just lists
182 182 of nodes)
183 '''
183 """
184 184 if not p2nodes:
185 185 return p1nodes
186 186 if not p1nodes:
@@ -221,10 +221,10 b' def singletagmerge(p1nodes, p2nodes):'
221 221
222 222
223 223 def merge(repo, fcd, fco, fca):
224 '''
224 """
225 225 Merge the tags of two revisions, taking into account the base tags
226 226 Try to minimize the diff between the merged tags and the first parent tags
227 '''
227 """
228 228 ui = repo.ui
229 229 # read the p1, p2 and base tags
230 230 # only keep the line numbers for the p1 tags
@@ -177,12 +177,12 b' def writediff(fp, difflist):'
177 177
178 178
179 179 def findglobaltags(ui, repo):
180 '''Find global tags in a repo: return a tagsmap
180 """Find global tags in a repo: return a tagsmap
181 181
182 182 tagsmap: tag name to (node, hist) 2-tuples.
183 183
184 184 The tags cache is read and updated as a side-effect of calling.
185 '''
185 """
186 186 (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
187 187 if cachetags is not None:
188 188 assert not shouldwrite
@@ -267,7 +267,7 b' def readlocaltags(ui, repo, alltags, tag'
267 267
268 268
269 269 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
270 '''Read tag definitions from a file (or any source of lines).
270 """Read tag definitions from a file (or any source of lines).
271 271
272 272 This function returns two sortdicts with similar information:
273 273
@@ -283,7 +283,7 b' def _readtaghist(ui, repo, lines, fn, re'
283 283 When calcnodelines is False the hextaglines dict is not calculated (an
284 284 empty dict is returned). This is done to improve this function's
285 285 performance in cases where the line numbers are not needed.
286 '''
286 """
287 287
288 288 bintaghist = util.sortdict()
289 289 hextaglines = util.sortdict()
@@ -325,14 +325,14 b' def _readtaghist(ui, repo, lines, fn, re'
325 325
326 326
327 327 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
328 '''Read tag definitions from a file (or any source of lines).
328 """Read tag definitions from a file (or any source of lines).
329 329
330 330 Returns a mapping from tag name to (node, hist).
331 331
332 332 "node" is the node id from the last line read for that name. "hist"
333 333 is the list of node ids previously associated with it (in file order).
334 334 All node ids are binary, not hex.
335 '''
335 """
336 336 filetags, nodelines = _readtaghist(
337 337 ui, repo, lines, fn, recode=recode, calcnodelines=calcnodelines
338 338 )
@@ -390,7 +390,7 b' def _filename(repo):'
390 390
391 391
392 392 def _readtagcache(ui, repo):
393 '''Read the tag cache.
393 """Read the tag cache.
394 394
395 395 Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
396 396
@@ -406,7 +406,7 b' def _readtagcache(ui, repo):'
406 406
407 407 If the cache is not up to date, the caller is responsible for reading tag
408 408 info from each returned head. (See findglobaltags().)
409 '''
409 """
410 410 try:
411 411 cachefile = repo.cachevfs(_filename(repo), b'r')
412 412 # force reading the file for static-http
@@ -549,7 +549,7 b' def _writetagcache(ui, repo, valid, cach'
549 549
550 550
551 551 def tag(repo, names, node, message, local, user, date, editor=False):
552 '''tag a revision with one or more symbolic names.
552 """tag a revision with one or more symbolic names.
553 553
554 554 names is a list of strings or, when adding a single tag, names may be a
555 555 string.
@@ -567,7 +567,7 b' def tag(repo, names, node, message, loca'
567 567
568 568 user: name of user to use if committing
569 569
570 date: date tuple to use if committing'''
570 date: date tuple to use if committing"""
571 571
572 572 if not local:
573 573 m = matchmod.exact([b'.hgtags'])
@@ -548,8 +548,7 b' def websub(text, websubtable):'
548 548
549 549
550 550 def loadfilter(ui, extname, registrarobj):
551 """Load template filter from specified registrarobj
552 """
551 """Load template filter from specified registrarobj"""
553 552 for name, func in pycompat.iteritems(registrarobj._table):
554 553 filters[name] = func
555 554
@@ -912,8 +912,7 b' def word(context, mapping, args):'
912 912
913 913
914 914 def loadfunction(ui, extname, registrarobj):
915 """Load template function from specified registrarobj
916 """
915 """Load template function from specified registrarobj"""
917 916 for name, func in pycompat.iteritems(registrarobj._table):
918 917 funcs[name] = func
919 918
@@ -994,8 +994,7 b' def showwhyunstable(context, mapping):'
994 994
995 995
996 996 def loadkeyword(ui, extname, registrarobj):
997 """Load template keyword from specified registrarobj
998 """
997 """Load template keyword from specified registrarobj"""
999 998 for name, func in pycompat.iteritems(registrarobj._table):
1000 999 keywords[name] = func
1001 1000
@@ -663,7 +663,7 b' class nullresourcemapper(resourcemapper)'
663 663
664 664
665 665 class engine(object):
666 '''template expansion engine.
666 """template expansion engine.
667 667
668 668 template expansion works like this. a map file contains key=value
669 669 pairs. if value is quoted, it is treated as string. otherwise, it
@@ -680,7 +680,7 b' class engine(object):'
680 680 {key%format}.
681 681
682 682 filter uses function to transform value. syntax is
683 {key|filter1|filter2|...}.'''
683 {key|filter1|filter2|...}."""
684 684
685 685 def __init__(self, loader, filters=None, defaults=None, resources=None):
686 686 self._loader = loader
@@ -781,9 +781,9 b' class engine(object):'
781 781 return False
782 782
783 783 def process(self, t, mapping):
784 '''Perform expansion. t is name of map element to expand.
784 """Perform expansion. t is name of map element to expand.
785 785 mapping contains added elements for use during expansion. Is a
786 generator.'''
786 generator."""
787 787 func, data = self._load(t)
788 788 return self._expand(func, data, mapping)
789 789
@@ -857,7 +857,11 b' def _readmapfile(fp, mapfile):'
857 857 if subresource:
858 858 data = subresource.read()
859 859 conf.parse(
860 abs, data, sections=sections, remap=remap, include=include,
860 abs,
861 data,
862 sections=sections,
863 remap=remap,
864 include=include,
861 865 )
862 866
863 867 data = fp.read()
@@ -1094,12 +1098,12 b' def templatedir():'
1094 1098
1095 1099
1096 1100 def open_template(name, templatepath=None):
1097 '''returns a file-like object for the given template, and its full path
1101 """returns a file-like object for the given template, and its full path
1098 1102
1099 1103 If the name is a relative path and we're in a frozen binary, the template
1100 1104 will be read from the mercurial.templates package instead. The returned path
1101 1105 will then be the relative path.
1102 '''
1106 """
1103 1107 # Does the name point directly to a map file?
1104 1108 if os.path.isfile(name) or os.path.isabs(name):
1105 1109 return name, open(name, mode='rb')
@@ -1021,7 +1021,12 b' class ifiledatatests(basetestcase):'
1021 1021 def testcensored(self):
1022 1022 f = self._makefilefn()
1023 1023
1024 stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
1024 stored1 = storageutil.packmeta(
1025 {
1026 b'censored': b'tombstone',
1027 },
1028 b'',
1029 )
1025 1030
1026 1031 with self._maketransactionfn() as tr:
1027 1032 node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1050,7 +1055,12 b' class ifiledatatests(basetestcase):'
1050 1055
1051 1056 f = self._makefilefn()
1052 1057
1053 stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
1058 stored1 = storageutil.packmeta(
1059 {
1060 b'censored': b'tombstone',
1061 },
1062 b'',
1063 )
1054 1064
1055 1065 with self._maketransactionfn() as tr:
1056 1066 node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1232,7 +1242,12 b' class ifilemutationtests(basetestcase):'
1232 1242 # Attempt to apply a delta made against a censored revision.
1233 1243 f = self._makefilefn()
1234 1244
1235 stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
1245 stored1 = storageutil.packmeta(
1246 {
1247 b'censored': b'tombstone',
1248 },
1249 b'',
1250 )
1236 1251
1237 1252 with self._maketransactionfn() as tr:
1238 1253 node0 = f.add(b'foo\n' * 30, None, tr, 0, nullid, nullid)
@@ -425,10 +425,10 b' class transaction(util.transactional):'
425 425
426 426 @active
427 427 def replace(self, file, offset):
428 '''
428 """
429 429 replace can only replace already committed entries
430 430 that are not pending in the queue
431 '''
431 """
432 432 if file in self._newfiles:
433 433 if not offset:
434 434 return
@@ -476,9 +476,9 b' class transaction(util.transactional):'
476 476
477 477 @active
478 478 def writepending(self):
479 '''write pending file to temporary version
479 """write pending file to temporary version
480 480
481 This is used to allow hooks to view a transaction before commit'''
481 This is used to allow hooks to view a transaction before commit"""
482 482 categories = sorted(self._pendingcallback)
483 483 for cat in categories:
484 484 # remove callback since the data will have been flushed
@@ -489,8 +489,7 b' class transaction(util.transactional):'
489 489
490 490 @active
491 491 def hasfinalize(self, category):
492 """check is a callback already exist for a category
493 """
492 """check is a callback already exist for a category"""
494 493 return category in self._finalizecallback
495 494
496 495 @active
@@ -533,11 +532,11 b' class transaction(util.transactional):'
533 532
534 533 @active
535 534 def addvalidator(self, category, callback):
536 """ adds a callback to be called when validating the transaction.
535 """adds a callback to be called when validating the transaction.
537 536
538 537 The transaction will be given as the first argument to the callback.
539 538
540 callback should raise exception if to abort transaction """
539 callback should raise exception if to abort transaction"""
541 540 self._validatecallback[category] = callback
542 541
543 542 @active
@@ -624,9 +623,9 b' class transaction(util.transactional):'
624 623
625 624 @active
626 625 def abort(self):
627 '''abort the transaction (generally called on error, or when the
626 """abort the transaction (generally called on error, or when the
628 627 transaction is not explicitly committed before going out of
629 scope)'''
628 scope)"""
630 629 self._abort()
631 630
632 631 def _writeundo(self):
@@ -117,7 +117,10 b' def findcommonincoming(repo, remote, hea'
117 117 for p in pycompat.xrange(0, len(r), 10):
118 118 with remote.commandexecutor() as e:
119 119 branches = e.callcommand(
120 b'branches', {b'nodes': r[p : p + 10],}
120 b'branches',
121 {
122 b'nodes': r[p : p + 10],
123 },
121 124 ).result()
122 125
123 126 for b in branches:
@@ -13,20 +13,20 b' from . import encoding'
13 13
14 14
15 15 def mayhavepending(root):
16 '''return whether 'root' may have pending changes, which are
16 """return whether 'root' may have pending changes, which are
17 17 visible to this process.
18 '''
18 """
19 19 return root == encoding.environ.get(b'HG_PENDING')
20 20
21 21
22 22 def trypending(root, vfs, filename, **kwargs):
23 '''Open file to be read according to HG_PENDING environment variable
23 """Open file to be read according to HG_PENDING environment variable
24 24
25 25 This opens '.pending' of specified 'filename' only when HG_PENDING
26 26 is equal to 'root'.
27 27
28 28 This returns '(fp, is_pending_opened)' tuple.
29 '''
29 """
30 30 if mayhavepending(root):
31 31 try:
32 32 return (vfs(b'%s.pending' % filename, **kwargs), True)
@@ -925,7 +925,7 b' class ui(object):'
925 925 yield section, name, value
926 926
927 927 def plain(self, feature=None):
928 '''is plain mode active?
928 """is plain mode active?
929 929
930 930 Plain mode means that all configuration variables which affect
931 931 the behavior and output of Mercurial should be
@@ -939,7 +939,7 b' class ui(object):'
939 939 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
940 940 - False if feature is disabled by default and not included in HGPLAIN
941 941 - True otherwise
942 '''
942 """
943 943 if (
944 944 b'HGPLAIN' not in encoding.environ
945 945 and b'HGPLAINEXCEPT' not in encoding.environ
@@ -1112,7 +1112,7 b' class ui(object):'
1112 1112 return self._colormode != b'win32'
1113 1113
1114 1114 def write(self, *args, **opts):
1115 '''write args to output
1115 """write args to output
1116 1116
1117 1117 By default, this method simply writes to the buffer or stdout.
1118 1118 Color mode can be set on the UI class to have the output decorated
@@ -1133,7 +1133,7 b' class ui(object):'
1133 1133 When labeling output for a specific command, a label of
1134 1134 "cmdname.type" is recommended. For example, status issues
1135 1135 a label of "status.modified" for modified files.
1136 '''
1136 """
1137 1137 dest = self._fout
1138 1138
1139 1139 # inlined _write() for speed
@@ -1453,9 +1453,9 b' class ui(object):'
1453 1453 return _reqexithandlers
1454 1454
1455 1455 def atexit(self, func, *args, **kwargs):
1456 '''register a function to run after dispatching a request
1456 """register a function to run after dispatching a request
1457 1457
1458 Handlers do not stay registered across request boundaries.'''
1458 Handlers do not stay registered across request boundaries."""
1459 1459 self._exithandlers.append((func, args, kwargs))
1460 1460 return func
1461 1461
@@ -1484,8 +1484,14 b' class ui(object):'
1484 1484 alldefaults = frozenset([b"text", b"curses"])
1485 1485
1486 1486 featureinterfaces = {
1487 b"chunkselector": [b"text", b"curses",],
1488 b"histedit": [b"text", b"curses",],
1487 b"chunkselector": [
1488 b"text",
1489 b"curses",
1490 ],
1491 b"histedit": [
1492 b"text",
1493 b"curses",
1494 ],
1489 1495 }
1490 1496
1491 1497 # Feature-specific interface
@@ -1532,7 +1538,7 b' class ui(object):'
1532 1538 return choseninterface
1533 1539
1534 1540 def interactive(self):
1535 '''is interactive input allowed?
1541 """is interactive input allowed?
1536 1542
1537 1543 An interactive session is a session where input can be reasonably read
1538 1544 from `sys.stdin'. If this function returns false, any attempt to read
@@ -1544,7 +1550,7 b' class ui(object):'
1544 1550 to a terminal device.
1545 1551
1546 1552 This function refers to input only; for output, see `ui.formatted()'.
1547 '''
1553 """
1548 1554 i = self.configbool(b"ui", b"interactive")
1549 1555 if i is None:
1550 1556 # some environments replace stdin without implementing isatty
@@ -1554,8 +1560,7 b' class ui(object):'
1554 1560 return i
1555 1561
1556 1562 def termwidth(self):
1557 '''how wide is the terminal in columns?
1558 '''
1563 """how wide is the terminal in columns?"""
1559 1564 if b'COLUMNS' in encoding.environ:
1560 1565 try:
1561 1566 return int(encoding.environ[b'COLUMNS'])
@@ -1564,7 +1569,7 b' class ui(object):'
1564 1569 return scmutil.termsize(self)[0]
1565 1570
1566 1571 def formatted(self):
1567 '''should formatted output be used?
1572 """should formatted output be used?
1568 1573
1569 1574 It is often desirable to format the output to suite the output medium.
1570 1575 Examples of this are truncating long lines or colorizing messages.
@@ -1579,7 +1584,7 b' class ui(object):'
1579 1584
1580 1585 This function refers to output only; for input, see `ui.interactive()'.
1581 1586 This function always returns false when in plain mode, see `ui.plain()'.
1582 '''
1587 """
1583 1588 if self.plain():
1584 1589 return False
1585 1590
@@ -1746,40 +1751,40 b' class ui(object):'
1746 1751 raise error.ResponseExpected()
1747 1752
1748 1753 def status(self, *msg, **opts):
1749 '''write status message to output (if ui.quiet is False)
1754 """write status message to output (if ui.quiet is False)
1750 1755
1751 1756 This adds an output label of "ui.status".
1752 '''
1757 """
1753 1758 if not self.quiet:
1754 1759 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1755 1760
1756 1761 def warn(self, *msg, **opts):
1757 '''write warning message to output (stderr)
1762 """write warning message to output (stderr)
1758 1763
1759 1764 This adds an output label of "ui.warning".
1760 '''
1765 """
1761 1766 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1762 1767
1763 1768 def error(self, *msg, **opts):
1764 '''write error message to output (stderr)
1769 """write error message to output (stderr)
1765 1770
1766 1771 This adds an output label of "ui.error".
1767 '''
1772 """
1768 1773 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1769 1774
1770 1775 def note(self, *msg, **opts):
1771 '''write note to output (if ui.verbose is True)
1776 """write note to output (if ui.verbose is True)
1772 1777
1773 1778 This adds an output label of "ui.note".
1774 '''
1779 """
1775 1780 if self.verbose:
1776 1781 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1777 1782
1778 1783 def debug(self, *msg, **opts):
1779 '''write debug message to output (if ui.debugflag is True)
1784 """write debug message to output (if ui.debugflag is True)
1780 1785
1781 1786 This adds an output label of "ui.debug".
1782 '''
1787 """
1783 1788 if self.debugflag:
1784 1789 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1785 1790 self.log(b'debug', b'%s', b''.join(msg))
@@ -1875,12 +1880,12 b' class ui(object):'
1875 1880 errprefix=None,
1876 1881 blockedtag=None,
1877 1882 ):
1878 '''execute shell command with appropriate output stream. command
1883 """execute shell command with appropriate output stream. command
1879 1884 output will be redirected if fout is not stdout.
1880 1885
1881 1886 if command fails and onerr is None, return status, else raise onerr
1882 1887 object as exception.
1883 '''
1888 """
1884 1889 if blockedtag is None:
1885 1890 # Long cmds tend to be because of an absolute path on cmd. Keep
1886 1891 # the tail end instead
@@ -1907,9 +1912,9 b' class ui(object):'
1907 1912 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
1908 1913
1909 1914 def traceback(self, exc=None, force=False):
1910 '''print exception traceback if traceback printing enabled or forced.
1915 """print exception traceback if traceback printing enabled or forced.
1911 1916 only to call in exception handler. returns true if traceback
1912 printed.'''
1917 printed."""
1913 1918 if self.tracebackflag or force:
1914 1919 if exc is None:
1915 1920 exc = sys.exc_info()
@@ -2011,7 +2016,7 b' class ui(object):'
2011 2016 self._loggers[name] = logger
2012 2017
2013 2018 def log(self, event, msgfmt, *msgargs, **opts):
2014 '''hook for logging facility extensions
2019 """hook for logging facility extensions
2015 2020
2016 2021 event should be a readily-identifiable subsystem, which will
2017 2022 allow filtering.
@@ -2020,7 +2025,7 b' class ui(object):'
2020 2025 *msgargs are %-formatted into it.
2021 2026
2022 2027 **opts currently has no defined meanings.
2023 '''
2028 """
2024 2029 if not self._loggers:
2025 2030 return
2026 2031 activeloggers = [
@@ -2040,7 +2045,7 b' class ui(object):'
2040 2045 self._loggers = registeredloggers
2041 2046
2042 2047 def label(self, msg, label):
2043 '''style msg based on supplied label
2048 """style msg based on supplied label
2044 2049
2045 2050 If some color mode is enabled, this will add the necessary control
2046 2051 characters to apply such color. In addition, 'debug' color mode adds
@@ -2048,7 +2053,7 b' class ui(object):'
2048 2053
2049 2054 ui.write(s, 'label') is equivalent to
2050 2055 ui.write(ui.label(s, 'label')).
2051 '''
2056 """
2052 2057 if self._colormode is not None:
2053 2058 return color.colorlabel(self, msg, label)
2054 2059 return msg
@@ -35,13 +35,13 b' urlreq = util.urlreq'
35 35
36 36
37 37 def escape(s, quote=None):
38 '''Replace special characters "&", "<" and ">" to HTML-safe sequences.
38 """Replace special characters "&", "<" and ">" to HTML-safe sequences.
39 39 If the optional flag quote is true, the quotation mark character (")
40 40 is also translated.
41 41
42 42 This is the same as cgi.escape in Python, but always operates on
43 43 bytes, whereas cgi.escape in Python 3 only works on unicodes.
44 '''
44 """
45 45 s = s.replace(b"&", b"&amp;")
46 46 s = s.replace(b"<", b"&lt;")
47 47 s = s.replace(b">", b"&gt;")
@@ -586,7 +586,7 b' def opener('
586 586 loggingopts=None,
587 587 sendaccept=True,
588 588 ):
589 '''
589 """
590 590 construct an opener suitable for urllib2
591 591 authinfo will be added to the password manager
592 592
@@ -600,7 +600,7 b' def opener('
600 600
601 601 ``sendaccept`` allows controlling whether the ``Accept`` request header
602 602 is sent. The header is sent by default.
603 '''
603 """
604 604 timeout = ui.configwith(float, b'http', b'timeout')
605 605 handlers = []
606 606
@@ -83,10 +83,22 b' if pycompat.ispy3:'
83 83 )
84 84 import urllib.response
85 85
86 urlreq._registeraliases(urllib.response, (b"addclosehook", b"addinfourl",))
86 urlreq._registeraliases(
87 urllib.response,
88 (
89 b"addclosehook",
90 b"addinfourl",
91 ),
92 )
87 93 import urllib.error
88 94
89 urlerr._registeraliases(urllib.error, (b"HTTPError", b"URLError",))
95 urlerr._registeraliases(
96 urllib.error,
97 (
98 b"HTTPError",
99 b"URLError",
100 ),
101 )
90 102 import http.server
91 103
92 104 httpserver._registeraliases(
@@ -179,12 +191,28 b' else:'
179 191 b"urlopen",
180 192 ),
181 193 )
182 urlreq._registeraliases(urlparse, (b"urlparse", b"urlunparse",))
194 urlreq._registeraliases(
195 urlparse,
196 (
197 b"urlparse",
198 b"urlunparse",
199 ),
200 )
183 201 urlreq._registeralias(urlparse, b"parse_qs", b"parseqs")
184 202 urlreq._registeralias(urlparse, b"parse_qsl", b"parseqsl")
185 urlerr._registeraliases(urllib2, (b"HTTPError", b"URLError",))
203 urlerr._registeraliases(
204 urllib2,
205 (
206 b"HTTPError",
207 b"URLError",
208 ),
209 )
186 210 httpserver._registeraliases(
187 BaseHTTPServer, (b"HTTPServer", b"BaseHTTPRequestHandler",)
211 BaseHTTPServer,
212 (
213 b"HTTPServer",
214 b"BaseHTTPRequestHandler",
215 ),
188 216 )
189 217 httpserver._registeraliases(
190 218 SimpleHTTPServer, (b"SimpleHTTPRequestHandler",)
@@ -1264,7 +1264,7 b' class cow(object):'
1264 1264
1265 1265
1266 1266 class sortdict(collections.OrderedDict):
1267 '''a simple sorted dictionary
1267 """a simple sorted dictionary
1268 1268
1269 1269 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1270 1270 >>> d2 = d1.copy()
@@ -1276,7 +1276,7 b' class sortdict(collections.OrderedDict):'
1276 1276 >>> d1.insert(1, b'a.5', 0.5)
1277 1277 >>> d1
1278 1278 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1279 '''
1279 """
1280 1280
1281 1281 def __setitem__(self, key, value):
1282 1282 if key in self:
@@ -1761,8 +1761,8 b' def clearcachedproperty(obj, prop):'
1761 1761
1762 1762
1763 1763 def increasingchunks(source, min=1024, max=65536):
1764 '''return no less than min bytes per chunk while data remains,
1765 doubling min after each chunk until it reaches max'''
1764 """return no less than min bytes per chunk while data remains,
1765 doubling min after each chunk until it reaches max"""
1766 1766
1767 1767 def log2(x):
1768 1768 if not x:
@@ -1833,7 +1833,7 b' if pycompat.ispypy:'
1833 1833
1834 1834
1835 1835 def pathto(root, n1, n2):
1836 '''return the relative path from one place to another.
1836 """return the relative path from one place to another.
1837 1837 root should use os.sep to separate directories
1838 1838 n1 should use os.sep to separate directories
1839 1839 n2 should use "/" to separate directories
@@ -1842,7 +1842,7 b' def pathto(root, n1, n2):'
1842 1842 If n1 is a relative path, it's assumed it's
1843 1843 relative to root.
1844 1844 n2 should always be relative to root.
1845 '''
1845 """
1846 1846 if not n1:
1847 1847 return localpath(n2)
1848 1848 if os.path.isabs(n1):
@@ -1892,7 +1892,7 b' def checksignature(func, depth=1):'
1892 1892
1893 1893
1894 1894 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1895 '''copy a file, preserving mode and optionally other stat info like
1895 """copy a file, preserving mode and optionally other stat info like
1896 1896 atime/mtime
1897 1897
1898 1898 checkambig argument is used with filestat, and is useful only if
@@ -1900,7 +1900,7 b' def copyfile(src, dest, hardlink=False, '
1900 1900 repo.wlock).
1901 1901
1902 1902 copystat and checkambig should be exclusive.
1903 '''
1903 """
1904 1904 assert not (copystat and checkambig)
1905 1905 oldstat = None
1906 1906 if os.path.lexists(dest):
@@ -2017,7 +2017,7 b' def copyfiles(src, dst, hardlink=None, p'
2017 2017
2018 2018
2019 2019 def checkwinfilename(path):
2020 r'''Check that the base-relative path is a valid filename on Windows.
2020 r"""Check that the base-relative path is a valid filename on Windows.
2021 2021 Returns None if the path is ok, or a UI string describing the problem.
2022 2022
2023 2023 >>> checkwinfilename(b"just/a/normal/path")
@@ -2039,7 +2039,7 b' def checkwinfilename(path):'
2039 2039 "filename ends with '\\', which is invalid on Windows"
2040 2040 >>> checkwinfilename(b"foo\\/bar")
2041 2041 "directory name ends with '\\', which is invalid on Windows"
2042 '''
2042 """
2043 2043 if path.endswith(b'\\'):
2044 2044 return _(b"filename ends with '\\', which is invalid on Windows")
2045 2045 if b'\\/' in path:
@@ -2175,11 +2175,11 b' class _re(object):'
2175 2175 _re2 = False
2176 2176
2177 2177 def compile(self, pat, flags=0):
2178 '''Compile a regular expression, using re2 if possible
2178 """Compile a regular expression, using re2 if possible
2179 2179
2180 2180 For best performance, use only re2-compatible regexp features. The
2181 2181 only flags from the re module that are re2-compatible are
2182 IGNORECASE and MULTILINE.'''
2182 IGNORECASE and MULTILINE."""
2183 2183 if _re2 is None:
2184 2184 self._checkre2()
2185 2185 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
@@ -2195,11 +2195,11 b' class _re(object):'
2195 2195
2196 2196 @propertycache
2197 2197 def escape(self):
2198 '''Return the version of escape corresponding to self.compile.
2198 """Return the version of escape corresponding to self.compile.
2199 2199
2200 2200 This is imperfect because whether re2 or re is used for a particular
2201 2201 function depends on the flags, etc, but it's the best we can do.
2202 '''
2202 """
2203 2203 global _re2
2204 2204 if _re2 is None:
2205 2205 self._checkre2()
@@ -2215,7 +2215,7 b' re = _re()'
2215 2215
2216 2216
2217 2217 def fspath(name, root):
2218 '''Get name in the case stored in the filesystem
2218 """Get name in the case stored in the filesystem
2219 2219
2220 2220 The name should be relative to root, and be normcase-ed for efficiency.
2221 2221
@@ -2223,7 +2223,7 b' def fspath(name, root):'
2223 2223 called, for case-sensitive filesystems (simply because it's expensive).
2224 2224
2225 2225 The root should be normcase-ed, too.
2226 '''
2226 """
2227 2227
2228 2228 def _makefspathcacheentry(dir):
2229 2229 return {normcase(n): n for n in os.listdir(dir)}
@@ -2301,11 +2301,11 b' def endswithsep(path):'
2301 2301
2302 2302
2303 2303 def splitpath(path):
2304 '''Split path by os.sep.
2304 """Split path by os.sep.
2305 2305 Note that this function does not use os.altsep because this is
2306 2306 an alternative of simple "xxx.split(os.sep)".
2307 2307 It is recommended to use os.path.normpath() before using this
2308 function if need.'''
2308 function if need."""
2309 2309 return path.split(pycompat.ossep)
2310 2310
2311 2311
@@ -2459,7 +2459,7 b' class filestat(object):'
2459 2459
2460 2460
2461 2461 class atomictempfile(object):
2462 '''writable file object that atomically updates a file
2462 """writable file object that atomically updates a file
2463 2463
2464 2464 All writes will go to a temporary copy of the original file. Call
2465 2465 close() when you are done writing, and atomictempfile will rename
@@ -2470,7 +2470,7 b' class atomictempfile(object):'
2470 2470 checkambig argument of constructor is used with filestat, and is
2471 2471 useful only if target file is guarded by any lock (e.g. repo.lock
2472 2472 or repo.wlock).
2473 '''
2473 """
2474 2474
2475 2475 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2476 2476 self.__name = name # permanent name
@@ -3365,7 +3365,7 b' timedcm._nested = 0'
3365 3365
3366 3366
3367 3367 def timed(func):
3368 '''Report the execution time of a function call to stderr.
3368 """Report the execution time of a function call to stderr.
3369 3369
3370 3370 During development, use as a decorator when you need to measure
3371 3371 the cost of a function, e.g. as follows:
@@ -3373,7 +3373,7 b' def timed(func):'
3373 3373 @util.timed
3374 3374 def foo(a, b, c):
3375 3375 pass
3376 '''
3376 """
3377 3377
3378 3378 def wrapper(*args, **kwargs):
3379 3379 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
@@ -3404,7 +3404,7 b' def timed(func):'
3404 3404
3405 3405
3406 3406 def sizetoint(s):
3407 '''Convert a space specifier to a byte count.
3407 """Convert a space specifier to a byte count.
3408 3408
3409 3409 >>> sizetoint(b'30')
3410 3410 30
@@ -3412,7 +3412,7 b' def sizetoint(s):'
3412 3412 2252
3413 3413 >>> sizetoint(b'6M')
3414 3414 6291456
3415 '''
3415 """
3416 3416 t = s.strip().lower()
3417 3417 try:
3418 3418 for k, u in _sizeunits:
@@ -3424,9 +3424,9 b' def sizetoint(s):'
3424 3424
3425 3425
3426 3426 class hooks(object):
3427 '''A collection of hook functions that can be used to extend a
3427 """A collection of hook functions that can be used to extend a
3428 3428 function's behavior. Hooks are called in lexicographic order,
3429 based on the names of their sources.'''
3429 based on the names of their sources."""
3430 3430
3431 3431 def __init__(self):
3432 3432 self._hooks = []
@@ -3443,7 +3443,7 b' class hooks(object):'
3443 3443
3444 3444
3445 3445 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3446 '''Yields lines for a nicely formatted stacktrace.
3446 """Yields lines for a nicely formatted stacktrace.
3447 3447 Skips the 'skip' last entries, then return the last 'depth' entries.
3448 3448 Each file+linenumber is formatted according to fileline.
3449 3449 Each line is formatted according to line.
@@ -3453,7 +3453,7 b" def getstackframes(skip=0, line=b' %-*s "
3453 3453 function
3454 3454
3455 3455 Not be used in production code but very convenient while developing.
3456 '''
3456 """
3457 3457 entries = [
3458 3458 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3459 3459 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
@@ -3475,12 +3475,12 b' def debugstacktrace('
3475 3475 depth=0,
3476 3476 prefix=b'',
3477 3477 ):
3478 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3478 """Writes a message to f (stderr) with a nicely formatted stacktrace.
3479 3479 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3480 3480 By default it will flush stdout first.
3481 3481 It can be used everywhere and intentionally does not require an ui object.
3482 3482 Not be used in production code but very convenient while developing.
3483 '''
3483 """
3484 3484 if otherf:
3485 3485 otherf.flush()
3486 3486 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
@@ -641,19 +641,28 b' class sansiodecoder(object):'
641 641
642 642 elif special == SPECIAL_START_ARRAY:
643 643 self._collectionstack.append(
644 {b'remaining': value, b'v': [],}
644 {
645 b'remaining': value,
646 b'v': [],
647 }
645 648 )
646 649 self._state = self._STATE_WANT_ARRAY_VALUE
647 650
648 651 elif special == SPECIAL_START_MAP:
649 652 self._collectionstack.append(
650 {b'remaining': value, b'v': {},}
653 {
654 b'remaining': value,
655 b'v': {},
656 }
651 657 )
652 658 self._state = self._STATE_WANT_MAP_KEY
653 659
654 660 elif special == SPECIAL_START_SET:
655 661 self._collectionstack.append(
656 {b'remaining': value, b'v': set(),}
662 {
663 b'remaining': value,
664 b'v': set(),
665 }
657 666 )
658 667 self._state = self._STATE_WANT_SET_VALUE
659 668
@@ -684,7 +693,10 b' class sansiodecoder(object):'
684 693 lastc[b'remaining'] -= 1
685 694
686 695 self._collectionstack.append(
687 {b'remaining': value, b'v': newvalue,}
696 {
697 b'remaining': value,
698 b'v': newvalue,
699 }
688 700 )
689 701
690 702 # self._state doesn't need changed.
@@ -711,7 +723,10 b' class sansiodecoder(object):'
711 723 lastc[b'remaining'] -= 1
712 724
713 725 self._collectionstack.append(
714 {b'remaining': value, b'v': newvalue,}
726 {
727 b'remaining': value,
728 b'v': newvalue,
729 }
715 730 )
716 731
717 732 self._state = self._STATE_WANT_SET_VALUE
@@ -775,7 +790,10 b' class sansiodecoder(object):'
775 790 lastc[b'remaining'] -= 1
776 791
777 792 self._collectionstack.append(
778 {b'remaining': value, b'v': newvalue,}
793 {
794 b'remaining': value,
795 b'v': newvalue,
796 }
779 797 )
780 798
781 799 self._state = self._STATE_WANT_ARRAY_VALUE
@@ -789,7 +807,10 b' class sansiodecoder(object):'
789 807 lastc[b'remaining'] -= 1
790 808
791 809 self._collectionstack.append(
792 {b'remaining': value, b'v': newvalue,}
810 {
811 b'remaining': value,
812 b'v': newvalue,
813 }
793 814 )
794 815
795 816 self._state = self._STATE_WANT_MAP_KEY
@@ -803,7 +824,10 b' class sansiodecoder(object):'
803 824 lastc[b'remaining'] -= 1
804 825
805 826 self._collectionstack.append(
806 {b'remaining': value, b'v': newvalue,}
827 {
828 b'remaining': value,
829 b'v': newvalue,
830 }
807 831 )
808 832
809 833 self._state = self._STATE_WANT_SET_VALUE
@@ -29,7 +29,8 b" SERVERROLE = b'server'"
29 29 CLIENTROLE = b'client'
30 30
31 31 compewireprotosupport = collections.namedtuple(
32 'compenginewireprotosupport', ('name', 'serverpriority', 'clientpriority'),
32 'compenginewireprotosupport',
33 ('name', 'serverpriority', 'clientpriority'),
33 34 )
34 35
35 36
@@ -53,12 +53,17 b' defaultdateformats = ('
53 53 b'%I:%M%p',
54 54 )
55 55
56 extendeddateformats = defaultdateformats + (b"%Y", b"%Y-%m", b"%b", b"%b %Y",)
56 extendeddateformats = defaultdateformats + (
57 b"%Y",
58 b"%Y-%m",
59 b"%b",
60 b"%b %Y",
61 )
57 62
58 63
59 64 def makedate(timestamp=None):
60 '''Return a unix timestamp (or the current time) as a (unixtime,
61 offset) tuple based off the local timezone.'''
65 """Return a unix timestamp (or the current time) as a (unixtime,
66 offset) tuple based off the local timezone."""
62 67 if timestamp is None:
63 68 timestamp = time.time()
64 69 if timestamp < 0:
@@ -115,7 +120,7 b' def shortdate(date=None):'
115 120
116 121 def parsetimezone(s):
117 122 """find a trailing timezone, if any, in string, and return a
118 (offset, remainder) pair"""
123 (offset, remainder) pair"""
119 124 s = pycompat.bytestr(s)
120 125
121 126 if s.endswith(b"GMT") or s.endswith(b"UTC"):
@@ -292,10 +292,10 b' def pipefilter(s, cmd):'
292 292
293 293
294 294 def tempfilter(s, cmd):
295 '''filter string S through a pair of temporary files with CMD.
295 """filter string S through a pair of temporary files with CMD.
296 296 CMD is used as a template to create the real command to be run,
297 297 with the strings INFILE and OUTFILE replaced by the real names of
298 the temporary files generated.'''
298 the temporary files generated."""
299 299 inname, outname = None, None
300 300 try:
301 301 infd, inname = pycompat.mkstemp(prefix=b'hg-filter-in-')
@@ -465,17 +465,16 b' else:'
465 465
466 466
467 467 def tonativeenv(env):
468 '''convert the environment from bytes to strings suitable for Popen(), etc.
469 '''
468 """convert the environment from bytes to strings suitable for Popen(), etc."""
470 469 return pycompat.rapply(tonativestr, env)
471 470
472 471
473 472 def system(cmd, environ=None, cwd=None, out=None):
474 '''enhanced shell command execution.
473 """enhanced shell command execution.
475 474 run with environment maybe modified, maybe in different dir.
476 475
477 476 if out is specified, it is assumed to be a file-like object that has a
478 write() method. stdout and stderr will be redirected to out.'''
477 write() method. stdout and stderr will be redirected to out."""
479 478 try:
480 479 stdout.flush()
481 480 except Exception:
@@ -685,14 +684,14 b' else:'
685 684 record_wait=None,
686 685 stdin_bytes=None,
687 686 ):
688 '''Spawn a command without waiting for it to finish.
687 """Spawn a command without waiting for it to finish.
689 688
690 689
691 690 When `record_wait` is not None, the spawned process will not be fully
692 691 detached and the `record_wait` argument will be called with a the
693 692 `Subprocess.wait` function for the spawned process. This is mostly
694 693 useful for developers that need to make sure the spawned process
695 finished before a certain point. (eg: writing test)'''
694 finished before a certain point. (eg: writing test)"""
696 695 if pycompat.isdarwin:
697 696 # avoid crash in CoreFoundation in case another thread
698 697 # calls gui() while we're calling fork().
@@ -494,15 +494,15 b' def person(author):'
494 494
495 495 @attr.s(hash=True)
496 496 class mailmapping(object):
497 '''Represents a username/email key or value in
498 a mailmap file'''
497 """Represents a username/email key or value in
498 a mailmap file"""
499 499
500 500 email = attr.ib()
501 501 name = attr.ib(default=None)
502 502
503 503
504 504 def _ismailmaplineinvalid(names, emails):
505 '''Returns True if the parsed names and emails
505 """Returns True if the parsed names and emails
506 506 in a mailmap entry are invalid.
507 507
508 508 >>> # No names or emails fails
@@ -522,7 +522,7 b' def _ismailmaplineinvalid(names, emails)'
522 522 >>> emails = [b'proper@email.com', b'commit@email.com']
523 523 >>> _ismailmaplineinvalid(names, emails)
524 524 False
525 '''
525 """
526 526 return not emails or not names and len(emails) < 2
527 527
528 528
@@ -597,11 +597,13 b' def parsemailmap(mailmapcontent):'
597 597 continue
598 598
599 599 mailmapkey = mailmapping(
600 email=emails[-1], name=names[-1] if len(names) == 2 else None,
600 email=emails[-1],
601 name=names[-1] if len(names) == 2 else None,
601 602 )
602 603
603 604 mailmap[mailmapkey] = mailmapping(
604 email=emails[0], name=names[0] if names else None,
605 email=emails[0],
606 name=names[0] if names else None,
605 607 )
606 608
607 609 return mailmap
@@ -659,7 +661,7 b' def mapname(mailmap, author):'
659 661
660 662
661 663 def isauthorwellformed(author):
662 '''Return True if the author field is well formed
664 """Return True if the author field is well formed
663 665 (ie "Contributor Name <contrib@email.dom>")
664 666
665 667 >>> isauthorwellformed(b'Good Author <good@author.com>')
@@ -676,7 +678,7 b' def isauthorwellformed(author):'
676 678 False
677 679 >>> isauthorwellformed(b'Bad Author <author>')
678 680 False
679 '''
681 """
680 682 return _correctauthorformat.match(author) is not None
681 683
682 684
@@ -83,12 +83,12 b' class abstractvfs(object):'
83 83
84 84 @util.propertycache
85 85 def open(self):
86 '''Open ``path`` file, which is relative to vfs root.
86 """Open ``path`` file, which is relative to vfs root.
87 87
88 88 Newly created directories are marked as "not to be indexed by
89 89 the content indexing service", if ``notindexed`` is specified
90 90 for "write" mode access.
91 '''
91 """
92 92 return self.__call__
93 93
94 94 def read(self, path):
@@ -142,9 +142,9 b' class abstractvfs(object):'
142 142 return os.path.islink(self.join(path))
143 143
144 144 def isfileorlink(self, path=None):
145 '''return whether path is a regular file or a symlink
145 """return whether path is a regular file or a symlink
146 146
147 Unlike isfile, this doesn't follow symlinks.'''
147 Unlike isfile, this doesn't follow symlinks."""
148 148 try:
149 149 st = self.lstat(path)
150 150 except OSError:
@@ -228,8 +228,7 b' class abstractvfs(object):'
228 228 return util.readlink(self.join(path))
229 229
230 230 def removedirs(self, path=None):
231 """Remove a leaf directory and all empty intermediate ones
232 """
231 """Remove a leaf directory and all empty intermediate ones"""
233 232 return util.removedirs(self.join(path))
234 233
235 234 def rmdir(self, path=None):
@@ -332,7 +331,7 b' class abstractvfs(object):'
332 331
333 332
334 333 class vfs(abstractvfs):
335 '''Operate files relative to a base directory
334 """Operate files relative to a base directory
336 335
337 336 This class is used to hide the details of COW semantics and
338 337 remote file access from higher level code.
@@ -340,7 +339,7 b' class vfs(abstractvfs):'
340 339 'cacheaudited' should be enabled only if (a) vfs object is short-lived, or
341 340 (b) the base directory is managed by hg and considered sort-of append-only.
342 341 See pathutil.pathauditor() for details.
343 '''
342 """
344 343
345 344 def __init__(
346 345 self,
@@ -397,7 +396,7 b' class vfs(abstractvfs):'
397 396 auditpath=True,
398 397 makeparentdirs=True,
399 398 ):
400 '''Open ``path`` file, which is relative to vfs root.
399 """Open ``path`` file, which is relative to vfs root.
401 400
402 401 By default, parent directories are created as needed. Newly created
403 402 directories are marked as "not to be indexed by the content indexing
@@ -426,7 +425,7 b' class vfs(abstractvfs):'
426 425 truncation), if it is owned by another. Therefore, use
427 426 combination of append mode and checkambig=True only in limited
428 427 cases (see also issue5418 and issue5584 for detail).
429 '''
428 """
430 429 if auditpath:
431 430 self._auditpath(path, mode)
432 431 f = self.join(path)
@@ -385,13 +385,13 b' def _getfileinfo(name):'
385 385
386 386
387 387 def checkcertificatechain(cert, build=True):
388 '''Tests the given certificate to see if there is a complete chain to a
389 trusted root certificate. As a side effect, missing certificates are
390 downloaded and installed unless ``build=False``. True is returned if a
391 chain to a trusted root exists (even if built on the fly), otherwise
392 False. NB: A chain to a trusted root does NOT imply that the certificate
393 is valid.
394 '''
388 """Tests the given certificate to see if there is a complete chain to a
389 trusted root certificate. As a side effect, missing certificates are
390 downloaded and installed unless ``build=False``. True is returned if a
391 chain to a trusted root exists (even if built on the fly), otherwise
392 False. NB: A chain to a trusted root does NOT imply that the certificate
393 is valid.
394 """
395 395
396 396 chainctxptr = ctypes.POINTER(CERT_CHAIN_CONTEXT)
397 397
@@ -488,8 +488,8 b' def lasterrorwaspipeerror(err):'
488 488
489 489
490 490 def testpid(pid):
491 '''return True if pid is still running or unable to
492 determine, False otherwise'''
491 """return True if pid is still running or unable to
492 determine, False otherwise"""
493 493 h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid)
494 494 if h:
495 495 try:
@@ -576,10 +576,10 b' def getuser():'
576 576
577 577
578 578 def setsignalhandler():
579 '''Register a termination handler for console events including
579 """Register a termination handler for console events including
580 580 CTRL+C. python signal handlers do not work well with socket
581 581 operations.
582 '''
582 """
583 583
584 584 def handler(event):
585 585 _kernel32.ExitProcess(1)
@@ -627,8 +627,8 b' def termsize():'
627 627
628 628
629 629 def enablevtmode():
630 '''Enable virtual terminal mode for the associated console. Return True if
631 enabled, else False.'''
630 """Enable virtual terminal mode for the associated console. Return True if
631 enabled, else False."""
632 632
633 633 ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
634 634
@@ -195,13 +195,13 b' def _isatty(fp):'
195 195
196 196
197 197 class winstdout(object):
198 '''Some files on Windows misbehave.
198 """Some files on Windows misbehave.
199 199
200 200 When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
201 201
202 202 When writing too many bytes to a console at the same, a "Not enough space"
203 203 error may happen. Python 3 already works around that.
204 '''
204 """
205 205
206 206 def __init__(self, fp):
207 207 self.fp = fp
@@ -497,11 +497,11 b' def isowner(st):'
497 497
498 498
499 499 def findexe(command):
500 '''Find executable for command searching like cmd.exe does.
500 """Find executable for command searching like cmd.exe does.
501 501 If command is a basename then PATH is searched for command.
502 502 PATH isn't searched if command is an absolute or relative path.
503 503 An extension from PATHEXT is found and added if not present.
504 If command isn't found None is returned.'''
504 If command isn't found None is returned."""
505 505 pathext = encoding.environ.get(b'PATHEXT', b'.COM;.EXE;.BAT;.CMD')
506 506 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
507 507 if os.path.splitext(command)[1].lower() in pathexts:
@@ -529,10 +529,10 b' def findexe(command):'
529 529
530 530
531 531 def statfiles(files):
532 '''Stat each file in files. Yield each stat, or None if a file
532 """Stat each file in files. Yield each stat, or None if a file
533 533 does not exist or has a type we don't care about.
534 534
535 Cluster and cache stat per directory to minimize number of OS stat calls.'''
535 Cluster and cache stat per directory to minimize number of OS stat calls."""
536 536 dircache = {} # dirname -> filename -> status | None if file does not exist
537 537 getkind = stat.S_IFMT
538 538 for nf in files:
@@ -630,14 +630,14 b' class cachestat(object):'
630 630
631 631
632 632 def lookupreg(key, valname=None, scope=None):
633 ''' Look up a key/value name in the Windows registry.
633 """Look up a key/value name in the Windows registry.
634 634
635 635 valname: value name. If unspecified, the default value for the key
636 636 is used.
637 637 scope: optionally specify scope for registry lookup, this can be
638 638 a sequence of scopes to look up in order. Default (CURRENT_USER,
639 639 LOCAL_MACHINE).
640 '''
640 """
641 641 if scope is None:
642 642 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
643 643 elif not isinstance(scope, (list, tuple)):
@@ -456,7 +456,10 b' def createcommandresponseeosframes('
456 456 def createalternatelocationresponseframe(stream, requestid, location):
457 457 data = {
458 458 b'status': b'redirect',
459 b'location': {b'url': location.url, b'mediatype': location.mediatype,},
459 b'location': {
460 b'url': location.url,
461 b'mediatype': location.mediatype,
462 },
460 463 }
461 464
462 465 for a in (
@@ -490,7 +493,12 b' def createalternatelocationresponseframe'
490 493 def createcommanderrorresponse(stream, requestid, message, args=None):
491 494 # TODO should this be using a list of {'msg': ..., 'args': {}} so atom
492 495 # formatting works consistently?
493 m = {b'status': b'error', b'error': {b'message': message,}}
496 m = {
497 b'status': b'error',
498 b'error': {
499 b'message': message,
500 },
501 }
494 502
495 503 if args:
496 504 m[b'error'][b'args'] = args
@@ -510,7 +518,12 b' def createerrorframe(stream, requestid, '
510 518 assert len(msg) <= DEFAULT_MAX_FRAME_SIZE
511 519
512 520 payload = b''.join(
513 cborutil.streamencode({b'type': errtype, b'message': [{b'msg': msg}],})
521 cborutil.streamencode(
522 {
523 b'type': errtype,
524 b'message': [{b'msg': msg}],
525 }
526 )
514 527 )
515 528
516 529 yield stream.makeframe(
@@ -1292,14 +1305,18 b' class serverreactor(object):'
1292 1305 for frame in gen:
1293 1306 yield frame
1294 1307
1295 return b'sendframes', {b'framegen': makegen(),}
1308 return b'sendframes', {
1309 b'framegen': makegen(),
1310 }
1296 1311
1297 1312 def _handlesendframes(self, framegen):
1298 1313 if self._deferoutput:
1299 1314 self._bufferedframegens.append(framegen)
1300 1315 return b'noop', {}
1301 1316 else:
1302 return b'sendframes', {b'framegen': framegen,}
1317 return b'sendframes', {
1318 b'framegen': framegen,
1319 }
1303 1320
1304 1321 def onservererror(self, stream, requestid, msg):
1305 1322 ensureserverstream(stream)
@@ -1351,7 +1368,9 b' class serverreactor(object):'
1351 1368 return s
1352 1369
1353 1370 def _makeerrorresult(self, msg):
1354 return b'error', {b'message': msg,}
1371 return b'error', {
1372 b'message': msg,
1373 }
1355 1374
1356 1375 def _makeruncommandresult(self, requestid):
1357 1376 entry = self._receivingcommands[requestid]
@@ -1397,7 +1416,9 b' class serverreactor(object):'
1397 1416 )
1398 1417
1399 1418 def _makewantframeresult(self):
1400 return b'wantframe', {b'state': self._state,}
1419 return b'wantframe', {
1420 b'state': self._state,
1421 }
1401 1422
1402 1423 def _validatecommandrequestframe(self, frame):
1403 1424 new = frame.flags & FLAG_COMMAND_REQUEST_NEW
@@ -1802,7 +1823,9 b' class clientreactor(object):'
1802 1823 return (
1803 1824 request,
1804 1825 b'sendframes',
1805 {b'framegen': self._makecommandframes(request),},
1826 {
1827 b'framegen': self._makecommandframes(request),
1828 },
1806 1829 )
1807 1830
1808 1831 def flushcommands(self):
@@ -1835,7 +1858,9 b' class clientreactor(object):'
1835 1858 for frame in self._makecommandframes(request):
1836 1859 yield frame
1837 1860
1838 return b'sendframes', {b'framegen': makeframes(),}
1861 return b'sendframes', {
1862 b'framegen': makeframes(),
1863 }
1839 1864
1840 1865 def _makecommandframes(self, request):
1841 1866 """Emit frames to issue a command request.
@@ -1851,7 +1876,9 b' class clientreactor(object):'
1851 1876
1852 1877 payload = b''.join(
1853 1878 cborutil.streamencode(
1854 {b'contentencodings': self._clientcontentencoders,}
1879 {
1880 b'contentencodings': self._clientcontentencoders,
1881 }
1855 1882 )
1856 1883 )
1857 1884
@@ -33,14 +33,23 b' SUPPORTED_ELLIPSESCAP = (ELLIPSESCAP1, E'
33 33
34 34 # All available wire protocol transports.
35 35 TRANSPORTS = {
36 SSHV1: {b'transport': b'ssh', b'version': 1,},
36 SSHV1: {
37 b'transport': b'ssh',
38 b'version': 1,
39 },
37 40 SSHV2: {
38 41 b'transport': b'ssh',
39 42 # TODO mark as version 2 once all commands are implemented.
40 43 b'version': 1,
41 44 },
42 b'http-v1': {b'transport': b'http', b'version': 1,},
43 HTTP_WIREPROTO_V2: {b'transport': b'http', b'version': 2,},
45 b'http-v1': {
46 b'transport': b'http',
47 b'version': 1,
48 },
49 HTTP_WIREPROTO_V2: {
50 b'transport': b'http',
51 b'version': 2,
52 },
44 53 }
45 54
46 55
@@ -36,7 +36,7 b' urlreq = util.urlreq'
36 36
37 37
38 38 def batchable(f):
39 '''annotation for batchable methods
39 """annotation for batchable methods
40 40
41 41 Such methods must implement a coroutine as follows:
42 42
@@ -56,7 +56,7 b' def batchable(f):'
56 56 method, but adds the original method as an attribute called "batchable",
57 57 which is used by remotebatch to split the call into separate encoding and
58 58 decoding phases.
59 '''
59 """
60 60
61 61 def plain(*args, **opts):
62 62 batchable = f(*args, **opts)
@@ -474,7 +474,7 b' class wirepeer(repository.peer):'
474 474 return changegroupmod.cg1unpacker(f, b'UN')
475 475
476 476 def unbundle(self, bundle, heads, url):
477 '''Send cg (a readable file-like object representing the
477 """Send cg (a readable file-like object representing the
478 478 changegroup to push, typically a chunkbuffer object) to the
479 479 remote server as a bundle.
480 480
@@ -485,7 +485,7 b' class wirepeer(repository.peer):'
485 485
486 486 `url` is the url the client thinks it's pushing to, which is
487 487 visible to hooks.
488 '''
488 """
489 489
490 490 if heads != [b'force'] and self.capable(b'unbundlehash'):
491 491 heads = wireprototypes.encodelist(
@@ -655,6 +655,5 b' class wirepeer(repository.peer):'
655 655 raise NotImplementedError()
656 656
657 657 def _abort(self, exception):
658 """clearly abort the wire protocol connection and raise the exception
659 """
658 """clearly abort the wire protocol connection and raise the exception"""
660 659 raise NotImplementedError()
@@ -602,10 +602,10 b' def pushkey(repo, proto, namespace, key,'
602 602
603 603 @wireprotocommand(b'stream_out', permission=b'pull')
604 604 def stream(repo, proto):
605 '''If the server supports streaming clone, it advertises the "stream"
605 """If the server supports streaming clone, it advertises the "stream"
606 606 capability with a value representing the version and flags of the repo
607 607 it is serving. Client checks to see if it understands the format.
608 '''
608 """
609 609 return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
610 610
611 611
@@ -982,7 +982,10 b' def capabilitiesv2(repo, proto):'
982 982 b'revisions': {
983 983 b'type': b'list',
984 984 b'example': [
985 {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
985 {
986 b'type': b'changesetexplicit',
987 b'nodes': [b'abcdef...'],
988 }
986 989 ],
987 990 },
988 991 b'fields': {
@@ -1166,14 +1169,20 b' def makefilematcher(repo, pathfilter):'
1166 1169 b'default': lambda: False,
1167 1170 b'example': True,
1168 1171 },
1169 b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
1172 b'nodes': {
1173 b'type': b'list',
1174 b'example': [b'0123456...'],
1175 },
1170 1176 b'fields': {
1171 1177 b'type': b'set',
1172 1178 b'default': set,
1173 1179 b'example': {b'parents', b'revision'},
1174 1180 b'validvalues': {b'parents', b'revision', b'linknode'},
1175 1181 },
1176 b'path': {b'type': b'bytes', b'example': b'foo.txt',},
1182 b'path': {
1183 b'type': b'bytes',
1184 b'example': b'foo.txt',
1185 },
1177 1186 },
1178 1187 permission=b'pull',
1179 1188 # TODO censoring a file revision won't invalidate the cache.
@@ -1262,7 +1271,10 b' def filesdatacapabilities(repo, proto):'
1262 1271 b'revisions': {
1263 1272 b'type': b'list',
1264 1273 b'example': [
1265 {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
1274 {
1275 b'type': b'changesetexplicit',
1276 b'nodes': [b'abcdef...'],
1277 }
1266 1278 ],
1267 1279 },
1268 1280 },
@@ -1375,7 +1387,12 b' def knownv2(repo, proto, nodes):'
1375 1387
1376 1388 @wireprotocommand(
1377 1389 b'listkeys',
1378 args={b'namespace': {b'type': b'bytes', b'example': b'ns',},},
1390 args={
1391 b'namespace': {
1392 b'type': b'bytes',
1393 b'example': b'ns',
1394 },
1395 },
1379 1396 permission=b'pull',
1380 1397 )
1381 1398 def listkeysv2(repo, proto, namespace):
@@ -1390,7 +1407,12 b' def listkeysv2(repo, proto, namespace):'
1390 1407
1391 1408 @wireprotocommand(
1392 1409 b'lookup',
1393 args={b'key': {b'type': b'bytes', b'example': b'foo',},},
1410 args={
1411 b'key': {
1412 b'type': b'bytes',
1413 b'example': b'foo',
1414 },
1415 },
1394 1416 permission=b'pull',
1395 1417 )
1396 1418 def lookupv2(repo, proto, key):
@@ -1415,7 +1437,10 b' def manifestdatacapabilities(repo, proto'
1415 1437 @wireprotocommand(
1416 1438 b'manifestdata',
1417 1439 args={
1418 b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
1440 b'nodes': {
1441 b'type': b'list',
1442 b'example': [b'0123456...'],
1443 },
1419 1444 b'haveparents': {
1420 1445 b'type': b'bool',
1421 1446 b'default': lambda: False,
@@ -1427,7 +1452,10 b' def manifestdatacapabilities(repo, proto'
1427 1452 b'example': {b'parents', b'revision'},
1428 1453 b'validvalues': {b'parents', b'revision'},
1429 1454 },
1430 b'tree': {b'type': b'bytes', b'example': b'',},
1455 b'tree': {
1456 b'type': b'bytes',
1457 b'example': b'',
1458 },
1431 1459 },
1432 1460 permission=b'pull',
1433 1461 cachekeyfn=makecommandcachekeyfn(b'manifestdata', 1, allargs=True),
@@ -1485,10 +1513,22 b' def manifestdata(repo, proto, haveparent'
1485 1513 @wireprotocommand(
1486 1514 b'pushkey',
1487 1515 args={
1488 b'namespace': {b'type': b'bytes', b'example': b'ns',},
1489 b'key': {b'type': b'bytes', b'example': b'key',},
1490 b'old': {b'type': b'bytes', b'example': b'old',},
1491 b'new': {b'type': b'bytes', b'example': b'new',},
1516 b'namespace': {
1517 b'type': b'bytes',
1518 b'example': b'ns',
1519 },
1520 b'key': {
1521 b'type': b'bytes',
1522 b'example': b'key',
1523 },
1524 b'old': {
1525 b'type': b'bytes',
1526 b'example': b'old',
1527 },
1528 b'new': {
1529 b'type': b'bytes',
1530 b'example': b'new',
1531 },
1492 1532 },
1493 1533 permission=b'push',
1494 1534 )
@@ -116,8 +116,8 b' else:'
116 116
117 117
118 118 def worthwhile(ui, costperop, nops, threadsafe=True):
119 '''try to determine whether the benefit of multiple processes can
120 outweigh the cost of starting them'''
119 """try to determine whether the benefit of multiple processes can
120 outweigh the cost of starting them"""
121 121
122 122 if not threadsafe and _DISALLOW_THREAD_UNSAFE:
123 123 return False
@@ -131,7 +131,7 b' def worthwhile(ui, costperop, nops, thre'
131 131 def worker(
132 132 ui, costperarg, func, staticargs, args, hasretval=False, threadsafe=True
133 133 ):
134 '''run a function, possibly in parallel in multiple worker
134 """run a function, possibly in parallel in multiple worker
135 135 processes.
136 136
137 137 returns a progress iterator
@@ -153,7 +153,7 b' def worker('
153 153 threadsafe - whether work items are thread safe and can be executed using
154 154 a thread-based worker. Should be disabled for CPU heavy tasks that don't
155 155 release the GIL.
156 '''
156 """
157 157 enabled = ui.configbool(b'worker', b'enabled')
158 158 if enabled and worthwhile(ui, costperarg, len(args), threadsafe=threadsafe):
159 159 return _platformworker(ui, func, staticargs, args, hasretval)
@@ -306,10 +306,10 b' def _posixworker(ui, func, staticargs, a'
306 306
307 307
308 308 def _posixexitstatus(code):
309 '''convert a posix exit status into the same form returned by
309 """convert a posix exit status into the same form returned by
310 310 os.spawnv
311 311
312 returns None if the process was stopped instead of exiting'''
312 returns None if the process was stopped instead of exiting"""
313 313 if os.WIFEXITED(code):
314 314 return os.WEXITSTATUS(code)
315 315 elif os.WIFSIGNALED(code):
@@ -423,7 +423,7 b' else:'
423 423
424 424
425 425 def partition(lst, nslices):
426 '''partition a list into N slices of roughly equal size
426 """partition a list into N slices of roughly equal size
427 427
428 428 The current strategy takes every Nth element from the input. If
429 429 we ever write workers that need to preserve grouping in input
@@ -450,6 +450,6 b' def partition(lst, nslices):'
450 450 What we should really be doing is have workers read filenames from a
451 451 ordered queue. This preserves locality and also keeps any worker from
452 452 getting more than one file out of balance.
453 '''
453 """
454 454 for i in range(nslices):
455 455 yield lst[i::nslices]
@@ -816,7 +816,8 b' class buildhgexe(build_ext):'
816 816 if sys.version_info[0] >= 3:
817 817 fsdecode = os.fsdecode
818 818 dest = os.path.join(
819 os.path.dirname(self.hgtarget), fsdecode(dllbasename),
819 os.path.dirname(self.hgtarget),
820 fsdecode(dllbasename),
820 821 )
821 822
822 823 if not os.path.exists(dest):
@@ -1066,7 +1067,7 b' class hginstall(install):'
1066 1067
1067 1068
1068 1069 class hginstalllib(install_lib):
1069 '''
1070 """
1070 1071 This is a specialization of install_lib that replaces the copy_file used
1071 1072 there so that it supports setting the mode of files after copying them,
1072 1073 instead of just preserving the mode that the files originally had. If your
@@ -1075,7 +1076,7 b' class hginstalllib(install_lib):'
1075 1076
1076 1077 Note that just passing keep_permissions=False to copy_file would be
1077 1078 insufficient, as it might still be applying a umask.
1078 '''
1079 """
1079 1080
1080 1081 def run(self):
1081 1082 realcopyfile = file_util.copy_file
@@ -1103,11 +1104,11 b' class hginstalllib(install_lib):'
1103 1104
1104 1105
1105 1106 class hginstallscripts(install_scripts):
1106 '''
1107 """
1107 1108 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1108 1109 the configured directory for modules. If possible, the path is made relative
1109 1110 to the directory for scripts.
1110 '''
1111 """
1111 1112
1112 1113 def initialize_options(self):
1113 1114 install_scripts.initialize_options(self)
@@ -1400,8 +1401,7 b' class RustCompilationError(CCompilerErro'
1400 1401
1401 1402
1402 1403 class RustExtension(Extension):
1403 """Base classes for concrete Rust Extension classes.
1404 """
1404 """Base classes for concrete Rust Extension classes."""
1405 1405
1406 1406 rusttargetdir = os.path.join('rust', 'target', 'release')
1407 1407
@@ -1547,7 +1547,10 b' extmodules = ['
1547 1547 include_dirs=common_include_dirs,
1548 1548 extra_compile_args=common_cflags,
1549 1549 depends=common_depends
1550 + ['mercurial/cext/charencode.h', 'mercurial/cext/revlog.h',],
1550 + [
1551 'mercurial/cext/charencode.h',
1552 'mercurial/cext/revlog.h',
1553 ],
1551 1554 ),
1552 1555 Extension(
1553 1556 'mercurial.cext.osutil',
@@ -1635,10 +1638,19 b" if os.name == 'nt':"
1635 1638 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1636 1639
1637 1640 packagedata = {
1638 'mercurial': ['locale/*/LC_MESSAGES/hg.mo', 'dummycert.pem',],
1639 'mercurial.defaultrc': ['*.rc',],
1640 'mercurial.helptext': ['*.txt',],
1641 'mercurial.helptext.internals': ['*.txt',],
1641 'mercurial': [
1642 'locale/*/LC_MESSAGES/hg.mo',
1643 'dummycert.pem',
1644 ],
1645 'mercurial.defaultrc': [
1646 '*.rc',
1647 ],
1648 'mercurial.helptext': [
1649 '*.txt',
1650 ],
1651 'mercurial.helptext.internals': [
1652 '*.txt',
1653 ],
1642 1654 }
1643 1655
1644 1656
@@ -44,16 +44,24 b' configtable = {}'
44 44 configitem = registrar.configitem(configtable)
45 45
46 46 configitem(
47 b'badserver', b'closeafteraccept', default=False,
47 b'badserver',
48 b'closeafteraccept',
49 default=False,
48 50 )
49 51 configitem(
50 b'badserver', b'closeafterrecvbytes', default=b'0',
52 b'badserver',
53 b'closeafterrecvbytes',
54 default=b'0',
51 55 )
52 56 configitem(
53 b'badserver', b'closeaftersendbytes', default=b'0',
57 b'badserver',
58 b'closeaftersendbytes',
59 default=b'0',
54 60 )
55 61 configitem(
56 b'badserver', b'closebeforeaccept', default=False,
62 b'badserver',
63 b'closebeforeaccept',
64 default=False,
57 65 )
58 66
59 67 # We can't adjust __class__ on a socket instance. So we define a proxy type.
@@ -27,7 +27,9 b' configtable = {}'
27 27 configitem = registrar.configitem(configtable)
28 28
29 29 configitem(
30 b'fakedirstatewritetime', b'fakenow', default=None,
30 b'fakedirstatewritetime',
31 b'fakenow',
32 default=None,
31 33 )
32 34
33 35 parsers = policy.importmod('parsers')
@@ -14,7 +14,9 b' configtable = {}'
14 14 configitem = registrar.configitem(configtable)
15 15
16 16 configitem(
17 b'fakepatchtime', b'fakenow', default=None,
17 b'fakepatchtime',
18 b'fakenow',
19 default=None,
18 20 )
19 21
20 22
@@ -139,10 +139,20 b' def extsetup(ui):'
139 139
140 140 # Register flag processors for each extension
141 141 flagutil.addflagprocessor(
142 REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,)
142 REVIDX_NOOP,
143 (
144 noopdonothingread,
145 noopdonothing,
146 validatehash,
147 ),
143 148 )
144 149 flagutil.addflagprocessor(
145 REVIDX_BASE64, (b64decode, b64encode, bypass,),
150 REVIDX_BASE64,
151 (
152 b64decode,
153 b64encode,
154 bypass,
155 ),
146 156 )
147 157 flagutil.addflagprocessor(
148 158 REVIDX_GZIP, (gzipdecompress, gzipcompress, bypass)
@@ -1047,7 +1047,7 b' def has_black():'
1047 1047 version_regex = b'black, version ([0-9a-b.]+)'
1048 1048 version = matchoutput(blackcmd, version_regex)
1049 1049 sv = distutils.version.StrictVersion
1050 return version and sv(_bytes2sys(version.group(1))) >= sv('19.10b0')
1050 return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1')
1051 1051
1052 1052
1053 1053 @check('pytype', 'the pytype type checker')
@@ -44,8 +44,7 b' def check(*args, **kwargs):'
44 44
45 45
46 46 def roundtrips(data, decode, encode):
47 """helper to tests function that must do proper encode/decode roundtripping
48 """
47 """helper to tests function that must do proper encode/decode roundtripping"""
49 48
50 49 @given(data)
51 50 def testroundtrips(value):
@@ -71,6 +70,11 b' bytestrings = ('
71 70 st.builds(
72 71 lambda s, e: s.encode(e),
73 72 st.text(),
74 st.sampled_from(['utf-8', 'utf-16',]),
73 st.sampled_from(
74 [
75 'utf-8',
76 'utf-16',
77 ]
78 ),
75 79 )
76 80 ) | st.binary()
@@ -534,7 +534,9 b' def getparser():'
534 534 help="install and use chg wrapper in place of hg",
535 535 )
536 536 hgconf.add_argument(
537 "--chg-debug", action="store_true", help="show chg debug logs",
537 "--chg-debug",
538 action="store_true",
539 help="show chg debug logs",
538 540 )
539 541 hgconf.add_argument("--compiler", help="compiler to build with")
540 542 hgconf.add_argument(
@@ -1193,7 +1195,10 b' class Test(unittest.TestCase):'
1193 1195 if self._keeptmpdir:
1194 1196 log(
1195 1197 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1196 % (_bytes2sys(self._testtmp), _bytes2sys(self._threadtmp),)
1198 % (
1199 _bytes2sys(self._testtmp),
1200 _bytes2sys(self._threadtmp),
1201 )
1197 1202 )
1198 1203 else:
1199 1204 try:
@@ -2091,11 +2096,11 b' class TTest(Test):'
2091 2096
2092 2097 @staticmethod
2093 2098 def parsehghaveoutput(lines):
2094 '''Parse hghave log lines.
2099 """Parse hghave log lines.
2095 2100
2096 2101 Return tuple of lists (missing, failed):
2097 2102 * the missing/unknown features
2098 * the features for which existence check failed'''
2103 * the features for which existence check failed"""
2099 2104 missing = []
2100 2105 failed = []
2101 2106 for line in lines:
@@ -2155,12 +2160,10 b' class TestResult(unittest._TextTestResul'
2155 2160 self.color = pygmentspresent
2156 2161
2157 2162 def onStart(self, test):
2158 """ Can be overriden by custom TestResult
2159 """
2163 """Can be overriden by custom TestResult"""
2160 2164
2161 2165 def onEnd(self):
2162 """ Can be overriden by custom TestResult
2163 """
2166 """Can be overriden by custom TestResult"""
2164 2167
2165 2168 def addFailure(self, test, reason):
2166 2169 self.failures.append((test, reason))
@@ -3168,7 +3171,9 b' class TestRunner(object):'
3168 3171 vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
3169 3172 vlog("# Using PATH", os.environ["PATH"])
3170 3173 vlog(
3171 "# Using", _bytes2sys(IMPL_PATH), _bytes2sys(osenvironb[IMPL_PATH]),
3174 "# Using",
3175 _bytes2sys(IMPL_PATH),
3176 _bytes2sys(osenvironb[IMPL_PATH]),
3172 3177 )
3173 3178 vlog("# Writing to directory", _bytes2sys(self._outputdir))
3174 3179
@@ -78,7 +78,13 b" testfilefixup(case0, b'22', [b'', b'22']"
78 78 testfilefixup(case0, b'222', [b'', b'222'])
79 79
80 80 # input case 1: 3 lines, each commit adds one line
81 case1 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2, 3]), (b'3', [3]),])
81 case1 = buildcontents(
82 [
83 (b'1', [1, 2, 3]),
84 (b'2', [2, 3]),
85 (b'3', [3]),
86 ]
87 )
82 88
83 89 # 1:1 line mapping
84 90 testfilefixup(case1, b'123', case1)
@@ -121,7 +127,13 b" testfilefixup(case1, b'1a23', case1)"
121 127 testfilefixup(case1, b'12b3', case1)
122 128
123 129 # input case 2: delete in the middle
124 case2 = buildcontents([(b'11', [1, 2]), (b'22', [1]), (b'33', [1, 2]),])
130 case2 = buildcontents(
131 [
132 (b'11', [1, 2]),
133 (b'22', [1]),
134 (b'33', [1, 2]),
135 ]
136 )
125 137
126 138 # deletion (optimize code should make it 2 chunks)
127 139 testfilefixup(
@@ -136,7 +148,13 b" testfilefixup(case2, b'aaaa', [b'', b'aa"
136 148 testfilefixup(case2, b'aaa', case2)
137 149
138 150 # input case 3: rev 3 reverts rev 2
139 case3 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2]), (b'3', [1, 2, 3]),])
151 case3 = buildcontents(
152 [
153 (b'1', [1, 2, 3]),
154 (b'2', [2]),
155 (b'3', [1, 2, 3]),
156 ]
157 )
140 158
141 159 # 1:1 line mapping
142 160 testfilefixup(case3, b'13', case3)
@@ -159,7 +177,13 b' case4 = buildcontents('
159 177 [
160 178 (b'1', [1, 2, 3]),
161 179 (b'2', [2, 3]),
162 (b'3', [1, 2,]),
180 (
181 b'3',
182 [
183 1,
184 2,
185 ],
186 ),
163 187 (b'4', [1, 3]),
164 188 (b'5', [3]),
165 189 (b'6', [2, 3]),
@@ -183,7 +207,13 b" testfilefixup(case4, b'28', [b'', b'34',"
183 207 testfilefixup(case4, b'', [b'', b'34', b'37', b''])
184 208
185 209 # input case 5: replace a small chunk which is near a deleted line
186 case5 = buildcontents([(b'12', [1, 2]), (b'3', [1]), (b'4', [1, 2]),])
210 case5 = buildcontents(
211 [
212 (b'12', [1, 2]),
213 (b'3', [1]),
214 (b'4', [1, 2]),
215 ]
216 )
187 217
188 218 testfilefixup(case5, b'1cd4', [b'', b'1cd34', b'1cd4'])
189 219
@@ -24,13 +24,13 b' if pycompat.ispy3:'
24 24
25 25
26 26 def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
27 '''nodes: total number of nodes in the graph
27 """nodes: total number of nodes in the graph
28 28 rootprob: probability that a new node (not 0) will be a root
29 29 mergeprob: probability that, excluding a root a node will be a merge
30 30 prevprob: probability that p1 will be the previous node
31 31
32 32 return value is a graph represented as an adjacency list.
33 '''
33 """
34 34 graph = [None] * nodes
35 35 for i in xrange(nodes):
36 36 if i == 0 or rng.random() < rootprob:
@@ -228,7 +228,11 b' def test_missingancestors_explicit():'
228 228 print("remaining (sorted): %s" % sorted(list(revs)))
229 229
230 230 for i, (bases, revs) in enumerate(
231 (({10}, {11}), ({11}, {10}), ({7}, {9, 11}),)
231 (
232 ({10}, {11}),
233 ({11}, {10}),
234 ({7}, {9, 11}),
235 )
232 236 ):
233 237 print("%% missingancestors(), example %d" % (i + 1))
234 238 missanc = ancestor.incrementalmissingancestors(graph.get, bases)
@@ -30,11 +30,17 b' class thing(object):'
30 30 class localthing(thing):
31 31 def foo(self, one, two=None):
32 32 if one:
33 return b"%s and %s" % (one, two,)
33 return b"%s and %s" % (
34 one,
35 two,
36 )
34 37 return b"Nope"
35 38
36 39 def bar(self, b, a):
37 return b"%s und %s" % (b, a,)
40 return b"%s und %s" % (
41 b,
42 a,
43 )
38 44
39 45 def greet(self, name=None):
40 46 return b"Hello, %s" % name
@@ -176,7 +182,15 b' class remotething(thing):'
176 182 args = b','.join(n + b'=' + escapearg(v) for n, v in args)
177 183 req.append(name + b':' + args)
178 184 req = b';'.join(req)
179 res = self._submitone(b'batch', [(b'cmds', req,)])
185 res = self._submitone(
186 b'batch',
187 [
188 (
189 b'cmds',
190 req,
191 )
192 ],
193 )
180 194 for r in res.split(b';'):
181 195 yield r
182 196
@@ -190,7 +204,16 b' class remotething(thing):'
190 204
191 205 @wireprotov1peer.batchable
192 206 def foo(self, one, two=None):
193 encargs = [(b'one', mangle(one),), (b'two', mangle(two),)]
207 encargs = [
208 (
209 b'one',
210 mangle(one),
211 ),
212 (
213 b'two',
214 mangle(two),
215 ),
216 ]
194 217 encresref = wireprotov1peer.future()
195 218 yield encargs, encresref
196 219 yield unmangle(encresref.value)
@@ -198,14 +221,33 b' class remotething(thing):'
198 221 @wireprotov1peer.batchable
199 222 def bar(self, b, a):
200 223 encresref = wireprotov1peer.future()
201 yield [(b'b', mangle(b),), (b'a', mangle(a),)], encresref
224 yield [
225 (
226 b'b',
227 mangle(b),
228 ),
229 (
230 b'a',
231 mangle(a),
232 ),
233 ], encresref
202 234 yield unmangle(encresref.value)
203 235
204 236 # greet is coded directly. It therefore does not support batching. If it
205 237 # does appear in a batch, the batch is split around greet, and the call to
206 238 # greet is done in its own roundtrip.
207 239 def greet(self, name=None):
208 return unmangle(self._submitone(b'greet', [(b'name', mangle(name),)]))
240 return unmangle(
241 self._submitone(
242 b'greet',
243 [
244 (
245 b'name',
246 mangle(name),
247 )
248 ],
249 )
250 )
209 251
210 252
211 253 # demo remote usage
@@ -690,7 +690,12 b' class ArrayTests(TestCase):'
690 690
691 691 self.assertEqual(
692 692 list(cborutil.streamencodearrayfromiter(source)),
693 [b'\x9f', b'\x43', b'foo', b'\xff',],
693 [
694 b'\x9f',
695 b'\x43',
696 b'foo',
697 b'\xff',
698 ],
694 699 )
695 700
696 701 dest = b''.join(cborutil.streamencodearrayfromiter(source))
@@ -799,7 +804,11 b' class ArrayTests(TestCase):'
799 804 class SetTests(TestCase):
800 805 def testempty(self):
801 806 self.assertEqual(
802 list(cborutil.streamencode(set())), [b'\xd9\x01\x02', b'\x80',]
807 list(cborutil.streamencode(set())),
808 [
809 b'\xd9\x01\x02',
810 b'\x80',
811 ],
803 812 )
804 813
805 814 self.assertEqual(cborutil.decodeall(b'\xd9\x01\x02\x80'), [set()])
@@ -914,14 +923,26 b' class SetTests(TestCase):'
914 923 ):
915 924 cborutil.decodeall(encoded)
916 925
917 encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\x80',]) # empty array
926 encoded = b''.join(
927 [
928 b'\xd9\x01\x02',
929 b'\x81',
930 b'\x80',
931 ]
932 ) # empty array
918 933
919 934 with self.assertRaisesRegex(
920 935 cborutil.CBORDecodeError, 'collections not allowed as set values'
921 936 ):
922 937 cborutil.decodeall(encoded)
923 938
924 encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\xa0',]) # empty map
939 encoded = b''.join(
940 [
941 b'\xd9\x01\x02',
942 b'\x81',
943 b'\xa0',
944 ]
945 ) # empty map
925 946
926 947 with self.assertRaisesRegex(
927 948 cborutil.CBORDecodeError, 'collections not allowed as set values'
@@ -1059,7 +1080,13 b' class MapTests(TestCase):'
1059 1080 ):
1060 1081 cborutil.decodeall(encoded)
1061 1082
1062 encoded = b''.join([b'\xa1', b'\x80', b'\x43foo',]) # empty array
1083 encoded = b''.join(
1084 [
1085 b'\xa1',
1086 b'\x80',
1087 b'\x43foo',
1088 ]
1089 ) # empty array
1063 1090
1064 1091 with self.assertRaisesRegex(
1065 1092 cborutil.CBORDecodeError, 'collections not supported as map keys'
@@ -1260,7 +1287,10 b' class DecodeallTests(TestCase):'
1260 1287
1261 1288 def testpartialinput(self):
1262 1289 encoded = b''.join(
1263 [b'\x82', b'\x01',] # array of 2 elements # integer 1
1290 [
1291 b'\x82',
1292 b'\x01',
1293 ] # array of 2 elements # integer 1
1264 1294 )
1265 1295
1266 1296 with self.assertRaisesRegex(
@@ -76,7 +76,9 b' if not os.path.isdir(os.path.join(cwd, "'
76 76 sys.exit(0)
77 77
78 78 files = subprocess.check_output(
79 "hg files --print0 \"%s\"" % fileset, shell=True, cwd=cwd,
79 "hg files --print0 \"%s\"" % fileset,
80 shell=True,
81 cwd=cwd,
80 82 ).split(b'\0')
81 83
82 84 if sys.version_info[0] >= 3:
@@ -69,29 +69,60 b' class linelogtests(unittest.TestCase):'
69 69 ll.replacelines(1, 0, 0, 0, 3)
70 70 self.assertEqual(
71 71 [(l.rev, l.linenum) for l in ll.annotate(1)],
72 [(1, 0), (1, 1), (1, 2),],
72 [
73 (1, 0),
74 (1, 1),
75 (1, 2),
76 ],
73 77 )
74 78 # Replace line 1 with a new line
75 79 ll.replacelines(2, 1, 2, 1, 2)
76 80 self.assertEqual(
77 81 [(l.rev, l.linenum) for l in ll.annotate(2)],
78 [(1, 0), (2, 1), (1, 2),],
82 [
83 (1, 0),
84 (2, 1),
85 (1, 2),
86 ],
79 87 )
80 88 # delete a line out of 2
81 89 ll.replacelines(3, 1, 2, 0, 0)
82 90 self.assertEqual(
83 [(l.rev, l.linenum) for l in ll.annotate(3)], [(1, 0), (1, 2),]
91 [(l.rev, l.linenum) for l in ll.annotate(3)],
92 [
93 (1, 0),
94 (1, 2),
95 ],
84 96 )
85 97 # annotation of 1 is unchanged
86 98 self.assertEqual(
87 99 [(l.rev, l.linenum) for l in ll.annotate(1)],
88 [(1, 0), (1, 1), (1, 2),],
100 [
101 (1, 0),
102 (1, 1),
103 (1, 2),
104 ],
89 105 )
90 106 ll.annotate(3) # set internal state to revision 3
91 107 start = ll.getoffset(0)
92 108 end = ll.getoffset(1)
93 self.assertEqual(ll.getalllines(start, end), [(1, 0), (2, 1), (1, 1),])
94 self.assertEqual(ll.getalllines(), [(1, 0), (2, 1), (1, 1), (1, 2),])
109 self.assertEqual(
110 ll.getalllines(start, end),
111 [
112 (1, 0),
113 (2, 1),
114 (1, 1),
115 ],
116 )
117 self.assertEqual(
118 ll.getalllines(),
119 [
120 (1, 0),
121 (2, 1),
122 (1, 1),
123 (1, 2),
124 ],
125 )
95 126
96 127 def testparseclinelogfile(self):
97 128 # This data is what the replacements in testsimpleedits
@@ -116,14 +147,26 b' class linelogtests(unittest.TestCase):'
116 147 llc = linelog.linelog.fromdata(data)
117 148 self.assertEqual(
118 149 [(l.rev, l.linenum) for l in llc.annotate(1)],
119 [(1, 0), (1, 1), (1, 2),],
150 [
151 (1, 0),
152 (1, 1),
153 (1, 2),
154 ],
120 155 )
121 156 self.assertEqual(
122 157 [(l.rev, l.linenum) for l in llc.annotate(2)],
123 [(1, 0), (2, 1), (1, 2),],
158 [
159 (1, 0),
160 (2, 1),
161 (1, 2),
162 ],
124 163 )
125 164 self.assertEqual(
126 [(l.rev, l.linenum) for l in llc.annotate(3)], [(1, 0), (1, 2),]
165 [(l.rev, l.linenum) for l in llc.annotate(3)],
166 [
167 (1, 0),
168 (1, 2),
169 ],
127 170 )
128 171 # Check we emit the same bytecode.
129 172 ll = linelog.linelog()
@@ -73,7 +73,10 b' class teststate(object):'
73 73 self._acquirecalled,
74 74 called,
75 75 'expected acquire to be %s but was actually %s'
76 % (self._tocalled(called), self._tocalled(self._acquirecalled),),
76 % (
77 self._tocalled(called),
78 self._tocalled(self._acquirecalled),
79 ),
77 80 )
78 81
79 82 def resetacquirefn(self):
@@ -84,7 +87,10 b' class teststate(object):'
84 87 self._releasecalled,
85 88 called,
86 89 'expected release to be %s but was actually %s'
87 % (self._tocalled(called), self._tocalled(self._releasecalled),),
90 % (
91 self._tocalled(called),
92 self._tocalled(self._releasecalled),
93 ),
88 94 )
89 95
90 96 def assertpostreleasecalled(self, called):
@@ -104,7 +110,10 b' class teststate(object):'
104 110 actual,
105 111 exists,
106 112 'expected lock to %s but actually did %s'
107 % (self._toexists(exists), self._toexists(actual),),
113 % (
114 self._toexists(exists),
115 self._toexists(actual),
116 ),
108 117 )
109 118
110 119 def _tocalled(self, called):
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now