##// END OF EJS Templates
formating: upgrade to black 20.8b1...
Augie Fackler -
r46554:89a2afe3 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -152,7 +152,11 b" ASSUME_ROLE_POLICY_DOCUMENT = '''"
152
152
153
153
154 IAM_INSTANCE_PROFILES = {
154 IAM_INSTANCE_PROFILES = {
155 'ephemeral-ec2-1': {'roles': ['ephemeral-ec2-role-1',],}
155 'ephemeral-ec2-1': {
156 'roles': [
157 'ephemeral-ec2-role-1',
158 ],
159 }
156 }
160 }
157
161
158
162
@@ -469,10 +473,22 b' def find_image(ec2resource, owner_id, na'
469
473
470 images = ec2resource.images.filter(
474 images = ec2resource.images.filter(
471 Filters=[
475 Filters=[
472 {'Name': 'owner-id', 'Values': [owner_id],},
476 {
473 {'Name': 'state', 'Values': ['available'],},
477 'Name': 'owner-id',
474 {'Name': 'image-type', 'Values': ['machine'],},
478 'Values': [owner_id],
475 {'Name': 'name', 'Values': [name],},
479 },
480 {
481 'Name': 'state',
482 'Values': ['available'],
483 },
484 {
485 'Name': 'image-type',
486 'Values': ['machine'],
487 },
488 {
489 'Name': 'name',
490 'Values': [name],
491 },
476 ]
492 ]
477 )
493 )
478
494
@@ -519,10 +535,13 b' def ensure_security_groups(ec2resource, '
519 print('adding security group %s' % actual)
535 print('adding security group %s' % actual)
520
536
521 group_res = ec2resource.create_security_group(
537 group_res = ec2resource.create_security_group(
522 Description=group['description'], GroupName=actual,
538 Description=group['description'],
539 GroupName=actual,
523 )
540 )
524
541
525 group_res.authorize_ingress(IpPermissions=group['ingress'],)
542 group_res.authorize_ingress(
543 IpPermissions=group['ingress'],
544 )
526
545
527 security_groups[name] = group_res
546 security_groups[name] = group_res
528
547
@@ -614,7 +633,10 b' def wait_for_ssm(ssmclient, instances):'
614 while True:
633 while True:
615 res = ssmclient.describe_instance_information(
634 res = ssmclient.describe_instance_information(
616 Filters=[
635 Filters=[
617 {'Key': 'InstanceIds', 'Values': [i.id for i in instances],},
636 {
637 'Key': 'InstanceIds',
638 'Values': [i.id for i in instances],
639 },
618 ],
640 ],
619 )
641 )
620
642
@@ -636,7 +658,9 b' def run_ssm_command(ssmclient, instances'
636 InstanceIds=[i.id for i in instances],
658 InstanceIds=[i.id for i in instances],
637 DocumentName=document_name,
659 DocumentName=document_name,
638 Parameters=parameters,
660 Parameters=parameters,
639 CloudWatchOutputConfig={'CloudWatchOutputEnabled': True,},
661 CloudWatchOutputConfig={
662 'CloudWatchOutputEnabled': True,
663 },
640 )
664 )
641
665
642 command_id = res['Command']['CommandId']
666 command_id = res['Command']['CommandId']
@@ -645,7 +669,8 b' def run_ssm_command(ssmclient, instances'
645 while True:
669 while True:
646 try:
670 try:
647 res = ssmclient.get_command_invocation(
671 res = ssmclient.get_command_invocation(
648 CommandId=command_id, InstanceId=instance.id,
672 CommandId=command_id,
673 InstanceId=instance.id,
649 )
674 )
650 except botocore.exceptions.ClientError as e:
675 except botocore.exceptions.ClientError as e:
651 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
676 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
@@ -799,19 +824,32 b' def create_ami_from_instance('
799 instance.stop()
824 instance.stop()
800
825
801 ec2client.get_waiter('instance_stopped').wait(
826 ec2client.get_waiter('instance_stopped').wait(
802 InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
827 InstanceIds=[instance.id],
828 WaiterConfig={
829 'Delay': 5,
830 },
803 )
831 )
804 print('%s is stopped' % instance.id)
832 print('%s is stopped' % instance.id)
805
833
806 image = instance.create_image(Name=name, Description=description,)
834 image = instance.create_image(
835 Name=name,
836 Description=description,
837 )
807
838
808 image.create_tags(
839 image.create_tags(
809 Tags=[{'Key': 'HGIMAGEFINGERPRINT', 'Value': fingerprint,},]
840 Tags=[
841 {
842 'Key': 'HGIMAGEFINGERPRINT',
843 'Value': fingerprint,
844 },
845 ]
810 )
846 )
811
847
812 print('waiting for image %s' % image.id)
848 print('waiting for image %s' % image.id)
813
849
814 ec2client.get_waiter('image_available').wait(ImageIds=[image.id],)
850 ec2client.get_waiter('image_available').wait(
851 ImageIds=[image.id],
852 )
815
853
816 print('image %s available as %s' % (image.id, image.name))
854 print('image %s available as %s' % (image.id, image.name))
817
855
@@ -837,7 +875,9 b' def ensure_linux_dev_ami(c: AWSConnectio'
837 ssh_username = 'admin'
875 ssh_username = 'admin'
838 elif distro == 'debian10':
876 elif distro == 'debian10':
839 image = find_image(
877 image = find_image(
840 ec2resource, DEBIAN_ACCOUNT_ID_2, 'debian-10-amd64-20190909-10',
878 ec2resource,
879 DEBIAN_ACCOUNT_ID_2,
880 'debian-10-amd64-20190909-10',
841 )
881 )
842 ssh_username = 'admin'
882 ssh_username = 'admin'
843 elif distro == 'ubuntu18.04':
883 elif distro == 'ubuntu18.04':
@@ -1066,7 +1106,9 b' def temporary_linux_dev_instances('
1066
1106
1067
1107
1068 def ensure_windows_dev_ami(
1108 def ensure_windows_dev_ami(
1069 c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME,
1109 c: AWSConnection,
1110 prefix='hg-',
1111 base_image_name=WINDOWS_BASE_IMAGE_NAME,
1070 ):
1112 ):
1071 """Ensure Windows Development AMI is available and up-to-date.
1113 """Ensure Windows Development AMI is available and up-to-date.
1072
1114
@@ -1190,7 +1232,9 b' def ensure_windows_dev_ami('
1190 ssmclient,
1232 ssmclient,
1191 [instance],
1233 [instance],
1192 'AWS-RunPowerShellScript',
1234 'AWS-RunPowerShellScript',
1193 {'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),},
1235 {
1236 'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),
1237 },
1194 )
1238 )
1195
1239
1196 # Reboot so all updates are fully applied.
1240 # Reboot so all updates are fully applied.
@@ -1202,7 +1246,10 b' def ensure_windows_dev_ami('
1202 print('rebooting instance %s' % instance.id)
1246 print('rebooting instance %s' % instance.id)
1203 instance.stop()
1247 instance.stop()
1204 ec2client.get_waiter('instance_stopped').wait(
1248 ec2client.get_waiter('instance_stopped').wait(
1205 InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
1249 InstanceIds=[instance.id],
1250 WaiterConfig={
1251 'Delay': 5,
1252 },
1206 )
1253 )
1207
1254
1208 instance.start()
1255 instance.start()
@@ -282,16 +282,20 b' def get_parser():'
282 help='Path for local state files',
282 help='Path for local state files',
283 )
283 )
284 parser.add_argument(
284 parser.add_argument(
285 '--aws-region', help='AWS region to use', default='us-west-2',
285 '--aws-region',
286 help='AWS region to use',
287 default='us-west-2',
286 )
288 )
287
289
288 subparsers = parser.add_subparsers()
290 subparsers = parser.add_subparsers()
289
291
290 sp = subparsers.add_parser(
292 sp = subparsers.add_parser(
291 'bootstrap-linux-dev', help='Bootstrap Linux development environments',
293 'bootstrap-linux-dev',
294 help='Bootstrap Linux development environments',
292 )
295 )
293 sp.add_argument(
296 sp.add_argument(
294 '--distros', help='Comma delimited list of distros to bootstrap',
297 '--distros',
298 help='Comma delimited list of distros to bootstrap',
295 )
299 )
296 sp.add_argument(
300 sp.add_argument(
297 '--parallel',
301 '--parallel',
@@ -312,13 +316,17 b' def get_parser():'
312 sp.set_defaults(func=bootstrap_windows_dev)
316 sp.set_defaults(func=bootstrap_windows_dev)
313
317
314 sp = subparsers.add_parser(
318 sp = subparsers.add_parser(
315 'build-all-windows-packages', help='Build all Windows packages',
319 'build-all-windows-packages',
320 help='Build all Windows packages',
316 )
321 )
317 sp.add_argument(
322 sp.add_argument(
318 '--revision', help='Mercurial revision to build', default='.',
323 '--revision',
324 help='Mercurial revision to build',
325 default='.',
319 )
326 )
320 sp.add_argument(
327 sp.add_argument(
321 '--version', help='Mercurial version string to use',
328 '--version',
329 help='Mercurial version string to use',
322 )
330 )
323 sp.add_argument(
331 sp.add_argument(
324 '--base-image-name',
332 '--base-image-name',
@@ -328,7 +336,8 b' def get_parser():'
328 sp.set_defaults(func=build_all_windows_packages)
336 sp.set_defaults(func=build_all_windows_packages)
329
337
330 sp = subparsers.add_parser(
338 sp = subparsers.add_parser(
331 'build-inno', help='Build Inno Setup installer(s)',
339 'build-inno',
340 help='Build Inno Setup installer(s)',
332 )
341 )
333 sp.add_argument(
342 sp.add_argument(
334 '--python-version',
343 '--python-version',
@@ -346,10 +355,13 b' def get_parser():'
346 default=['x64'],
355 default=['x64'],
347 )
356 )
348 sp.add_argument(
357 sp.add_argument(
349 '--revision', help='Mercurial revision to build', default='.',
358 '--revision',
359 help='Mercurial revision to build',
360 default='.',
350 )
361 )
351 sp.add_argument(
362 sp.add_argument(
352 '--version', help='Mercurial version string to use in installer',
363 '--version',
364 help='Mercurial version string to use in installer',
353 )
365 )
354 sp.add_argument(
366 sp.add_argument(
355 '--base-image-name',
367 '--base-image-name',
@@ -359,7 +371,8 b' def get_parser():'
359 sp.set_defaults(func=build_inno)
371 sp.set_defaults(func=build_inno)
360
372
361 sp = subparsers.add_parser(
373 sp = subparsers.add_parser(
362 'build-windows-wheel', help='Build Windows wheel(s)',
374 'build-windows-wheel',
375 help='Build Windows wheel(s)',
363 )
376 )
364 sp.add_argument(
377 sp.add_argument(
365 '--python-version',
378 '--python-version',
@@ -376,7 +389,9 b' def get_parser():'
376 default=['x64'],
389 default=['x64'],
377 )
390 )
378 sp.add_argument(
391 sp.add_argument(
379 '--revision', help='Mercurial revision to build', default='.',
392 '--revision',
393 help='Mercurial revision to build',
394 default='.',
380 )
395 )
381 sp.add_argument(
396 sp.add_argument(
382 '--base-image-name',
397 '--base-image-name',
@@ -402,10 +417,13 b' def get_parser():'
402 default=['x64'],
417 default=['x64'],
403 )
418 )
404 sp.add_argument(
419 sp.add_argument(
405 '--revision', help='Mercurial revision to build', default='.',
420 '--revision',
421 help='Mercurial revision to build',
422 default='.',
406 )
423 )
407 sp.add_argument(
424 sp.add_argument(
408 '--version', help='Mercurial version string to use in installer',
425 '--version',
426 help='Mercurial version string to use in installer',
409 )
427 )
410 sp.add_argument(
428 sp.add_argument(
411 '--base-image-name',
429 '--base-image-name',
@@ -421,11 +439,15 b' def get_parser():'
421 sp.set_defaults(func=terminate_ec2_instances)
439 sp.set_defaults(func=terminate_ec2_instances)
422
440
423 sp = subparsers.add_parser(
441 sp = subparsers.add_parser(
424 'purge-ec2-resources', help='Purge all EC2 resources managed by us',
442 'purge-ec2-resources',
443 help='Purge all EC2 resources managed by us',
425 )
444 )
426 sp.set_defaults(func=purge_ec2_resources)
445 sp.set_defaults(func=purge_ec2_resources)
427
446
428 sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
447 sp = subparsers.add_parser(
448 'run-tests-linux',
449 help='Run tests on Linux',
450 )
429 sp.add_argument(
451 sp.add_argument(
430 '--distro',
452 '--distro',
431 help='Linux distribution to run tests on',
453 help='Linux distribution to run tests on',
@@ -468,10 +490,13 b' def get_parser():'
468 sp.set_defaults(func=run_tests_linux)
490 sp.set_defaults(func=run_tests_linux)
469
491
470 sp = subparsers.add_parser(
492 sp = subparsers.add_parser(
471 'run-tests-windows', help='Run tests on Windows',
493 'run-tests-windows',
494 help='Run tests on Windows',
472 )
495 )
473 sp.add_argument(
496 sp.add_argument(
474 '--instance-type', help='EC2 instance type to use', default='t3.medium',
497 '--instance-type',
498 help='EC2 instance type to use',
499 default='t3.medium',
475 )
500 )
476 sp.add_argument(
501 sp.add_argument(
477 '--python-version',
502 '--python-version',
@@ -486,7 +511,8 b' def get_parser():'
486 default='x64',
511 default='x64',
487 )
512 )
488 sp.add_argument(
513 sp.add_argument(
489 '--test-flags', help='Extra command line flags to pass to run-tests.py',
514 '--test-flags',
515 help='Extra command line flags to pass to run-tests.py',
490 )
516 )
491 sp.add_argument(
517 sp.add_argument(
492 '--base-image-name',
518 '--base-image-name',
@@ -514,10 +540,12 b' def get_parser():'
514 help='Skip uploading to www.mercurial-scm.org',
540 help='Skip uploading to www.mercurial-scm.org',
515 )
541 )
516 sp.add_argument(
542 sp.add_argument(
517 '--ssh-username', help='SSH username for mercurial-scm.org',
543 '--ssh-username',
544 help='SSH username for mercurial-scm.org',
518 )
545 )
519 sp.add_argument(
546 sp.add_argument(
520 'version', help='Mercurial version string to locate local packages',
547 'version',
548 help='Mercurial version string to locate local packages',
521 )
549 )
522 sp.set_defaults(func=publish_windows_artifacts)
550 sp.set_defaults(func=publish_windows_artifacts)
523
551
@@ -362,7 +362,8 b' def build_inno_installer('
362 raise Exception("unhandled arch: %s" % arch)
362 raise Exception("unhandled arch: %s" % arch)
363
363
364 ps = BUILD_INNO_PYTHON3.format(
364 ps = BUILD_INNO_PYTHON3.format(
365 pyoxidizer_target=target_triple, version=version,
365 pyoxidizer_target=target_triple,
366 version=version,
366 )
367 )
367 else:
368 else:
368 extra_args = []
369 extra_args = []
@@ -427,7 +428,8 b' def build_wix_installer('
427 raise Exception("unhandled arch: %s" % arch)
428 raise Exception("unhandled arch: %s" % arch)
428
429
429 ps = BUILD_WIX_PYTHON3.format(
430 ps = BUILD_WIX_PYTHON3.format(
430 pyoxidizer_target=target_triple, version=version,
431 pyoxidizer_target=target_triple,
432 version=version,
431 )
433 )
432 else:
434 else:
433 extra_args = []
435 extra_args = []
@@ -460,7 +462,10 b' def run_tests(winrm_client, python_versi'
460
462
461 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
463 python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
462
464
463 ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
465 ps = RUN_TESTS.format(
466 python_path=python_path,
467 test_flags=test_flags or '',
468 )
464
469
465 run_powershell(winrm_client, ps)
470 run_powershell(winrm_client, ps)
466
471
@@ -213,15 +213,19 b' def replacetokens(tokens, opts):'
213 fn = t.string
213 fn = t.string
214
214
215 # *attr() builtins don't accept byte strings to 2nd argument.
215 # *attr() builtins don't accept byte strings to 2nd argument.
216 if fn in (
216 if (
217 'getattr',
217 fn
218 'setattr',
218 in (
219 'hasattr',
219 'getattr',
220 'safehasattr',
220 'setattr',
221 'wrapfunction',
221 'hasattr',
222 'wrapclass',
222 'safehasattr',
223 'addattr',
223 'wrapfunction',
224 ) and (opts['allow-attr-methods'] or not _isop(i - 1, '.')):
224 'wrapclass',
225 'addattr',
226 )
227 and (opts['allow-attr-methods'] or not _isop(i - 1, '.'))
228 ):
225 arg1idx = _findargnofcall(1)
229 arg1idx = _findargnofcall(1)
226 if arg1idx is not None:
230 if arg1idx is not None:
227 _ensuresysstr(arg1idx)
231 _ensuresysstr(arg1idx)
@@ -620,13 +620,17 b' cfilters = ['
620 ]
620 ]
621
621
622 inutilpats = [
622 inutilpats = [
623 [(r'\bui\.', "don't use ui in util"),],
623 [
624 (r'\bui\.', "don't use ui in util"),
625 ],
624 # warnings
626 # warnings
625 [],
627 [],
626 ]
628 ]
627
629
628 inrevlogpats = [
630 inrevlogpats = [
629 [(r'\brepo\.', "don't use repo in revlog"),],
631 [
632 (r'\brepo\.', "don't use repo in revlog"),
633 ],
630 # warnings
634 # warnings
631 [],
635 [],
632 ]
636 ]
@@ -44,7 +44,11 b' def build_inno(pyoxidizer_target=None, p'
44 )
44 )
45 else:
45 else:
46 inno.build_with_py2exe(
46 inno.build_with_py2exe(
47 SOURCE_DIR, build_dir, pathlib.Path(python), iscc, version=version,
47 SOURCE_DIR,
48 build_dir,
49 pathlib.Path(python),
50 iscc,
51 version=version,
48 )
52 )
49
53
50
54
@@ -198,7 +198,11 b' def build_installer('
198 except jinja2.TemplateSyntaxError as e:
198 except jinja2.TemplateSyntaxError as e:
199 raise Exception(
199 raise Exception(
200 'template syntax error at %s:%d: %s'
200 'template syntax error at %s:%d: %s'
201 % (e.name, e.lineno, e.message,)
201 % (
202 e.name,
203 e.lineno,
204 e.message,
205 )
202 )
206 )
203
207
204 content = template.render(package_files=package_files)
208 content = template.render(package_files=package_files)
@@ -517,7 +517,10 b' def run_wix_packaging('
517 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
517 args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
518
518
519 args.extend(
519 args.extend(
520 [str(build_dir / 'stage.wixobj'), str(build_dir / 'mercurial.wixobj'),]
520 [
521 str(build_dir / 'stage.wixobj'),
522 str(build_dir / 'mercurial.wixobj'),
523 ]
521 )
524 )
522
525
523 subprocess.run(args, cwd=str(source_dir), check=True)
526 subprocess.run(args, cwd=str(source_dir), check=True)
@@ -291,7 +291,9 b' try:'
291 experimental=True,
291 experimental=True,
292 )
292 )
293 configitem(
293 configitem(
294 b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
294 b'perf',
295 b'pre-run',
296 default=mercurial.configitems.dynamicdefault,
295 )
297 )
296 configitem(
298 configitem(
297 b'perf',
299 b'perf',
@@ -310,19 +312,29 b' except TypeError:'
310 # compatibility fix for a11fd395e83f
312 # compatibility fix for a11fd395e83f
311 # hg version: 5.2
313 # hg version: 5.2
312 configitem(
314 configitem(
313 b'perf', b'presleep', default=mercurial.configitems.dynamicdefault,
315 b'perf',
316 b'presleep',
317 default=mercurial.configitems.dynamicdefault,
314 )
318 )
315 configitem(
319 configitem(
316 b'perf', b'stub', default=mercurial.configitems.dynamicdefault,
320 b'perf',
321 b'stub',
322 default=mercurial.configitems.dynamicdefault,
317 )
323 )
318 configitem(
324 configitem(
319 b'perf', b'parentscount', default=mercurial.configitems.dynamicdefault,
325 b'perf',
326 b'parentscount',
327 default=mercurial.configitems.dynamicdefault,
320 )
328 )
321 configitem(
329 configitem(
322 b'perf', b'all-timing', default=mercurial.configitems.dynamicdefault,
330 b'perf',
331 b'all-timing',
332 default=mercurial.configitems.dynamicdefault,
323 )
333 )
324 configitem(
334 configitem(
325 b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
335 b'perf',
336 b'pre-run',
337 default=mercurial.configitems.dynamicdefault,
326 )
338 )
327 configitem(
339 configitem(
328 b'perf',
340 b'perf',
@@ -330,7 +342,9 b' except TypeError:'
330 default=mercurial.configitems.dynamicdefault,
342 default=mercurial.configitems.dynamicdefault,
331 )
343 )
332 configitem(
344 configitem(
333 b'perf', b'run-limits', default=mercurial.configitems.dynamicdefault,
345 b'perf',
346 b'run-limits',
347 default=mercurial.configitems.dynamicdefault,
334 )
348 )
335
349
336
350
@@ -385,8 +399,7 b' def gettimer(ui, opts=None):'
385 from mercurial import node
399 from mercurial import node
386
400
387 class defaultformatter(object):
401 class defaultformatter(object):
388 """Minimized composition of baseformatter and plainformatter
402 """Minimized composition of baseformatter and plainformatter"""
389 """
390
403
391 def __init__(self, ui, topic, opts):
404 def __init__(self, ui, topic, opts):
392 self._ui = ui
405 self._ui = ui
@@ -658,8 +671,7 b' def getbranchmapsubsettable():'
658
671
659
672
660 def getsvfs(repo):
673 def getsvfs(repo):
661 """Return appropriate object to access files under .hg/store
674 """Return appropriate object to access files under .hg/store"""
662 """
663 # for "historical portability":
675 # for "historical portability":
664 # repo.svfs has been available since 2.3 (or 7034365089bf)
676 # repo.svfs has been available since 2.3 (or 7034365089bf)
665 svfs = getattr(repo, 'svfs', None)
677 svfs = getattr(repo, 'svfs', None)
@@ -670,8 +682,7 b' def getsvfs(repo):'
670
682
671
683
672 def getvfs(repo):
684 def getvfs(repo):
673 """Return appropriate object to access files under .hg
685 """Return appropriate object to access files under .hg"""
674 """
675 # for "historical portability":
686 # for "historical portability":
676 # repo.vfs has been available since 2.3 (or 7034365089bf)
687 # repo.vfs has been available since 2.3 (or 7034365089bf)
677 vfs = getattr(repo, 'vfs', None)
688 vfs = getattr(repo, 'vfs', None)
@@ -682,8 +693,7 b' def getvfs(repo):'
682
693
683
694
684 def repocleartagscachefunc(repo):
695 def repocleartagscachefunc(repo):
685 """Return the function to clear tags cache according to repo internal API
696 """Return the function to clear tags cache according to repo internal API"""
686 """
687 if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525)
697 if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525)
688 # in this case, setattr(repo, '_tagscache', None) or so isn't
698 # in this case, setattr(repo, '_tagscache', None) or so isn't
689 # correct way to clear tags cache, because existing code paths
699 # correct way to clear tags cache, because existing code paths
@@ -847,7 +857,9 b' def perfheads(ui, repo, **opts):'
847 @command(
857 @command(
848 b'perftags',
858 b'perftags',
849 formatteropts
859 formatteropts
850 + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
860 + [
861 (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
862 ],
851 )
863 )
852 def perftags(ui, repo, **opts):
864 def perftags(ui, repo, **opts):
853 opts = _byteskwargs(opts)
865 opts = _byteskwargs(opts)
@@ -900,8 +912,7 b' def perfancestorset(ui, repo, revset, **'
900
912
901 @command(b'perfdiscovery', formatteropts, b'PATH')
913 @command(b'perfdiscovery', formatteropts, b'PATH')
902 def perfdiscovery(ui, repo, path, **opts):
914 def perfdiscovery(ui, repo, path, **opts):
903 """benchmark discovery between local repo and the peer at given path
915 """benchmark discovery between local repo and the peer at given path"""
904 """
905 repos = [repo, None]
916 repos = [repo, None]
906 timer, fm = gettimer(ui, opts)
917 timer, fm = gettimer(ui, opts)
907 path = ui.expandpath(path)
918 path = ui.expandpath(path)
@@ -919,7 +930,9 b' def perfdiscovery(ui, repo, path, **opts'
919 @command(
930 @command(
920 b'perfbookmarks',
931 b'perfbookmarks',
921 formatteropts
932 formatteropts
922 + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
933 + [
934 (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
935 ],
923 )
936 )
924 def perfbookmarks(ui, repo, **opts):
937 def perfbookmarks(ui, repo, **opts):
925 """benchmark parsing bookmarks from disk to memory"""
938 """benchmark parsing bookmarks from disk to memory"""
@@ -1184,8 +1197,7 b' def perfdirstate(ui, repo, **opts):'
1184
1197
1185 @command(b'perfdirstatedirs', formatteropts)
1198 @command(b'perfdirstatedirs', formatteropts)
1186 def perfdirstatedirs(ui, repo, **opts):
1199 def perfdirstatedirs(ui, repo, **opts):
1187 """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache
1200 """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache"""
1188 """
1189 opts = _byteskwargs(opts)
1201 opts = _byteskwargs(opts)
1190 timer, fm = gettimer(ui, opts)
1202 timer, fm = gettimer(ui, opts)
1191 repo.dirstate.hasdir(b"a")
1203 repo.dirstate.hasdir(b"a")
@@ -1245,8 +1257,7 b' def perfdirfoldmap(ui, repo, **opts):'
1245
1257
1246 @command(b'perfdirstatewrite', formatteropts)
1258 @command(b'perfdirstatewrite', formatteropts)
1247 def perfdirstatewrite(ui, repo, **opts):
1259 def perfdirstatewrite(ui, repo, **opts):
1248 """benchmap the time it take to write a dirstate on disk
1260 """benchmap the time it take to write a dirstate on disk"""
1249 """
1250 opts = _byteskwargs(opts)
1261 opts = _byteskwargs(opts)
1251 timer, fm = gettimer(ui, opts)
1262 timer, fm = gettimer(ui, opts)
1252 ds = repo.dirstate
1263 ds = repo.dirstate
@@ -1359,7 +1370,9 b' def perfpathcopies(ui, repo, rev1, rev2,'
1359
1370
1360 @command(
1371 @command(
1361 b'perfphases',
1372 b'perfphases',
1362 [(b'', b'full', False, b'include file reading time too'),],
1373 [
1374 (b'', b'full', False, b'include file reading time too'),
1375 ],
1363 b"",
1376 b"",
1364 )
1377 )
1365 def perfphases(ui, repo, **opts):
1378 def perfphases(ui, repo, **opts):
@@ -1839,7 +1852,10 b' def perfmoonwalk(ui, repo, **opts):'
1839
1852
1840 @command(
1853 @command(
1841 b'perftemplating',
1854 b'perftemplating',
1842 [(b'r', b'rev', [], b'revisions to run the template on'),] + formatteropts,
1855 [
1856 (b'r', b'rev', [], b'revisions to run the template on'),
1857 ]
1858 + formatteropts,
1843 )
1859 )
1844 def perftemplating(ui, repo, testedtemplate=None, **opts):
1860 def perftemplating(ui, repo, testedtemplate=None, **opts):
1845 """test the rendering time of a given template"""
1861 """test the rendering time of a given template"""
@@ -2193,10 +2209,18 b' def perfhelperpathcopies(ui, repo, revs='
2193 }
2209 }
2194 if dostats:
2210 if dostats:
2195 alldata['nbrevs'].append(
2211 alldata['nbrevs'].append(
2196 (data['nbrevs'], base.hex(), parent.hex(),)
2212 (
2213 data['nbrevs'],
2214 base.hex(),
2215 parent.hex(),
2216 )
2197 )
2217 )
2198 alldata['nbmissingfiles'].append(
2218 alldata['nbmissingfiles'].append(
2199 (data['nbmissingfiles'], base.hex(), parent.hex(),)
2219 (
2220 data['nbmissingfiles'],
2221 base.hex(),
2222 parent.hex(),
2223 )
2200 )
2224 )
2201 if dotiming:
2225 if dotiming:
2202 begin = util.timer()
2226 begin = util.timer()
@@ -2207,10 +2231,18 b' def perfhelperpathcopies(ui, repo, revs='
2207 data['nbrenamedfiles'] = len(renames)
2231 data['nbrenamedfiles'] = len(renames)
2208 if dostats:
2232 if dostats:
2209 alldata['time'].append(
2233 alldata['time'].append(
2210 (data['time'], base.hex(), parent.hex(),)
2234 (
2235 data['time'],
2236 base.hex(),
2237 parent.hex(),
2238 )
2211 )
2239 )
2212 alldata['nbrenames'].append(
2240 alldata['nbrenames'].append(
2213 (data['nbrenamedfiles'], base.hex(), parent.hex(),)
2241 (
2242 data['nbrenamedfiles'],
2243 base.hex(),
2244 parent.hex(),
2245 )
2214 )
2246 )
2215 fm.startitem()
2247 fm.startitem()
2216 fm.data(**data)
2248 fm.data(**data)
@@ -3321,7 +3353,9 b' def perfrevset(ui, repo, expr, clear=Fal'
3321
3353
3322 @command(
3354 @command(
3323 b'perfvolatilesets',
3355 b'perfvolatilesets',
3324 [(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),]
3356 [
3357 (b'', b'clear-obsstore', False, b'drop obsstore between each call.'),
3358 ]
3325 + formatteropts,
3359 + formatteropts,
3326 )
3360 )
3327 def perfvolatilesets(ui, repo, *names, **opts):
3361 def perfvolatilesets(ui, repo, *names, **opts):
@@ -3807,8 +3841,7 b' def perflrucache('
3807 ],
3841 ],
3808 )
3842 )
3809 def perfwrite(ui, repo, **opts):
3843 def perfwrite(ui, repo, **opts):
3810 """microbenchmark ui.write (and others)
3844 """microbenchmark ui.write (and others)"""
3811 """
3812 opts = _byteskwargs(opts)
3845 opts = _byteskwargs(opts)
3813
3846
3814 write = getattr(ui, _sysstr(opts[b'write_method']))
3847 write = getattr(ui, _sysstr(opts[b'write_method']))
@@ -9,12 +9,12 b' from mercurial import ('
9
9
10
10
11 def diffstat(ui, repo, **kwargs):
11 def diffstat(ui, repo, **kwargs):
12 '''Example usage:
12 """Example usage:
13
13
14 [hooks]
14 [hooks]
15 commit.diffstat = python:/path/to/this/file.py:diffstat
15 commit.diffstat = python:/path/to/this/file.py:diffstat
16 changegroup.diffstat = python:/path/to/this/file.py:diffstat
16 changegroup.diffstat = python:/path/to/this/file.py:diffstat
17 '''
17 """
18 if kwargs.get('parent2'):
18 if kwargs.get('parent2'):
19 return
19 return
20 node = kwargs['node']
20 node = kwargs['node']
@@ -53,7 +53,10 b' SOURCES = ['
53 # Headers whose preprocessed output will be fed into cdef().
53 # Headers whose preprocessed output will be fed into cdef().
54 HEADERS = [
54 HEADERS = [
55 os.path.join(HERE, "zstd", *p)
55 os.path.join(HERE, "zstd", *p)
56 for p in (("zstd.h",), ("dictBuilder", "zdict.h"),)
56 for p in (
57 ("zstd.h",),
58 ("dictBuilder", "zdict.h"),
59 )
57 ]
60 ]
58
61
59 INCLUDE_DIRS = [
62 INCLUDE_DIRS = [
@@ -80,12 +83,20 b' if hasattr(compiler, "initialize"):'
80 if compiler.compiler_type == "unix":
83 if compiler.compiler_type == "unix":
81 args = list(compiler.executables["compiler"])
84 args = list(compiler.executables["compiler"])
82 args.extend(
85 args.extend(
83 ["-E", "-DZSTD_STATIC_LINKING_ONLY", "-DZDICT_STATIC_LINKING_ONLY",]
86 [
87 "-E",
88 "-DZSTD_STATIC_LINKING_ONLY",
89 "-DZDICT_STATIC_LINKING_ONLY",
90 ]
84 )
91 )
85 elif compiler.compiler_type == "msvc":
92 elif compiler.compiler_type == "msvc":
86 args = [compiler.cc]
93 args = [compiler.cc]
87 args.extend(
94 args.extend(
88 ["/EP", "/DZSTD_STATIC_LINKING_ONLY", "/DZDICT_STATIC_LINKING_ONLY",]
95 [
96 "/EP",
97 "/DZSTD_STATIC_LINKING_ONLY",
98 "/DZDICT_STATIC_LINKING_ONLY",
99 ]
89 )
100 )
90 else:
101 else:
91 raise Exception("unsupported compiler type: %s" % compiler.compiler_type)
102 raise Exception("unsupported compiler type: %s" % compiler.compiler_type)
@@ -26,7 +26,9 b' import sys'
26
26
27 _hgenv = dict(os.environ)
27 _hgenv = dict(os.environ)
28 _hgenv.update(
28 _hgenv.update(
29 {'HGPLAIN': '1',}
29 {
30 'HGPLAIN': '1',
31 }
30 )
32 )
31
33
32 _HG_FIRST_CHANGE = '9117c6561b0bd7792fa13b50d28239d51b78e51f'
34 _HG_FIRST_CHANGE = '9117c6561b0bd7792fa13b50d28239d51b78e51f'
@@ -122,7 +122,7 b' def parsegitdiff(lines):'
122 optionalrepo=True,
122 optionalrepo=True,
123 )
123 )
124 def analyze(ui, repo, *revs, **opts):
124 def analyze(ui, repo, *revs, **opts):
125 '''create a simple model of a repository to use for later synthesis
125 """create a simple model of a repository to use for later synthesis
126
126
127 This command examines every changeset in the given range (or all
127 This command examines every changeset in the given range (or all
128 of history if none are specified) and creates a simple statistical
128 of history if none are specified) and creates a simple statistical
@@ -133,7 +133,7 b' def analyze(ui, repo, *revs, **opts):'
133 :hg:`synthesize` to create or augment a repository with synthetic
133 :hg:`synthesize` to create or augment a repository with synthetic
134 commits that have a structure that is statistically similar to the
134 commits that have a structure that is statistically similar to the
135 analyzed repository.
135 analyzed repository.
136 '''
136 """
137 root = repo.root
137 root = repo.root
138 if not root.endswith(os.path.sep):
138 if not root.endswith(os.path.sep):
139 root += os.path.sep
139 root += os.path.sep
@@ -281,7 +281,7 b' def analyze(ui, repo, *revs, **opts):'
281 _('hg synthesize [OPTION].. DESCFILE'),
281 _('hg synthesize [OPTION].. DESCFILE'),
282 )
282 )
283 def synthesize(ui, repo, descpath, **opts):
283 def synthesize(ui, repo, descpath, **opts):
284 '''synthesize commits based on a model of an existing repository
284 """synthesize commits based on a model of an existing repository
285
285
286 The model must have been generated by :hg:`analyze`. Commits will
286 The model must have been generated by :hg:`analyze`. Commits will
287 be generated randomly according to the probabilities described in
287 be generated randomly according to the probabilities described in
@@ -293,7 +293,7 b' def synthesize(ui, repo, descpath, **opt'
293 names, words will be chosen randomly from a dictionary that is
293 names, words will be chosen randomly from a dictionary that is
294 presumed to contain one word per line. Use --dict to specify the
294 presumed to contain one word per line. Use --dict to specify the
295 path to an alternate dictionary to use.
295 path to an alternate dictionary to use.
296 '''
296 """
297 try:
297 try:
298 fp = hg.openpath(ui, descpath)
298 fp = hg.openpath(ui, descpath)
299 except Exception as err:
299 except Exception as err:
@@ -542,12 +542,12 b' def renamedirs(dirs, words):'
542 replacements = {'': ''}
542 replacements = {'': ''}
543
543
544 def rename(dirpath):
544 def rename(dirpath):
545 '''Recursively rename the directory and all path prefixes.
545 """Recursively rename the directory and all path prefixes.
546
546
547 The mapping from path to renamed path is stored for all path prefixes
547 The mapping from path to renamed path is stored for all path prefixes
548 as in dynamic programming, ensuring linear runtime and consistent
548 as in dynamic programming, ensuring linear runtime and consistent
549 renaming regardless of iteration order through the model.
549 renaming regardless of iteration order through the model.
550 '''
550 """
551 if dirpath in replacements:
551 if dirpath in replacements:
552 return replacements[dirpath]
552 return replacements[dirpath]
553 head, _ = os.path.split(dirpath)
553 head, _ = os.path.split(dirpath)
@@ -81,8 +81,7 b' def writeerr(data):'
81
81
82
82
83 class embeddedmatcher(object): # pytype: disable=ignored-metaclass
83 class embeddedmatcher(object): # pytype: disable=ignored-metaclass
84 """Base class to detect embedded code fragments in *.t test script
84 """Base class to detect embedded code fragments in *.t test script"""
85 """
86
85
87 __metaclass__ = abc.ABCMeta
86 __metaclass__ = abc.ABCMeta
88
87
@@ -103,8 +102,7 b' class embeddedmatcher(object): # pytype'
103
102
104 @abc.abstractmethod
103 @abc.abstractmethod
105 def isinside(self, ctx, line):
104 def isinside(self, ctx, line):
106 """Examine whether line is inside embedded code, if not yet endsat
105 """Examine whether line is inside embedded code, if not yet endsat"""
107 """
108
106
109 @abc.abstractmethod
107 @abc.abstractmethod
110 def ignores(self, ctx):
108 def ignores(self, ctx):
@@ -822,7 +822,10 b' class Translator(nodes.NodeVisitor):'
822 # man 7 man argues to use ".IP" instead of ".TP"
822 # man 7 man argues to use ".IP" instead of ".TP"
823 self.body.append(
823 self.body.append(
824 '.IP %s %d\n'
824 '.IP %s %d\n'
825 % (next(self._list_char[-1]), self._list_char[-1].get_width(),)
825 % (
826 next(self._list_char[-1]),
827 self._list_char[-1].get_width(),
828 )
826 )
829 )
827
830
828 def depart_list_item(self, node):
831 def depart_list_item(self, node):
@@ -239,25 +239,44 b' configitem = registrar.configitem(config'
239
239
240 # deprecated config: acl.config
240 # deprecated config: acl.config
241 configitem(
241 configitem(
242 b'acl', b'config', default=None,
242 b'acl',
243 b'config',
244 default=None,
243 )
245 )
244 configitem(
246 configitem(
245 b'acl.groups', b'.*', default=None, generic=True,
247 b'acl.groups',
248 b'.*',
249 default=None,
250 generic=True,
246 )
251 )
247 configitem(
252 configitem(
248 b'acl.deny.branches', b'.*', default=None, generic=True,
253 b'acl.deny.branches',
254 b'.*',
255 default=None,
256 generic=True,
249 )
257 )
250 configitem(
258 configitem(
251 b'acl.allow.branches', b'.*', default=None, generic=True,
259 b'acl.allow.branches',
260 b'.*',
261 default=None,
262 generic=True,
252 )
263 )
253 configitem(
264 configitem(
254 b'acl.deny', b'.*', default=None, generic=True,
265 b'acl.deny',
266 b'.*',
267 default=None,
268 generic=True,
255 )
269 )
256 configitem(
270 configitem(
257 b'acl.allow', b'.*', default=None, generic=True,
271 b'acl.allow',
272 b'.*',
273 default=None,
274 generic=True,
258 )
275 )
259 configitem(
276 configitem(
260 b'acl', b'sources', default=lambda: [b'serve'],
277 b'acl',
278 b'sources',
279 default=lambda: [b'serve'],
261 )
280 )
262
281
263
282
@@ -42,7 +42,9 b' configtable = {}'
42 configitem = registrar.configitem(configtable)
42 configitem = registrar.configitem(configtable)
43
43
44 configitem(
44 configitem(
45 b'automv', b'similarity', default=95,
45 b'automv',
46 b'similarity',
47 default=95,
46 )
48 )
47
49
48
50
@@ -72,19 +72,29 b' configtable = {}'
72 configitem = registrar.configitem(configtable)
72 configitem = registrar.configitem(configtable)
73
73
74 configitem(
74 configitem(
75 b'blackbox', b'dirty', default=False,
75 b'blackbox',
76 b'dirty',
77 default=False,
76 )
78 )
77 configitem(
79 configitem(
78 b'blackbox', b'maxsize', default=b'1 MB',
80 b'blackbox',
81 b'maxsize',
82 default=b'1 MB',
79 )
83 )
80 configitem(
84 configitem(
81 b'blackbox', b'logsource', default=False,
85 b'blackbox',
86 b'logsource',
87 default=False,
82 )
88 )
83 configitem(
89 configitem(
84 b'blackbox', b'maxfiles', default=7,
90 b'blackbox',
91 b'maxfiles',
92 default=7,
85 )
93 )
86 configitem(
94 configitem(
87 b'blackbox', b'track', default=lambda: [b'*'],
95 b'blackbox',
96 b'track',
97 default=lambda: [b'*'],
88 )
98 )
89 configitem(
99 configitem(
90 b'blackbox',
100 b'blackbox',
@@ -92,7 +102,9 b' configitem('
92 default=lambda: [b'chgserver', b'cmdserver', b'extension'],
102 default=lambda: [b'chgserver', b'cmdserver', b'extension'],
93 )
103 )
94 configitem(
104 configitem(
95 b'blackbox', b'date-format', default=b'%Y/%m/%d %H:%M:%S',
105 b'blackbox',
106 b'date-format',
107 default=b'%Y/%m/%d %H:%M:%S',
96 )
108 )
97
109
98 _lastlogger = loggingutil.proxylogger()
110 _lastlogger = loggingutil.proxylogger()
@@ -189,14 +201,15 b' def reposetup(ui, repo):'
189
201
190 @command(
202 @command(
191 b'blackbox',
203 b'blackbox',
192 [(b'l', b'limit', 10, _(b'the number of events to show')),],
204 [
205 (b'l', b'limit', 10, _(b'the number of events to show')),
206 ],
193 _(b'hg blackbox [OPTION]...'),
207 _(b'hg blackbox [OPTION]...'),
194 helpcategory=command.CATEGORY_MAINTENANCE,
208 helpcategory=command.CATEGORY_MAINTENANCE,
195 helpbasic=True,
209 helpbasic=True,
196 )
210 )
197 def blackbox(ui, repo, *revs, **opts):
211 def blackbox(ui, repo, *revs, **opts):
198 '''view the recent repository events
212 """view the recent repository events"""
199 '''
200
213
201 if not repo.vfs.exists(b'blackbox.log'):
214 if not repo.vfs.exists(b'blackbox.log'):
202 return
215 return
@@ -325,22 +325,34 b' configtable = {}'
325 configitem = registrar.configitem(configtable)
325 configitem = registrar.configitem(configtable)
326
326
327 configitem(
327 configitem(
328 b'bugzilla', b'apikey', default=b'',
328 b'bugzilla',
329 b'apikey',
330 default=b'',
329 )
331 )
330 configitem(
332 configitem(
331 b'bugzilla', b'bzdir', default=b'/var/www/html/bugzilla',
333 b'bugzilla',
334 b'bzdir',
335 default=b'/var/www/html/bugzilla',
332 )
336 )
333 configitem(
337 configitem(
334 b'bugzilla', b'bzemail', default=None,
338 b'bugzilla',
339 b'bzemail',
340 default=None,
335 )
341 )
336 configitem(
342 configitem(
337 b'bugzilla', b'bzurl', default=b'http://localhost/bugzilla/',
343 b'bugzilla',
344 b'bzurl',
345 default=b'http://localhost/bugzilla/',
338 )
346 )
339 configitem(
347 configitem(
340 b'bugzilla', b'bzuser', default=None,
348 b'bugzilla',
349 b'bzuser',
350 default=None,
341 )
351 )
342 configitem(
352 configitem(
343 b'bugzilla', b'db', default=b'bugs',
353 b'bugzilla',
354 b'db',
355 default=b'bugs',
344 )
356 )
345 configitem(
357 configitem(
346 b'bugzilla',
358 b'bugzilla',
@@ -353,19 +365,29 b' configitem('
353 ),
365 ),
354 )
366 )
355 configitem(
367 configitem(
356 b'bugzilla', b'fixresolution', default=b'FIXED',
368 b'bugzilla',
369 b'fixresolution',
370 default=b'FIXED',
357 )
371 )
358 configitem(
372 configitem(
359 b'bugzilla', b'fixstatus', default=b'RESOLVED',
373 b'bugzilla',
374 b'fixstatus',
375 default=b'RESOLVED',
360 )
376 )
361 configitem(
377 configitem(
362 b'bugzilla', b'host', default=b'localhost',
378 b'bugzilla',
379 b'host',
380 default=b'localhost',
363 )
381 )
364 configitem(
382 configitem(
365 b'bugzilla', b'notify', default=configitem.dynamicdefault,
383 b'bugzilla',
384 b'notify',
385 default=configitem.dynamicdefault,
366 )
386 )
367 configitem(
387 configitem(
368 b'bugzilla', b'password', default=None,
388 b'bugzilla',
389 b'password',
390 default=None,
369 )
391 )
370 configitem(
392 configitem(
371 b'bugzilla',
393 b'bugzilla',
@@ -377,25 +399,39 b' configitem('
377 ),
399 ),
378 )
400 )
379 configitem(
401 configitem(
380 b'bugzilla', b'strip', default=0,
402 b'bugzilla',
403 b'strip',
404 default=0,
381 )
405 )
382 configitem(
406 configitem(
383 b'bugzilla', b'style', default=None,
407 b'bugzilla',
408 b'style',
409 default=None,
384 )
410 )
385 configitem(
411 configitem(
386 b'bugzilla', b'template', default=None,
412 b'bugzilla',
413 b'template',
414 default=None,
387 )
415 )
388 configitem(
416 configitem(
389 b'bugzilla', b'timeout', default=5,
417 b'bugzilla',
418 b'timeout',
419 default=5,
390 )
420 )
391 configitem(
421 configitem(
392 b'bugzilla', b'user', default=b'bugs',
422 b'bugzilla',
423 b'user',
424 default=b'bugs',
393 )
425 )
394 configitem(
426 configitem(
395 b'bugzilla', b'usermap', default=None,
427 b'bugzilla',
428 b'usermap',
429 default=None,
396 )
430 )
397 configitem(
431 configitem(
398 b'bugzilla', b'version', default=None,
432 b'bugzilla',
433 b'version',
434 default=None,
399 )
435 )
400
436
401
437
@@ -430,29 +466,29 b' class bzaccess(object):'
430 '''remove bug IDs where node occurs in comment text from bugs.'''
466 '''remove bug IDs where node occurs in comment text from bugs.'''
431
467
432 def updatebug(self, bugid, newstate, text, committer):
468 def updatebug(self, bugid, newstate, text, committer):
433 '''update the specified bug. Add comment text and set new states.
469 """update the specified bug. Add comment text and set new states.
434
470
435 If possible add the comment as being from the committer of
471 If possible add the comment as being from the committer of
436 the changeset. Otherwise use the default Bugzilla user.
472 the changeset. Otherwise use the default Bugzilla user.
437 '''
473 """
438
474
439 def notify(self, bugs, committer):
475 def notify(self, bugs, committer):
440 '''Force sending of Bugzilla notification emails.
476 """Force sending of Bugzilla notification emails.
441
477
442 Only required if the access method does not trigger notification
478 Only required if the access method does not trigger notification
443 emails automatically.
479 emails automatically.
444 '''
480 """
445
481
446
482
447 # Bugzilla via direct access to MySQL database.
483 # Bugzilla via direct access to MySQL database.
448 class bzmysql(bzaccess):
484 class bzmysql(bzaccess):
449 '''Support for direct MySQL access to Bugzilla.
485 """Support for direct MySQL access to Bugzilla.
450
486
451 The earliest Bugzilla version this is tested with is version 2.16.
487 The earliest Bugzilla version this is tested with is version 2.16.
452
488
453 If your Bugzilla is version 3.4 or above, you are strongly
489 If your Bugzilla is version 3.4 or above, you are strongly
454 recommended to use the XMLRPC access method instead.
490 recommended to use the XMLRPC access method instead.
455 '''
491 """
456
492
457 @staticmethod
493 @staticmethod
458 def sql_buglist(ids):
494 def sql_buglist(ids):
@@ -581,9 +617,9 b' class bzmysql(bzaccess):'
581 return userid
617 return userid
582
618
583 def get_bugzilla_user(self, committer):
619 def get_bugzilla_user(self, committer):
584 '''See if committer is a registered bugzilla user. Return
620 """See if committer is a registered bugzilla user. Return
585 bugzilla username and userid if so. If not, return default
621 bugzilla username and userid if so. If not, return default
586 bugzilla username and userid.'''
622 bugzilla username and userid."""
587 user = self.map_committer(committer)
623 user = self.map_committer(committer)
588 try:
624 try:
589 userid = self.get_user_id(user)
625 userid = self.get_user_id(user)
@@ -604,10 +640,10 b' class bzmysql(bzaccess):'
604 return (user, userid)
640 return (user, userid)
605
641
606 def updatebug(self, bugid, newstate, text, committer):
642 def updatebug(self, bugid, newstate, text, committer):
607 '''update bug state with comment text.
643 """update bug state with comment text.
608
644
609 Try adding comment as committer of changeset, otherwise as
645 Try adding comment as committer of changeset, otherwise as
610 default bugzilla user.'''
646 default bugzilla user."""
611 if len(newstate) > 0:
647 if len(newstate) > 0:
612 self.ui.warn(_(b"Bugzilla/MySQL cannot update bug state\n"))
648 self.ui.warn(_(b"Bugzilla/MySQL cannot update bug state\n"))
613
649
@@ -869,7 +905,7 b' class bzxmlrpcemail(bzxmlrpc):'
869 return b"@%s = %s" % (fieldname, pycompat.bytestr(value))
905 return b"@%s = %s" % (fieldname, pycompat.bytestr(value))
870
906
871 def send_bug_modify_email(self, bugid, commands, comment, committer):
907 def send_bug_modify_email(self, bugid, commands, comment, committer):
872 '''send modification message to Bugzilla bug via email.
908 """send modification message to Bugzilla bug via email.
873
909
874 The message format is documented in the Bugzilla email_in.pl
910 The message format is documented in the Bugzilla email_in.pl
875 specification. commands is a list of command lines, comment is the
911 specification. commands is a list of command lines, comment is the
@@ -878,7 +914,7 b' class bzxmlrpcemail(bzxmlrpc):'
878 To stop users from crafting commit comments with
914 To stop users from crafting commit comments with
879 Bugzilla commands, specify the bug ID via the message body, rather
915 Bugzilla commands, specify the bug ID via the message body, rather
880 than the subject line, and leave a blank line after it.
916 than the subject line, and leave a blank line after it.
881 '''
917 """
882 user = self.map_committer(committer)
918 user = self.map_committer(committer)
883 matches = self.bzproxy.User.get(
919 matches = self.bzproxy.User.get(
884 {b'match': [user], b'token': self.bztoken}
920 {b'match': [user], b'token': self.bztoken}
@@ -1016,11 +1052,11 b' class bzrestapi(bzaccess):'
1016 del bugs[bugid]
1052 del bugs[bugid]
1017
1053
1018 def updatebug(self, bugid, newstate, text, committer):
1054 def updatebug(self, bugid, newstate, text, committer):
1019 '''update the specified bug. Add comment text and set new states.
1055 """update the specified bug. Add comment text and set new states.
1020
1056
1021 If possible add the comment as being from the committer of
1057 If possible add the comment as being from the committer of
1022 the changeset. Otherwise use the default Bugzilla user.
1058 the changeset. Otherwise use the default Bugzilla user.
1023 '''
1059 """
1024 bugmod = {}
1060 bugmod = {}
1025 if b'hours' in newstate:
1061 if b'hours' in newstate:
1026 bugmod[b'work_time'] = newstate[b'hours']
1062 bugmod[b'work_time'] = newstate[b'hours']
@@ -1050,11 +1086,11 b' class bzrestapi(bzaccess):'
1050 self.ui.debug(b'added comment to bug %s\n' % bugid)
1086 self.ui.debug(b'added comment to bug %s\n' % bugid)
1051
1087
1052 def notify(self, bugs, committer):
1088 def notify(self, bugs, committer):
1053 '''Force sending of Bugzilla notification emails.
1089 """Force sending of Bugzilla notification emails.
1054
1090
1055 Only required if the access method does not trigger notification
1091 Only required if the access method does not trigger notification
1056 emails automatically.
1092 emails automatically.
1057 '''
1093 """
1058 pass
1094 pass
1059
1095
1060
1096
@@ -1092,12 +1128,12 b' class bugzilla(object):'
1092 self.split_re = re.compile(br'\D+')
1128 self.split_re = re.compile(br'\D+')
1093
1129
1094 def find_bugs(self, ctx):
1130 def find_bugs(self, ctx):
1095 '''return bugs dictionary created from commit comment.
1131 """return bugs dictionary created from commit comment.
1096
1132
1097 Extract bug info from changeset comments. Filter out any that are
1133 Extract bug info from changeset comments. Filter out any that are
1098 not known to Bugzilla, and any that already have a reference to
1134 not known to Bugzilla, and any that already have a reference to
1099 the given changeset in their comments.
1135 the given changeset in their comments.
1100 '''
1136 """
1101 start = 0
1137 start = 0
1102 bugs = {}
1138 bugs = {}
1103 bugmatch = self.bug_re.search(ctx.description(), start)
1139 bugmatch = self.bug_re.search(ctx.description(), start)
@@ -1152,8 +1188,8 b' class bugzilla(object):'
1152 '''update bugzilla bug with reference to changeset.'''
1188 '''update bugzilla bug with reference to changeset.'''
1153
1189
1154 def webroot(root):
1190 def webroot(root):
1155 '''strip leading prefix of repo root and turn into
1191 """strip leading prefix of repo root and turn into
1156 url-safe path.'''
1192 url-safe path."""
1157 count = int(self.ui.config(b'bugzilla', b'strip'))
1193 count = int(self.ui.config(b'bugzilla', b'strip'))
1158 root = util.pconvert(root)
1194 root = util.pconvert(root)
1159 while count > 0:
1195 while count > 0:
@@ -1195,9 +1231,9 b' class bugzilla(object):'
1195
1231
1196
1232
1197 def hook(ui, repo, hooktype, node=None, **kwargs):
1233 def hook(ui, repo, hooktype, node=None, **kwargs):
1198 '''add comment to bugzilla for each changeset that refers to a
1234 """add comment to bugzilla for each changeset that refers to a
1199 bugzilla bug id. only add a comment once per bug, so same change
1235 bugzilla bug id. only add a comment once per bug, so same change
1200 seen multiple times does not fill bug with duplicate data.'''
1236 seen multiple times does not fill bug with duplicate data."""
1201 if node is None:
1237 if node is None:
1202 raise error.Abort(
1238 raise error.Abort(
1203 _(b'hook type %s does not pass a changeset id') % hooktype
1239 _(b'hook type %s does not pass a changeset id') % hooktype
@@ -156,7 +156,7 b' def countrate(ui, repo, amap, *pats, **o'
156 inferrepo=True,
156 inferrepo=True,
157 )
157 )
158 def churn(ui, repo, *pats, **opts):
158 def churn(ui, repo, *pats, **opts):
159 '''histogram of changes to the repository
159 """histogram of changes to the repository
160
160
161 This command will display a histogram representing the number
161 This command will display a histogram representing the number
162 of changed lines or revisions, grouped according to the given
162 of changed lines or revisions, grouped according to the given
@@ -193,7 +193,7 b' def churn(ui, repo, *pats, **opts):'
193 Such a file may be specified with the --aliases option, otherwise
193 Such a file may be specified with the --aliases option, otherwise
194 a .hgchurn file will be looked for in the working directory root.
194 a .hgchurn file will be looked for in the working directory root.
195 Aliases will be split from the rightmost "=".
195 Aliases will be split from the rightmost "=".
196 '''
196 """
197
197
198 def pad(s, l):
198 def pad(s, l):
199 return s + b" " * (l - encoding.colwidth(s))
199 return s + b" " * (l - encoding.colwidth(s))
@@ -536,7 +536,7 b' def debugsvnlog(ui, **opts):'
536 norepo=True,
536 norepo=True,
537 )
537 )
538 def debugcvsps(ui, *args, **opts):
538 def debugcvsps(ui, *args, **opts):
539 '''create changeset information from CVS
539 """create changeset information from CVS
540
540
541 This command is intended as a debugging tool for the CVS to
541 This command is intended as a debugging tool for the CVS to
542 Mercurial converter, and can be used as a direct replacement for
542 Mercurial converter, and can be used as a direct replacement for
@@ -545,7 +545,7 b' def debugcvsps(ui, *args, **opts):'
545 Hg debugcvsps reads the CVS rlog for current directory (or any
545 Hg debugcvsps reads the CVS rlog for current directory (or any
546 named directory) in the CVS repository, and converts the log to a
546 named directory) in the CVS repository, and converts the log to a
547 series of changesets based on matching commit log entries and
547 series of changesets based on matching commit log entries and
548 dates.'''
548 dates."""
549 return cvsps.debugcvsps(ui, *args, **opts)
549 return cvsps.debugcvsps(ui, *args, **opts)
550
550
551
551
@@ -21,7 +21,11 b' from . import common'
21
21
22 # these do not work with demandimport, blacklist
22 # these do not work with demandimport, blacklist
23 demandimport.IGNORES.update(
23 demandimport.IGNORES.update(
24 [b'bzrlib.transactions', b'bzrlib.urlutils', b'ElementPath',]
24 [
25 b'bzrlib.transactions',
26 b'bzrlib.urlutils',
27 b'ElementPath',
28 ]
25 )
29 )
26
30
27 try:
31 try:
@@ -172,8 +172,8 b' class converter_source(object):'
172 self.encoding = b'utf-8'
172 self.encoding = b'utf-8'
173
173
174 def checkhexformat(self, revstr, mapname=b'splicemap'):
174 def checkhexformat(self, revstr, mapname=b'splicemap'):
175 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
175 """fails if revstr is not a 40 byte hex. mercurial and git both uses
176 such format for their revision numbering
176 such format for their revision numbering
177 """
177 """
178 if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
178 if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
179 raise error.Abort(
179 raise error.Abort(
@@ -283,8 +283,7 b' class converter_source(object):'
283 return False
283 return False
284
284
285 def hasnativeclose(self):
285 def hasnativeclose(self):
286 """Return true if this source has ability to close branch.
286 """Return true if this source has ability to close branch."""
287 """
288 return False
287 return False
289
288
290 def lookuprev(self, rev):
289 def lookuprev(self, rev):
@@ -303,8 +302,8 b' class converter_source(object):'
303
302
304 def checkrevformat(self, revstr, mapname=b'splicemap'):
303 def checkrevformat(self, revstr, mapname=b'splicemap'):
305 """revstr is a string that describes a revision in the given
304 """revstr is a string that describes a revision in the given
306 source control system. Return true if revstr has correct
305 source control system. Return true if revstr has correct
307 format.
306 format.
308 """
307 """
309 return True
308 return True
310
309
@@ -96,7 +96,7 b' def recode(s):'
96
96
97
97
98 def mapbranch(branch, branchmap):
98 def mapbranch(branch, branchmap):
99 '''
99 """
100 >>> bmap = {b'default': b'branch1'}
100 >>> bmap = {b'default': b'branch1'}
101 >>> for i in [b'', None]:
101 >>> for i in [b'', None]:
102 ... mapbranch(i, bmap)
102 ... mapbranch(i, bmap)
@@ -115,7 +115,7 b' def mapbranch(branch, branchmap):'
115 'branch4'
115 'branch4'
116 'branch4'
116 'branch4'
117 'branch5'
117 'branch5'
118 '''
118 """
119 # If branch is None or empty, this commit is coming from the source
119 # If branch is None or empty, this commit is coming from the source
120 # repository's default branch and destined for the default branch in the
120 # repository's default branch and destined for the default branch in the
121 # destination repository. For such commits, using a literal "default"
121 # destination repository. For such commits, using a literal "default"
@@ -228,14 +228,14 b' class converter(object):'
228 self.branchmap = mapfile(ui, opts.get(b'branchmap'))
228 self.branchmap = mapfile(ui, opts.get(b'branchmap'))
229
229
230 def parsesplicemap(self, path):
230 def parsesplicemap(self, path):
231 """ check and validate the splicemap format and
231 """check and validate the splicemap format and
232 return a child/parents dictionary.
232 return a child/parents dictionary.
233 Format checking has two parts.
233 Format checking has two parts.
234 1. generic format which is same across all source types
234 1. generic format which is same across all source types
235 2. specific format checking which may be different for
235 2. specific format checking which may be different for
236 different source type. This logic is implemented in
236 different source type. This logic is implemented in
237 checkrevformat function in source files like
237 checkrevformat function in source files like
238 hg.py, subversion.py etc.
238 hg.py, subversion.py etc.
239 """
239 """
240
240
241 if not path:
241 if not path:
@@ -275,8 +275,8 b' class converter(object):'
275 return m
275 return m
276
276
277 def walktree(self, heads):
277 def walktree(self, heads):
278 '''Return a mapping that identifies the uncommitted parents of every
278 """Return a mapping that identifies the uncommitted parents of every
279 uncommitted changeset.'''
279 uncommitted changeset."""
280 visit = list(heads)
280 visit = list(heads)
281 known = set()
281 known = set()
282 parents = {}
282 parents = {}
@@ -332,8 +332,8 b' class converter(object):'
332 parents[c] = pc
332 parents[c] = pc
333
333
334 def toposort(self, parents, sortmode):
334 def toposort(self, parents, sortmode):
335 '''Return an ordering such that every uncommitted changeset is
335 """Return an ordering such that every uncommitted changeset is
336 preceded by all its uncommitted ancestors.'''
336 preceded by all its uncommitted ancestors."""
337
337
338 def mapchildren(parents):
338 def mapchildren(parents):
339 """Return a (children, roots) tuple where 'children' maps parent
339 """Return a (children, roots) tuple where 'children' maps parent
@@ -29,25 +29,25 b' pickle = util.pickle'
29
29
30
30
31 class logentry(object):
31 class logentry(object):
32 '''Class logentry has the following attributes:
32 """Class logentry has the following attributes:
33 .author - author name as CVS knows it
33 .author - author name as CVS knows it
34 .branch - name of branch this revision is on
34 .branch - name of branch this revision is on
35 .branches - revision tuple of branches starting at this revision
35 .branches - revision tuple of branches starting at this revision
36 .comment - commit message
36 .comment - commit message
37 .commitid - CVS commitid or None
37 .commitid - CVS commitid or None
38 .date - the commit date as a (time, tz) tuple
38 .date - the commit date as a (time, tz) tuple
39 .dead - true if file revision is dead
39 .dead - true if file revision is dead
40 .file - Name of file
40 .file - Name of file
41 .lines - a tuple (+lines, -lines) or None
41 .lines - a tuple (+lines, -lines) or None
42 .parent - Previous revision of this entry
42 .parent - Previous revision of this entry
43 .rcs - name of file as returned from CVS
43 .rcs - name of file as returned from CVS
44 .revision - revision number as tuple
44 .revision - revision number as tuple
45 .tags - list of tags on the file
45 .tags - list of tags on the file
46 .synthetic - is this a synthetic "file ... added on ..." revision?
46 .synthetic - is this a synthetic "file ... added on ..." revision?
47 .mergepoint - the branch that has been merged from (if present in
47 .mergepoint - the branch that has been merged from (if present in
48 rlog output) or None
48 rlog output) or None
49 .branchpoints - the branches that start at the current entry or empty
49 .branchpoints - the branches that start at the current entry or empty
50 '''
50 """
51
51
52 def __init__(self, **entries):
52 def __init__(self, **entries):
53 self.synthetic = False
53 self.synthetic = False
@@ -580,20 +580,20 b' def createlog(ui, directory=None, root=b'
580
580
581
581
582 class changeset(object):
582 class changeset(object):
583 '''Class changeset has the following attributes:
583 """Class changeset has the following attributes:
584 .id - integer identifying this changeset (list index)
584 .id - integer identifying this changeset (list index)
585 .author - author name as CVS knows it
585 .author - author name as CVS knows it
586 .branch - name of branch this changeset is on, or None
586 .branch - name of branch this changeset is on, or None
587 .comment - commit message
587 .comment - commit message
588 .commitid - CVS commitid or None
588 .commitid - CVS commitid or None
589 .date - the commit date as a (time,tz) tuple
589 .date - the commit date as a (time,tz) tuple
590 .entries - list of logentry objects in this changeset
590 .entries - list of logentry objects in this changeset
591 .parents - list of one or two parent changesets
591 .parents - list of one or two parent changesets
592 .tags - list of tags on this changeset
592 .tags - list of tags on this changeset
593 .synthetic - from synthetic revision "file ... added on branch ..."
593 .synthetic - from synthetic revision "file ... added on branch ..."
594 .mergepoint- the branch that has been merged from or None
594 .mergepoint- the branch that has been merged from or None
595 .branchpoints- the branches that start at the current entry or empty
595 .branchpoints- the branches that start at the current entry or empty
596 '''
596 """
597
597
598 def __init__(self, **entries):
598 def __init__(self, **entries):
599 self.id = None
599 self.id = None
@@ -945,10 +945,10 b' def createchangeset(ui, log, fuzz=60, me'
945
945
946
946
947 def debugcvsps(ui, *args, **opts):
947 def debugcvsps(ui, *args, **opts):
948 '''Read CVS rlog for current directory or named path in
948 """Read CVS rlog for current directory or named path in
949 repository, and convert the log to changesets based on matching
949 repository, and convert the log to changesets based on matching
950 commit log entries and dates.
950 commit log entries and dates.
951 '''
951 """
952 opts = pycompat.byteskwargs(opts)
952 opts = pycompat.byteskwargs(opts)
953 if opts[b"new_cache"]:
953 if opts[b"new_cache"]:
954 cache = b"write"
954 cache = b"write"
@@ -19,14 +19,14 b' SKIPREV = common.SKIPREV'
19
19
20
20
21 def rpairs(path):
21 def rpairs(path):
22 '''Yield tuples with path split at '/', starting with the full path.
22 """Yield tuples with path split at '/', starting with the full path.
23 No leading, trailing or double '/', please.
23 No leading, trailing or double '/', please.
24 >>> for x in rpairs(b'foo/bar/baz'): print(x)
24 >>> for x in rpairs(b'foo/bar/baz'): print(x)
25 ('foo/bar/baz', '')
25 ('foo/bar/baz', '')
26 ('foo/bar', 'baz')
26 ('foo/bar', 'baz')
27 ('foo', 'bar/baz')
27 ('foo', 'bar/baz')
28 ('.', 'foo/bar/baz')
28 ('.', 'foo/bar/baz')
29 '''
29 """
30 i = len(path)
30 i = len(path)
31 while i != -1:
31 while i != -1:
32 yield path[:i], path[i + 1 :]
32 yield path[:i], path[i + 1 :]
@@ -35,17 +35,17 b' def rpairs(path):'
35
35
36
36
37 def normalize(path):
37 def normalize(path):
38 ''' We use posixpath.normpath to support cross-platform path format.
38 """We use posixpath.normpath to support cross-platform path format.
39 However, it doesn't handle None input. So we wrap it up. '''
39 However, it doesn't handle None input. So we wrap it up."""
40 if path is None:
40 if path is None:
41 return None
41 return None
42 return posixpath.normpath(path)
42 return posixpath.normpath(path)
43
43
44
44
45 class filemapper(object):
45 class filemapper(object):
46 '''Map and filter filenames when importing.
46 """Map and filter filenames when importing.
47 A name can be mapped to itself, a new name, or None (omit from new
47 A name can be mapped to itself, a new name, or None (omit from new
48 repository).'''
48 repository)."""
49
49
50 def __init__(self, ui, path=None):
50 def __init__(self, ui, path=None):
51 self.ui = ui
51 self.ui = ui
@@ -118,13 +118,19 b' configtable = {}'
118 configitem = registrar.configitem(configtable)
118 configitem = registrar.configitem(configtable)
119
119
120 configitem(
120 configitem(
121 b'eol', b'fix-trailing-newline', default=False,
121 b'eol',
122 b'fix-trailing-newline',
123 default=False,
122 )
124 )
123 configitem(
125 configitem(
124 b'eol', b'native', default=pycompat.oslinesep,
126 b'eol',
127 b'native',
128 default=pycompat.oslinesep,
125 )
129 )
126 configitem(
130 configitem(
127 b'eol', b'only-consistent', default=True,
131 b'eol',
132 b'only-consistent',
133 default=True,
128 )
134 )
129
135
130 # Matches a lone LF, i.e., one that is not part of CRLF.
136 # Matches a lone LF, i.e., one that is not part of CRLF.
@@ -118,19 +118,29 b' configtable = {}'
118 configitem = registrar.configitem(configtable)
118 configitem = registrar.configitem(configtable)
119
119
120 configitem(
120 configitem(
121 b'extdiff', br'opts\..*', default=b'', generic=True,
121 b'extdiff',
122 br'opts\..*',
123 default=b'',
124 generic=True,
122 )
125 )
123
126
124 configitem(
127 configitem(
125 b'extdiff', br'gui\..*', generic=True,
128 b'extdiff',
129 br'gui\..*',
130 generic=True,
126 )
131 )
127
132
128 configitem(
133 configitem(
129 b'diff-tools', br'.*\.diffargs$', default=None, generic=True,
134 b'diff-tools',
135 br'.*\.diffargs$',
136 default=None,
137 generic=True,
130 )
138 )
131
139
132 configitem(
140 configitem(
133 b'diff-tools', br'.*\.gui$', generic=True,
141 b'diff-tools',
142 br'.*\.gui$',
143 generic=True,
134 )
144 )
135
145
136 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
146 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -141,9 +151,9 b" testedwith = b'ships-with-hg-core'"
141
151
142
152
143 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
153 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
144 '''snapshot files as of some revision
154 """snapshot files as of some revision
145 if not using snapshot, -I/-X does not work and recursive diff
155 if not using snapshot, -I/-X does not work and recursive diff
146 in tools like kdiff3 and meld displays too many files.'''
156 in tools like kdiff3 and meld displays too many files."""
147 dirname = os.path.basename(repo.root)
157 dirname = os.path.basename(repo.root)
148 if dirname == b"":
158 if dirname == b"":
149 dirname = b"root"
159 dirname = b"root"
@@ -230,9 +240,9 b' def formatcmdline('
230
240
231
241
232 def _systembackground(cmd, environ=None, cwd=None):
242 def _systembackground(cmd, environ=None, cwd=None):
233 ''' like 'procutil.system', but returns the Popen object directly
243 """like 'procutil.system', but returns the Popen object directly
234 so we don't have to wait on it.
244 so we don't have to wait on it.
235 '''
245 """
236 env = procutil.shellenviron(environ)
246 env = procutil.shellenviron(environ)
237 proc = subprocess.Popen(
247 proc = subprocess.Popen(
238 procutil.tonativestr(cmd),
248 procutil.tonativestr(cmd),
@@ -530,13 +540,13 b' def diffrevs('
530
540
531
541
532 def dodiff(ui, repo, cmdline, pats, opts, guitool=False):
542 def dodiff(ui, repo, cmdline, pats, opts, guitool=False):
533 '''Do the actual diff:
543 """Do the actual diff:
534
544
535 - copy to a temp structure if diffing 2 internal revisions
545 - copy to a temp structure if diffing 2 internal revisions
536 - copy to a temp structure if diffing working revision with
546 - copy to a temp structure if diffing working revision with
537 another one and more than 1 file is changed
547 another one and more than 1 file is changed
538 - just invoke the diff for a single file in the working dir
548 - just invoke the diff for a single file in the working dir
539 '''
549 """
540
550
541 cmdutil.check_at_most_one_arg(opts, b'rev', b'change')
551 cmdutil.check_at_most_one_arg(opts, b'rev', b'change')
542 revs = opts.get(b'rev')
552 revs = opts.get(b'rev')
@@ -628,14 +638,16 b' extdiffopts = ('
628
638
629 @command(
639 @command(
630 b'extdiff',
640 b'extdiff',
631 [(b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),]
641 [
642 (b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),
643 ]
632 + extdiffopts,
644 + extdiffopts,
633 _(b'hg extdiff [OPT]... [FILE]...'),
645 _(b'hg extdiff [OPT]... [FILE]...'),
634 helpcategory=command.CATEGORY_FILE_CONTENTS,
646 helpcategory=command.CATEGORY_FILE_CONTENTS,
635 inferrepo=True,
647 inferrepo=True,
636 )
648 )
637 def extdiff(ui, repo, *pats, **opts):
649 def extdiff(ui, repo, *pats, **opts):
638 '''use external program to diff repository (or selected files)
650 """use external program to diff repository (or selected files)
639
651
640 Show differences between revisions for the specified files, using
652 Show differences between revisions for the specified files, using
641 an external program. The default program used is diff, with
653 an external program. The default program used is diff, with
@@ -664,7 +676,7 b' def extdiff(ui, repo, *pats, **opts):'
664
676
665 The --confirm option will prompt the user before each invocation of
677 The --confirm option will prompt the user before each invocation of
666 the external program. It is ignored if --per-file isn't specified.
678 the external program. It is ignored if --per-file isn't specified.
667 '''
679 """
668 opts = pycompat.byteskwargs(opts)
680 opts = pycompat.byteskwargs(opts)
669 program = opts.get(b'program')
681 program = opts.get(b'program')
670 option = opts.get(b'option')
682 option = opts.get(b'option')
@@ -70,13 +70,19 b' configtable = {}'
70 configitem = registrar.configitem(configtable)
70 configitem = registrar.configitem(configtable)
71
71
72 configitem(
72 configitem(
73 b'factotum', b'executable', default=b'/bin/auth/factotum',
73 b'factotum',
74 b'executable',
75 default=b'/bin/auth/factotum',
74 )
76 )
75 configitem(
77 configitem(
76 b'factotum', b'mountpoint', default=b'/mnt/factotum',
78 b'factotum',
79 b'mountpoint',
80 default=b'/mnt/factotum',
77 )
81 )
78 configitem(
82 configitem(
79 b'factotum', b'service', default=b'hg',
83 b'factotum',
84 b'service',
85 default=b'hg',
80 )
86 )
81
87
82
88
@@ -54,7 +54,7 b" testedwith = b'ships-with-hg-core'"
54 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
54 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
55 )
55 )
56 def fetch(ui, repo, source=b'default', **opts):
56 def fetch(ui, repo, source=b'default', **opts):
57 '''pull changes from a remote repository, merge new changes if needed.
57 """pull changes from a remote repository, merge new changes if needed.
58
58
59 This finds all changes from the repository at the specified path
59 This finds all changes from the repository at the specified path
60 or URL and adds them to the local repository.
60 or URL and adds them to the local repository.
@@ -71,7 +71,7 b" def fetch(ui, repo, source=b'default', *"
71 See :hg:`help dates` for a list of formats valid for -d/--date.
71 See :hg:`help dates` for a list of formats valid for -d/--date.
72
72
73 Returns 0 on success.
73 Returns 0 on success.
74 '''
74 """
75
75
76 opts = pycompat.byteskwargs(opts)
76 opts = pycompat.byteskwargs(opts)
77 date = opts.get(b'date')
77 date = opts.get(b'date')
@@ -372,7 +372,7 b' def cleanup(repo, replacements, wdirwrit'
372
372
373
373
374 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
374 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
375 """"Constructs the list of files to be fixed at specific revisions
375 """ "Constructs the list of files to be fixed at specific revisions
376
376
377 It is up to the caller how to consume the work items, and the only
377 It is up to the caller how to consume the work items, and the only
378 dependence between them is that replacement revisions must be committed in
378 dependence between them is that replacement revisions must be committed in
@@ -154,25 +154,40 b' configtable = {}'
154 configitem = registrar.configitem(configtable)
154 configitem = registrar.configitem(configtable)
155
155
156 configitem(
156 configitem(
157 b'fsmonitor', b'mode', default=b'on',
157 b'fsmonitor',
158 b'mode',
159 default=b'on',
158 )
160 )
159 configitem(
161 configitem(
160 b'fsmonitor', b'walk_on_invalidate', default=False,
162 b'fsmonitor',
163 b'walk_on_invalidate',
164 default=False,
161 )
165 )
162 configitem(
166 configitem(
163 b'fsmonitor', b'timeout', default=b'2',
167 b'fsmonitor',
168 b'timeout',
169 default=b'2',
164 )
170 )
165 configitem(
171 configitem(
166 b'fsmonitor', b'blacklistusers', default=list,
172 b'fsmonitor',
173 b'blacklistusers',
174 default=list,
175 )
176 configitem(
177 b'fsmonitor',
178 b'watchman_exe',
179 default=b'watchman',
167 )
180 )
168 configitem(
181 configitem(
169 b'fsmonitor', b'watchman_exe', default=b'watchman',
182 b'fsmonitor',
183 b'verbose',
184 default=True,
185 experimental=True,
170 )
186 )
171 configitem(
187 configitem(
172 b'fsmonitor', b'verbose', default=True, experimental=True,
188 b'experimental',
173 )
189 b'fsmonitor.transaction_notify',
174 configitem(
190 default=False,
175 b'experimental', b'fsmonitor.transaction_notify', default=False,
176 )
191 )
177
192
178 # This extension is incompatible with the following blacklisted extensions
193 # This extension is incompatible with the following blacklisted extensions
@@ -271,11 +286,11 b' def _watchmantofsencoding(path):'
271
286
272
287
273 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
288 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
274 '''Replacement for dirstate.walk, hooking into Watchman.
289 """Replacement for dirstate.walk, hooking into Watchman.
275
290
276 Whenever full is False, ignored is False, and the Watchman client is
291 Whenever full is False, ignored is False, and the Watchman client is
277 available, use Watchman combined with saved state to possibly return only a
292 available, use Watchman combined with saved state to possibly return only a
278 subset of files.'''
293 subset of files."""
279
294
280 def bail(reason):
295 def bail(reason):
281 self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
296 self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
@@ -731,8 +746,8 b' def extsetup(ui):'
731
746
732
747
733 def wrapsymlink(orig, source, link_name):
748 def wrapsymlink(orig, source, link_name):
734 ''' if we create a dangling symlink, also touch the parent dir
749 """if we create a dangling symlink, also touch the parent dir
735 to encourage fsevents notifications to work more correctly '''
750 to encourage fsevents notifications to work more correctly"""
736 try:
751 try:
737 return orig(source, link_name)
752 return orig(source, link_name)
738 finally:
753 finally:
@@ -743,13 +758,13 b' def wrapsymlink(orig, source, link_name)'
743
758
744
759
745 class state_update(object):
760 class state_update(object):
746 ''' This context manager is responsible for dispatching the state-enter
761 """This context manager is responsible for dispatching the state-enter
747 and state-leave signals to the watchman service. The enter and leave
762 and state-leave signals to the watchman service. The enter and leave
748 methods can be invoked manually (for scenarios where context manager
763 methods can be invoked manually (for scenarios where context manager
749 semantics are not possible). If parameters oldnode and newnode are None,
764 semantics are not possible). If parameters oldnode and newnode are None,
750 they will be populated based on current working copy in enter and
765 they will be populated based on current working copy in enter and
751 leave, respectively. Similarly, if the distance is none, it will be
766 leave, respectively. Similarly, if the distance is none, it will be
752 calculated based on the oldnode and newnode in the leave method.'''
767 calculated based on the oldnode and newnode in the leave method."""
753
768
754 def __init__(
769 def __init__(
755 self,
770 self,
@@ -282,11 +282,11 b' class SocketConnectError(WatchmanError):'
282
282
283 class SocketTimeout(WatchmanError):
283 class SocketTimeout(WatchmanError):
284 """A specialized exception raised for socket timeouts during communication to/from watchman.
284 """A specialized exception raised for socket timeouts during communication to/from watchman.
285 This makes it easier to implement non-blocking loops as callers can easily distinguish
285 This makes it easier to implement non-blocking loops as callers can easily distinguish
286 between a routine timeout and an actual error condition.
286 between a routine timeout and an actual error condition.
287
287
288 Note that catching WatchmanError will also catch this as it is a super-class, so backwards
288 Note that catching WatchmanError will also catch this as it is a super-class, so backwards
289 compatibility in exception handling is preserved.
289 compatibility in exception handling is preserved.
290 """
290 """
291
291
292
292
@@ -323,7 +323,7 b' class Transport(object):'
323 pass
323 pass
324
324
325 def readLine(self):
325 def readLine(self):
326 """ read a line
326 """read a line
327 Maintains its own buffer, callers of the transport should not mix
327 Maintains its own buffer, callers of the transport should not mix
328 calls to readBytes and readLine.
328 calls to readBytes and readLine.
329 """
329 """
@@ -409,7 +409,7 b' class UnixSocketTransport(Transport):'
409
409
410
410
411 def _get_overlapped_result_ex_impl(pipe, olap, nbytes, millis, alertable):
411 def _get_overlapped_result_ex_impl(pipe, olap, nbytes, millis, alertable):
412 """ Windows 7 and earlier does not support GetOverlappedResultEx. The
412 """Windows 7 and earlier does not support GetOverlappedResultEx. The
413 alternative is to use GetOverlappedResult and wait for read or write
413 alternative is to use GetOverlappedResult and wait for read or write
414 operation to complete. This is done be using CreateEvent and
414 operation to complete. This is done be using CreateEvent and
415 WaitForSingleObjectEx. CreateEvent, WaitForSingleObjectEx
415 WaitForSingleObjectEx. CreateEvent, WaitForSingleObjectEx
@@ -510,9 +510,9 b' class WindowsNamedPipeTransport(Transpor'
510 self.timeout = int(value * 1000)
510 self.timeout = int(value * 1000)
511
511
512 def readBytes(self, size):
512 def readBytes(self, size):
513 """ A read can block for an unbounded amount of time, even if the
513 """A read can block for an unbounded amount of time, even if the
514 kernel reports that the pipe handle is signalled, so we need to
514 kernel reports that the pipe handle is signalled, so we need to
515 always perform our reads asynchronously
515 always perform our reads asynchronously
516 """
516 """
517
517
518 # try to satisfy the read from any buffered data
518 # try to satisfy the read from any buffered data
@@ -627,7 +627,7 b' def _default_binpath(binpath=None):'
627
627
628
628
629 class CLIProcessTransport(Transport):
629 class CLIProcessTransport(Transport):
630 """ open a pipe to the cli to talk to the service
630 """open a pipe to the cli to talk to the service
631 This intended to be used only in the test harness!
631 This intended to be used only in the test harness!
632
632
633 The CLI is an oddball because we only support JSON input
633 The CLI is an oddball because we only support JSON input
@@ -739,8 +739,8 b' class BserCodec(Codec):'
739
739
740
740
741 class ImmutableBserCodec(BserCodec):
741 class ImmutableBserCodec(BserCodec):
742 """ use the BSER encoding, decoding values using the newer
742 """use the BSER encoding, decoding values using the newer
743 immutable object support """
743 immutable object support"""
744
744
745 def _loads(self, response):
745 def _loads(self, response):
746 return bser.loads(
746 return bser.loads(
@@ -817,8 +817,8 b' class Bser2WithFallbackCodec(BserCodec):'
817
817
818
818
819 class ImmutableBser2Codec(Bser2WithFallbackCodec, ImmutableBserCodec):
819 class ImmutableBser2Codec(Bser2WithFallbackCodec, ImmutableBserCodec):
820 """ use the BSER encoding, decoding values using the newer
820 """use the BSER encoding, decoding values using the newer
821 immutable object support """
821 immutable object support"""
822
822
823 pass
823 pass
824
824
@@ -1050,7 +1050,7 b' class client(object):'
1050 self.sendConn = None
1050 self.sendConn = None
1051
1051
1052 def receive(self):
1052 def receive(self):
1053 """ receive the next PDU from the watchman service
1053 """receive the next PDU from the watchman service
1054
1054
1055 If the client has activated subscriptions or logs then
1055 If the client has activated subscriptions or logs then
1056 this PDU may be a unilateral PDU sent by the service to
1056 this PDU may be a unilateral PDU sent by the service to
@@ -1098,7 +1098,7 b' class client(object):'
1098 return False
1098 return False
1099
1099
1100 def getLog(self, remove=True):
1100 def getLog(self, remove=True):
1101 """ Retrieve buffered log data
1101 """Retrieve buffered log data
1102
1102
1103 If remove is true the data will be removed from the buffer.
1103 If remove is true the data will be removed from the buffer.
1104 Otherwise it will be left in the buffer
1104 Otherwise it will be left in the buffer
@@ -1109,7 +1109,7 b' class client(object):'
1109 return res
1109 return res
1110
1110
1111 def getSubscription(self, name, remove=True, root=None):
1111 def getSubscription(self, name, remove=True, root=None):
1112 """ Retrieve the data associated with a named subscription
1112 """Retrieve the data associated with a named subscription
1113
1113
1114 If remove is True (the default), the subscription data is removed
1114 If remove is True (the default), the subscription data is removed
1115 from the buffer. Otherwise the data is returned but left in
1115 from the buffer. Otherwise the data is returned but left in
@@ -1144,7 +1144,7 b' class client(object):'
1144 return sub
1144 return sub
1145
1145
1146 def query(self, *args):
1146 def query(self, *args):
1147 """ Send a query to the watchman service and return the response
1147 """Send a query to the watchman service and return the response
1148
1148
1149 This call will block until the response is returned.
1149 This call will block until the response is returned.
1150 If any unilateral responses are sent by the service in between
1150 If any unilateral responses are sent by the service in between
@@ -55,8 +55,8 b' def check(version, name):'
55
55
56
56
57 def synthesize(vers, opts):
57 def synthesize(vers, opts):
58 """ Synthesize a capability enabled version response
58 """Synthesize a capability enabled version response
59 This is a very limited emulation for relatively recent feature sets
59 This is a very limited emulation for relatively recent feature sets
60 """
60 """
61 parsed_version = parse_version(vers["version"])
61 parsed_version = parse_version(vers["version"])
62 vers["capabilities"] = {}
62 vers["capabilities"] = {}
@@ -33,7 +33,9 b' configtable = {}'
33 configitem = registrar.configitem(configtable)
33 configitem = registrar.configitem(configtable)
34 # git.log-index-cache-miss: internal knob for testing
34 # git.log-index-cache-miss: internal knob for testing
35 configitem(
35 configitem(
36 b"git", b"log-index-cache-miss", default=False,
36 b"git",
37 b"log-index-cache-miss",
38 default=False,
37 )
39 )
38
40
39 # TODO: extract an interface for this in core
41 # TODO: extract an interface for this in core
@@ -224,8 +226,7 b' class gitbmstore(object):'
224 return bname
226 return bname
225
227
226 def applychanges(self, repo, tr, changes):
228 def applychanges(self, repo, tr, changes):
227 """Apply a list of changes to bookmarks
229 """Apply a list of changes to bookmarks"""
228 """
229 # TODO: this should respect transactions, but that's going to
230 # TODO: this should respect transactions, but that's going to
230 # require enlarging the gitbmstore to know how to do in-memory
231 # require enlarging the gitbmstore to know how to do in-memory
231 # temporary writes and read those back prior to transaction
232 # temporary writes and read those back prior to transaction
@@ -127,7 +127,7 b' class gittreemanifest(object):'
127 return dir in self._dirs
127 return dir in self._dirs
128
128
129 def diff(self, other, match=lambda x: True, clean=False):
129 def diff(self, other, match=lambda x: True, clean=False):
130 '''Finds changes between the current manifest and m2.
130 """Finds changes between the current manifest and m2.
131
131
132 The result is returned as a dict with filename as key and
132 The result is returned as a dict with filename as key and
133 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
133 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
@@ -135,7 +135,7 b' class gittreemanifest(object):'
135 in the current/other manifest. Where the file does not exist,
135 in the current/other manifest. Where the file does not exist,
136 the nodeid will be None and the flags will be the empty
136 the nodeid will be None and the flags will be the empty
137 string.
137 string.
138 '''
138 """
139 result = {}
139 result = {}
140
140
141 def _iterativediff(t1, t2, subdir):
141 def _iterativediff(t1, t2, subdir):
@@ -59,10 +59,10 b' def convert(s):'
59 helpbasic=True,
59 helpbasic=True,
60 )
60 )
61 def githelp(ui, repo, *args, **kwargs):
61 def githelp(ui, repo, *args, **kwargs):
62 '''suggests the Mercurial equivalent of the given git command
62 """suggests the Mercurial equivalent of the given git command
63
63
64 Usage: hg githelp -- <git command>
64 Usage: hg githelp -- <git command>
65 '''
65 """
66
66
67 if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
67 if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
68 raise error.Abort(
68 raise error.Abort(
@@ -37,13 +37,20 b' configtable = {}'
37 configitem = registrar.configitem(configtable)
37 configitem = registrar.configitem(configtable)
38
38
39 configitem(
39 configitem(
40 b'gpg', b'cmd', default=b'gpg',
40 b'gpg',
41 b'cmd',
42 default=b'gpg',
41 )
43 )
42 configitem(
44 configitem(
43 b'gpg', b'key', default=None,
45 b'gpg',
46 b'key',
47 default=None,
44 )
48 )
45 configitem(
49 configitem(
46 b'gpg', b'.*', default=None, generic=True,
50 b'gpg',
51 b'.*',
52 default=None,
53 generic=True,
47 )
54 )
48
55
49 # Custom help category
56 # Custom help category
@@ -78,7 +85,11 b' class gpg(object):'
78 fp.close()
85 fp.close()
79 gpgcmd = (
86 gpgcmd = (
80 b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\""
87 b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\""
81 % (self.path, sigfile, datafile,)
88 % (
89 self.path,
90 sigfile,
91 datafile,
92 )
82 )
93 )
83 ret = procutil.filter(b"", gpgcmd)
94 ret = procutil.filter(b"", gpgcmd)
84 finally:
95 finally:
@@ -65,7 +65,9 b' configtable = {}'
65 configitem = registrar.configitem(configtable)
65 configitem = registrar.configitem(configtable)
66
66
67 configitem(
67 configitem(
68 b'hgk', b'path', default=b'hgk',
68 b'hgk',
69 b'path',
70 default=b'hgk',
69 )
71 )
70
72
71
73
@@ -247,22 +247,34 b' command = registrar.command(cmdtable)'
247 configtable = {}
247 configtable = {}
248 configitem = registrar.configitem(configtable)
248 configitem = registrar.configitem(configtable)
249 configitem(
249 configitem(
250 b'experimental', b'histedit.autoverb', default=False,
250 b'experimental',
251 b'histedit.autoverb',
252 default=False,
251 )
253 )
252 configitem(
254 configitem(
253 b'histedit', b'defaultrev', default=None,
255 b'histedit',
256 b'defaultrev',
257 default=None,
254 )
258 )
255 configitem(
259 configitem(
256 b'histedit', b'dropmissing', default=False,
260 b'histedit',
261 b'dropmissing',
262 default=False,
257 )
263 )
258 configitem(
264 configitem(
259 b'histedit', b'linelen', default=80,
265 b'histedit',
266 b'linelen',
267 default=80,
260 )
268 )
261 configitem(
269 configitem(
262 b'histedit', b'singletransaction', default=False,
270 b'histedit',
271 b'singletransaction',
272 default=False,
263 )
273 )
264 configitem(
274 configitem(
265 b'ui', b'interface.histedit', default=None,
275 b'ui',
276 b'interface.histedit',
277 default=None,
266 )
278 )
267 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
279 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
268
280
@@ -280,7 +292,7 b' internalactions = set()'
280
292
281
293
282 def geteditcomment(ui, first, last):
294 def geteditcomment(ui, first, last):
283 """ construct the editor comment
295 """construct the editor comment
284 The comment includes::
296 The comment includes::
285 - an intro
297 - an intro
286 - sorted primary commands
298 - sorted primary commands
@@ -477,8 +489,7 b' class histeditaction(object):'
477
489
478 @classmethod
490 @classmethod
479 def fromrule(cls, state, rule):
491 def fromrule(cls, state, rule):
480 """Parses the given rule, returning an instance of the histeditaction.
492 """Parses the given rule, returning an instance of the histeditaction."""
481 """
482 ruleid = rule.strip().split(b' ', 1)[0]
493 ruleid = rule.strip().split(b' ', 1)[0]
483 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
494 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
484 # Check for validation of rule ids and get the rulehash
495 # Check for validation of rule ids and get the rulehash
@@ -544,7 +555,7 b' class histeditaction(object):'
544
555
545 def tostate(self):
556 def tostate(self):
546 """Print an action in format used by histedit state files
557 """Print an action in format used by histedit state files
547 (the first line is a verb, the remainder is the second)
558 (the first line is a verb, the remainder is the second)
548 """
559 """
549 return b"%s\n%s" % (self.verb, node.hex(self.node))
560 return b"%s\n%s" % (self.verb, node.hex(self.node))
550
561
@@ -1178,8 +1189,8 b' class histeditrule(object):'
1178
1189
1179 # ============ EVENTS ===============
1190 # ============ EVENTS ===============
1180 def movecursor(state, oldpos, newpos):
1191 def movecursor(state, oldpos, newpos):
1181 '''Change the rule/changeset that the cursor is pointing to, regardless of
1192 """Change the rule/changeset that the cursor is pointing to, regardless of
1182 current mode (you can switch between patches from the view patch window).'''
1193 current mode (you can switch between patches from the view patch window)."""
1183 state[b'pos'] = newpos
1194 state[b'pos'] = newpos
1184
1195
1185 mode, _ = state[b'mode']
1196 mode, _ = state[b'mode']
@@ -1256,8 +1267,8 b' def cycleaction(state, pos, next=False):'
1256
1267
1257
1268
1258 def changeview(state, delta, unit):
1269 def changeview(state, delta, unit):
1259 '''Change the region of whatever is being viewed (a patch or the list of
1270 """Change the region of whatever is being viewed (a patch or the list of
1260 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.'''
1271 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
1261 mode, _ = state[b'mode']
1272 mode, _ = state[b'mode']
1262 if mode != MODE_PATCH:
1273 if mode != MODE_PATCH:
1263 return
1274 return
@@ -1582,8 +1593,12 b' pgup/K: move patch up, pgdn/J: move patc'
1582 b'mode': (MODE_INIT, MODE_INIT),
1593 b'mode': (MODE_INIT, MODE_INIT),
1583 b'page_height': None,
1594 b'page_height': None,
1584 b'modes': {
1595 b'modes': {
1585 MODE_RULES: {b'line_offset': 0,},
1596 MODE_RULES: {
1586 MODE_PATCH: {b'line_offset': 0,},
1597 b'line_offset': 0,
1598 },
1599 MODE_PATCH: {
1600 b'line_offset': 0,
1601 },
1587 },
1602 },
1588 b'repo': repo,
1603 b'repo': repo,
1589 }
1604 }
@@ -40,10 +40,14 b' configtable = {}'
40 configitem = registrar.configitem(configtable)
40 configitem = registrar.configitem(configtable)
41
41
42 configitem(
42 configitem(
43 b'notify_obsoleted', b'domain', default=None,
43 b'notify_obsoleted',
44 b'domain',
45 default=None,
44 )
46 )
45 configitem(
47 configitem(
46 b'notify_obsoleted', b'messageidseed', default=None,
48 b'notify_obsoleted',
49 b'messageidseed',
50 default=None,
47 )
51 )
48 configitem(
52 configitem(
49 b'notify_obsoleted',
53 b'notify_obsoleted',
@@ -39,10 +39,14 b' configtable = {}'
39 configitem = registrar.configitem(configtable)
39 configitem = registrar.configitem(configtable)
40
40
41 configitem(
41 configitem(
42 b'notify_published', b'domain', default=None,
42 b'notify_published',
43 b'domain',
44 default=None,
43 )
45 )
44 configitem(
46 configitem(
45 b'notify_published', b'messageidseed', default=None,
47 b'notify_published',
48 b'messageidseed',
49 default=None,
46 )
50 )
47 configitem(
51 configitem(
48 b'notify_published',
52 b'notify_published',
@@ -154,37 +154,59 b' configtable = {}'
154 configitem = registrar.configitem(configtable)
154 configitem = registrar.configitem(configtable)
155
155
156 configitem(
156 configitem(
157 b'infinitepush', b'server', default=False,
157 b'infinitepush',
158 b'server',
159 default=False,
158 )
160 )
159 configitem(
161 configitem(
160 b'infinitepush', b'storetype', default=b'',
162 b'infinitepush',
163 b'storetype',
164 default=b'',
161 )
165 )
162 configitem(
166 configitem(
163 b'infinitepush', b'indextype', default=b'',
167 b'infinitepush',
168 b'indextype',
169 default=b'',
164 )
170 )
165 configitem(
171 configitem(
166 b'infinitepush', b'indexpath', default=b'',
172 b'infinitepush',
173 b'indexpath',
174 default=b'',
167 )
175 )
168 configitem(
176 configitem(
169 b'infinitepush', b'storeallparts', default=False,
177 b'infinitepush',
178 b'storeallparts',
179 default=False,
170 )
180 )
171 configitem(
181 configitem(
172 b'infinitepush', b'reponame', default=b'',
182 b'infinitepush',
183 b'reponame',
184 default=b'',
173 )
185 )
174 configitem(
186 configitem(
175 b'scratchbranch', b'storepath', default=b'',
187 b'scratchbranch',
188 b'storepath',
189 default=b'',
176 )
190 )
177 configitem(
191 configitem(
178 b'infinitepush', b'branchpattern', default=b'',
192 b'infinitepush',
193 b'branchpattern',
194 default=b'',
179 )
195 )
180 configitem(
196 configitem(
181 b'infinitepush', b'pushtobundlestore', default=False,
197 b'infinitepush',
198 b'pushtobundlestore',
199 default=False,
182 )
200 )
183 configitem(
201 configitem(
184 b'experimental', b'server-bundlestore-bookmark', default=b'',
202 b'experimental',
203 b'server-bundlestore-bookmark',
204 default=b'',
185 )
205 )
186 configitem(
206 configitem(
187 b'experimental', b'infinitepush-scratchpush', default=False,
207 b'experimental',
208 b'infinitepush-scratchpush',
209 default=False,
188 )
210 )
189
211
190 experimental = b'experimental'
212 experimental = b'experimental'
@@ -249,13 +271,13 b' def _getloglevel(ui):'
249
271
250
272
251 def _tryhoist(ui, remotebookmark):
273 def _tryhoist(ui, remotebookmark):
252 '''returns a bookmarks with hoisted part removed
274 """returns a bookmarks with hoisted part removed
253
275
254 Remotenames extension has a 'hoist' config that allows to use remote
276 Remotenames extension has a 'hoist' config that allows to use remote
255 bookmarks without specifying remote path. For example, 'hg update master'
277 bookmarks without specifying remote path. For example, 'hg update master'
256 works as well as 'hg update remote/master'. We want to allow the same in
278 works as well as 'hg update remote/master'. We want to allow the same in
257 infinitepush.
279 infinitepush.
258 '''
280 """
259
281
260 if common.isremotebooksenabled(ui):
282 if common.isremotebooksenabled(ui):
261 hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/'
283 hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/'
@@ -427,11 +449,11 b' def _readbundlerevs(bundlerepo):'
427
449
428
450
429 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
451 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
430 '''Tells remotefilelog to include all changed files to the changegroup
452 """Tells remotefilelog to include all changed files to the changegroup
431
453
432 By default remotefilelog doesn't include file content to the changegroup.
454 By default remotefilelog doesn't include file content to the changegroup.
433 But we need to include it if we are fetching from bundlestore.
455 But we need to include it if we are fetching from bundlestore.
434 '''
456 """
435 changedfiles = set()
457 changedfiles = set()
436 cl = bundlerepo.changelog
458 cl = bundlerepo.changelog
437 for r in bundlerevs:
459 for r in bundlerevs:
@@ -457,11 +479,11 b' def _includefilelogstobundle(bundlecaps,'
457
479
458
480
459 def _rebundle(bundlerepo, bundleroots, unknownhead):
481 def _rebundle(bundlerepo, bundleroots, unknownhead):
460 '''
482 """
461 Bundle may include more revision then user requested. For example,
483 Bundle may include more revision then user requested. For example,
462 if user asks for revision but bundle also consists its descendants.
484 if user asks for revision but bundle also consists its descendants.
463 This function will filter out all revision that user is not requested.
485 This function will filter out all revision that user is not requested.
464 '''
486 """
465 parts = []
487 parts = []
466
488
467 version = b'02'
489 version = b'02'
@@ -499,10 +521,10 b' def _needsrebundling(head, bundlerepo):'
499
521
500
522
501 def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
523 def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
502 '''generates bundle that will be send to the user
524 """generates bundle that will be send to the user
503
525
504 returns tuple with raw bundle string and bundle type
526 returns tuple with raw bundle string and bundle type
505 '''
527 """
506 parts = []
528 parts = []
507 if not _needsrebundling(head, bundlerepo):
529 if not _needsrebundling(head, bundlerepo):
508 with util.posixfile(bundlefile, b"rb") as f:
530 with util.posixfile(bundlefile, b"rb") as f:
@@ -1022,7 +1044,12 b' def storetobundlestore(orig, repo, op, u'
1022 )
1044 )
1023 rpart.addparam(b'return', b'1', mandatory=False)
1045 rpart.addparam(b'return', b'1', mandatory=False)
1024
1046
1025 op.records.add(part.type, {b'return': 1,})
1047 op.records.add(
1048 part.type,
1049 {
1050 b'return': 1,
1051 },
1052 )
1026 if bundlepart:
1053 if bundlepart:
1027 bundler.addpart(bundlepart)
1054 bundler.addpart(bundlepart)
1028
1055
@@ -1112,7 +1139,12 b' def processparts(orig, repo, op, unbundl'
1112 bundle2._processpart(op, part)
1139 bundle2._processpart(op, part)
1113
1140
1114 if handleallparts:
1141 if handleallparts:
1115 op.records.add(part.type, {b'return': 1,})
1142 op.records.add(
1143 part.type,
1144 {
1145 b'return': 1,
1146 },
1147 )
1116 if bundlepart:
1148 if bundlepart:
1117 bundler.addpart(bundlepart)
1149 bundler.addpart(bundlepart)
1118
1150
@@ -1284,11 +1316,11 b' def _maybeaddpushbackpart(op, bookmark, '
1284
1316
1285
1317
1286 def bundle2pushkey(orig, op, part):
1318 def bundle2pushkey(orig, op, part):
1287 '''Wrapper of bundle2.handlepushkey()
1319 """Wrapper of bundle2.handlepushkey()
1288
1320
1289 The only goal is to skip calling the original function if flag is set.
1321 The only goal is to skip calling the original function if flag is set.
1290 It's set if infinitepush push is happening.
1322 It's set if infinitepush push is happening.
1291 '''
1323 """
1292 if op.records[scratchbranchparttype + b'_skippushkey']:
1324 if op.records[scratchbranchparttype + b'_skippushkey']:
1293 if op.reply is not None:
1325 if op.reply is not None:
1294 rpart = op.reply.newpart(b'reply:pushkey')
1326 rpart = op.reply.newpart(b'reply:pushkey')
@@ -1300,11 +1332,11 b' def bundle2pushkey(orig, op, part):'
1300
1332
1301
1333
1302 def bundle2handlephases(orig, op, part):
1334 def bundle2handlephases(orig, op, part):
1303 '''Wrapper of bundle2.handlephases()
1335 """Wrapper of bundle2.handlephases()
1304
1336
1305 The only goal is to skip calling the original function if flag is set.
1337 The only goal is to skip calling the original function if flag is set.
1306 It's set if infinitepush push is happening.
1338 It's set if infinitepush push is happening.
1307 '''
1339 """
1308
1340
1309 if op.records[scratchbranchparttype + b'_skipphaseheads']:
1341 if op.records[scratchbranchparttype + b'_skipphaseheads']:
1310 return
1342 return
@@ -1313,11 +1345,11 b' def bundle2handlephases(orig, op, part):'
1313
1345
1314
1346
1315 def _asyncsavemetadata(root, nodes):
1347 def _asyncsavemetadata(root, nodes):
1316 '''starts a separate process that fills metadata for the nodes
1348 """starts a separate process that fills metadata for the nodes
1317
1349
1318 This function creates a separate process and doesn't wait for it's
1350 This function creates a separate process and doesn't wait for it's
1319 completion. This was done to avoid slowing down pushes
1351 completion. This was done to avoid slowing down pushes
1320 '''
1352 """
1321
1353
1322 maxnodes = 50
1354 maxnodes = 50
1323 if len(nodes) > maxnodes:
1355 if len(nodes) > maxnodes:
@@ -90,11 +90,11 b' def _validaterevset(repo, revset, bookma'
90
90
91
91
92 def _handlelfs(repo, missing):
92 def _handlelfs(repo, missing):
93 '''Special case if lfs is enabled
93 """Special case if lfs is enabled
94
94
95 If lfs is enabled then we need to call prepush hook
95 If lfs is enabled then we need to call prepush hook
96 to make sure large files are uploaded to lfs
96 to make sure large files are uploaded to lfs
97 '''
97 """
98 try:
98 try:
99 lfsmod = extensions.find(b'lfs')
99 lfsmod = extensions.find(b'lfs')
100 lfsmod.wrapper.uploadblobsfromrevs(repo, missing)
100 lfsmod.wrapper.uploadblobsfromrevs(repo, missing)
@@ -47,8 +47,7 b' class indexapi(object):'
47 raise NotImplementedError()
47 raise NotImplementedError()
48
48
49 def deletebookmarks(self, patterns):
49 def deletebookmarks(self, patterns):
50 """Accepts list of bookmarks and deletes them.
50 """Accepts list of bookmarks and deletes them."""
51 """
52 raise NotImplementedError()
51 raise NotImplementedError()
53
52
54 def getbundle(self, node):
53 def getbundle(self, node):
@@ -28,9 +28,9 b' def _convertbookmarkpattern(pattern):'
28
28
29
29
30 class sqlindexapi(indexapi.indexapi):
30 class sqlindexapi(indexapi.indexapi):
31 '''
31 """
32 Sql backend for infinitepush index. See schema.sql
32 Sql backend for infinitepush index. See schema.sql
33 '''
33 """
34
34
35 def __init__(
35 def __init__(
36 self,
36 self,
@@ -158,13 +158,14 b' configtable = {}'
158 configitem = registrar.configitem(configtable)
158 configitem = registrar.configitem(configtable)
159
159
160 configitem(
160 configitem(
161 b'keywordset', b'svn', default=False,
161 b'keywordset',
162 b'svn',
163 default=False,
162 )
164 )
163 # date like in cvs' $Date
165 # date like in cvs' $Date
164 @templatefilter(b'utcdate', intype=templateutil.date)
166 @templatefilter(b'utcdate', intype=templateutil.date)
165 def utcdate(date):
167 def utcdate(date):
166 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
168 """Date. Returns a UTC-date in this format: "2009/08/18 11:00:13"."""
167 '''
168 dateformat = b'%Y/%m/%d %H:%M:%S'
169 dateformat = b'%Y/%m/%d %H:%M:%S'
169 return dateutil.datestr((date[0], 0), dateformat)
170 return dateutil.datestr((date[0], 0), dateformat)
170
171
@@ -172,18 +173,18 b' def utcdate(date):'
172 # date like in svn's $Date
173 # date like in svn's $Date
173 @templatefilter(b'svnisodate', intype=templateutil.date)
174 @templatefilter(b'svnisodate', intype=templateutil.date)
174 def svnisodate(date):
175 def svnisodate(date):
175 '''Date. Returns a date in this format: "2009-08-18 13:00:13
176 """Date. Returns a date in this format: "2009-08-18 13:00:13
176 +0200 (Tue, 18 Aug 2009)".
177 +0200 (Tue, 18 Aug 2009)".
177 '''
178 """
178 return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
179 return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
179
180
180
181
181 # date like in svn's $Id
182 # date like in svn's $Id
182 @templatefilter(b'svnutcdate', intype=templateutil.date)
183 @templatefilter(b'svnutcdate', intype=templateutil.date)
183 def svnutcdate(date):
184 def svnutcdate(date):
184 '''Date. Returns a UTC-date in this format: "2009-08-18
185 """Date. Returns a UTC-date in this format: "2009-08-18
185 11:00:13Z".
186 11:00:13Z".
186 '''
187 """
187 dateformat = b'%Y-%m-%d %H:%M:%SZ'
188 dateformat = b'%Y-%m-%d %H:%M:%SZ'
188 return dateutil.datestr((date[0], 0), dateformat)
189 return dateutil.datestr((date[0], 0), dateformat)
189
190
@@ -221,25 +222,25 b' def _defaultkwmaps(ui):'
221
222
222
223
223 def _shrinktext(text, subfunc):
224 def _shrinktext(text, subfunc):
224 '''Helper for keyword expansion removal in text.
225 """Helper for keyword expansion removal in text.
225 Depending on subfunc also returns number of substitutions.'''
226 Depending on subfunc also returns number of substitutions."""
226 return subfunc(br'$\1$', text)
227 return subfunc(br'$\1$', text)
227
228
228
229
229 def _preselect(wstatus, changed):
230 def _preselect(wstatus, changed):
230 '''Retrieves modified and added files from a working directory state
231 """Retrieves modified and added files from a working directory state
231 and returns the subset of each contained in given changed files
232 and returns the subset of each contained in given changed files
232 retrieved from a change context.'''
233 retrieved from a change context."""
233 modified = [f for f in wstatus.modified if f in changed]
234 modified = [f for f in wstatus.modified if f in changed]
234 added = [f for f in wstatus.added if f in changed]
235 added = [f for f in wstatus.added if f in changed]
235 return modified, added
236 return modified, added
236
237
237
238
238 class kwtemplater(object):
239 class kwtemplater(object):
239 '''
240 """
240 Sets up keyword templates, corresponding keyword regex, and
241 Sets up keyword templates, corresponding keyword regex, and
241 provides keyword substitution functions.
242 provides keyword substitution functions.
242 '''
243 """
243
244
244 def __init__(self, ui, repo, inc, exc):
245 def __init__(self, ui, repo, inc, exc):
245 self.ui = ui
246 self.ui = ui
@@ -304,8 +305,8 b' class kwtemplater(object):'
304 return data
305 return data
305
306
306 def iskwfile(self, cand, ctx):
307 def iskwfile(self, cand, ctx):
307 '''Returns subset of candidates which are configured for keyword
308 """Returns subset of candidates which are configured for keyword
308 expansion but are not symbolic links.'''
309 expansion but are not symbolic links."""
309 return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
310 return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
310
311
311 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
312 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
@@ -374,18 +375,18 b' class kwtemplater(object):'
374 return lines
375 return lines
375
376
376 def wread(self, fname, data):
377 def wread(self, fname, data):
377 '''If in restricted mode returns data read from wdir with
378 """If in restricted mode returns data read from wdir with
378 keyword substitutions removed.'''
379 keyword substitutions removed."""
379 if self.restrict:
380 if self.restrict:
380 return self.shrink(fname, data)
381 return self.shrink(fname, data)
381 return data
382 return data
382
383
383
384
384 class kwfilelog(filelog.filelog):
385 class kwfilelog(filelog.filelog):
385 '''
386 """
386 Subclass of filelog to hook into its read, add, cmp methods.
387 Subclass of filelog to hook into its read, add, cmp methods.
387 Keywords are "stored" unexpanded, and processed on reading.
388 Keywords are "stored" unexpanded, and processed on reading.
388 '''
389 """
389
390
390 def __init__(self, opener, kwt, path):
391 def __init__(self, opener, kwt, path):
391 super(kwfilelog, self).__init__(opener, path)
392 super(kwfilelog, self).__init__(opener, path)
@@ -411,8 +412,8 b' class kwfilelog(filelog.filelog):'
411
412
412
413
413 def _status(ui, repo, wctx, kwt, *pats, **opts):
414 def _status(ui, repo, wctx, kwt, *pats, **opts):
414 '''Bails out if [keyword] configuration is not active.
415 """Bails out if [keyword] configuration is not active.
415 Returns status of working directory.'''
416 Returns status of working directory."""
416 if kwt:
417 if kwt:
417 opts = pycompat.byteskwargs(opts)
418 opts = pycompat.byteskwargs(opts)
418 return repo.status(
419 return repo.status(
@@ -448,7 +449,7 b' def _kwfwrite(ui, repo, expand, *pats, *'
448 optionalrepo=True,
449 optionalrepo=True,
449 )
450 )
450 def demo(ui, repo, *args, **opts):
451 def demo(ui, repo, *args, **opts):
451 '''print [keywordmaps] configuration and an expansion example
452 """print [keywordmaps] configuration and an expansion example
452
453
453 Show current, custom, or default keyword template maps and their
454 Show current, custom, or default keyword template maps and their
454 expansions.
455 expansions.
@@ -459,7 +460,7 b' def demo(ui, repo, *args, **opts):'
459 Use -d/--default to disable current configuration.
460 Use -d/--default to disable current configuration.
460
461
461 See :hg:`help templates` for information on templates and filters.
462 See :hg:`help templates` for information on templates and filters.
462 '''
463 """
463
464
464 def demoitems(section, items):
465 def demoitems(section, items):
465 ui.write(b'[%s]\n' % section)
466 ui.write(b'[%s]\n' % section)
@@ -547,12 +548,12 b' def demo(ui, repo, *args, **opts):'
547 inferrepo=True,
548 inferrepo=True,
548 )
549 )
549 def expand(ui, repo, *pats, **opts):
550 def expand(ui, repo, *pats, **opts):
550 '''expand keywords in the working directory
551 """expand keywords in the working directory
551
552
552 Run after (re)enabling keyword expansion.
553 Run after (re)enabling keyword expansion.
553
554
554 kwexpand refuses to run if given files contain local changes.
555 kwexpand refuses to run if given files contain local changes.
555 '''
556 """
556 # 3rd argument sets expansion to True
557 # 3rd argument sets expansion to True
557 _kwfwrite(ui, repo, True, *pats, **opts)
558 _kwfwrite(ui, repo, True, *pats, **opts)
558
559
@@ -569,7 +570,7 b' def expand(ui, repo, *pats, **opts):'
569 inferrepo=True,
570 inferrepo=True,
570 )
571 )
571 def files(ui, repo, *pats, **opts):
572 def files(ui, repo, *pats, **opts):
572 '''show files configured for keyword expansion
573 """show files configured for keyword expansion
573
574
574 List which files in the working directory are matched by the
575 List which files in the working directory are matched by the
575 [keyword] configuration patterns.
576 [keyword] configuration patterns.
@@ -588,7 +589,7 b' def files(ui, repo, *pats, **opts):'
588 k = keyword expansion candidate (not tracked)
589 k = keyword expansion candidate (not tracked)
589 I = ignored
590 I = ignored
590 i = ignored (not tracked)
591 i = ignored (not tracked)
591 '''
592 """
592 kwt = getattr(repo, '_keywordkwt', None)
593 kwt = getattr(repo, '_keywordkwt', None)
593 wctx = repo[None]
594 wctx = repo[None]
594 status = _status(ui, repo, wctx, kwt, *pats, **opts)
595 status = _status(ui, repo, wctx, kwt, *pats, **opts)
@@ -634,12 +635,12 b' def files(ui, repo, *pats, **opts):'
634 inferrepo=True,
635 inferrepo=True,
635 )
636 )
636 def shrink(ui, repo, *pats, **opts):
637 def shrink(ui, repo, *pats, **opts):
637 '''revert expanded keywords in the working directory
638 """revert expanded keywords in the working directory
638
639
639 Must be run before changing/disabling active keywords.
640 Must be run before changing/disabling active keywords.
640
641
641 kwshrink refuses to run if given files contain local changes.
642 kwshrink refuses to run if given files contain local changes.
642 '''
643 """
643 # 3rd argument sets expansion to False
644 # 3rd argument sets expansion to False
644 _kwfwrite(ui, repo, False, *pats, **opts)
645 _kwfwrite(ui, repo, False, *pats, **opts)
645
646
@@ -648,8 +649,8 b' def shrink(ui, repo, *pats, **opts):'
648
649
649
650
650 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
651 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
651 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
652 """Monkeypatch/wrap patch.patchfile.__init__ to avoid
652 rejects or conflicts due to expanded keywords in working dir.'''
653 rejects or conflicts due to expanded keywords in working dir."""
653 orig(self, ui, gp, backend, store, eolmode)
654 orig(self, ui, gp, backend, store, eolmode)
654 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
655 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
655 if kwt:
656 if kwt:
@@ -702,7 +703,7 b' def kw_amend(orig, ui, repo, old, extra,'
702
703
703
704
704 def kw_copy(orig, ui, repo, pats, opts, rename=False):
705 def kw_copy(orig, ui, repo, pats, opts, rename=False):
705 '''Wraps cmdutil.copy so that copy/rename destinations do not
706 """Wraps cmdutil.copy so that copy/rename destinations do not
706 contain expanded keywords.
707 contain expanded keywords.
707 Note that the source of a regular file destination may also be a
708 Note that the source of a regular file destination may also be a
708 symlink:
709 symlink:
@@ -710,7 +711,7 b' def kw_copy(orig, ui, repo, pats, opts, '
710 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
711 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
711 For the latter we have to follow the symlink to find out whether its
712 For the latter we have to follow the symlink to find out whether its
712 target is configured for expansion and we therefore must unexpand the
713 target is configured for expansion and we therefore must unexpand the
713 keywords in the destination.'''
714 keywords in the destination."""
714 kwt = getattr(repo, '_keywordkwt', None)
715 kwt = getattr(repo, '_keywordkwt', None)
715 if kwt is None:
716 if kwt is None:
716 return orig(ui, repo, pats, opts, rename)
717 return orig(ui, repo, pats, opts, rename)
@@ -722,9 +723,9 b' def kw_copy(orig, ui, repo, pats, opts, '
722 cwd = repo.getcwd()
723 cwd = repo.getcwd()
723
724
724 def haskwsource(dest):
725 def haskwsource(dest):
725 '''Returns true if dest is a regular file and configured for
726 """Returns true if dest is a regular file and configured for
726 expansion or a symlink which points to a file configured for
727 expansion or a symlink which points to a file configured for
727 expansion. '''
728 expansion."""
728 source = repo.dirstate.copied(dest)
729 source = repo.dirstate.copied(dest)
729 if b'l' in wctx.flags(source):
730 if b'l' in wctx.flags(source):
730 source = pathutil.canonpath(
731 source = pathutil.canonpath(
@@ -785,12 +786,12 b' def kwfilectx_cmp(orig, self, fctx):'
785
786
786
787
787 def uisetup(ui):
788 def uisetup(ui):
788 ''' Monkeypatches dispatch._parse to retrieve user command.
789 """Monkeypatches dispatch._parse to retrieve user command.
789 Overrides file method to return kwfilelog instead of filelog
790 Overrides file method to return kwfilelog instead of filelog
790 if file matches user configuration.
791 if file matches user configuration.
791 Wraps commit to overwrite configured files with updated
792 Wraps commit to overwrite configured files with updated
792 keyword substitutions.
793 keyword substitutions.
793 Monkeypatches patch and webcommands.'''
794 Monkeypatches patch and webcommands."""
794
795
795 def kwdispatch_parse(orig, ui, args):
796 def kwdispatch_parse(orig, ui, args):
796 '''Monkeypatch dispatch._parse to obtain running hg command.'''
797 '''Monkeypatch dispatch._parse to obtain running hg command.'''
@@ -136,13 +136,19 b' eh.merge(overrides.eh)'
136 eh.merge(proto.eh)
136 eh.merge(proto.eh)
137
137
138 eh.configitem(
138 eh.configitem(
139 b'largefiles', b'minsize', default=eh.configitem.dynamicdefault,
139 b'largefiles',
140 b'minsize',
141 default=eh.configitem.dynamicdefault,
140 )
142 )
141 eh.configitem(
143 eh.configitem(
142 b'largefiles', b'patterns', default=list,
144 b'largefiles',
145 b'patterns',
146 default=list,
143 )
147 )
144 eh.configitem(
148 eh.configitem(
145 b'largefiles', b'usercache', default=None,
149 b'largefiles',
150 b'usercache',
151 default=None,
146 )
152 )
147
153
148 cmdtable = eh.cmdtable
154 cmdtable = eh.cmdtable
@@ -17,8 +17,8 b' from . import lfutil'
17
17
18
18
19 class StoreError(Exception):
19 class StoreError(Exception):
20 '''Raised when there is a problem getting files from or putting
20 """Raised when there is a problem getting files from or putting
21 files to a central store.'''
21 files to a central store."""
22
22
23 def __init__(self, filename, hash, url, detail):
23 def __init__(self, filename, hash, url, detail):
24 self.filename = filename
24 self.filename = filename
@@ -49,19 +49,19 b' class basestore(object):'
49 raise NotImplementedError(b'abstract method')
49 raise NotImplementedError(b'abstract method')
50
50
51 def exists(self, hashes):
51 def exists(self, hashes):
52 '''Check to see if the store contains the given hashes. Given an
52 """Check to see if the store contains the given hashes. Given an
53 iterable of hashes it returns a mapping from hash to bool.'''
53 iterable of hashes it returns a mapping from hash to bool."""
54 raise NotImplementedError(b'abstract method')
54 raise NotImplementedError(b'abstract method')
55
55
56 def get(self, files):
56 def get(self, files):
57 '''Get the specified largefiles from the store and write to local
57 """Get the specified largefiles from the store and write to local
58 files under repo.root. files is a list of (filename, hash)
58 files under repo.root. files is a list of (filename, hash)
59 tuples. Return (success, missing), lists of files successfully
59 tuples. Return (success, missing), lists of files successfully
60 downloaded and those not found in the store. success is a list
60 downloaded and those not found in the store. success is a list
61 of (filename, hash) tuples; missing is a list of filenames that
61 of (filename, hash) tuples; missing is a list of filenames that
62 we could not get. (The detailed error message will already have
62 we could not get. (The detailed error message will already have
63 been presented to the user, so missing is just supplied as a
63 been presented to the user, so missing is just supplied as a
64 summary.)'''
64 summary.)"""
65 success = []
65 success = []
66 missing = []
66 missing = []
67 ui = self.ui
67 ui = self.ui
@@ -123,9 +123,9 b' class basestore(object):'
123 return True
123 return True
124
124
125 def verify(self, revs, contents=False):
125 def verify(self, revs, contents=False):
126 '''Verify the existence (and, optionally, contents) of every big
126 """Verify the existence (and, optionally, contents) of every big
127 file revision referenced by every changeset in revs.
127 file revision referenced by every changeset in revs.
128 Return 0 if all is well, non-zero on any errors.'''
128 Return 0 if all is well, non-zero on any errors."""
129
129
130 self.ui.status(
130 self.ui.status(
131 _(b'searching %d changesets for largefiles\n') % len(revs)
131 _(b'searching %d changesets for largefiles\n') % len(revs)
@@ -163,17 +163,17 b' class basestore(object):'
163 return int(failed)
163 return int(failed)
164
164
165 def _getfile(self, tmpfile, filename, hash):
165 def _getfile(self, tmpfile, filename, hash):
166 '''Fetch one revision of one file from the store and write it
166 """Fetch one revision of one file from the store and write it
167 to tmpfile. Compute the hash of the file on-the-fly as it
167 to tmpfile. Compute the hash of the file on-the-fly as it
168 downloads and return the hash. Close tmpfile. Raise
168 downloads and return the hash. Close tmpfile. Raise
169 StoreError if unable to download the file (e.g. it does not
169 StoreError if unable to download the file (e.g. it does not
170 exist in the store).'''
170 exist in the store)."""
171 raise NotImplementedError(b'abstract method')
171 raise NotImplementedError(b'abstract method')
172
172
173 def _verifyfiles(self, contents, filestocheck):
173 def _verifyfiles(self, contents, filestocheck):
174 '''Perform the actual verification of files in the store.
174 """Perform the actual verification of files in the store.
175 'contents' controls verification of content hash.
175 'contents' controls verification of content hash.
176 'filestocheck' is list of files to check.
176 'filestocheck' is list of files to check.
177 Returns _true_ if any problems are found!
177 Returns _true_ if any problems are found!
178 '''
178 """
179 raise NotImplementedError(b'abstract method')
179 raise NotImplementedError(b'abstract method')
@@ -66,7 +66,7 b' eh = exthelper.exthelper()'
66 inferrepo=True,
66 inferrepo=True,
67 )
67 )
68 def lfconvert(ui, src, dest, *pats, **opts):
68 def lfconvert(ui, src, dest, *pats, **opts):
69 '''convert a normal repository to a largefiles repository
69 """convert a normal repository to a largefiles repository
70
70
71 Convert repository SOURCE to a new repository DEST, identical to
71 Convert repository SOURCE to a new repository DEST, identical to
72 SOURCE except that certain files will be converted as largefiles:
72 SOURCE except that certain files will be converted as largefiles:
@@ -82,7 +82,7 b' def lfconvert(ui, src, dest, *pats, **op'
82 repository.
82 repository.
83
83
84 Use --to-normal to convert largefiles back to normal files; after
84 Use --to-normal to convert largefiles back to normal files; after
85 this, the DEST repository can be used without largefiles at all.'''
85 this, the DEST repository can be used without largefiles at all."""
86
86
87 opts = pycompat.byteskwargs(opts)
87 opts = pycompat.byteskwargs(opts)
88 if opts[b'to_normal']:
88 if opts[b'to_normal']:
@@ -393,8 +393,8 b' def _converttags(ui, revmap, data):'
393
393
394
394
395 def _islfile(file, ctx, matcher, size):
395 def _islfile(file, ctx, matcher, size):
396 '''Return true if file should be considered a largefile, i.e.
396 """Return true if file should be considered a largefile, i.e.
397 matcher matches it or it is larger than size.'''
397 matcher matches it or it is larger than size."""
398 # never store special .hg* files as largefiles
398 # never store special .hg* files as largefiles
399 if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
399 if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
400 return False
400 return False
@@ -440,11 +440,11 b' def uploadlfiles(ui, rsrc, rdst, files):'
440
440
441
441
442 def verifylfiles(ui, repo, all=False, contents=False):
442 def verifylfiles(ui, repo, all=False, contents=False):
443 '''Verify that every largefile revision in the current changeset
443 """Verify that every largefile revision in the current changeset
444 exists in the central store. With --contents, also verify that
444 exists in the central store. With --contents, also verify that
445 the contents of each local largefile file revision are correct (SHA-1 hash
445 the contents of each local largefile file revision are correct (SHA-1 hash
446 matches the revision ID). With --all, check every changeset in
446 matches the revision ID). With --all, check every changeset in
447 this repository.'''
447 this repository."""
448 if all:
448 if all:
449 revs = repo.revs(b'all()')
449 revs = repo.revs(b'all()')
450 else:
450 else:
@@ -455,12 +455,12 b' def verifylfiles(ui, repo, all=False, co'
455
455
456
456
457 def cachelfiles(ui, repo, node, filelist=None):
457 def cachelfiles(ui, repo, node, filelist=None):
458 '''cachelfiles ensures that all largefiles needed by the specified revision
458 """cachelfiles ensures that all largefiles needed by the specified revision
459 are present in the repository's largefile cache.
459 are present in the repository's largefile cache.
460
460
461 returns a tuple (cached, missing). cached is the list of files downloaded
461 returns a tuple (cached, missing). cached is the list of files downloaded
462 by this operation; missing is the list of files that were needed but could
462 by this operation; missing is the list of files that were needed but could
463 not be found.'''
463 not be found."""
464 lfiles = lfutil.listlfiles(repo, node)
464 lfiles = lfutil.listlfiles(repo, node)
465 if filelist:
465 if filelist:
466 lfiles = set(lfiles) & set(filelist)
466 lfiles = set(lfiles) & set(filelist)
@@ -502,11 +502,11 b' def downloadlfiles(ui, repo):'
502 def updatelfiles(
502 def updatelfiles(
503 ui, repo, filelist=None, printmessage=None, normallookup=False
503 ui, repo, filelist=None, printmessage=None, normallookup=False
504 ):
504 ):
505 '''Update largefiles according to standins in the working directory
505 """Update largefiles according to standins in the working directory
506
506
507 If ``printmessage`` is other than ``None``, it means "print (or
507 If ``printmessage`` is other than ``None``, it means "print (or
508 ignore, for false) message forcibly".
508 ignore, for false) message forcibly".
509 '''
509 """
510 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
510 statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
511 with repo.wlock():
511 with repo.wlock():
512 lfdirstate = lfutil.openlfdirstate(ui, repo)
512 lfdirstate = lfutil.openlfdirstate(ui, repo)
@@ -80,10 +80,10 b' def link(src, dest):'
80
80
81
81
82 def usercachepath(ui, hash):
82 def usercachepath(ui, hash):
83 '''Return the correct location in the "global" largefiles cache for a file
83 """Return the correct location in the "global" largefiles cache for a file
84 with the given hash.
84 with the given hash.
85 This cache is used for sharing of largefiles across repositories - both
85 This cache is used for sharing of largefiles across repositories - both
86 to preserve download bandwidth and storage space.'''
86 to preserve download bandwidth and storage space."""
87 return os.path.join(_usercachedir(ui), hash)
87 return os.path.join(_usercachedir(ui), hash)
88
88
89
89
@@ -143,9 +143,9 b' def inusercache(ui, hash):'
143
143
144
144
145 def findfile(repo, hash):
145 def findfile(repo, hash):
146 '''Return store path of the largefile with the specified hash.
146 """Return store path of the largefile with the specified hash.
147 As a side effect, the file might be linked from user cache.
147 As a side effect, the file might be linked from user cache.
148 Return None if the file can't be found locally.'''
148 Return None if the file can't be found locally."""
149 path, exists = findstorepath(repo, hash)
149 path, exists = findstorepath(repo, hash)
150 if exists:
150 if exists:
151 repo.ui.note(_(b'found %s in store\n') % hash)
151 repo.ui.note(_(b'found %s in store\n') % hash)
@@ -191,10 +191,10 b' class largefilesdirstate(dirstate.dirsta'
191
191
192
192
193 def openlfdirstate(ui, repo, create=True):
193 def openlfdirstate(ui, repo, create=True):
194 '''
194 """
195 Return a dirstate object that tracks largefiles: i.e. its root is
195 Return a dirstate object that tracks largefiles: i.e. its root is
196 the repo root, but it is saved in .hg/largefiles/dirstate.
196 the repo root, but it is saved in .hg/largefiles/dirstate.
197 '''
197 """
198 vfs = repo.vfs
198 vfs = repo.vfs
199 lfstoredir = longname
199 lfstoredir = longname
200 opener = vfsmod.vfs(vfs.join(lfstoredir))
200 opener = vfsmod.vfs(vfs.join(lfstoredir))
@@ -245,8 +245,8 b' def lfdirstatestatus(lfdirstate, repo):'
245
245
246
246
247 def listlfiles(repo, rev=None, matcher=None):
247 def listlfiles(repo, rev=None, matcher=None):
248 '''return a list of largefiles in the working copy or the
248 """return a list of largefiles in the working copy or the
249 specified changeset'''
249 specified changeset"""
250
250
251 if matcher is None:
251 if matcher is None:
252 matcher = getstandinmatcher(repo)
252 matcher = getstandinmatcher(repo)
@@ -265,18 +265,18 b' def instore(repo, hash, forcelocal=False'
265
265
266
266
267 def storepath(repo, hash, forcelocal=False):
267 def storepath(repo, hash, forcelocal=False):
268 '''Return the correct location in the repository largefiles store for a
268 """Return the correct location in the repository largefiles store for a
269 file with the given hash.'''
269 file with the given hash."""
270 if not forcelocal and repo.shared():
270 if not forcelocal and repo.shared():
271 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
271 return repo.vfs.reljoin(repo.sharedpath, longname, hash)
272 return repo.vfs.join(longname, hash)
272 return repo.vfs.join(longname, hash)
273
273
274
274
275 def findstorepath(repo, hash):
275 def findstorepath(repo, hash):
276 '''Search through the local store path(s) to find the file for the given
276 """Search through the local store path(s) to find the file for the given
277 hash. If the file is not found, its path in the primary store is returned.
277 hash. If the file is not found, its path in the primary store is returned.
278 The return value is a tuple of (path, exists(path)).
278 The return value is a tuple of (path, exists(path)).
279 '''
279 """
280 # For shared repos, the primary store is in the share source. But for
280 # For shared repos, the primary store is in the share source. But for
281 # backward compatibility, force a lookup in the local store if it wasn't
281 # backward compatibility, force a lookup in the local store if it wasn't
282 # found in the share source.
282 # found in the share source.
@@ -291,11 +291,11 b' def findstorepath(repo, hash):'
291
291
292
292
293 def copyfromcache(repo, hash, filename):
293 def copyfromcache(repo, hash, filename):
294 '''Copy the specified largefile from the repo or system cache to
294 """Copy the specified largefile from the repo or system cache to
295 filename in the repository. Return true on success or false if the
295 filename in the repository. Return true on success or false if the
296 file was not found in either cache (which should not happened:
296 file was not found in either cache (which should not happened:
297 this is meant to be called only after ensuring that the needed
297 this is meant to be called only after ensuring that the needed
298 largefile exists in the cache).'''
298 largefile exists in the cache)."""
299 wvfs = repo.wvfs
299 wvfs = repo.wvfs
300 path = findfile(repo, hash)
300 path = findfile(repo, hash)
301 if path is None:
301 if path is None:
@@ -354,8 +354,8 b' def copytostoreabsolute(repo, file, hash'
354
354
355
355
356 def linktousercache(repo, hash):
356 def linktousercache(repo, hash):
357 '''Link / copy the largefile with the specified hash from the store
357 """Link / copy the largefile with the specified hash from the store
358 to the cache.'''
358 to the cache."""
359 path = usercachepath(repo.ui, hash)
359 path = usercachepath(repo.ui, hash)
360 link(storepath(repo, hash), path)
360 link(storepath(repo, hash), path)
361
361
@@ -380,9 +380,9 b' def getstandinmatcher(repo, rmatcher=Non'
380
380
381
381
382 def composestandinmatcher(repo, rmatcher):
382 def composestandinmatcher(repo, rmatcher):
383 '''Return a matcher that accepts standins corresponding to the
383 """Return a matcher that accepts standins corresponding to the
384 files accepted by rmatcher. Pass the list of files in the matcher
384 files accepted by rmatcher. Pass the list of files in the matcher
385 as the paths specified by the user.'''
385 as the paths specified by the user."""
386 smatcher = getstandinmatcher(repo, rmatcher)
386 smatcher = getstandinmatcher(repo, rmatcher)
387 isstandin = smatcher.matchfn
387 isstandin = smatcher.matchfn
388
388
@@ -395,8 +395,8 b' def composestandinmatcher(repo, rmatcher'
395
395
396
396
397 def standin(filename):
397 def standin(filename):
398 '''Return the repo-relative path to the standin for the specified big
398 """Return the repo-relative path to the standin for the specified big
399 file.'''
399 file."""
400 # Notes:
400 # Notes:
401 # 1) Some callers want an absolute path, but for instance addlargefiles
401 # 1) Some callers want an absolute path, but for instance addlargefiles
402 # needs it repo-relative so it can be passed to repo[None].add(). So
402 # needs it repo-relative so it can be passed to repo[None].add(). So
@@ -408,8 +408,8 b' def standin(filename):'
408
408
409
409
410 def isstandin(filename):
410 def isstandin(filename):
411 '''Return true if filename is a big file standin. filename must be
411 """Return true if filename is a big file standin. filename must be
412 in Mercurial's internal form (slash-separated).'''
412 in Mercurial's internal form (slash-separated)."""
413 return filename.startswith(shortnameslash)
413 return filename.startswith(shortnameslash)
414
414
415
415
@@ -439,9 +439,9 b' def updatestandin(repo, lfile, standin):'
439
439
440
440
441 def readasstandin(fctx):
441 def readasstandin(fctx):
442 '''read hex hash from given filectx of standin file
442 """read hex hash from given filectx of standin file
443
443
444 This encapsulates how "standin" data is stored into storage layer.'''
444 This encapsulates how "standin" data is stored into storage layer."""
445 return fctx.data().strip()
445 return fctx.data().strip()
446
446
447
447
@@ -451,8 +451,8 b' def writestandin(repo, standin, hash, ex'
451
451
452
452
453 def copyandhash(instream, outfile):
453 def copyandhash(instream, outfile):
454 '''Read bytes from instream (iterable) and write them to outfile,
454 """Read bytes from instream (iterable) and write them to outfile,
455 computing the SHA-1 hash of the data along the way. Return the hash.'''
455 computing the SHA-1 hash of the data along the way. Return the hash."""
456 hasher = hashutil.sha1(b'')
456 hasher = hashutil.sha1(b'')
457 for data in instream:
457 for data in instream:
458 hasher.update(data)
458 hasher.update(data)
@@ -635,11 +635,11 b' def getlfilestoupload(repo, missing, add'
635
635
636
636
637 def updatestandinsbymatch(repo, match):
637 def updatestandinsbymatch(repo, match):
638 '''Update standins in the working directory according to specified match
638 """Update standins in the working directory according to specified match
639
639
640 This returns (possibly modified) ``match`` object to be used for
640 This returns (possibly modified) ``match`` object to be used for
641 subsequent commit process.
641 subsequent commit process.
642 '''
642 """
643
643
644 ui = repo.ui
644 ui = repo.ui
645
645
@@ -741,7 +741,7 b' def updatestandinsbymatch(repo, match):'
741
741
742
742
743 class automatedcommithook(object):
743 class automatedcommithook(object):
744 '''Stateful hook to update standins at the 1st commit of resuming
744 """Stateful hook to update standins at the 1st commit of resuming
745
745
746 For efficiency, updating standins in the working directory should
746 For efficiency, updating standins in the working directory should
747 be avoided while automated committing (like rebase, transplant and
747 be avoided while automated committing (like rebase, transplant and
@@ -750,7 +750,7 b' class automatedcommithook(object):'
750 But the 1st commit of resuming automated committing (e.g. ``rebase
750 But the 1st commit of resuming automated committing (e.g. ``rebase
751 --continue``) should update them, because largefiles may be
751 --continue``) should update them, because largefiles may be
752 modified manually.
752 modified manually.
753 '''
753 """
754
754
755 def __init__(self, resuming):
755 def __init__(self, resuming):
756 self.resuming = resuming
756 self.resuming = resuming
@@ -764,14 +764,14 b' class automatedcommithook(object):'
764
764
765
765
766 def getstatuswriter(ui, repo, forcibly=None):
766 def getstatuswriter(ui, repo, forcibly=None):
767 '''Return the function to write largefiles specific status out
767 """Return the function to write largefiles specific status out
768
768
769 If ``forcibly`` is ``None``, this returns the last element of
769 If ``forcibly`` is ``None``, this returns the last element of
770 ``repo._lfstatuswriters`` as "default" writer function.
770 ``repo._lfstatuswriters`` as "default" writer function.
771
771
772 Otherwise, this returns the function to always write out (or
772 Otherwise, this returns the function to always write out (or
773 ignore if ``not forcibly``) status.
773 ignore if ``not forcibly``) status.
774 '''
774 """
775 if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
775 if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
776 return repo._lfstatuswriters[-1]
776 return repo._lfstatuswriters[-1]
777 else:
777 else:
@@ -20,9 +20,9 b' from . import ('
20
20
21
21
22 class localstore(basestore.basestore):
22 class localstore(basestore.basestore):
23 '''localstore first attempts to grab files out of the store in the remote
23 """localstore first attempts to grab files out of the store in the remote
24 Mercurial repository. Failing that, it attempts to grab the files from
24 Mercurial repository. Failing that, it attempts to grab the files from
25 the user cache.'''
25 the user cache."""
26
26
27 def __init__(self, ui, repo, remote):
27 def __init__(self, ui, repo, remote):
28 self.remote = remote.local()
28 self.remote = remote.local()
@@ -58,8 +58,8 b" MERGE_ACTION_LARGEFILE_MARK_REMOVED = b'"
58
58
59
59
60 def composelargefilematcher(match, manifest):
60 def composelargefilematcher(match, manifest):
61 '''create a matcher that matches only the largefiles in the original
61 """create a matcher that matches only the largefiles in the original
62 matcher'''
62 matcher"""
63 m = copy.copy(match)
63 m = copy.copy(match)
64 lfile = lambda f: lfutil.standin(f) in manifest
64 lfile = lambda f: lfutil.standin(f) in manifest
65 m._files = [lf for lf in m._files if lfile(lf)]
65 m._files = [lf for lf in m._files if lfile(lf)]
@@ -586,11 +586,17 b' def overridecalculateupdates('
586 mresult.addfile(lfile, b'k', None, b'replaces standin')
586 mresult.addfile(lfile, b'k', None, b'replaces standin')
587 if branchmerge:
587 if branchmerge:
588 mresult.addfile(
588 mresult.addfile(
589 standin, b'k', None, b'replaced by non-standin',
589 standin,
590 b'k',
591 None,
592 b'replaced by non-standin',
590 )
593 )
591 else:
594 else:
592 mresult.addfile(
595 mresult.addfile(
593 standin, b'r', None, b'replaced by non-standin',
596 standin,
597 b'r',
598 None,
599 b'replaced by non-standin',
594 )
600 )
595 elif lm in (b'g', b'dc') and sm != b'r':
601 elif lm in (b'g', b'dc') and sm != b'r':
596 if lm == b'dc':
602 if lm == b'dc':
@@ -610,7 +616,10 b' def overridecalculateupdates('
610 if branchmerge:
616 if branchmerge:
611 # largefile can be restored from standin safely
617 # largefile can be restored from standin safely
612 mresult.addfile(
618 mresult.addfile(
613 lfile, b'k', None, b'replaced by standin',
619 lfile,
620 b'k',
621 None,
622 b'replaced by standin',
614 )
623 )
615 mresult.addfile(standin, b'k', None, b'replaces standin')
624 mresult.addfile(standin, b'k', None, b'replaces standin')
616 else:
625 else:
@@ -628,7 +637,10 b' def overridecalculateupdates('
628 else: # pick remote normal file
637 else: # pick remote normal file
629 mresult.addfile(lfile, b'g', largs, b'replaces standin')
638 mresult.addfile(lfile, b'g', largs, b'replaces standin')
630 mresult.addfile(
639 mresult.addfile(
631 standin, b'r', None, b'replaced by non-standin',
640 standin,
641 b'r',
642 None,
643 b'replaced by non-standin',
632 )
644 )
633
645
634 return mresult
646 return mresult
@@ -39,8 +39,8 b' httpoldcallstream = None'
39
39
40
40
41 def putlfile(repo, proto, sha):
41 def putlfile(repo, proto, sha):
42 '''Server command for putting a largefile into a repository's local store
42 """Server command for putting a largefile into a repository's local store
43 and into the user cache.'''
43 and into the user cache."""
44 with proto.mayberedirectstdio() as output:
44 with proto.mayberedirectstdio() as output:
45 path = lfutil.storepath(repo, sha)
45 path = lfutil.storepath(repo, sha)
46 util.makedirs(os.path.dirname(path))
46 util.makedirs(os.path.dirname(path))
@@ -69,8 +69,8 b' def putlfile(repo, proto, sha):'
69
69
70
70
71 def getlfile(repo, proto, sha):
71 def getlfile(repo, proto, sha):
72 '''Server command for retrieving a largefile from the repository-local
72 """Server command for retrieving a largefile from the repository-local
73 cache or user cache.'''
73 cache or user cache."""
74 filename = lfutil.findfile(repo, sha)
74 filename = lfutil.findfile(repo, sha)
75 if not filename:
75 if not filename:
76 raise error.Abort(
76 raise error.Abort(
@@ -93,12 +93,12 b' def getlfile(repo, proto, sha):'
93
93
94
94
95 def statlfile(repo, proto, sha):
95 def statlfile(repo, proto, sha):
96 '''Server command for checking if a largefile is present - returns '2\n' if
96 """Server command for checking if a largefile is present - returns '2\n' if
97 the largefile is missing, '0\n' if it seems to be in good condition.
97 the largefile is missing, '0\n' if it seems to be in good condition.
98
98
99 The value 1 is reserved for mismatched checksum, but that is too expensive
99 The value 1 is reserved for mismatched checksum, but that is too expensive
100 to be verified on every stat and must be caught be running 'hg verify'
100 to be verified on every stat and must be caught be running 'hg verify'
101 server side.'''
101 server side."""
102 filename = lfutil.findfile(repo, sha)
102 filename = lfutil.findfile(repo, sha)
103 if not filename:
103 if not filename:
104 return wireprototypes.bytesresponse(b'2\n')
104 return wireprototypes.bytesresponse(b'2\n')
@@ -194,8 +194,8 b' def _capabilities(orig, repo, proto):'
194
194
195
195
196 def heads(orig, repo, proto):
196 def heads(orig, repo, proto):
197 '''Wrap server command - largefile capable clients will know to call
197 """Wrap server command - largefile capable clients will know to call
198 lheads instead'''
198 lheads instead"""
199 if lfutil.islfilesrepo(repo):
199 if lfutil.islfilesrepo(repo):
200 return wireprototypes.ooberror(LARGEFILES_REQUIRED_MSG)
200 return wireprototypes.ooberror(LARGEFILES_REQUIRED_MSG)
201
201
@@ -146,8 +146,8 b' class remotestore(basestore.basestore):'
146 raise NotImplementedError(b'abstract method')
146 raise NotImplementedError(b'abstract method')
147
147
148 def _stat(self, hashes):
148 def _stat(self, hashes):
149 '''Get information about availability of files specified by
149 """Get information about availability of files specified by
150 hashes in the remote store. Return dictionary mapping hashes
150 hashes in the remote store. Return dictionary mapping hashes
151 to return code where 0 means that file is available, other
151 to return code where 0 means that file is available, other
152 values if not.'''
152 values if not."""
153 raise NotImplementedError(b'abstract method')
153 raise NotImplementedError(b'abstract method')
@@ -360,7 +360,7 b' def reposetup(ui, repo):'
360 # TODO: _subdirlfs should be moved into "lfutil.py", because
360 # TODO: _subdirlfs should be moved into "lfutil.py", because
361 # it is referred only from "lfutil.updatestandinsbymatch"
361 # it is referred only from "lfutil.updatestandinsbymatch"
362 def _subdirlfs(self, files, lfiles):
362 def _subdirlfs(self, files, lfiles):
363 '''
363 """
364 Adjust matched file list
364 Adjust matched file list
365 If we pass a directory to commit whose only committable files
365 If we pass a directory to commit whose only committable files
366 are largefiles, the core commit code aborts before finding
366 are largefiles, the core commit code aborts before finding
@@ -370,7 +370,7 b' def reposetup(ui, repo):'
370 we explicitly add the largefiles to the match list and remove
370 we explicitly add the largefiles to the match list and remove
371 the directory.
371 the directory.
372 In other cases, we leave the match list unmodified.
372 In other cases, we leave the match list unmodified.
373 '''
373 """
374 actualfiles = []
374 actualfiles = []
375 dirs = []
375 dirs = []
376 regulars = []
376 regulars = []
@@ -30,13 +30,23 b' class wirestore(remotestore.remotestore)'
30 return self.remote.getlfile(hash)
30 return self.remote.getlfile(hash)
31
31
32 def _stat(self, hashes):
32 def _stat(self, hashes):
33 '''For each hash, return 0 if it is available, other values if not.
33 """For each hash, return 0 if it is available, other values if not.
34 It is usually 2 if the largefile is missing, but might be 1 the server
34 It is usually 2 if the largefile is missing, but might be 1 the server
35 has a corrupted copy.'''
35 has a corrupted copy."""
36
36
37 with self.remote.commandexecutor() as e:
37 with self.remote.commandexecutor() as e:
38 fs = []
38 fs = []
39 for hash in hashes:
39 for hash in hashes:
40 fs.append((hash, e.callcommand(b'statlfile', {b'sha': hash,})))
40 fs.append(
41 (
42 hash,
43 e.callcommand(
44 b'statlfile',
45 {
46 b'sha': hash,
47 },
48 ),
49 )
50 )
41
51
42 return {hash: f.result() for hash, f in fs}
52 return {hash: f.result() for hash, f in fs}
@@ -172,33 +172,51 b' reposetup = eh.finalreposetup'
172 templatekeyword = eh.templatekeyword
172 templatekeyword = eh.templatekeyword
173
173
174 eh.configitem(
174 eh.configitem(
175 b'experimental', b'lfs.serve', default=True,
175 b'experimental',
176 b'lfs.serve',
177 default=True,
176 )
178 )
177 eh.configitem(
179 eh.configitem(
178 b'experimental', b'lfs.user-agent', default=None,
180 b'experimental',
181 b'lfs.user-agent',
182 default=None,
179 )
183 )
180 eh.configitem(
184 eh.configitem(
181 b'experimental', b'lfs.disableusercache', default=False,
185 b'experimental',
186 b'lfs.disableusercache',
187 default=False,
182 )
188 )
183 eh.configitem(
189 eh.configitem(
184 b'experimental', b'lfs.worker-enable', default=True,
190 b'experimental',
191 b'lfs.worker-enable',
192 default=True,
185 )
193 )
186
194
187 eh.configitem(
195 eh.configitem(
188 b'lfs', b'url', default=None,
196 b'lfs',
197 b'url',
198 default=None,
189 )
199 )
190 eh.configitem(
200 eh.configitem(
191 b'lfs', b'usercache', default=None,
201 b'lfs',
202 b'usercache',
203 default=None,
192 )
204 )
193 # Deprecated
205 # Deprecated
194 eh.configitem(
206 eh.configitem(
195 b'lfs', b'threshold', default=None,
207 b'lfs',
208 b'threshold',
209 default=None,
196 )
210 )
197 eh.configitem(
211 eh.configitem(
198 b'lfs', b'track', default=b'none()',
212 b'lfs',
213 b'track',
214 default=b'none()',
199 )
215 )
200 eh.configitem(
216 eh.configitem(
201 b'lfs', b'retry', default=5,
217 b'lfs',
218 b'retry',
219 default=5,
202 )
220 )
203
221
204 lfsprocessor = (
222 lfsprocessor = (
@@ -96,8 +96,7 b' class nullvfs(lfsvfs):'
96
96
97
97
98 class lfsuploadfile(httpconnectionmod.httpsendfile):
98 class lfsuploadfile(httpconnectionmod.httpsendfile):
99 """a file-like object that supports keepalive.
99 """a file-like object that supports keepalive."""
100 """
101
100
102 def __init__(self, ui, filename):
101 def __init__(self, ui, filename):
103 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
102 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
@@ -258,9 +257,9 b' class local(object):'
258
257
259
258
260 def _urlerrorreason(urlerror):
259 def _urlerrorreason(urlerror):
261 '''Create a friendly message for the given URLError to be used in an
260 """Create a friendly message for the given URLError to be used in an
262 LfsRemoteError message.
261 LfsRemoteError message.
263 '''
262 """
264 inst = urlerror
263 inst = urlerror
265
264
266 if isinstance(urlerror.reason, Exception):
265 if isinstance(urlerror.reason, Exception):
@@ -338,7 +337,10 b' class _gitlfsremote(object):'
338 ]
337 ]
339 requestdata = pycompat.bytesurl(
338 requestdata = pycompat.bytesurl(
340 json.dumps(
339 json.dumps(
341 {'objects': objects, 'operation': pycompat.strurl(action),}
340 {
341 'objects': objects,
342 'operation': pycompat.strurl(action),
343 }
342 )
344 )
343 )
345 )
344 url = b'%s/objects/batch' % self.baseurl
346 url = b'%s/objects/batch' % self.baseurl
@@ -381,10 +381,10 b' def candownload(repo):'
381
381
382
382
383 def uploadblobsfromrevs(repo, revs):
383 def uploadblobsfromrevs(repo, revs):
384 '''upload lfs blobs introduced by revs
384 """upload lfs blobs introduced by revs
385
385
386 Note: also used by other extensions e. g. infinitepush. avoid renaming.
386 Note: also used by other extensions e. g. infinitepush. avoid renaming.
387 '''
387 """
388 if _canskipupload(repo):
388 if _canskipupload(repo):
389 return
389 return
390 pointers = extractpointers(repo, revs)
390 pointers = extractpointers(repo, revs)
@@ -125,16 +125,24 b' configtable = {}'
125 configitem = registrar.configitem(configtable)
125 configitem = registrar.configitem(configtable)
126
126
127 configitem(
127 configitem(
128 b'mq', b'git', default=b'auto',
128 b'mq',
129 b'git',
130 default=b'auto',
129 )
131 )
130 configitem(
132 configitem(
131 b'mq', b'keepchanges', default=False,
133 b'mq',
134 b'keepchanges',
135 default=False,
132 )
136 )
133 configitem(
137 configitem(
134 b'mq', b'plain', default=False,
138 b'mq',
139 b'plain',
140 default=False,
135 )
141 )
136 configitem(
142 configitem(
137 b'mq', b'secret', default=False,
143 b'mq',
144 b'secret',
145 default=False,
138 )
146 )
139
147
140 # force load strip extension formerly included in mq and import some utility
148 # force load strip extension formerly included in mq and import some utility
@@ -156,8 +164,8 b' strip = strip.strip'
156
164
157
165
158 def checksubstate(repo, baserev=None):
166 def checksubstate(repo, baserev=None):
159 '''return list of subrepos at a different revision than substate.
167 """return list of subrepos at a different revision than substate.
160 Abort if any subrepos have uncommitted changes.'''
168 Abort if any subrepos have uncommitted changes."""
161 inclsubs = []
169 inclsubs = []
162 wctx = repo[None]
170 wctx = repo[None]
163 if baserev:
171 if baserev:
@@ -449,9 +457,9 b' class patchheader(object):'
449 __str__ = encoding.strmethod(__bytes__)
457 __str__ = encoding.strmethod(__bytes__)
450
458
451 def _delmsg(self):
459 def _delmsg(self):
452 '''Remove existing message, keeping the rest of the comments fields.
460 """Remove existing message, keeping the rest of the comments fields.
453 If comments contains 'subject: ', message will prepend
461 If comments contains 'subject: ', message will prepend
454 the field and a blank line.'''
462 the field and a blank line."""
455 if self.message:
463 if self.message:
456 subj = b'subject: ' + self.message[0].lower()
464 subj = b'subject: ' + self.message[0].lower()
457 for i in pycompat.xrange(len(self.comments)):
465 for i in pycompat.xrange(len(self.comments)):
@@ -949,8 +957,8 b' class queue(object):'
949 return (0, head)
957 return (0, head)
950
958
951 def patch(self, repo, patchfile):
959 def patch(self, repo, patchfile):
952 '''Apply patchfile to the working directory.
960 """Apply patchfile to the working directory.
953 patchfile: name of patch file'''
961 patchfile: name of patch file"""
954 files = set()
962 files = set()
955 try:
963 try:
956 fuzz = patchmod.patch(
964 fuzz = patchmod.patch(
@@ -1363,7 +1371,7 b' class queue(object):'
1363
1371
1364 def new(self, repo, patchfn, *pats, **opts):
1372 def new(self, repo, patchfn, *pats, **opts):
1365 """options:
1373 """options:
1366 msg: a string or a no-argument function returning a string
1374 msg: a string or a no-argument function returning a string
1367 """
1375 """
1368 opts = pycompat.byteskwargs(opts)
1376 opts = pycompat.byteskwargs(opts)
1369 msg = opts.get(b'msg')
1377 msg = opts.get(b'msg')
@@ -1718,7 +1726,10 b' class queue(object):'
1718 except: # re-raises
1726 except: # re-raises
1719 self.ui.warn(_(b'cleaning up working directory...\n'))
1727 self.ui.warn(_(b'cleaning up working directory...\n'))
1720 cmdutil.revert(
1728 cmdutil.revert(
1721 self.ui, repo, repo[b'.'], no_backup=True,
1729 self.ui,
1730 repo,
1731 repo[b'.'],
1732 no_backup=True,
1722 )
1733 )
1723 # only remove unknown files that we know we touched or
1734 # only remove unknown files that we know we touched or
1724 # created while patching
1735 # created while patching
@@ -2823,7 +2834,7 b' def init(ui, repo, **opts):'
2823 norepo=True,
2834 norepo=True,
2824 )
2835 )
2825 def clone(ui, source, dest=None, **opts):
2836 def clone(ui, source, dest=None, **opts):
2826 '''clone main and patch repository at same time
2837 """clone main and patch repository at same time
2827
2838
2828 If source is local, destination will have no patches applied. If
2839 If source is local, destination will have no patches applied. If
2829 source is remote, this command can not check if patches are
2840 source is remote, this command can not check if patches are
@@ -2838,7 +2849,7 b' def clone(ui, source, dest=None, **opts)'
2838 would be created by :hg:`init --mq`.
2849 would be created by :hg:`init --mq`.
2839
2850
2840 Return 0 on success.
2851 Return 0 on success.
2841 '''
2852 """
2842 opts = pycompat.byteskwargs(opts)
2853 opts = pycompat.byteskwargs(opts)
2843
2854
2844 def patchdir(repo):
2855 def patchdir(repo):
@@ -2937,7 +2948,10 b' def commit(ui, repo, *pats, **opts):'
2937
2948
2938 @command(
2949 @command(
2939 b"qseries",
2950 b"qseries",
2940 [(b'm', b'missing', None, _(b'print patches not in series')),] + seriesopts,
2951 [
2952 (b'm', b'missing', None, _(b'print patches not in series')),
2953 ]
2954 + seriesopts,
2941 _(b'hg qseries [-ms]'),
2955 _(b'hg qseries [-ms]'),
2942 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2956 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2943 )
2957 )
@@ -3282,9 +3296,9 b' def fold(ui, repo, *files, **opts):'
3282 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3296 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3283 )
3297 )
3284 def goto(ui, repo, patch, **opts):
3298 def goto(ui, repo, patch, **opts):
3285 '''push or pop patches until named patch is at top of stack
3299 """push or pop patches until named patch is at top of stack
3286
3300
3287 Returns 0 on success.'''
3301 Returns 0 on success."""
3288 opts = pycompat.byteskwargs(opts)
3302 opts = pycompat.byteskwargs(opts)
3289 opts = fixkeepchangesopts(ui, opts)
3303 opts = fixkeepchangesopts(ui, opts)
3290 q = repo.mq
3304 q = repo.mq
@@ -3321,7 +3335,7 b' def goto(ui, repo, patch, **opts):'
3321 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3335 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3322 )
3336 )
3323 def guard(ui, repo, *args, **opts):
3337 def guard(ui, repo, *args, **opts):
3324 '''set or print guards for a patch
3338 """set or print guards for a patch
3325
3339
3326 Guards control whether a patch can be pushed. A patch with no
3340 Guards control whether a patch can be pushed. A patch with no
3327 guards is always pushed. A patch with a positive guard ("+foo") is
3341 guards is always pushed. A patch with a positive guard ("+foo") is
@@ -3341,7 +3355,7 b' def guard(ui, repo, *args, **opts):'
3341 hg qguard other.patch -- +2.6.17 -stable
3355 hg qguard other.patch -- +2.6.17 -stable
3342
3356
3343 Returns 0 on success.
3357 Returns 0 on success.
3344 '''
3358 """
3345
3359
3346 def status(idx):
3360 def status(idx):
3347 guards = q.seriesguards[idx] or [b'unguarded']
3361 guards = q.seriesguards[idx] or [b'unguarded']
@@ -3712,7 +3726,7 b' def save(ui, repo, **opts):'
3712 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3726 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3713 )
3727 )
3714 def select(ui, repo, *args, **opts):
3728 def select(ui, repo, *args, **opts):
3715 '''set or print guarded patches to push
3729 """set or print guarded patches to push
3716
3730
3717 Use the :hg:`qguard` command to set or print guards on patch, then use
3731 Use the :hg:`qguard` command to set or print guards on patch, then use
3718 qselect to tell mq which guards to use. A patch will be pushed if
3732 qselect to tell mq which guards to use. A patch will be pushed if
@@ -3744,7 +3758,7 b' def select(ui, repo, *args, **opts):'
3744 Use -s/--series to print a list of all guards in the series file
3758 Use -s/--series to print a list of all guards in the series file
3745 (no other arguments needed). Use -v for more information.
3759 (no other arguments needed). Use -v for more information.
3746
3760
3747 Returns 0 on success.'''
3761 Returns 0 on success."""
3748
3762
3749 q = repo.mq
3763 q = repo.mq
3750 opts = pycompat.byteskwargs(opts)
3764 opts = pycompat.byteskwargs(opts)
@@ -3888,7 +3902,7 b' def finish(ui, repo, *revrange, **opts):'
3888 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3902 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3889 )
3903 )
3890 def qqueue(ui, repo, name=None, **opts):
3904 def qqueue(ui, repo, name=None, **opts):
3891 '''manage multiple patch queues
3905 """manage multiple patch queues
3892
3906
3893 Supports switching between different patch queues, as well as creating
3907 Supports switching between different patch queues, as well as creating
3894 new patch queues and deleting existing ones.
3908 new patch queues and deleting existing ones.
@@ -3907,7 +3921,7 b' def qqueue(ui, repo, name=None, **opts):'
3907 active queue.
3921 active queue.
3908
3922
3909 Returns 0 on success.
3923 Returns 0 on success.
3910 '''
3924 """
3911 q = repo.mq
3925 q = repo.mq
3912 _defaultqueue = b'patches'
3926 _defaultqueue = b'patches'
3913 _allqueues = b'patches.queues'
3927 _allqueues = b'patches.queues'
@@ -4250,8 +4264,7 b' revsetpredicate = registrar.revsetpredic'
4250
4264
4251 @revsetpredicate(b'mq()')
4265 @revsetpredicate(b'mq()')
4252 def revsetmq(repo, subset, x):
4266 def revsetmq(repo, subset, x):
4253 """Changesets managed by MQ.
4267 """Changesets managed by MQ."""
4254 """
4255 revsetlang.getargs(x, 0, 0, _(b"mq takes no arguments"))
4268 revsetlang.getargs(x, 0, 0, _(b"mq takes no arguments"))
4256 applied = {repo[r.node].rev() for r in repo.mq.applied}
4269 applied = {repo[r.node].rev() for r in repo.mq.applied}
4257 return smartset.baseset([r for r in subset if r in applied])
4270 return smartset.baseset([r for r in subset if r in applied])
@@ -78,7 +78,14 b' def getbundlechangegrouppart_narrow('
78
78
79
79
80 def generateellipsesbundle2(
80 def generateellipsesbundle2(
81 bundler, repo, include, exclude, version, common, heads, depth,
81 bundler,
82 repo,
83 include,
84 exclude,
85 version,
86 common,
87 heads,
88 depth,
82 ):
89 ):
83 match = narrowspec.match(repo.root, include=include, exclude=exclude)
90 match = narrowspec.match(repo.root, include=include, exclude=exclude)
84 if depth is not None:
91 if depth is not None:
@@ -113,7 +120,13 b' def generateellipsesbundle2('
113
120
114
121
115 def generate_ellipses_bundle2_for_widening(
122 def generate_ellipses_bundle2_for_widening(
116 bundler, repo, oldmatch, newmatch, version, common, known,
123 bundler,
124 repo,
125 oldmatch,
126 newmatch,
127 version,
128 common,
129 known,
117 ):
130 ):
118 common = set(common or [nullid])
131 common = set(common or [nullid])
119 # Steps:
132 # Steps:
@@ -120,7 +120,13 b' def narrow_widen('
120 )
120 )
121 else:
121 else:
122 narrowbundle2.generate_ellipses_bundle2_for_widening(
122 narrowbundle2.generate_ellipses_bundle2_for_widening(
123 bundler, repo, oldmatch, newmatch, cgversion, common, known,
123 bundler,
124 repo,
125 oldmatch,
126 newmatch,
127 cgversion,
128 common,
129 known,
124 )
130 )
125 except error.Abort as exc:
131 except error.Abort as exc:
126 bundler = bundle2.bundle20(repo.ui)
132 bundler = bundle2.bundle20(repo.ui)
@@ -190,64 +190,104 b' configtable = {}'
190 configitem = registrar.configitem(configtable)
190 configitem = registrar.configitem(configtable)
191
191
192 configitem(
192 configitem(
193 b'notify', b'changegroup', default=None,
193 b'notify',
194 b'changegroup',
195 default=None,
194 )
196 )
195 configitem(
197 configitem(
196 b'notify', b'config', default=None,
198 b'notify',
199 b'config',
200 default=None,
197 )
201 )
198 configitem(
202 configitem(
199 b'notify', b'diffstat', default=True,
203 b'notify',
204 b'diffstat',
205 default=True,
200 )
206 )
201 configitem(
207 configitem(
202 b'notify', b'domain', default=None,
208 b'notify',
209 b'domain',
210 default=None,
203 )
211 )
204 configitem(
212 configitem(
205 b'notify', b'messageidseed', default=None,
213 b'notify',
214 b'messageidseed',
215 default=None,
206 )
216 )
207 configitem(
217 configitem(
208 b'notify', b'fromauthor', default=None,
218 b'notify',
219 b'fromauthor',
220 default=None,
209 )
221 )
210 configitem(
222 configitem(
211 b'notify', b'incoming', default=None,
223 b'notify',
224 b'incoming',
225 default=None,
212 )
226 )
213 configitem(
227 configitem(
214 b'notify', b'maxdiff', default=300,
228 b'notify',
229 b'maxdiff',
230 default=300,
215 )
231 )
216 configitem(
232 configitem(
217 b'notify', b'maxdiffstat', default=-1,
233 b'notify',
234 b'maxdiffstat',
235 default=-1,
218 )
236 )
219 configitem(
237 configitem(
220 b'notify', b'maxsubject', default=67,
238 b'notify',
239 b'maxsubject',
240 default=67,
221 )
241 )
222 configitem(
242 configitem(
223 b'notify', b'mbox', default=None,
243 b'notify',
244 b'mbox',
245 default=None,
224 )
246 )
225 configitem(
247 configitem(
226 b'notify', b'merge', default=True,
248 b'notify',
249 b'merge',
250 default=True,
227 )
251 )
228 configitem(
252 configitem(
229 b'notify', b'outgoing', default=None,
253 b'notify',
254 b'outgoing',
255 default=None,
230 )
256 )
231 configitem(
257 configitem(
232 b'notify', b'reply-to-predecessor', default=False,
258 b'notify',
259 b'reply-to-predecessor',
260 default=False,
233 )
261 )
234 configitem(
262 configitem(
235 b'notify', b'sources', default=b'serve',
263 b'notify',
264 b'sources',
265 default=b'serve',
236 )
266 )
237 configitem(
267 configitem(
238 b'notify', b'showfunc', default=None,
268 b'notify',
269 b'showfunc',
270 default=None,
239 )
271 )
240 configitem(
272 configitem(
241 b'notify', b'strip', default=0,
273 b'notify',
274 b'strip',
275 default=0,
242 )
276 )
243 configitem(
277 configitem(
244 b'notify', b'style', default=None,
278 b'notify',
279 b'style',
280 default=None,
245 )
281 )
246 configitem(
282 configitem(
247 b'notify', b'template', default=None,
283 b'notify',
284 b'template',
285 default=None,
248 )
286 )
249 configitem(
287 configitem(
250 b'notify', b'test', default=True,
288 b'notify',
289 b'test',
290 default=True,
251 )
291 )
252
292
253 # template for single changeset can include email headers.
293 # template for single changeset can include email headers.
@@ -539,10 +579,10 b' class notifier(object):'
539
579
540
580
541 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
581 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
542 '''send email notifications to interested subscribers.
582 """send email notifications to interested subscribers.
543
583
544 if used as changegroup hook, send one email for all changesets in
584 if used as changegroup hook, send one email for all changesets in
545 changegroup. else send one email per changeset.'''
585 changegroup. else send one email per changeset."""
546
586
547 n = notifier(ui, repo, hooktype)
587 n = notifier(ui, repo, hooktype)
548 ctx = repo.unfiltered()[node]
588 ctx = repo.unfiltered()[node]
@@ -41,7 +41,9 b' configtable = {}'
41 configitem = registrar.configitem(configtable)
41 configitem = registrar.configitem(configtable)
42
42
43 configitem(
43 configitem(
44 b'pager', b'attend', default=lambda: attended,
44 b'pager',
45 b'attend',
46 default=lambda: attended,
45 )
47 )
46
48
47
49
@@ -110,34 +110,54 b' configtable = {}'
110 configitem = registrar.configitem(configtable)
110 configitem = registrar.configitem(configtable)
111
111
112 configitem(
112 configitem(
113 b'patchbomb', b'bundletype', default=None,
113 b'patchbomb',
114 b'bundletype',
115 default=None,
114 )
116 )
115 configitem(
117 configitem(
116 b'patchbomb', b'bcc', default=None,
118 b'patchbomb',
119 b'bcc',
120 default=None,
117 )
121 )
118 configitem(
122 configitem(
119 b'patchbomb', b'cc', default=None,
123 b'patchbomb',
124 b'cc',
125 default=None,
120 )
126 )
121 configitem(
127 configitem(
122 b'patchbomb', b'confirm', default=False,
128 b'patchbomb',
129 b'confirm',
130 default=False,
123 )
131 )
124 configitem(
132 configitem(
125 b'patchbomb', b'flagtemplate', default=None,
133 b'patchbomb',
134 b'flagtemplate',
135 default=None,
126 )
136 )
127 configitem(
137 configitem(
128 b'patchbomb', b'from', default=None,
138 b'patchbomb',
139 b'from',
140 default=None,
129 )
141 )
130 configitem(
142 configitem(
131 b'patchbomb', b'intro', default=b'auto',
143 b'patchbomb',
144 b'intro',
145 default=b'auto',
132 )
146 )
133 configitem(
147 configitem(
134 b'patchbomb', b'publicurl', default=None,
148 b'patchbomb',
149 b'publicurl',
150 default=None,
135 )
151 )
136 configitem(
152 configitem(
137 b'patchbomb', b'reply-to', default=None,
153 b'patchbomb',
154 b'reply-to',
155 default=None,
138 )
156 )
139 configitem(
157 configitem(
140 b'patchbomb', b'to', default=None,
158 b'patchbomb',
159 b'to',
160 default=None,
141 )
161 )
142
162
143 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
163 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -148,8 +168,7 b" testedwith = b'ships-with-hg-core'"
148
168
149
169
150 def _addpullheader(seq, ctx):
170 def _addpullheader(seq, ctx):
151 """Add a header pointing to a public URL where the changeset is available
171 """Add a header pointing to a public URL where the changeset is available"""
152 """
153 repo = ctx.repo()
172 repo = ctx.repo()
154 # experimental config: patchbomb.publicurl
173 # experimental config: patchbomb.publicurl
155 # waiting for some logic that check that the changeset are available on the
174 # waiting for some logic that check that the changeset are available on the
@@ -656,7 +675,7 b' emailopts = ['
656 helpcategory=command.CATEGORY_IMPORT_EXPORT,
675 helpcategory=command.CATEGORY_IMPORT_EXPORT,
657 )
676 )
658 def email(ui, repo, *revs, **opts):
677 def email(ui, repo, *revs, **opts):
659 '''send changesets by email
678 """send changesets by email
660
679
661 By default, diffs are sent in the format generated by
680 By default, diffs are sent in the format generated by
662 :hg:`export`, one per message. The series starts with a "[PATCH 0
681 :hg:`export`, one per message. The series starts with a "[PATCH 0
@@ -739,7 +758,7 b' def email(ui, repo, *revs, **opts):'
739
758
740 Before using this command, you will need to enable email in your
759 Before using this command, you will need to enable email in your
741 hgrc. See the [email] section in hgrc(5) for details.
760 hgrc. See the [email] section in hgrc(5) for details.
742 '''
761 """
743 opts = pycompat.byteskwargs(opts)
762 opts = pycompat.byteskwargs(opts)
744
763
745 _charsets = mail._charsets(ui)
764 _charsets = mail._charsets(ui)
@@ -108,33 +108,51 b' uisetup = eh.finaluisetup'
108
108
109 # developer config: phabricator.batchsize
109 # developer config: phabricator.batchsize
110 eh.configitem(
110 eh.configitem(
111 b'phabricator', b'batchsize', default=12,
111 b'phabricator',
112 b'batchsize',
113 default=12,
112 )
114 )
113 eh.configitem(
115 eh.configitem(
114 b'phabricator', b'callsign', default=None,
116 b'phabricator',
117 b'callsign',
118 default=None,
115 )
119 )
116 eh.configitem(
120 eh.configitem(
117 b'phabricator', b'curlcmd', default=None,
121 b'phabricator',
122 b'curlcmd',
123 default=None,
118 )
124 )
119 # developer config: phabricator.debug
125 # developer config: phabricator.debug
120 eh.configitem(
126 eh.configitem(
121 b'phabricator', b'debug', default=False,
127 b'phabricator',
128 b'debug',
129 default=False,
122 )
130 )
123 # developer config: phabricator.repophid
131 # developer config: phabricator.repophid
124 eh.configitem(
132 eh.configitem(
125 b'phabricator', b'repophid', default=None,
133 b'phabricator',
134 b'repophid',
135 default=None,
126 )
136 )
127 eh.configitem(
137 eh.configitem(
128 b'phabricator', b'url', default=None,
138 b'phabricator',
139 b'url',
140 default=None,
129 )
141 )
130 eh.configitem(
142 eh.configitem(
131 b'phabsend', b'confirm', default=False,
143 b'phabsend',
144 b'confirm',
145 default=False,
132 )
146 )
133 eh.configitem(
147 eh.configitem(
134 b'phabimport', b'secret', default=False,
148 b'phabimport',
149 b'secret',
150 default=False,
135 )
151 )
136 eh.configitem(
152 eh.configitem(
137 b'phabimport', b'obsolete', default=False,
153 b'phabimport',
154 b'obsolete',
155 default=False,
138 )
156 )
139
157
140 colortable = {
158 colortable = {
@@ -166,8 +184,7 b' colortable = {'
166
184
167 @eh.wrapfunction(localrepo, "loadhgrc")
185 @eh.wrapfunction(localrepo, "loadhgrc")
168 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
186 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
169 """Load ``.arcconfig`` content into a ui instance on repository open.
187 """Load ``.arcconfig`` content into a ui instance on repository open."""
170 """
171 result = False
188 result = False
172 arcconfig = {}
189 arcconfig = {}
173
190
@@ -633,8 +650,7 b' class DiffFileType(object):'
633
650
634 @attr.s
651 @attr.s
635 class phabhunk(dict):
652 class phabhunk(dict):
636 """Represents a Differential hunk, which is owned by a Differential change
653 """Represents a Differential hunk, which is owned by a Differential change"""
637 """
638
654
639 oldOffset = attr.ib(default=0) # camelcase-required
655 oldOffset = attr.ib(default=0) # camelcase-required
640 oldLength = attr.ib(default=0) # camelcase-required
656 oldLength = attr.ib(default=0) # camelcase-required
@@ -1512,7 +1528,9 b' def phabsend(ui, repo, *revs, **opts):'
1512 mapping.get(old.p2().node(), (old.p2(),))[0],
1528 mapping.get(old.p2().node(), (old.p2(),))[0],
1513 ]
1529 ]
1514 newdesc = rewriteutil.update_hash_refs(
1530 newdesc = rewriteutil.update_hash_refs(
1515 repo, newdesc, mapping,
1531 repo,
1532 newdesc,
1533 mapping,
1516 )
1534 )
1517 new = context.metadataonlyctx(
1535 new = context.metadataonlyctx(
1518 repo,
1536 repo,
@@ -2227,7 +2245,10 b' def template_review(context, mapping):'
2227 m = _differentialrevisiondescre.search(ctx.description())
2245 m = _differentialrevisiondescre.search(ctx.description())
2228 if m:
2246 if m:
2229 return templateutil.hybriddict(
2247 return templateutil.hybriddict(
2230 {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
2248 {
2249 b'url': m.group('url'),
2250 b'id': b"D%s" % m.group('id'),
2251 }
2231 )
2252 )
2232 else:
2253 else:
2233 tags = ctx.repo().nodetags(ctx.node())
2254 tags = ctx.repo().nodetags(ctx.node())
@@ -2238,14 +2259,18 b' def template_review(context, mapping):'
2238 url += b'/'
2259 url += b'/'
2239 url += t
2260 url += t
2240
2261
2241 return templateutil.hybriddict({b'url': url, b'id': t,})
2262 return templateutil.hybriddict(
2263 {
2264 b'url': url,
2265 b'id': t,
2266 }
2267 )
2242 return None
2268 return None
2243
2269
2244
2270
2245 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2271 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
2246 def template_status(context, mapping):
2272 def template_status(context, mapping):
2247 """:phabstatus: String. Status of Phabricator differential.
2273 """:phabstatus: String. Status of Phabricator differential."""
2248 """
2249 ctx = context.resource(mapping, b'ctx')
2274 ctx = context.resource(mapping, b'ctx')
2250 repo = context.resource(mapping, b'repo')
2275 repo = context.resource(mapping, b'repo')
2251 ui = context.resource(mapping, b'ui')
2276 ui = context.resource(mapping, b'ui')
@@ -2259,7 +2284,10 b' def template_status(context, mapping):'
2259 for drev in drevs:
2284 for drev in drevs:
2260 if int(drev[b'id']) == drevid:
2285 if int(drev[b'id']) == drevid:
2261 return templateutil.hybriddict(
2286 return templateutil.hybriddict(
2262 {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
2287 {
2288 b'url': drev[b'uri'],
2289 b'status': drev[b'statusName'],
2290 }
2263 )
2291 )
2264 return None
2292 return None
2265
2293
@@ -67,7 +67,7 b" testedwith = b'ships-with-hg-core'"
67 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
67 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
68 )
68 )
69 def purge(ui, repo, *dirs, **opts):
69 def purge(ui, repo, *dirs, **opts):
70 '''removes files not tracked by Mercurial
70 """removes files not tracked by Mercurial
71
71
72 Delete files not known to Mercurial. This is useful to test local
72 Delete files not known to Mercurial. This is useful to test local
73 and uncommitted changes in an otherwise-clean source tree.
73 and uncommitted changes in an otherwise-clean source tree.
@@ -95,7 +95,7 b' def purge(ui, repo, *dirs, **opts):'
95 you forgot to add to the repository. If you only want to print the
95 you forgot to add to the repository. If you only want to print the
96 list of files that this program would delete, use the --print
96 list of files that this program would delete, use the --print
97 option.
97 option.
98 '''
98 """
99 opts = pycompat.byteskwargs(opts)
99 opts = pycompat.byteskwargs(opts)
100 cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
100 cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
101
101
@@ -507,10 +507,10 b' class rebaseruntime(object):'
507 ui.note(_(b'rebase merging completed\n'))
507 ui.note(_(b'rebase merging completed\n'))
508
508
509 def _concludenode(self, rev, editor, commitmsg=None):
509 def _concludenode(self, rev, editor, commitmsg=None):
510 '''Commit the wd changes with parents p1 and p2.
510 """Commit the wd changes with parents p1 and p2.
511
511
512 Reuse commit info from rev but also store useful information in extra.
512 Reuse commit info from rev but also store useful information in extra.
513 Return node of committed revision.'''
513 Return node of committed revision."""
514 repo = self.repo
514 repo = self.repo
515 ctx = repo[rev]
515 ctx = repo[rev]
516 if commitmsg is None:
516 if commitmsg is None:
@@ -1135,7 +1135,11 b' def _dryrunrebase(ui, repo, action, opts'
1135 overrides = {(b'rebase', b'singletransaction'): True}
1135 overrides = {(b'rebase', b'singletransaction'): True}
1136 with ui.configoverride(overrides, b'rebase'):
1136 with ui.configoverride(overrides, b'rebase'):
1137 _origrebase(
1137 _origrebase(
1138 ui, repo, action, opts, rbsrt,
1138 ui,
1139 repo,
1140 action,
1141 opts,
1142 rbsrt,
1139 )
1143 )
1140 except error.ConflictResolutionRequired:
1144 except error.ConflictResolutionRequired:
1141 ui.status(_(b'hit a merge conflict\n'))
1145 ui.status(_(b'hit a merge conflict\n'))
@@ -1447,8 +1451,8 b' def externalparent(repo, state, destance'
1447
1451
1448
1452
1449 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1453 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
1450 '''Commit the memory changes with parents p1 and p2.
1454 """Commit the memory changes with parents p1 and p2.
1451 Return node of committed revision.'''
1455 Return node of committed revision."""
1452 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1456 # By convention, ``extra['branch']`` (set by extrafn) clobbers
1453 # ``branch`` (used when passing ``--keepbranches``).
1457 # ``branch`` (used when passing ``--keepbranches``).
1454 branch = None
1458 branch = None
@@ -1475,8 +1479,8 b' def commitmemorynode(repo, wctx, editor,'
1475
1479
1476
1480
1477 def commitnode(repo, editor, extra, user, date, commitmsg):
1481 def commitnode(repo, editor, extra, user, date, commitmsg):
1478 '''Commit the wd changes with parents p1 and p2.
1482 """Commit the wd changes with parents p1 and p2.
1479 Return node of committed revision.'''
1483 Return node of committed revision."""
1480 dsguard = util.nullcontextmanager()
1484 dsguard = util.nullcontextmanager()
1481 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1485 if not repo.ui.configbool(b'rebase', b'singletransaction'):
1482 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
1486 dsguard = dirstateguard.dirstateguard(repo, b'rebase')
@@ -1965,11 +1969,11 b' def sortsource(destmap):'
1965
1969
1966
1970
1967 def buildstate(repo, destmap, collapse):
1971 def buildstate(repo, destmap, collapse):
1968 '''Define which revisions are going to be rebased and where
1972 """Define which revisions are going to be rebased and where
1969
1973
1970 repo: repo
1974 repo: repo
1971 destmap: {srcrev: destrev}
1975 destmap: {srcrev: destrev}
1972 '''
1976 """
1973 rebaseset = destmap.keys()
1977 rebaseset = destmap.keys()
1974 originalwd = repo[b'.'].rev()
1978 originalwd = repo[b'.'].rev()
1975
1979
@@ -39,7 +39,7 b" testedwith = b'ships-with-hg-core'"
39 helpcategory=command.CATEGORY_COMMITTING,
39 helpcategory=command.CATEGORY_COMMITTING,
40 )
40 )
41 def record(ui, repo, *pats, **opts):
41 def record(ui, repo, *pats, **opts):
42 '''interactively select changes to commit
42 """interactively select changes to commit
43
43
44 If a list of files is omitted, all changes reported by :hg:`status`
44 If a list of files is omitted, all changes reported by :hg:`status`
45 will be candidates for recording.
45 will be candidates for recording.
@@ -65,7 +65,7 b' def record(ui, repo, *pats, **opts):'
65
65
66 ? - display help
66 ? - display help
67
67
68 This command is not available when committing a merge.'''
68 This command is not available when committing a merge."""
69
69
70 if not ui.interactive():
70 if not ui.interactive():
71 raise error.Abort(
71 raise error.Abort(
@@ -106,11 +106,11 b' def qrefresh(origfn, ui, repo, *pats, **'
106 inferrepo=True,
106 inferrepo=True,
107 )
107 )
108 def qrecord(ui, repo, patch, *pats, **opts):
108 def qrecord(ui, repo, patch, *pats, **opts):
109 '''interactively record a new patch
109 """interactively record a new patch
110
110
111 See :hg:`help qnew` & :hg:`help record` for more information and
111 See :hg:`help qnew` & :hg:`help record` for more information and
112 usage.
112 usage.
113 '''
113 """
114 return _qrecord(b'qnew', ui, repo, patch, *pats, **opts)
114 return _qrecord(b'qnew', ui, repo, patch, *pats, **opts)
115
115
116
116
@@ -758,10 +758,10 b' def getrenamedfn(orig, repo, endrev=None'
758 rcache = {}
758 rcache = {}
759
759
760 def getrenamed(fn, rev):
760 def getrenamed(fn, rev):
761 '''looks up all renames for a file (up to endrev) the first
761 """looks up all renames for a file (up to endrev) the first
762 time the file is given. It indexes on the changerev and only
762 time the file is given. It indexes on the changerev and only
763 parses the manifest if linkrev != changerev.
763 parses the manifest if linkrev != changerev.
764 Returns rename info for fn at changerev rev.'''
764 Returns rename info for fn at changerev rev."""
765 if rev in rcache.setdefault(fn, {}):
765 if rev in rcache.setdefault(fn, {}):
766 return rcache[fn][rev]
766 return rcache[fn][rev]
767
767
@@ -822,8 +822,7 b' def filelogrevset(orig, repo, subset, x)'
822
822
823 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
823 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
824 def gc(ui, *args, **opts):
824 def gc(ui, *args, **opts):
825 '''garbage collect the client and server filelog caches
825 """garbage collect the client and server filelog caches"""
826 '''
827 cachepaths = set()
826 cachepaths = set()
828
827
829 # get the system client cache
828 # get the system client cache
@@ -1105,7 +1104,9 b' def _fileprefetchhook(repo, revmatches):'
1105
1104
1106 @command(
1105 @command(
1107 b'debugremotefilelog',
1106 b'debugremotefilelog',
1108 [(b'd', b'decompress', None, _(b'decompress the filelog first')),],
1107 [
1108 (b'd', b'decompress', None, _(b'decompress the filelog first')),
1109 ],
1109 _(b'hg debugremotefilelog <path>'),
1110 _(b'hg debugremotefilelog <path>'),
1110 norepo=True,
1111 norepo=True,
1111 )
1112 )
@@ -1115,7 +1116,9 b' def debugremotefilelog(ui, path, **opts)'
1115
1116
1116 @command(
1117 @command(
1117 b'verifyremotefilelog',
1118 b'verifyremotefilelog',
1118 [(b'd', b'decompress', None, _(b'decompress the filelogs first')),],
1119 [
1120 (b'd', b'decompress', None, _(b'decompress the filelogs first')),
1121 ],
1119 _(b'hg verifyremotefilelogs <directory>'),
1122 _(b'hg verifyremotefilelogs <directory>'),
1120 norepo=True,
1123 norepo=True,
1121 )
1124 )
@@ -103,7 +103,7 b' class basestore(object):'
103 def _cleanupdirectory(self, rootdir):
103 def _cleanupdirectory(self, rootdir):
104 """Removes the empty directories and unnecessary files within the root
104 """Removes the empty directories and unnecessary files within the root
105 directory recursively. Note that this method does not remove the root
105 directory recursively. Note that this method does not remove the root
106 directory itself. """
106 directory itself."""
107
107
108 oldfiles = set()
108 oldfiles = set()
109 otherfiles = set()
109 otherfiles = set()
@@ -17,8 +17,7 b' from . import ('
17
17
18
18
19 class ChainIndicies(object):
19 class ChainIndicies(object):
20 """A static class for easy reference to the delta chain indicies.
20 """A static class for easy reference to the delta chain indicies."""
21 """
22
21
23 # The filename of this revision delta
22 # The filename of this revision delta
24 NAME = 0
23 NAME = 0
@@ -73,8 +72,7 b' class unioncontentstore(basestore.baseun'
73
72
74 @basestore.baseunionstore.retriable
73 @basestore.baseunionstore.retriable
75 def getdelta(self, name, node):
74 def getdelta(self, name, node):
76 """Return the single delta entry for the given name/node pair.
75 """Return the single delta entry for the given name/node pair."""
77 """
78 for store in self.stores:
76 for store in self.stores:
79 try:
77 try:
80 return store.getdelta(name, node)
78 return store.getdelta(name, node)
@@ -302,8 +302,7 b' def _getfiles_threaded('
302
302
303
303
304 class fileserverclient(object):
304 class fileserverclient(object):
305 """A client for requesting files from the remote file server.
305 """A client for requesting files from the remote file server."""
306 """
307
306
308 def __init__(self, repo):
307 def __init__(self, repo):
309 ui = repo.ui
308 ui = repo.ui
@@ -568,8 +567,7 b' class fileserverclient(object):'
568 def prefetch(
567 def prefetch(
569 self, fileids, force=False, fetchdata=True, fetchhistory=False
568 self, fileids, force=False, fetchdata=True, fetchhistory=False
570 ):
569 ):
571 """downloads the given file versions to the cache
570 """downloads the given file versions to the cache"""
572 """
573 repo = self.repo
571 repo = self.repo
574 idstocheck = []
572 idstocheck = []
575 for file, id in fileids:
573 for file, id in fileids:
@@ -63,8 +63,8 b' class remotefilectx(context.filectx):'
63 return self.linkrev()
63 return self.linkrev()
64
64
65 def filectx(self, fileid, changeid=None):
65 def filectx(self, fileid, changeid=None):
66 '''opens an arbitrary revision of the file without
66 """opens an arbitrary revision of the file without
67 opening a new filelog'''
67 opening a new filelog"""
68 return remotefilectx(
68 return remotefilectx(
69 self._repo,
69 self._repo,
70 self._path,
70 self._path,
@@ -40,8 +40,7 b' from . import ('
40
40
41
41
42 def setupserver(ui, repo):
42 def setupserver(ui, repo):
43 """Sets up a normal Mercurial repo so it can serve files to shallow repos.
43 """Sets up a normal Mercurial repo so it can serve files to shallow repos."""
44 """
45 onetimesetup(ui)
44 onetimesetup(ui)
46
45
47 # don't send files to shallow clients during pulls
46 # don't send files to shallow clients during pulls
@@ -79,8 +78,7 b' onetime = False'
79
78
80
79
81 def onetimesetup(ui):
80 def onetimesetup(ui):
82 """Configures the wireprotocol for both clients and servers.
81 """Configures the wireprotocol for both clients and servers."""
83 """
84 global onetime
82 global onetime
85 if onetime:
83 if onetime:
86 return
84 return
@@ -281,8 +279,7 b' def _loadfileblob(repo, cachepath, path,'
281
279
282
280
283 def getflogheads(repo, proto, path):
281 def getflogheads(repo, proto, path):
284 """A server api for requesting a filelog's heads
282 """A server api for requesting a filelog's heads"""
285 """
286 flog = repo.file(path)
283 flog = repo.file(path)
287 heads = flog.heads()
284 heads = flog.heads()
288 return b'\n'.join((hex(head) for head in heads if head != nullid))
285 return b'\n'.join((hex(head) for head in heads if head != nullid))
@@ -309,8 +306,7 b' def getfile(repo, proto, file, node):'
309
306
310
307
311 def getfiles(repo, proto):
308 def getfiles(repo, proto):
312 """A server api for requesting particular versions of particular files.
309 """A server api for requesting particular versions of particular files."""
313 """
314 if shallowutil.isenabled(repo):
310 if shallowutil.isenabled(repo):
315 raise error.Abort(_(b'cannot fetch remote files from shallow repo'))
311 raise error.Abort(_(b'cannot fetch remote files from shallow repo'))
316 if not isinstance(proto, _sshv1server):
312 if not isinstance(proto, _sshv1server):
@@ -54,8 +54,7 b' def backgroundrepack(repo, incremental=T'
54
54
55
55
56 def fullrepack(repo, options=None):
56 def fullrepack(repo, options=None):
57 """If ``packsonly`` is True, stores creating only loose objects are skipped.
57 """If ``packsonly`` is True, stores creating only loose objects are skipped."""
58 """
59 if util.safehasattr(repo, 'shareddatastores'):
58 if util.safehasattr(repo, 'shareddatastores'):
60 datasource = contentstore.unioncontentstore(*repo.shareddatastores)
59 datasource = contentstore.unioncontentstore(*repo.shareddatastores)
61 historysource = metadatastore.unionmetadatastore(
60 historysource = metadatastore.unionmetadatastore(
@@ -874,8 +873,7 b' class repackledger(object):'
874
873
875
874
876 class repackentry(object):
875 class repackentry(object):
877 """Simple class representing a single revision entry in the repackledger.
876 """Simple class representing a single revision entry in the repackledger."""
878 """
879
877
880 __slots__ = (
878 __slots__ = (
881 'filename',
879 'filename',
@@ -161,11 +161,11 b' def wraprepo(repo):'
161 return path
161 return path
162
162
163 def maybesparsematch(self, *revs, **kwargs):
163 def maybesparsematch(self, *revs, **kwargs):
164 '''
164 """
165 A wrapper that allows the remotefilelog to invoke sparsematch() if
165 A wrapper that allows the remotefilelog to invoke sparsematch() if
166 this is a sparse repository, or returns None if this is not a
166 this is a sparse repository, or returns None if this is not a
167 sparse repository.
167 sparse repository.
168 '''
168 """
169 if revs:
169 if revs:
170 ret = sparse.matcher(repo, revs=revs)
170 ret = sparse.matcher(repo, revs=revs)
171 else:
171 else:
@@ -217,8 +217,7 b' def wraprepo(repo):'
217 def backgroundprefetch(
217 def backgroundprefetch(
218 self, revs, base=None, repack=False, pats=None, opts=None
218 self, revs, base=None, repack=False, pats=None, opts=None
219 ):
219 ):
220 """Runs prefetch in background with optional repack
220 """Runs prefetch in background with optional repack"""
221 """
222 cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch']
221 cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch']
223 if repack:
222 if repack:
224 cmd.append(b'--repack')
223 cmd.append(b'--repack')
@@ -66,13 +66,19 b' templatekeyword = registrar.templatekeyw'
66 revsetpredicate = registrar.revsetpredicate()
66 revsetpredicate = registrar.revsetpredicate()
67
67
68 configitem(
68 configitem(
69 b'remotenames', b'bookmarks', default=True,
69 b'remotenames',
70 b'bookmarks',
71 default=True,
70 )
72 )
71 configitem(
73 configitem(
72 b'remotenames', b'branches', default=True,
74 b'remotenames',
75 b'branches',
76 default=True,
73 )
77 )
74 configitem(
78 configitem(
75 b'remotenames', b'hoistedpeer', default=b'default',
79 b'remotenames',
80 b'hoistedpeer',
81 default=b'default',
76 )
82 )
77
83
78
84
@@ -142,8 +142,7 b' def extsetup(ui):'
142
142
143 @command(b'debugexpandscheme', norepo=True)
143 @command(b'debugexpandscheme', norepo=True)
144 def expandscheme(ui, url, **opts):
144 def expandscheme(ui, url, **opts):
145 """given a repo path, provide the scheme-expanded path
145 """given a repo path, provide the scheme-expanded path"""
146 """
147 repo = hg._peerlookup(url)
146 repo = hg._peerlookup(url)
148 if isinstance(repo, ShortRepository):
147 if isinstance(repo, ShortRepository):
149 url = repo.resolve(url)
148 url = repo.resolve(url)
@@ -75,7 +75,12 b" testedwith = b'ships-with-hg-core'"
75 [
75 [
76 (b'U', b'noupdate', None, _(b'do not create a working directory')),
76 (b'U', b'noupdate', None, _(b'do not create a working directory')),
77 (b'B', b'bookmarks', None, _(b'also share bookmarks')),
77 (b'B', b'bookmarks', None, _(b'also share bookmarks')),
78 (b'', b'relative', None, _(b'point to source using a relative path'),),
78 (
79 b'',
80 b'relative',
81 None,
82 _(b'point to source using a relative path'),
83 ),
79 ],
84 ],
80 _(b'[-U] [-B] SOURCE [DEST]'),
85 _(b'[-U] [-B] SOURCE [DEST]'),
81 helpcategory=command.CATEGORY_REPO_CREATION,
86 helpcategory=command.CATEGORY_REPO_CREATION,
@@ -62,10 +62,14 b' configtable = {}'
62 configitem = registrar.configitem(configtable)
62 configitem = registrar.configitem(configtable)
63
63
64 configitem(
64 configitem(
65 b'transplant', b'filter', default=None,
65 b'transplant',
66 b'filter',
67 default=None,
66 )
68 )
67 configitem(
69 configitem(
68 b'transplant', b'log', default=None,
70 b'transplant',
71 b'log',
72 default=None,
69 )
73 )
70
74
71
75
@@ -140,8 +144,8 b' class transplanter(object):'
140 self.getcommiteditor = getcommiteditor
144 self.getcommiteditor = getcommiteditor
141
145
142 def applied(self, repo, node, parent):
146 def applied(self, repo, node, parent):
143 '''returns True if a node is already an ancestor of parent
147 """returns True if a node is already an ancestor of parent
144 or is parent or has already been transplanted'''
148 or is parent or has already been transplanted"""
145 if hasnode(repo, parent):
149 if hasnode(repo, parent):
146 parentrev = repo.changelog.rev(parent)
150 parentrev = repo.changelog.rev(parent)
147 if hasnode(repo, node):
151 if hasnode(repo, node):
@@ -682,7 +686,7 b' def browserevs(ui, repo, nodes, opts):'
682 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
686 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
683 )
687 )
684 def transplant(ui, repo, *revs, **opts):
688 def transplant(ui, repo, *revs, **opts):
685 '''transplant changesets from another branch
689 """transplant changesets from another branch
686
690
687 Selected changesets will be applied on top of the current working
691 Selected changesets will be applied on top of the current working
688 directory with the log of the original changeset. The changesets
692 directory with the log of the original changeset. The changesets
@@ -731,7 +735,7 b' def transplant(ui, repo, *revs, **opts):'
731 If a changeset application fails, you can fix the merge by hand
735 If a changeset application fails, you can fix the merge by hand
732 and then resume where you left off by calling :hg:`transplant
736 and then resume where you left off by calling :hg:`transplant
733 --continue/-c`.
737 --continue/-c`.
734 '''
738 """
735 with repo.wlock():
739 with repo.wlock():
736 return _dotransplant(ui, repo, *revs, **opts)
740 return _dotransplant(ui, repo, *revs, **opts)
737
741
@@ -743,9 +747,9 b' def _dotransplant(ui, repo, *revs, **opt'
743 yield node
747 yield node
744
748
745 def transplantwalk(repo, dest, heads, match=util.always):
749 def transplantwalk(repo, dest, heads, match=util.always):
746 '''Yield all nodes that are ancestors of a head but not ancestors
750 """Yield all nodes that are ancestors of a head but not ancestors
747 of dest.
751 of dest.
748 If no heads are specified, the heads of repo will be used.'''
752 If no heads are specified, the heads of repo will be used."""
749 if not heads:
753 if not heads:
750 heads = repo.heads()
754 heads = repo.heads()
751 ancestors = []
755 ancestors = []
@@ -886,8 +890,7 b' revsetpredicate = registrar.revsetpredic'
886
890
887 @revsetpredicate(b'transplanted([set])')
891 @revsetpredicate(b'transplanted([set])')
888 def revsettransplanted(repo, subset, x):
892 def revsettransplanted(repo, subset, x):
889 """Transplanted changesets in set, or all transplanted changesets.
893 """Transplanted changesets in set, or all transplanted changesets."""
890 """
891 if x:
894 if x:
892 s = revset.getset(repo, subset, x)
895 s = revset.getset(repo, subset, x)
893 else:
896 else:
@@ -43,10 +43,14 b' configtable = {}'
43 configitem = registrar.configitem(configtable)
43 configitem = registrar.configitem(configtable)
44
44
45 configitem(
45 configitem(
46 b'experimental', b'uncommitondirtywdir', default=False,
46 b'experimental',
47 b'uncommitondirtywdir',
48 default=False,
47 )
49 )
48 configitem(
50 configitem(
49 b'experimental', b'uncommit.keep', default=False,
51 b'experimental',
52 b'uncommit.keep',
53 default=False,
50 )
54 )
51
55
52 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
56 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -70,7 +70,9 b' configitem = registrar.configitem(config'
70 # Encoding.encoding may be updated by --encoding option.
70 # Encoding.encoding may be updated by --encoding option.
71 # Use a lambda do delay the resolution.
71 # Use a lambda do delay the resolution.
72 configitem(
72 configitem(
73 b'win32mbcs', b'encoding', default=lambda: encoding.encoding,
73 b'win32mbcs',
74 b'encoding',
75 default=lambda: encoding.encoding,
74 )
76 )
75
77
76 _encoding = None # see extsetup
78 _encoding = None # see extsetup
@@ -62,7 +62,9 b' configtable = {}'
62 configitem = registrar.configitem(configtable)
62 configitem = registrar.configitem(configtable)
63
63
64 configitem(
64 configitem(
65 b'win32text', b'warn', default=True,
65 b'win32text',
66 b'warn',
67 default=True,
66 )
68 )
67
69
68 # regexp for single LF without CR preceding.
70 # regexp for single LF without CR preceding.
@@ -33,8 +33,7 b' def levelchecker(level, msgidpat):'
33
33
34
34
35 def match(checker, pe):
35 def match(checker, pe):
36 """Examine whether POEntry "pe" is target of specified checker or not
36 """Examine whether POEntry "pe" is target of specified checker or not"""
37 """
38 if not checker.match(pe.msgid):
37 if not checker.match(pe.msgid):
39 return
38 return
40 # examine suppression by translator comment
39 # examine suppression by translator comment
@@ -148,11 +148,11 b' def ancestors(pfunc, *orignodes):'
148
148
149
149
150 class incrementalmissingancestors(object):
150 class incrementalmissingancestors(object):
151 '''persistent state used to calculate missing ancestors incrementally
151 """persistent state used to calculate missing ancestors incrementally
152
152
153 Although similar in spirit to lazyancestors below, this is a separate class
153 Although similar in spirit to lazyancestors below, this is a separate class
154 because trying to support contains and missingancestors operations with the
154 because trying to support contains and missingancestors operations with the
155 same internal data structures adds needless complexity.'''
155 same internal data structures adds needless complexity."""
156
156
157 def __init__(self, pfunc, bases):
157 def __init__(self, pfunc, bases):
158 self.bases = set(bases)
158 self.bases = set(bases)
@@ -198,12 +198,12 b' class incrementalmissingancestors(object'
198 break
198 break
199
199
200 def missingancestors(self, revs):
200 def missingancestors(self, revs):
201 '''return all the ancestors of revs that are not ancestors of self.bases
201 """return all the ancestors of revs that are not ancestors of self.bases
202
202
203 This may include elements from revs.
203 This may include elements from revs.
204
204
205 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
205 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
206 revision number order, which is a topological order.'''
206 revision number order, which is a topological order."""
207 revsvisit = set(revs)
207 revsvisit = set(revs)
208 basesvisit = self.bases
208 basesvisit = self.bases
209 pfunc = self.pfunc
209 pfunc = self.pfunc
@@ -37,8 +37,8 b' stringio = util.stringio'
37
37
38
38
39 def tidyprefix(dest, kind, prefix):
39 def tidyprefix(dest, kind, prefix):
40 '''choose prefix to use for names in archive. make sure prefix is
40 """choose prefix to use for names in archive. make sure prefix is
41 safe for consumers.'''
41 safe for consumers."""
42
42
43 if prefix:
43 if prefix:
44 prefix = util.normpath(prefix)
44 prefix = util.normpath(prefix)
@@ -132,8 +132,8 b' def buildmetadata(ctx):'
132
132
133
133
134 class tarit(object):
134 class tarit(object):
135 '''write archive to tar file or stream. can write uncompressed,
135 """write archive to tar file or stream. can write uncompressed,
136 or compress with gzip or bzip2.'''
136 or compress with gzip or bzip2."""
137
137
138 if pycompat.ispy3:
138 if pycompat.ispy3:
139 GzipFileWithTime = gzip.GzipFile # camelcase-required
139 GzipFileWithTime = gzip.GzipFile # camelcase-required
@@ -185,8 +185,10 b' class tarit(object):'
185 mtime=mtime,
185 mtime=mtime,
186 )
186 )
187 self.fileobj = gzfileobj
187 self.fileobj = gzfileobj
188 return tarfile.TarFile.taropen( # pytype: disable=attribute-error
188 return (
189 name, pycompat.sysstr(mode), gzfileobj
189 tarfile.TarFile.taropen( # pytype: disable=attribute-error
190 name, pycompat.sysstr(mode), gzfileobj
191 )
190 )
192 )
191 else:
193 else:
192 try:
194 try:
@@ -224,8 +226,8 b' class tarit(object):'
224
226
225
227
226 class zipit(object):
228 class zipit(object):
227 '''write archive to zip file or stream. can write uncompressed,
229 """write archive to zip file or stream. can write uncompressed,
228 or compressed with deflate.'''
230 or compressed with deflate."""
229
231
230 def __init__(self, dest, mtime, compress=True):
232 def __init__(self, dest, mtime, compress=True):
231 if isinstance(dest, bytes):
233 if isinstance(dest, bytes):
@@ -316,7 +318,7 b' def archive('
316 mtime=None,
318 mtime=None,
317 subrepos=False,
319 subrepos=False,
318 ):
320 ):
319 '''create archive of repo as it was at node.
321 """create archive of repo as it was at node.
320
322
321 dest can be name of directory, name of archive file, or file
323 dest can be name of directory, name of archive file, or file
322 object to write archive to.
324 object to write archive to.
@@ -333,7 +335,7 b' def archive('
333 mtime is the modified time, in seconds, or None to use the changeset time.
335 mtime is the modified time, in seconds, or None to use the changeset time.
334
336
335 subrepos tells whether to include subrepos.
337 subrepos tells whether to include subrepos.
336 '''
338 """
337
339
338 if kind == b'txz' and not pycompat.ispy3:
340 if kind == b'txz' and not pycompat.ispy3:
339 raise error.Abort(_(b'xz compression is only available in Python 3'))
341 raise error.Abort(_(b'xz compression is only available in Python 3'))
@@ -189,8 +189,7 b' class bmstore(object):'
189 return self._nodemap.get(node, [])
189 return self._nodemap.get(node, [])
190
190
191 def applychanges(self, repo, tr, changes):
191 def applychanges(self, repo, tr, changes):
192 """Apply a list of changes to bookmarks
192 """Apply a list of changes to bookmarks"""
193 """
194 bmchanges = tr.changes.get(b'bookmarks')
193 bmchanges = tr.changes.get(b'bookmarks')
195 for name, node in changes:
194 for name, node in changes:
196 old = self._refmap.get(name)
195 old = self._refmap.get(name)
@@ -422,8 +421,8 b' def headsforactive(repo):'
422
421
423
422
424 def calculateupdate(ui, repo):
423 def calculateupdate(ui, repo):
425 '''Return a tuple (activemark, movemarkfrom) indicating the active bookmark
424 """Return a tuple (activemark, movemarkfrom) indicating the active bookmark
426 and where to move the active bookmark from, if needed.'''
425 and where to move the active bookmark from, if needed."""
427 checkout, movemarkfrom = None, None
426 checkout, movemarkfrom = None, None
428 activemark = repo._activebookmark
427 activemark = repo._activebookmark
429 if isactivewdirparent(repo):
428 if isactivewdirparent(repo):
@@ -509,7 +508,7 b' def pushbookmark(repo, key, old, new):'
509
508
510
509
511 def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
510 def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
512 '''Compare bookmarks between srcmarks and dstmarks
511 """Compare bookmarks between srcmarks and dstmarks
513
512
514 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
513 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
515 differ, invalid)", each are list of bookmarks below:
514 differ, invalid)", each are list of bookmarks below:
@@ -532,7 +531,7 b' def comparebookmarks(repo, srcmarks, dst'
532
531
533 If "targets" is specified, only bookmarks listed in it are
532 If "targets" is specified, only bookmarks listed in it are
534 examined.
533 examined.
535 '''
534 """
536
535
537 if targets:
536 if targets:
538 bset = set(targets)
537 bset = set(targets)
@@ -585,14 +584,14 b' def comparebookmarks(repo, srcmarks, dst'
585
584
586
585
587 def _diverge(ui, b, path, localmarks, remotenode):
586 def _diverge(ui, b, path, localmarks, remotenode):
588 '''Return appropriate diverged bookmark for specified ``path``
587 """Return appropriate diverged bookmark for specified ``path``
589
588
590 This returns None, if it is failed to assign any divergent
589 This returns None, if it is failed to assign any divergent
591 bookmark name.
590 bookmark name.
592
591
593 This reuses already existing one with "@number" suffix, if it
592 This reuses already existing one with "@number" suffix, if it
594 refers ``remotenode``.
593 refers ``remotenode``.
595 '''
594 """
596 if b == b'@':
595 if b == b'@':
597 b = b''
596 b = b''
598 # try to use an @pathalias suffix
597 # try to use an @pathalias suffix
@@ -762,13 +761,17 b' def updatefromremote(ui, repo, remotemar'
762
761
763
762
764 def incoming(ui, repo, peer):
763 def incoming(ui, repo, peer):
765 '''Show bookmarks incoming from other to repo
764 """Show bookmarks incoming from other to repo"""
766 '''
767 ui.status(_(b"searching for changed bookmarks\n"))
765 ui.status(_(b"searching for changed bookmarks\n"))
768
766
769 with peer.commandexecutor() as e:
767 with peer.commandexecutor() as e:
770 remotemarks = unhexlifybookmarks(
768 remotemarks = unhexlifybookmarks(
771 e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
769 e.callcommand(
770 b'listkeys',
771 {
772 b'namespace': b'bookmarks',
773 },
774 ).result()
772 )
775 )
773
776
774 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
777 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -813,8 +816,7 b' def incoming(ui, repo, peer):'
813
816
814
817
815 def outgoing(ui, repo, other):
818 def outgoing(ui, repo, other):
816 '''Show bookmarks outgoing from repo to other
819 """Show bookmarks outgoing from repo to other"""
817 '''
818 ui.status(_(b"searching for changed bookmarks\n"))
820 ui.status(_(b"searching for changed bookmarks\n"))
819
821
820 remotemarks = unhexlifybookmarks(other.listkeys(b'bookmarks'))
822 remotemarks = unhexlifybookmarks(other.listkeys(b'bookmarks'))
@@ -863,13 +865,18 b' def outgoing(ui, repo, other):'
863
865
864
866
865 def summary(repo, peer):
867 def summary(repo, peer):
866 '''Compare bookmarks between repo and other for "hg summary" output
868 """Compare bookmarks between repo and other for "hg summary" output
867
869
868 This returns "(# of incoming, # of outgoing)" tuple.
870 This returns "(# of incoming, # of outgoing)" tuple.
869 '''
871 """
870 with peer.commandexecutor() as e:
872 with peer.commandexecutor() as e:
871 remotemarks = unhexlifybookmarks(
873 remotemarks = unhexlifybookmarks(
872 e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
874 e.callcommand(
875 b'listkeys',
876 {
877 b'namespace': b'bookmarks',
878 },
879 ).result()
873 )
880 )
874
881
875 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
882 r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -41,7 +41,17 b' if pycompat.TYPE_CHECKING:'
41 )
41 )
42
42
43 assert any(
43 assert any(
44 (Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Union,)
44 (
45 Any,
46 Callable,
47 Dict,
48 Iterable,
49 List,
50 Optional,
51 Set,
52 Tuple,
53 Union,
54 )
45 )
55 )
46
56
47 subsettable = repoviewutil.subsettable
57 subsettable = repoviewutil.subsettable
@@ -139,8 +149,7 b' class BranchMapCache(object):'
139
149
140
150
141 def _unknownnode(node):
151 def _unknownnode(node):
142 """ raises ValueError when branchcache found a node which does not exists
152 """raises ValueError when branchcache found a node which does not exists"""
143 """
144 raise ValueError('node %s does not exist' % pycompat.sysstr(hex(node)))
153 raise ValueError('node %s does not exist' % pycompat.sysstr(hex(node)))
145
154
146
155
@@ -183,9 +192,9 b' class branchcache(object):'
183 hasnode=None,
192 hasnode=None,
184 ):
193 ):
185 # type: (Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None
194 # type: (Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None
186 """ hasnode is a function which can be used to verify whether changelog
195 """hasnode is a function which can be used to verify whether changelog
187 has a given node or not. If it's not provided, we assume that every node
196 has a given node or not. If it's not provided, we assume that every node
188 we have exists in changelog """
197 we have exists in changelog"""
189 self.tipnode = tipnode
198 self.tipnode = tipnode
190 self.tiprev = tiprev
199 self.tiprev = tiprev
191 self.filteredhash = filteredhash
200 self.filteredhash = filteredhash
@@ -304,7 +313,7 b' class branchcache(object):'
304 return bcache
313 return bcache
305
314
306 def load(self, repo, lineiter):
315 def load(self, repo, lineiter):
307 """ fully loads the branchcache by reading from the file using the line
316 """fully loads the branchcache by reading from the file using the line
308 iterator passed"""
317 iterator passed"""
309 for line in lineiter:
318 for line in lineiter:
310 line = line.rstrip(b'\n')
319 line = line.rstrip(b'\n')
@@ -340,8 +349,8 b' class branchcache(object):'
340 return False
349 return False
341
350
342 def _branchtip(self, heads):
351 def _branchtip(self, heads):
343 '''Return tuple with last open head in heads and false,
352 """Return tuple with last open head in heads and false,
344 otherwise return last closed head and true.'''
353 otherwise return last closed head and true."""
345 tip = heads[-1]
354 tip = heads[-1]
346 closed = True
355 closed = True
347 for h in reversed(heads):
356 for h in reversed(heads):
@@ -352,9 +361,9 b' class branchcache(object):'
352 return tip, closed
361 return tip, closed
353
362
354 def branchtip(self, branch):
363 def branchtip(self, branch):
355 '''Return the tipmost open head on branch head, otherwise return the
364 """Return the tipmost open head on branch head, otherwise return the
356 tipmost closed head on branch.
365 tipmost closed head on branch.
357 Raise KeyError for unknown branch.'''
366 Raise KeyError for unknown branch."""
358 return self._branchtip(self[branch])[0]
367 return self._branchtip(self[branch])[0]
359
368
360 def iteropen(self, nodes):
369 def iteropen(self, nodes):
@@ -489,7 +489,12 b' def processparts(repo, op, unbundler):'
489
489
490 def _processchangegroup(op, cg, tr, source, url, **kwargs):
490 def _processchangegroup(op, cg, tr, source, url, **kwargs):
491 ret = cg.apply(op.repo, tr, source, url, **kwargs)
491 ret = cg.apply(op.repo, tr, source, url, **kwargs)
492 op.records.add(b'changegroup', {b'return': ret,})
492 op.records.add(
493 b'changegroup',
494 {
495 b'return': ret,
496 },
497 )
493 return ret
498 return ret
494
499
495
500
@@ -1647,8 +1652,7 b' def bundle2caps(remote):'
1647
1652
1648
1653
1649 def obsmarkersversion(caps):
1654 def obsmarkersversion(caps):
1650 """extract the list of supported obsmarkers versions from a bundle2caps dict
1655 """extract the list of supported obsmarkers versions from a bundle2caps dict"""
1651 """
1652 obscaps = caps.get(b'obsmarkers', ())
1656 obscaps = caps.get(b'obsmarkers', ())
1653 return [int(c[1:]) for c in obscaps if c.startswith(b'V')]
1657 return [int(c[1:]) for c in obscaps if c.startswith(b'V')]
1654
1658
@@ -328,8 +328,7 b' class bundlerepository(object):'
328 self._cgunpacker = changegroup.getunbundler(version, cgstream, b'UN')
328 self._cgunpacker = changegroup.getunbundler(version, cgstream, b'UN')
329
329
330 def _writetempbundle(self, readfn, suffix, header=b''):
330 def _writetempbundle(self, readfn, suffix, header=b''):
331 """Write a temporary file to disk
331 """Write a temporary file to disk"""
332 """
333 fdtemp, temp = self.vfs.mkstemp(prefix=b"hg-bundle-", suffix=suffix)
332 fdtemp, temp = self.vfs.mkstemp(prefix=b"hg-bundle-", suffix=suffix)
334 self.tempfile = temp
333 self.tempfile = temp
335
334
@@ -530,7 +529,7 b' class bundletransactionmanager(object):'
530 def getremotechanges(
529 def getremotechanges(
531 ui, repo, peer, onlyheads=None, bundlename=None, force=False
530 ui, repo, peer, onlyheads=None, bundlename=None, force=False
532 ):
531 ):
533 '''obtains a bundle of changes incoming from peer
532 """obtains a bundle of changes incoming from peer
534
533
535 "onlyheads" restricts the returned changes to those reachable from the
534 "onlyheads" restricts the returned changes to those reachable from the
536 specified heads.
535 specified heads.
@@ -548,7 +547,7 b' def getremotechanges('
548 "cleanupfn" must be called without arguments when you're done processing
547 "cleanupfn" must be called without arguments when you're done processing
549 the changes; it closes both the original "peer" and the one returned
548 the changes; it closes both the original "peer" and the one returned
550 here.
549 here.
551 '''
550 """
552 tmp = discovery.findcommonincoming(repo, peer, heads=onlyheads, force=force)
551 tmp = discovery.findcommonincoming(repo, peer, heads=onlyheads, force=force)
553 common, incoming, rheads = tmp
552 common, incoming, rheads = tmp
554 if not incoming:
553 if not incoming:
@@ -611,7 +610,10 b' def getremotechanges('
611 with peer.commandexecutor() as e:
610 with peer.commandexecutor() as e:
612 cg = e.callcommand(
611 cg = e.callcommand(
613 b'changegroup',
612 b'changegroup',
614 {b'nodes': incoming, b'source': b'incoming',},
613 {
614 b'nodes': incoming,
615 b'source': b'incoming',
616 },
615 ).result()
617 ).result()
616
618
617 rheads = None
619 rheads = None
@@ -655,7 +657,10 b' def getremotechanges('
655
657
656 with peer.commandexecutor() as e:
658 with peer.commandexecutor() as e:
657 remotephases = e.callcommand(
659 remotephases = e.callcommand(
658 b'listkeys', {b'namespace': b'phases',}
660 b'listkeys',
661 {
662 b'namespace': b'phases',
663 },
659 ).result()
664 ).result()
660
665
661 pullop = exchange.pulloperation(bundlerepo, peer, heads=reponodes)
666 pullop = exchange.pulloperation(bundlerepo, peer, heads=reponodes)
@@ -91,8 +91,8 b' def stripdesc(desc):'
91
91
92
92
93 class appender(object):
93 class appender(object):
94 '''the changelog index must be updated last on disk, so we use this class
94 """the changelog index must be updated last on disk, so we use this class
95 to delay writes to it'''
95 to delay writes to it"""
96
96
97 def __init__(self, vfs, name, mode, buf):
97 def __init__(self, vfs, name, mode, buf):
98 self.data = buf
98 self.data = buf
@@ -399,7 +399,7 b' def filterchunks(ui, originalhunks, usec'
399
399
400
400
401 def recordfilter(ui, originalhunks, match, operation=None):
401 def recordfilter(ui, originalhunks, match, operation=None):
402 """ Prompts the user to filter the originalhunks and return a list of
402 """Prompts the user to filter the originalhunks and return a list of
403 selected hunks.
403 selected hunks.
404 *operation* is used for to build ui messages to indicate the user what
404 *operation* is used for to build ui messages to indicate the user what
405 kind of filtering they are doing: reverting, committing, shelving, etc.
405 kind of filtering they are doing: reverting, committing, shelving, etc.
@@ -1078,7 +1078,7 b' def findrepo(p):'
1078
1078
1079
1079
1080 def bailifchanged(repo, merge=True, hint=None):
1080 def bailifchanged(repo, merge=True, hint=None):
1081 """ enforce the precondition that working directory must be clean.
1081 """enforce the precondition that working directory must be clean.
1082
1082
1083 'merge' can be set to false if a pending uncommitted merge should be
1083 'merge' can be set to false if a pending uncommitted merge should be
1084 ignored (such as when 'update --check' runs).
1084 ignored (such as when 'update --check' runs).
@@ -2184,7 +2184,7 b' def export('
2184 opts=None,
2184 opts=None,
2185 match=None,
2185 match=None,
2186 ):
2186 ):
2187 '''export changesets as hg patches
2187 """export changesets as hg patches
2188
2188
2189 Args:
2189 Args:
2190 repo: The repository from which we're exporting revisions.
2190 repo: The repository from which we're exporting revisions.
@@ -2205,7 +2205,7 b' def export('
2205 fntemplate specified: Each rev is written to a unique file named using
2205 fntemplate specified: Each rev is written to a unique file named using
2206 the given template.
2206 the given template.
2207 Otherwise: All revs will be written to basefm.
2207 Otherwise: All revs will be written to basefm.
2208 '''
2208 """
2209 _prefetchchangedfiles(repo, revs, match)
2209 _prefetchchangedfiles(repo, revs, match)
2210
2210
2211 if not fntemplate:
2211 if not fntemplate:
@@ -3476,7 +3476,8 b' def revert(ui, repo, ctx, *pats, **opts)'
3476 repo, [f for sublist in oplist for f in sublist]
3476 repo, [f for sublist in oplist for f in sublist]
3477 )
3477 )
3478 prefetch(
3478 prefetch(
3479 repo, [(ctx.rev(), matchfiles)],
3479 repo,
3480 [(ctx.rev(), matchfiles)],
3480 )
3481 )
3481 match = scmutil.match(repo[None], pats)
3482 match = scmutil.match(repo[None], pats)
3482 _performrevert(
3483 _performrevert(
@@ -3724,10 +3725,10 b' summaryremotehooks = util.hooks()'
3724
3725
3725
3726
3726 def checkunfinished(repo, commit=False, skipmerge=False):
3727 def checkunfinished(repo, commit=False, skipmerge=False):
3727 '''Look for an unfinished multistep operation, like graft, and abort
3728 """Look for an unfinished multistep operation, like graft, and abort
3728 if found. It's probably good to check this right before
3729 if found. It's probably good to check this right before
3729 bailifchanged().
3730 bailifchanged().
3730 '''
3731 """
3731 # Check for non-clearable states first, so things like rebase will take
3732 # Check for non-clearable states first, so things like rebase will take
3732 # precedence over update.
3733 # precedence over update.
3733 for state in statemod._unfinishedstates:
3734 for state in statemod._unfinishedstates:
@@ -3753,9 +3754,9 b' def checkunfinished(repo, commit=False, '
3753
3754
3754
3755
3755 def clearunfinished(repo):
3756 def clearunfinished(repo):
3756 '''Check for unfinished operations (as above), and clear the ones
3757 """Check for unfinished operations (as above), and clear the ones
3757 that are clearable.
3758 that are clearable.
3758 '''
3759 """
3759 for state in statemod._unfinishedstates:
3760 for state in statemod._unfinishedstates:
3760 if state._reportonly:
3761 if state._reportonly:
3761 continue
3762 continue
@@ -3770,8 +3771,8 b' def clearunfinished(repo):'
3770
3771
3771
3772
3772 def getunfinishedstate(repo):
3773 def getunfinishedstate(repo):
3773 ''' Checks for unfinished operations and returns statecheck object
3774 """Checks for unfinished operations and returns statecheck object
3774 for it'''
3775 for it"""
3775 for state in statemod._unfinishedstates:
3776 for state in statemod._unfinishedstates:
3776 if state.isunfinished(repo):
3777 if state.isunfinished(repo):
3777 return state
3778 return state
@@ -3779,7 +3780,7 b' def getunfinishedstate(repo):'
3779
3780
3780
3781
3781 def howtocontinue(repo):
3782 def howtocontinue(repo):
3782 '''Check for an unfinished operation and return the command to finish
3783 """Check for an unfinished operation and return the command to finish
3783 it.
3784 it.
3784
3785
3785 statemod._unfinishedstates list is checked for an unfinished operation
3786 statemod._unfinishedstates list is checked for an unfinished operation
@@ -3788,7 +3789,7 b' def howtocontinue(repo):'
3788
3789
3789 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3790 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3790 a boolean.
3791 a boolean.
3791 '''
3792 """
3792 contmsg = _(b"continue: %s")
3793 contmsg = _(b"continue: %s")
3793 for state in statemod._unfinishedstates:
3794 for state in statemod._unfinishedstates:
3794 if not state._continueflag:
3795 if not state._continueflag:
@@ -3801,13 +3802,13 b' def howtocontinue(repo):'
3801
3802
3802
3803
3803 def checkafterresolved(repo):
3804 def checkafterresolved(repo):
3804 '''Inform the user about the next action after completing hg resolve
3805 """Inform the user about the next action after completing hg resolve
3805
3806
3806 If there's a an unfinished operation that supports continue flag,
3807 If there's a an unfinished operation that supports continue flag,
3807 howtocontinue will yield repo.ui.warn as the reporter.
3808 howtocontinue will yield repo.ui.warn as the reporter.
3808
3809
3809 Otherwise, it will yield repo.ui.note.
3810 Otherwise, it will yield repo.ui.note.
3810 '''
3811 """
3811 msg, warning = howtocontinue(repo)
3812 msg, warning = howtocontinue(repo)
3812 if msg is not None:
3813 if msg is not None:
3813 if warning:
3814 if warning:
@@ -3817,14 +3818,14 b' def checkafterresolved(repo):'
3817
3818
3818
3819
3819 def wrongtooltocontinue(repo, task):
3820 def wrongtooltocontinue(repo, task):
3820 '''Raise an abort suggesting how to properly continue if there is an
3821 """Raise an abort suggesting how to properly continue if there is an
3821 active task.
3822 active task.
3822
3823
3823 Uses howtocontinue() to find the active task.
3824 Uses howtocontinue() to find the active task.
3824
3825
3825 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3826 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3826 a hint.
3827 a hint.
3827 '''
3828 """
3828 after = howtocontinue(repo)
3829 after = howtocontinue(repo)
3829 hint = None
3830 hint = None
3830 if after[1]:
3831 if after[1]:
@@ -605,7 +605,7 b' def annotate(ui, repo, *pats, **opts):'
605 helpcategory=command.CATEGORY_IMPORT_EXPORT,
605 helpcategory=command.CATEGORY_IMPORT_EXPORT,
606 )
606 )
607 def archive(ui, repo, dest, **opts):
607 def archive(ui, repo, dest, **opts):
608 '''create an unversioned archive of a repository revision
608 """create an unversioned archive of a repository revision
609
609
610 By default, the revision used is the parent of the working
610 By default, the revision used is the parent of the working
611 directory; use -r/--rev to specify a different revision.
611 directory; use -r/--rev to specify a different revision.
@@ -644,7 +644,7 b' def archive(ui, repo, dest, **opts):'
644 removed.
644 removed.
645
645
646 Returns 0 on success.
646 Returns 0 on success.
647 '''
647 """
648
648
649 opts = pycompat.byteskwargs(opts)
649 opts = pycompat.byteskwargs(opts)
650 rev = opts.get(b'rev')
650 rev = opts.get(b'rev')
@@ -718,7 +718,7 b' def archive(ui, repo, dest, **opts):'
718 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
718 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
719 )
719 )
720 def backout(ui, repo, node=None, rev=None, **opts):
720 def backout(ui, repo, node=None, rev=None, **opts):
721 '''reverse effect of earlier changeset
721 """reverse effect of earlier changeset
722
722
723 Prepare a new changeset with the effect of REV undone in the
723 Prepare a new changeset with the effect of REV undone in the
724 current working directory. If no conflicts were encountered,
724 current working directory. If no conflicts were encountered,
@@ -768,7 +768,7 b' def backout(ui, repo, node=None, rev=Non'
768
768
769 Returns 0 on success, 1 if nothing to backout or there are unresolved
769 Returns 0 on success, 1 if nothing to backout or there are unresolved
770 files.
770 files.
771 '''
771 """
772 with repo.wlock(), repo.lock():
772 with repo.wlock(), repo.lock():
773 return _dobackout(ui, repo, node, rev, **opts)
773 return _dobackout(ui, repo, node, rev, **opts)
774
774
@@ -1166,7 +1166,7 b' def bisect('
1166 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1166 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1167 )
1167 )
1168 def bookmark(ui, repo, *names, **opts):
1168 def bookmark(ui, repo, *names, **opts):
1169 '''create a new bookmark or list existing bookmarks
1169 """create a new bookmark or list existing bookmarks
1170
1170
1171 Bookmarks are labels on changesets to help track lines of development.
1171 Bookmarks are labels on changesets to help track lines of development.
1172 Bookmarks are unversioned and can be moved, renamed and deleted.
1172 Bookmarks are unversioned and can be moved, renamed and deleted.
@@ -1224,7 +1224,7 b' def bookmark(ui, repo, *names, **opts):'
1224 - print only the active bookmark name::
1224 - print only the active bookmark name::
1225
1225
1226 hg book -ql .
1226 hg book -ql .
1227 '''
1227 """
1228 opts = pycompat.byteskwargs(opts)
1228 opts = pycompat.byteskwargs(opts)
1229 force = opts.get(b'force')
1229 force = opts.get(b'force')
1230 rev = opts.get(b'rev')
1230 rev = opts.get(b'rev')
@@ -2804,7 +2804,9 b' def files(ui, repo, *pats, **opts):'
2804
2804
2805 @command(
2805 @command(
2806 b'forget',
2806 b'forget',
2807 [(b'i', b'interactive', None, _(b'use interactive mode')),]
2807 [
2808 (b'i', b'interactive', None, _(b'use interactive mode')),
2809 ]
2808 + walkopts
2810 + walkopts
2809 + dryrunopts,
2811 + dryrunopts,
2810 _(b'[OPTION]... FILE...'),
2812 _(b'[OPTION]... FILE...'),
@@ -2904,7 +2906,7 b' def forget(ui, repo, *pats, **opts):'
2904 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2906 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2905 )
2907 )
2906 def graft(ui, repo, *revs, **opts):
2908 def graft(ui, repo, *revs, **opts):
2907 '''copy changes from other branches onto the current branch
2909 """copy changes from other branches onto the current branch
2908
2910
2909 This command uses Mercurial's merge logic to copy individual
2911 This command uses Mercurial's merge logic to copy individual
2910 changes from other branches without merging branches in the
2912 changes from other branches without merging branches in the
@@ -2997,7 +2999,7 b' def graft(ui, repo, *revs, **opts):'
2997 See :hg:`help revisions` for more about specifying revisions.
2999 See :hg:`help revisions` for more about specifying revisions.
2998
3000
2999 Returns 0 on successful completion, 1 if there are unresolved files.
3001 Returns 0 on successful completion, 1 if there are unresolved files.
3000 '''
3002 """
3001 with repo.wlock():
3003 with repo.wlock():
3002 return _dograft(ui, repo, *revs, **opts)
3004 return _dograft(ui, repo, *revs, **opts)
3003
3005
@@ -5261,7 +5263,12 b' def postincoming(ui, repo, modheads, opt'
5261 None,
5263 None,
5262 _(b'run even when remote repository is unrelated'),
5264 _(b'run even when remote repository is unrelated'),
5263 ),
5265 ),
5264 (b'', b'confirm', None, _(b'confirm pull before applying changes'),),
5266 (
5267 b'',
5268 b'confirm',
5269 None,
5270 _(b'confirm pull before applying changes'),
5271 ),
5265 (
5272 (
5266 b'r',
5273 b'r',
5267 b'rev',
5274 b'rev',
@@ -5518,7 +5525,9 b' def push(ui, repo, dest=None, **opts):'
5518
5525
5519 if opts.get(b'all_bookmarks'):
5526 if opts.get(b'all_bookmarks'):
5520 cmdutil.check_incompatible_arguments(
5527 cmdutil.check_incompatible_arguments(
5521 opts, b'all_bookmarks', [b'bookmark', b'rev'],
5528 opts,
5529 b'all_bookmarks',
5530 [b'bookmark', b'rev'],
5522 )
5531 )
5523 opts[b'bookmark'] = list(repo._bookmarks)
5532 opts[b'bookmark'] = list(repo._bookmarks)
5524
5533
@@ -5608,7 +5617,9 b' def push(ui, repo, dest=None, **opts):'
5608
5617
5609 @command(
5618 @command(
5610 b'recover',
5619 b'recover',
5611 [(b'', b'verify', False, b"run `hg verify` after successful recover"),],
5620 [
5621 (b'', b'verify', False, b"run `hg verify` after successful recover"),
5622 ],
5612 helpcategory=command.CATEGORY_MAINTENANCE,
5623 helpcategory=command.CATEGORY_MAINTENANCE,
5613 )
5624 )
5614 def recover(ui, repo, **opts):
5625 def recover(ui, repo, **opts):
@@ -6448,7 +6459,7 b' def serve(ui, repo, **opts):'
6448 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6459 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6449 )
6460 )
6450 def shelve(ui, repo, *pats, **opts):
6461 def shelve(ui, repo, *pats, **opts):
6451 '''save and set aside changes from the working directory
6462 """save and set aside changes from the working directory
6452
6463
6453 Shelving takes files that "hg status" reports as not clean, saves
6464 Shelving takes files that "hg status" reports as not clean, saves
6454 the modifications to a bundle (a shelved change), and reverts the
6465 the modifications to a bundle (a shelved change), and reverts the
@@ -6479,7 +6490,7 b' def shelve(ui, repo, *pats, **opts):'
6479
6490
6480 To delete specific shelved changes, use ``--delete``. To delete
6491 To delete specific shelved changes, use ``--delete``. To delete
6481 all shelved changes, use ``--cleanup``.
6492 all shelved changes, use ``--cleanup``.
6482 '''
6493 """
6483 opts = pycompat.byteskwargs(opts)
6494 opts = pycompat.byteskwargs(opts)
6484 allowables = [
6495 allowables = [
6485 (b'addremove', {b'create'}), # 'create' is pseudo action
6496 (b'addremove', {b'create'}), # 'create' is pseudo action
@@ -7707,8 +7718,7 b' def version_(ui, **opts):'
7707
7718
7708
7719
7709 def loadcmdtable(ui, name, cmdtable):
7720 def loadcmdtable(ui, name, cmdtable):
7710 """Load command functions from specified cmdtable
7721 """Load command functions from specified cmdtable"""
7711 """
7712 overrides = [cmd for cmd in cmdtable if cmd in table]
7722 overrides = [cmd for cmd in cmdtable if cmd in table]
7713 if overrides:
7723 if overrides:
7714 ui.warn(
7724 ui.warn(
@@ -316,8 +316,8 b' class server(object):'
316 return -1
316 return -1
317
317
318 def runcommand(self):
318 def runcommand(self):
319 """ reads a list of \0 terminated arguments, executes
319 """reads a list of \0 terminated arguments, executes
320 and writes the return code to the result channel """
320 and writes the return code to the result channel"""
321 from . import dispatch # avoid cycle
321 from . import dispatch # avoid cycle
322
322
323 args = self._readlist()
323 args = self._readlist()
@@ -98,7 +98,11 b' def commitctx(repo, ctx, error=False, or'
98 )
98 )
99 xp1, xp2 = p1.hex(), p2 and p2.hex() or b''
99 xp1, xp2 = p1.hex(), p2 and p2.hex() or b''
100 repo.hook(
100 repo.hook(
101 b'pretxncommit', throw=True, node=hex(n), parent1=xp1, parent2=xp2,
101 b'pretxncommit',
102 throw=True,
103 node=hex(n),
104 parent1=xp1,
105 parent2=xp2,
102 )
106 )
103 # set the new commit is proper phase
107 # set the new commit is proper phase
104 targetphase = subrepoutil.newcommitphase(repo.ui, ctx)
108 targetphase = subrepoutil.newcommitphase(repo.ui, ctx)
@@ -154,10 +158,10 b' def _prepare_files(tr, ctx, error=False,'
154
158
155
159
156 def _get_salvaged(repo, ms, ctx):
160 def _get_salvaged(repo, ms, ctx):
157 """ returns a list of salvaged files
161 """returns a list of salvaged files
158
162
159 returns empty list if config option which process salvaged files are
163 returns empty list if config option which process salvaged files are
160 not enabled """
164 not enabled"""
161 salvaged = []
165 salvaged = []
162 copy_sd = repo.filecopiesmode == b'changeset-sidedata'
166 copy_sd = repo.filecopiesmode == b'changeset-sidedata'
163 if copy_sd and len(ctx.parents()) > 1:
167 if copy_sd and len(ctx.parents()) > 1:
@@ -238,7 +242,14 b' def _process_files(tr, ctx, ms, files, e'
238
242
239
243
240 def _filecommit(
244 def _filecommit(
241 repo, fctx, manifest1, manifest2, linkrev, tr, includecopymeta, ms,
245 repo,
246 fctx,
247 manifest1,
248 manifest2,
249 linkrev,
250 tr,
251 includecopymeta,
252 ms,
242 ):
253 ):
243 """
254 """
244 commit an individual file as part of a larger transaction
255 commit an individual file as part of a larger transaction
@@ -208,9 +208,11 b' class config(object):'
208 def read(self, path, fp=None, sections=None, remap=None):
208 def read(self, path, fp=None, sections=None, remap=None):
209 if not fp:
209 if not fp:
210 fp = util.posixfile(path, b'rb')
210 fp = util.posixfile(path, b'rb')
211 assert getattr(fp, 'mode', 'rb') == 'rb', (
211 assert (
212 b'config files must be opened in binary mode, got fp=%r mode=%r'
212 getattr(fp, 'mode', 'rb') == 'rb'
213 % (fp, fp.mode,)
213 ), b'config files must be opened in binary mode, got fp=%r mode=%r' % (
214 fp,
215 fp.mode,
214 )
216 )
215
217
216 dir = os.path.dirname(path)
218 dir = os.path.dirname(path)
This diff has been collapsed as it changes many lines, (3311 lines changed) Show them Hide them
@@ -133,78 +133,127 b' coreconfigitem = getitemregister(coreite'
133
133
134 def _registerdiffopts(section, configprefix=b''):
134 def _registerdiffopts(section, configprefix=b''):
135 coreconfigitem(
135 coreconfigitem(
136 section, configprefix + b'nodates', default=False,
136 section,
137 configprefix + b'nodates',
138 default=False,
137 )
139 )
138 coreconfigitem(
140 coreconfigitem(
139 section, configprefix + b'showfunc', default=False,
141 section,
142 configprefix + b'showfunc',
143 default=False,
140 )
144 )
141 coreconfigitem(
145 coreconfigitem(
142 section, configprefix + b'unified', default=None,
146 section,
147 configprefix + b'unified',
148 default=None,
143 )
149 )
144 coreconfigitem(
150 coreconfigitem(
145 section, configprefix + b'git', default=False,
151 section,
152 configprefix + b'git',
153 default=False,
146 )
154 )
147 coreconfigitem(
155 coreconfigitem(
148 section, configprefix + b'ignorews', default=False,
156 section,
157 configprefix + b'ignorews',
158 default=False,
149 )
159 )
150 coreconfigitem(
160 coreconfigitem(
151 section, configprefix + b'ignorewsamount', default=False,
161 section,
162 configprefix + b'ignorewsamount',
163 default=False,
152 )
164 )
153 coreconfigitem(
165 coreconfigitem(
154 section, configprefix + b'ignoreblanklines', default=False,
166 section,
167 configprefix + b'ignoreblanklines',
168 default=False,
155 )
169 )
156 coreconfigitem(
170 coreconfigitem(
157 section, configprefix + b'ignorewseol', default=False,
171 section,
172 configprefix + b'ignorewseol',
173 default=False,
158 )
174 )
159 coreconfigitem(
175 coreconfigitem(
160 section, configprefix + b'nobinary', default=False,
176 section,
177 configprefix + b'nobinary',
178 default=False,
161 )
179 )
162 coreconfigitem(
180 coreconfigitem(
163 section, configprefix + b'noprefix', default=False,
181 section,
182 configprefix + b'noprefix',
183 default=False,
164 )
184 )
165 coreconfigitem(
185 coreconfigitem(
166 section, configprefix + b'word-diff', default=False,
186 section,
187 configprefix + b'word-diff',
188 default=False,
167 )
189 )
168
190
169
191
170 coreconfigitem(
192 coreconfigitem(
171 b'alias', b'.*', default=dynamicdefault, generic=True,
193 b'alias',
172 )
194 b'.*',
173 coreconfigitem(
195 default=dynamicdefault,
174 b'auth', b'cookiefile', default=None,
196 generic=True,
197 )
198 coreconfigitem(
199 b'auth',
200 b'cookiefile',
201 default=None,
175 )
202 )
176 _registerdiffopts(section=b'annotate')
203 _registerdiffopts(section=b'annotate')
177 # bookmarks.pushing: internal hack for discovery
204 # bookmarks.pushing: internal hack for discovery
178 coreconfigitem(
205 coreconfigitem(
179 b'bookmarks', b'pushing', default=list,
206 b'bookmarks',
207 b'pushing',
208 default=list,
180 )
209 )
181 # bundle.mainreporoot: internal hack for bundlerepo
210 # bundle.mainreporoot: internal hack for bundlerepo
182 coreconfigitem(
211 coreconfigitem(
183 b'bundle', b'mainreporoot', default=b'',
212 b'bundle',
184 )
213 b'mainreporoot',
185 coreconfigitem(
214 default=b'',
186 b'censor', b'policy', default=b'abort', experimental=True,
215 )
187 )
216 coreconfigitem(
188 coreconfigitem(
217 b'censor',
189 b'chgserver', b'idletimeout', default=3600,
218 b'policy',
190 )
219 default=b'abort',
191 coreconfigitem(
220 experimental=True,
192 b'chgserver', b'skiphash', default=False,
221 )
193 )
222 coreconfigitem(
194 coreconfigitem(
223 b'chgserver',
195 b'cmdserver', b'log', default=None,
224 b'idletimeout',
196 )
225 default=3600,
197 coreconfigitem(
226 )
198 b'cmdserver', b'max-log-files', default=7,
227 coreconfigitem(
199 )
228 b'chgserver',
200 coreconfigitem(
229 b'skiphash',
201 b'cmdserver', b'max-log-size', default=b'1 MB',
230 default=False,
202 )
231 )
203 coreconfigitem(
232 coreconfigitem(
204 b'cmdserver', b'max-repo-cache', default=0, experimental=True,
233 b'cmdserver',
205 )
234 b'log',
206 coreconfigitem(
235 default=None,
207 b'cmdserver', b'message-encodings', default=list,
236 )
237 coreconfigitem(
238 b'cmdserver',
239 b'max-log-files',
240 default=7,
241 )
242 coreconfigitem(
243 b'cmdserver',
244 b'max-log-size',
245 default=b'1 MB',
246 )
247 coreconfigitem(
248 b'cmdserver',
249 b'max-repo-cache',
250 default=0,
251 experimental=True,
252 )
253 coreconfigitem(
254 b'cmdserver',
255 b'message-encodings',
256 default=list,
208 )
257 )
209 coreconfigitem(
258 coreconfigitem(
210 b'cmdserver',
259 b'cmdserver',
@@ -212,16 +261,25 b' coreconfigitem('
212 default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
261 default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
213 )
262 )
214 coreconfigitem(
263 coreconfigitem(
215 b'cmdserver', b'shutdown-on-interrupt', default=True,
264 b'cmdserver',
216 )
265 b'shutdown-on-interrupt',
217 coreconfigitem(
266 default=True,
218 b'color', b'.*', default=None, generic=True,
267 )
219 )
268 coreconfigitem(
220 coreconfigitem(
269 b'color',
221 b'color', b'mode', default=b'auto',
270 b'.*',
222 )
271 default=None,
223 coreconfigitem(
272 generic=True,
224 b'color', b'pagermode', default=dynamicdefault,
273 )
274 coreconfigitem(
275 b'color',
276 b'mode',
277 default=b'auto',
278 )
279 coreconfigitem(
280 b'color',
281 b'pagermode',
282 default=dynamicdefault,
225 )
283 )
226 coreconfigitem(
284 coreconfigitem(
227 b'command-templates',
285 b'command-templates',
@@ -230,7 +288,10 b' coreconfigitem('
230 alias=[(b'ui', b'graphnodetemplate')],
288 alias=[(b'ui', b'graphnodetemplate')],
231 )
289 )
232 coreconfigitem(
290 coreconfigitem(
233 b'command-templates', b'log', default=None, alias=[(b'ui', b'logtemplate')],
291 b'command-templates',
292 b'log',
293 default=None,
294 alias=[(b'ui', b'logtemplate')],
234 )
295 )
235 coreconfigitem(
296 coreconfigitem(
236 b'command-templates',
297 b'command-templates',
@@ -252,7 +313,9 b' coreconfigitem('
252 alias=[(b'ui', b'pre-merge-tool-output-template')],
313 alias=[(b'ui', b'pre-merge-tool-output-template')],
253 )
314 )
254 coreconfigitem(
315 coreconfigitem(
255 b'command-templates', b'oneline-summary', default=None,
316 b'command-templates',
317 b'oneline-summary',
318 default=None,
256 )
319 )
257 coreconfigitem(
320 coreconfigitem(
258 b'command-templates',
321 b'command-templates',
@@ -262,327 +325,546 b' coreconfigitem('
262 )
325 )
263 _registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
326 _registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
264 coreconfigitem(
327 coreconfigitem(
265 b'commands', b'commit.post-status', default=False,
328 b'commands',
266 )
329 b'commit.post-status',
267 coreconfigitem(
330 default=False,
268 b'commands', b'grep.all-files', default=False, experimental=True,
331 )
269 )
332 coreconfigitem(
270 coreconfigitem(
333 b'commands',
271 b'commands', b'merge.require-rev', default=False,
334 b'grep.all-files',
272 )
335 default=False,
273 coreconfigitem(
336 experimental=True,
274 b'commands', b'push.require-revs', default=False,
337 )
275 )
338 coreconfigitem(
276 coreconfigitem(
339 b'commands',
277 b'commands', b'resolve.confirm', default=False,
340 b'merge.require-rev',
278 )
341 default=False,
279 coreconfigitem(
342 )
280 b'commands', b'resolve.explicit-re-merge', default=False,
343 coreconfigitem(
281 )
344 b'commands',
282 coreconfigitem(
345 b'push.require-revs',
283 b'commands', b'resolve.mark-check', default=b'none',
346 default=False,
347 )
348 coreconfigitem(
349 b'commands',
350 b'resolve.confirm',
351 default=False,
352 )
353 coreconfigitem(
354 b'commands',
355 b'resolve.explicit-re-merge',
356 default=False,
357 )
358 coreconfigitem(
359 b'commands',
360 b'resolve.mark-check',
361 default=b'none',
284 )
362 )
285 _registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
363 _registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
286 coreconfigitem(
364 coreconfigitem(
287 b'commands', b'show.aliasprefix', default=list,
365 b'commands',
288 )
366 b'show.aliasprefix',
289 coreconfigitem(
367 default=list,
290 b'commands', b'status.relative', default=False,
368 )
291 )
369 coreconfigitem(
292 coreconfigitem(
370 b'commands',
293 b'commands', b'status.skipstates', default=[], experimental=True,
371 b'status.relative',
294 )
372 default=False,
295 coreconfigitem(
373 )
296 b'commands', b'status.terse', default=b'',
374 coreconfigitem(
297 )
375 b'commands',
298 coreconfigitem(
376 b'status.skipstates',
299 b'commands', b'status.verbose', default=False,
377 default=[],
300 )
378 experimental=True,
301 coreconfigitem(
379 )
302 b'commands', b'update.check', default=None,
380 coreconfigitem(
303 )
381 b'commands',
304 coreconfigitem(
382 b'status.terse',
305 b'commands', b'update.requiredest', default=False,
383 default=b'',
306 )
384 )
307 coreconfigitem(
385 coreconfigitem(
308 b'committemplate', b'.*', default=None, generic=True,
386 b'commands',
309 )
387 b'status.verbose',
310 coreconfigitem(
388 default=False,
311 b'convert', b'bzr.saverev', default=True,
389 )
312 )
390 coreconfigitem(
313 coreconfigitem(
391 b'commands',
314 b'convert', b'cvsps.cache', default=True,
392 b'update.check',
315 )
393 default=None,
316 coreconfigitem(
394 )
317 b'convert', b'cvsps.fuzz', default=60,
395 coreconfigitem(
318 )
396 b'commands',
319 coreconfigitem(
397 b'update.requiredest',
320 b'convert', b'cvsps.logencoding', default=None,
398 default=False,
321 )
399 )
322 coreconfigitem(
400 coreconfigitem(
323 b'convert', b'cvsps.mergefrom', default=None,
401 b'committemplate',
324 )
402 b'.*',
325 coreconfigitem(
403 default=None,
326 b'convert', b'cvsps.mergeto', default=None,
404 generic=True,
327 )
405 )
328 coreconfigitem(
406 coreconfigitem(
329 b'convert', b'git.committeractions', default=lambda: [b'messagedifferent'],
407 b'convert',
330 )
408 b'bzr.saverev',
331 coreconfigitem(
409 default=True,
332 b'convert', b'git.extrakeys', default=list,
410 )
333 )
411 coreconfigitem(
334 coreconfigitem(
412 b'convert',
335 b'convert', b'git.findcopiesharder', default=False,
413 b'cvsps.cache',
336 )
414 default=True,
337 coreconfigitem(
415 )
338 b'convert', b'git.remoteprefix', default=b'remote',
416 coreconfigitem(
339 )
417 b'convert',
340 coreconfigitem(
418 b'cvsps.fuzz',
341 b'convert', b'git.renamelimit', default=400,
419 default=60,
342 )
420 )
343 coreconfigitem(
421 coreconfigitem(
344 b'convert', b'git.saverev', default=True,
422 b'convert',
345 )
423 b'cvsps.logencoding',
346 coreconfigitem(
424 default=None,
347 b'convert', b'git.similarity', default=50,
425 )
348 )
426 coreconfigitem(
349 coreconfigitem(
427 b'convert',
350 b'convert', b'git.skipsubmodules', default=False,
428 b'cvsps.mergefrom',
351 )
429 default=None,
352 coreconfigitem(
430 )
353 b'convert', b'hg.clonebranches', default=False,
431 coreconfigitem(
354 )
432 b'convert',
355 coreconfigitem(
433 b'cvsps.mergeto',
356 b'convert', b'hg.ignoreerrors', default=False,
434 default=None,
357 )
435 )
358 coreconfigitem(
436 coreconfigitem(
359 b'convert', b'hg.preserve-hash', default=False,
437 b'convert',
360 )
438 b'git.committeractions',
361 coreconfigitem(
439 default=lambda: [b'messagedifferent'],
362 b'convert', b'hg.revs', default=None,
440 )
363 )
441 coreconfigitem(
364 coreconfigitem(
442 b'convert',
365 b'convert', b'hg.saverev', default=False,
443 b'git.extrakeys',
366 )
444 default=list,
367 coreconfigitem(
445 )
368 b'convert', b'hg.sourcename', default=None,
446 coreconfigitem(
369 )
447 b'convert',
370 coreconfigitem(
448 b'git.findcopiesharder',
371 b'convert', b'hg.startrev', default=None,
449 default=False,
372 )
450 )
373 coreconfigitem(
451 coreconfigitem(
374 b'convert', b'hg.tagsbranch', default=b'default',
452 b'convert',
375 )
453 b'git.remoteprefix',
376 coreconfigitem(
454 default=b'remote',
377 b'convert', b'hg.usebranchnames', default=True,
455 )
378 )
456 coreconfigitem(
379 coreconfigitem(
457 b'convert',
380 b'convert', b'ignoreancestorcheck', default=False, experimental=True,
458 b'git.renamelimit',
381 )
459 default=400,
382 coreconfigitem(
460 )
383 b'convert', b'localtimezone', default=False,
461 coreconfigitem(
384 )
462 b'convert',
385 coreconfigitem(
463 b'git.saverev',
386 b'convert', b'p4.encoding', default=dynamicdefault,
464 default=True,
387 )
465 )
388 coreconfigitem(
466 coreconfigitem(
389 b'convert', b'p4.startrev', default=0,
467 b'convert',
390 )
468 b'git.similarity',
391 coreconfigitem(
469 default=50,
392 b'convert', b'skiptags', default=False,
470 )
393 )
471 coreconfigitem(
394 coreconfigitem(
472 b'convert',
395 b'convert', b'svn.debugsvnlog', default=True,
473 b'git.skipsubmodules',
396 )
474 default=False,
397 coreconfigitem(
475 )
398 b'convert', b'svn.trunk', default=None,
476 coreconfigitem(
399 )
477 b'convert',
400 coreconfigitem(
478 b'hg.clonebranches',
401 b'convert', b'svn.tags', default=None,
479 default=False,
402 )
480 )
403 coreconfigitem(
481 coreconfigitem(
404 b'convert', b'svn.branches', default=None,
482 b'convert',
405 )
483 b'hg.ignoreerrors',
406 coreconfigitem(
484 default=False,
407 b'convert', b'svn.startrev', default=0,
485 )
408 )
486 coreconfigitem(
409 coreconfigitem(
487 b'convert',
410 b'debug', b'dirstate.delaywrite', default=0,
488 b'hg.preserve-hash',
411 )
489 default=False,
412 coreconfigitem(
490 )
413 b'defaults', b'.*', default=None, generic=True,
491 coreconfigitem(
414 )
492 b'convert',
415 coreconfigitem(
493 b'hg.revs',
416 b'devel', b'all-warnings', default=False,
494 default=None,
417 )
495 )
418 coreconfigitem(
496 coreconfigitem(
419 b'devel', b'bundle2.debug', default=False,
497 b'convert',
420 )
498 b'hg.saverev',
421 coreconfigitem(
499 default=False,
422 b'devel', b'bundle.delta', default=b'',
500 )
423 )
501 coreconfigitem(
424 coreconfigitem(
502 b'convert',
425 b'devel', b'cache-vfs', default=None,
503 b'hg.sourcename',
426 )
504 default=None,
427 coreconfigitem(
505 )
428 b'devel', b'check-locks', default=False,
506 coreconfigitem(
429 )
507 b'convert',
430 coreconfigitem(
508 b'hg.startrev',
431 b'devel', b'check-relroot', default=False,
509 default=None,
432 )
510 )
433 coreconfigitem(
511 coreconfigitem(
434 b'devel', b'default-date', default=None,
512 b'convert',
435 )
513 b'hg.tagsbranch',
436 coreconfigitem(
514 default=b'default',
437 b'devel', b'deprec-warn', default=False,
515 )
438 )
516 coreconfigitem(
439 coreconfigitem(
517 b'convert',
440 b'devel', b'disableloaddefaultcerts', default=False,
518 b'hg.usebranchnames',
441 )
519 default=True,
442 coreconfigitem(
520 )
443 b'devel', b'warn-empty-changegroup', default=False,
521 coreconfigitem(
444 )
522 b'convert',
445 coreconfigitem(
523 b'ignoreancestorcheck',
446 b'devel', b'legacy.exchange', default=list,
524 default=False,
447 )
525 experimental=True,
448 coreconfigitem(
526 )
449 b'devel', b'persistent-nodemap', default=False,
527 coreconfigitem(
450 )
528 b'convert',
451 coreconfigitem(
529 b'localtimezone',
452 b'devel', b'servercafile', default=b'',
530 default=False,
453 )
531 )
454 coreconfigitem(
532 coreconfigitem(
455 b'devel', b'serverexactprotocol', default=b'',
533 b'convert',
456 )
534 b'p4.encoding',
457 coreconfigitem(
535 default=dynamicdefault,
458 b'devel', b'serverrequirecert', default=False,
536 )
459 )
537 coreconfigitem(
460 coreconfigitem(
538 b'convert',
461 b'devel', b'strip-obsmarkers', default=True,
539 b'p4.startrev',
462 )
540 default=0,
463 coreconfigitem(
541 )
464 b'devel', b'warn-config', default=None,
542 coreconfigitem(
465 )
543 b'convert',
466 coreconfigitem(
544 b'skiptags',
467 b'devel', b'warn-config-default', default=None,
545 default=False,
468 )
546 )
469 coreconfigitem(
547 coreconfigitem(
470 b'devel', b'user.obsmarker', default=None,
548 b'convert',
471 )
549 b'svn.debugsvnlog',
472 coreconfigitem(
550 default=True,
473 b'devel', b'warn-config-unknown', default=None,
551 )
474 )
552 coreconfigitem(
475 coreconfigitem(
553 b'convert',
476 b'devel', b'debug.copies', default=False,
554 b'svn.trunk',
477 )
555 default=None,
478 coreconfigitem(
556 )
479 b'devel', b'debug.extensions', default=False,
557 coreconfigitem(
480 )
558 b'convert',
481 coreconfigitem(
559 b'svn.tags',
482 b'devel', b'debug.repo-filters', default=False,
560 default=None,
483 )
561 )
484 coreconfigitem(
562 coreconfigitem(
485 b'devel', b'debug.peer-request', default=False,
563 b'convert',
486 )
564 b'svn.branches',
487 coreconfigitem(
565 default=None,
488 b'devel', b'discovery.randomize', default=True,
566 )
567 coreconfigitem(
568 b'convert',
569 b'svn.startrev',
570 default=0,
571 )
572 coreconfigitem(
573 b'debug',
574 b'dirstate.delaywrite',
575 default=0,
576 )
577 coreconfigitem(
578 b'defaults',
579 b'.*',
580 default=None,
581 generic=True,
582 )
583 coreconfigitem(
584 b'devel',
585 b'all-warnings',
586 default=False,
587 )
588 coreconfigitem(
589 b'devel',
590 b'bundle2.debug',
591 default=False,
592 )
593 coreconfigitem(
594 b'devel',
595 b'bundle.delta',
596 default=b'',
597 )
598 coreconfigitem(
599 b'devel',
600 b'cache-vfs',
601 default=None,
602 )
603 coreconfigitem(
604 b'devel',
605 b'check-locks',
606 default=False,
607 )
608 coreconfigitem(
609 b'devel',
610 b'check-relroot',
611 default=False,
612 )
613 coreconfigitem(
614 b'devel',
615 b'default-date',
616 default=None,
617 )
618 coreconfigitem(
619 b'devel',
620 b'deprec-warn',
621 default=False,
622 )
623 coreconfigitem(
624 b'devel',
625 b'disableloaddefaultcerts',
626 default=False,
627 )
628 coreconfigitem(
629 b'devel',
630 b'warn-empty-changegroup',
631 default=False,
632 )
633 coreconfigitem(
634 b'devel',
635 b'legacy.exchange',
636 default=list,
637 )
638 coreconfigitem(
639 b'devel',
640 b'persistent-nodemap',
641 default=False,
642 )
643 coreconfigitem(
644 b'devel',
645 b'servercafile',
646 default=b'',
647 )
648 coreconfigitem(
649 b'devel',
650 b'serverexactprotocol',
651 default=b'',
652 )
653 coreconfigitem(
654 b'devel',
655 b'serverrequirecert',
656 default=False,
657 )
658 coreconfigitem(
659 b'devel',
660 b'strip-obsmarkers',
661 default=True,
662 )
663 coreconfigitem(
664 b'devel',
665 b'warn-config',
666 default=None,
667 )
668 coreconfigitem(
669 b'devel',
670 b'warn-config-default',
671 default=None,
672 )
673 coreconfigitem(
674 b'devel',
675 b'user.obsmarker',
676 default=None,
677 )
678 coreconfigitem(
679 b'devel',
680 b'warn-config-unknown',
681 default=None,
682 )
683 coreconfigitem(
684 b'devel',
685 b'debug.copies',
686 default=False,
687 )
688 coreconfigitem(
689 b'devel',
690 b'debug.extensions',
691 default=False,
692 )
693 coreconfigitem(
694 b'devel',
695 b'debug.repo-filters',
696 default=False,
697 )
698 coreconfigitem(
699 b'devel',
700 b'debug.peer-request',
701 default=False,
702 )
703 coreconfigitem(
704 b'devel',
705 b'discovery.randomize',
706 default=True,
489 )
707 )
490 _registerdiffopts(section=b'diff')
708 _registerdiffopts(section=b'diff')
491 coreconfigitem(
709 coreconfigitem(
492 b'email', b'bcc', default=None,
710 b'email',
493 )
711 b'bcc',
494 coreconfigitem(
712 default=None,
495 b'email', b'cc', default=None,
713 )
496 )
714 coreconfigitem(
497 coreconfigitem(
715 b'email',
498 b'email', b'charsets', default=list,
716 b'cc',
499 )
717 default=None,
500 coreconfigitem(
718 )
501 b'email', b'from', default=None,
719 coreconfigitem(
502 )
720 b'email',
503 coreconfigitem(
721 b'charsets',
504 b'email', b'method', default=b'smtp',
722 default=list,
505 )
723 )
506 coreconfigitem(
724 coreconfigitem(
507 b'email', b'reply-to', default=None,
725 b'email',
508 )
726 b'from',
509 coreconfigitem(
727 default=None,
510 b'email', b'to', default=None,
728 )
511 )
729 coreconfigitem(
512 coreconfigitem(
730 b'email',
513 b'experimental', b'archivemetatemplate', default=dynamicdefault,
731 b'method',
514 )
732 default=b'smtp',
515 coreconfigitem(
733 )
516 b'experimental', b'auto-publish', default=b'publish',
734 coreconfigitem(
517 )
735 b'email',
518 coreconfigitem(
736 b'reply-to',
519 b'experimental', b'bundle-phases', default=False,
737 default=None,
520 )
738 )
521 coreconfigitem(
739 coreconfigitem(
522 b'experimental', b'bundle2-advertise', default=True,
740 b'email',
523 )
741 b'to',
524 coreconfigitem(
742 default=None,
525 b'experimental', b'bundle2-output-capture', default=False,
743 )
526 )
744 coreconfigitem(
527 coreconfigitem(
745 b'experimental',
528 b'experimental', b'bundle2.pushback', default=False,
746 b'archivemetatemplate',
529 )
747 default=dynamicdefault,
530 coreconfigitem(
748 )
531 b'experimental', b'bundle2lazylocking', default=False,
749 coreconfigitem(
532 )
750 b'experimental',
533 coreconfigitem(
751 b'auto-publish',
534 b'experimental', b'bundlecomplevel', default=None,
752 default=b'publish',
535 )
753 )
536 coreconfigitem(
754 coreconfigitem(
537 b'experimental', b'bundlecomplevel.bzip2', default=None,
755 b'experimental',
538 )
756 b'bundle-phases',
539 coreconfigitem(
757 default=False,
540 b'experimental', b'bundlecomplevel.gzip', default=None,
758 )
541 )
759 coreconfigitem(
542 coreconfigitem(
760 b'experimental',
543 b'experimental', b'bundlecomplevel.none', default=None,
761 b'bundle2-advertise',
544 )
762 default=True,
545 coreconfigitem(
763 )
546 b'experimental', b'bundlecomplevel.zstd', default=None,
764 coreconfigitem(
547 )
765 b'experimental',
548 coreconfigitem(
766 b'bundle2-output-capture',
549 b'experimental', b'changegroup3', default=False,
767 default=False,
550 )
768 )
551 coreconfigitem(
769 coreconfigitem(
552 b'experimental', b'cleanup-as-archived', default=False,
770 b'experimental',
553 )
771 b'bundle2.pushback',
554 coreconfigitem(
772 default=False,
555 b'experimental', b'clientcompressionengines', default=list,
773 )
556 )
774 coreconfigitem(
557 coreconfigitem(
775 b'experimental',
558 b'experimental', b'copytrace', default=b'on',
776 b'bundle2lazylocking',
559 )
777 default=False,
560 coreconfigitem(
778 )
561 b'experimental', b'copytrace.movecandidateslimit', default=100,
779 coreconfigitem(
562 )
780 b'experimental',
563 coreconfigitem(
781 b'bundlecomplevel',
564 b'experimental', b'copytrace.sourcecommitlimit', default=100,
782 default=None,
565 )
783 )
566 coreconfigitem(
784 coreconfigitem(
567 b'experimental', b'copies.read-from', default=b"filelog-only",
785 b'experimental',
568 )
786 b'bundlecomplevel.bzip2',
569 coreconfigitem(
787 default=None,
570 b'experimental', b'copies.write-to', default=b'filelog-only',
788 )
571 )
789 coreconfigitem(
572 coreconfigitem(
790 b'experimental',
573 b'experimental', b'crecordtest', default=None,
791 b'bundlecomplevel.gzip',
574 )
792 default=None,
575 coreconfigitem(
793 )
576 b'experimental', b'directaccess', default=False,
794 coreconfigitem(
577 )
795 b'experimental',
578 coreconfigitem(
796 b'bundlecomplevel.none',
579 b'experimental', b'directaccess.revnums', default=False,
797 default=None,
580 )
798 )
581 coreconfigitem(
799 coreconfigitem(
582 b'experimental', b'editortmpinhg', default=False,
800 b'experimental',
583 )
801 b'bundlecomplevel.zstd',
584 coreconfigitem(
802 default=None,
585 b'experimental', b'evolution', default=list,
803 )
804 coreconfigitem(
805 b'experimental',
806 b'changegroup3',
807 default=False,
808 )
809 coreconfigitem(
810 b'experimental',
811 b'cleanup-as-archived',
812 default=False,
813 )
814 coreconfigitem(
815 b'experimental',
816 b'clientcompressionengines',
817 default=list,
818 )
819 coreconfigitem(
820 b'experimental',
821 b'copytrace',
822 default=b'on',
823 )
824 coreconfigitem(
825 b'experimental',
826 b'copytrace.movecandidateslimit',
827 default=100,
828 )
829 coreconfigitem(
830 b'experimental',
831 b'copytrace.sourcecommitlimit',
832 default=100,
833 )
834 coreconfigitem(
835 b'experimental',
836 b'copies.read-from',
837 default=b"filelog-only",
838 )
839 coreconfigitem(
840 b'experimental',
841 b'copies.write-to',
842 default=b'filelog-only',
843 )
844 coreconfigitem(
845 b'experimental',
846 b'crecordtest',
847 default=None,
848 )
849 coreconfigitem(
850 b'experimental',
851 b'directaccess',
852 default=False,
853 )
854 coreconfigitem(
855 b'experimental',
856 b'directaccess.revnums',
857 default=False,
858 )
859 coreconfigitem(
860 b'experimental',
861 b'editortmpinhg',
862 default=False,
863 )
864 coreconfigitem(
865 b'experimental',
866 b'evolution',
867 default=list,
586 )
868 )
587 coreconfigitem(
869 coreconfigitem(
588 b'experimental',
870 b'experimental',
@@ -591,10 +873,14 b' coreconfigitem('
591 alias=[(b'experimental', b'allowdivergence')],
873 alias=[(b'experimental', b'allowdivergence')],
592 )
874 )
593 coreconfigitem(
875 coreconfigitem(
594 b'experimental', b'evolution.allowunstable', default=None,
876 b'experimental',
595 )
877 b'evolution.allowunstable',
596 coreconfigitem(
878 default=None,
597 b'experimental', b'evolution.createmarkers', default=None,
879 )
880 coreconfigitem(
881 b'experimental',
882 b'evolution.createmarkers',
883 default=None,
598 )
884 )
599 coreconfigitem(
885 coreconfigitem(
600 b'experimental',
886 b'experimental',
@@ -603,109 +889,173 b' coreconfigitem('
603 alias=[(b'experimental', b'effect-flags')],
889 alias=[(b'experimental', b'effect-flags')],
604 )
890 )
605 coreconfigitem(
891 coreconfigitem(
606 b'experimental', b'evolution.exchange', default=None,
892 b'experimental',
607 )
893 b'evolution.exchange',
608 coreconfigitem(
894 default=None,
609 b'experimental', b'evolution.bundle-obsmarker', default=False,
895 )
610 )
896 coreconfigitem(
611 coreconfigitem(
897 b'experimental',
612 b'experimental', b'log.topo', default=False,
898 b'evolution.bundle-obsmarker',
613 )
899 default=False,
614 coreconfigitem(
900 )
615 b'experimental', b'evolution.report-instabilities', default=True,
901 coreconfigitem(
616 )
902 b'experimental',
617 coreconfigitem(
903 b'log.topo',
618 b'experimental', b'evolution.track-operation', default=True,
904 default=False,
905 )
906 coreconfigitem(
907 b'experimental',
908 b'evolution.report-instabilities',
909 default=True,
910 )
911 coreconfigitem(
912 b'experimental',
913 b'evolution.track-operation',
914 default=True,
619 )
915 )
620 # repo-level config to exclude a revset visibility
916 # repo-level config to exclude a revset visibility
621 #
917 #
622 # The target use case is to use `share` to expose different subset of the same
918 # The target use case is to use `share` to expose different subset of the same
623 # repository, especially server side. See also `server.view`.
919 # repository, especially server side. See also `server.view`.
624 coreconfigitem(
920 coreconfigitem(
625 b'experimental', b'extra-filter-revs', default=None,
921 b'experimental',
626 )
922 b'extra-filter-revs',
627 coreconfigitem(
923 default=None,
628 b'experimental', b'maxdeltachainspan', default=-1,
924 )
925 coreconfigitem(
926 b'experimental',
927 b'maxdeltachainspan',
928 default=-1,
629 )
929 )
630 # tracks files which were undeleted (merge might delete them but we explicitly
930 # tracks files which were undeleted (merge might delete them but we explicitly
631 # kept/undeleted them) and creates new filenodes for them
931 # kept/undeleted them) and creates new filenodes for them
632 coreconfigitem(
932 coreconfigitem(
633 b'experimental', b'merge-track-salvaged', default=False,
933 b'experimental',
634 )
934 b'merge-track-salvaged',
635 coreconfigitem(
935 default=False,
636 b'experimental', b'mergetempdirprefix', default=None,
936 )
637 )
937 coreconfigitem(
638 coreconfigitem(
938 b'experimental',
639 b'experimental', b'mmapindexthreshold', default=None,
939 b'mergetempdirprefix',
640 )
940 default=None,
641 coreconfigitem(
941 )
642 b'experimental', b'narrow', default=False,
942 coreconfigitem(
643 )
943 b'experimental',
644 coreconfigitem(
944 b'mmapindexthreshold',
645 b'experimental', b'nonnormalparanoidcheck', default=False,
945 default=None,
646 )
946 )
647 coreconfigitem(
947 coreconfigitem(
648 b'experimental', b'exportableenviron', default=list,
948 b'experimental',
649 )
949 b'narrow',
650 coreconfigitem(
950 default=False,
651 b'experimental', b'extendedheader.index', default=None,
951 )
652 )
952 coreconfigitem(
653 coreconfigitem(
953 b'experimental',
654 b'experimental', b'extendedheader.similarity', default=False,
954 b'nonnormalparanoidcheck',
655 )
955 default=False,
656 coreconfigitem(
956 )
657 b'experimental', b'graphshorten', default=False,
957 coreconfigitem(
658 )
958 b'experimental',
659 coreconfigitem(
959 b'exportableenviron',
660 b'experimental', b'graphstyle.parent', default=dynamicdefault,
960 default=list,
661 )
961 )
662 coreconfigitem(
962 coreconfigitem(
663 b'experimental', b'graphstyle.missing', default=dynamicdefault,
963 b'experimental',
664 )
964 b'extendedheader.index',
665 coreconfigitem(
965 default=None,
666 b'experimental', b'graphstyle.grandparent', default=dynamicdefault,
966 )
667 )
967 coreconfigitem(
668 coreconfigitem(
968 b'experimental',
669 b'experimental', b'hook-track-tags', default=False,
969 b'extendedheader.similarity',
670 )
970 default=False,
671 coreconfigitem(
971 )
672 b'experimental', b'httppeer.advertise-v2', default=False,
972 coreconfigitem(
673 )
973 b'experimental',
674 coreconfigitem(
974 b'graphshorten',
675 b'experimental', b'httppeer.v2-encoder-order', default=None,
975 default=False,
676 )
976 )
677 coreconfigitem(
977 coreconfigitem(
678 b'experimental', b'httppostargs', default=False,
978 b'experimental',
979 b'graphstyle.parent',
980 default=dynamicdefault,
981 )
982 coreconfigitem(
983 b'experimental',
984 b'graphstyle.missing',
985 default=dynamicdefault,
986 )
987 coreconfigitem(
988 b'experimental',
989 b'graphstyle.grandparent',
990 default=dynamicdefault,
991 )
992 coreconfigitem(
993 b'experimental',
994 b'hook-track-tags',
995 default=False,
996 )
997 coreconfigitem(
998 b'experimental',
999 b'httppeer.advertise-v2',
1000 default=False,
1001 )
1002 coreconfigitem(
1003 b'experimental',
1004 b'httppeer.v2-encoder-order',
1005 default=None,
1006 )
1007 coreconfigitem(
1008 b'experimental',
1009 b'httppostargs',
1010 default=False,
679 )
1011 )
680 coreconfigitem(b'experimental', b'nointerrupt', default=False)
1012 coreconfigitem(b'experimental', b'nointerrupt', default=False)
681 coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
1013 coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
682
1014
683 coreconfigitem(
1015 coreconfigitem(
684 b'experimental', b'obsmarkers-exchange-debug', default=False,
1016 b'experimental',
685 )
1017 b'obsmarkers-exchange-debug',
686 coreconfigitem(
1018 default=False,
687 b'experimental', b'remotenames', default=False,
1019 )
688 )
1020 coreconfigitem(
689 coreconfigitem(
1021 b'experimental',
690 b'experimental', b'removeemptydirs', default=True,
1022 b'remotenames',
691 )
1023 default=False,
692 coreconfigitem(
1024 )
693 b'experimental', b'revert.interactive.select-to-keep', default=False,
1025 coreconfigitem(
694 )
1026 b'experimental',
695 coreconfigitem(
1027 b'removeemptydirs',
696 b'experimental', b'revisions.prefixhexnode', default=False,
1028 default=True,
697 )
1029 )
698 coreconfigitem(
1030 coreconfigitem(
699 b'experimental', b'revlogv2', default=None,
1031 b'experimental',
700 )
1032 b'revert.interactive.select-to-keep',
701 coreconfigitem(
1033 default=False,
702 b'experimental', b'revisions.disambiguatewithin', default=None,
1034 )
703 )
1035 coreconfigitem(
704 coreconfigitem(
1036 b'experimental',
705 b'experimental', b'rust.index', default=False,
1037 b'revisions.prefixhexnode',
706 )
1038 default=False,
707 coreconfigitem(
1039 )
708 b'experimental', b'server.filesdata.recommended-batch-size', default=50000,
1040 coreconfigitem(
1041 b'experimental',
1042 b'revlogv2',
1043 default=None,
1044 )
1045 coreconfigitem(
1046 b'experimental',
1047 b'revisions.disambiguatewithin',
1048 default=None,
1049 )
1050 coreconfigitem(
1051 b'experimental',
1052 b'rust.index',
1053 default=False,
1054 )
1055 coreconfigitem(
1056 b'experimental',
1057 b'server.filesdata.recommended-batch-size',
1058 default=50000,
709 )
1059 )
710 coreconfigitem(
1060 coreconfigitem(
711 b'experimental',
1061 b'experimental',
@@ -713,10 +1063,14 b' coreconfigitem('
713 default=100000,
1063 default=100000,
714 )
1064 )
715 coreconfigitem(
1065 coreconfigitem(
716 b'experimental', b'server.stream-narrow-clones', default=False,
1066 b'experimental',
717 )
1067 b'server.stream-narrow-clones',
718 coreconfigitem(
1068 default=False,
719 b'experimental', b'single-head-per-branch', default=False,
1069 )
1070 coreconfigitem(
1071 b'experimental',
1072 b'single-head-per-branch',
1073 default=False,
720 )
1074 )
721 coreconfigitem(
1075 coreconfigitem(
722 b'experimental',
1076 b'experimental',
@@ -724,73 +1078,125 b' coreconfigitem('
724 default=False,
1078 default=False,
725 )
1079 )
726 coreconfigitem(
1080 coreconfigitem(
727 b'experimental', b'sshserver.support-v2', default=False,
1081 b'experimental',
728 )
1082 b'sshserver.support-v2',
729 coreconfigitem(
1083 default=False,
730 b'experimental', b'sparse-read', default=False,
1084 )
731 )
1085 coreconfigitem(
732 coreconfigitem(
1086 b'experimental',
733 b'experimental', b'sparse-read.density-threshold', default=0.50,
1087 b'sparse-read',
734 )
1088 default=False,
735 coreconfigitem(
1089 )
736 b'experimental', b'sparse-read.min-gap-size', default=b'65K',
1090 coreconfigitem(
737 )
1091 b'experimental',
738 coreconfigitem(
1092 b'sparse-read.density-threshold',
739 b'experimental', b'treemanifest', default=False,
1093 default=0.50,
740 )
1094 )
741 coreconfigitem(
1095 coreconfigitem(
742 b'experimental', b'update.atomic-file', default=False,
1096 b'experimental',
743 )
1097 b'sparse-read.min-gap-size',
744 coreconfigitem(
1098 default=b'65K',
745 b'experimental', b'sshpeer.advertise-v2', default=False,
1099 )
746 )
1100 coreconfigitem(
747 coreconfigitem(
1101 b'experimental',
748 b'experimental', b'web.apiserver', default=False,
1102 b'treemanifest',
749 )
1103 default=False,
750 coreconfigitem(
1104 )
751 b'experimental', b'web.api.http-v2', default=False,
1105 coreconfigitem(
752 )
1106 b'experimental',
753 coreconfigitem(
1107 b'update.atomic-file',
754 b'experimental', b'web.api.debugreflect', default=False,
1108 default=False,
755 )
1109 )
756 coreconfigitem(
1110 coreconfigitem(
757 b'experimental', b'worker.wdir-get-thread-safe', default=False,
1111 b'experimental',
758 )
1112 b'sshpeer.advertise-v2',
759 coreconfigitem(
1113 default=False,
760 b'experimental', b'worker.repository-upgrade', default=False,
1114 )
761 )
1115 coreconfigitem(
762 coreconfigitem(
1116 b'experimental',
763 b'experimental', b'xdiff', default=False,
1117 b'web.apiserver',
764 )
1118 default=False,
765 coreconfigitem(
1119 )
766 b'extensions', b'.*', default=None, generic=True,
1120 coreconfigitem(
767 )
1121 b'experimental',
768 coreconfigitem(
1122 b'web.api.http-v2',
769 b'extdata', b'.*', default=None, generic=True,
1123 default=False,
770 )
1124 )
771 coreconfigitem(
1125 coreconfigitem(
772 b'format', b'bookmarks-in-store', default=False,
1126 b'experimental',
773 )
1127 b'web.api.debugreflect',
774 coreconfigitem(
1128 default=False,
775 b'format', b'chunkcachesize', default=None, experimental=True,
1129 )
776 )
1130 coreconfigitem(
777 coreconfigitem(
1131 b'experimental',
778 b'format', b'dotencode', default=True,
1132 b'worker.wdir-get-thread-safe',
779 )
1133 default=False,
780 coreconfigitem(
1134 )
781 b'format', b'generaldelta', default=False, experimental=True,
1135 coreconfigitem(
782 )
1136 b'experimental',
783 coreconfigitem(
1137 b'worker.repository-upgrade',
784 b'format', b'manifestcachesize', default=None, experimental=True,
1138 default=False,
785 )
1139 )
786 coreconfigitem(
1140 coreconfigitem(
787 b'format', b'maxchainlen', default=dynamicdefault, experimental=True,
1141 b'experimental',
788 )
1142 b'xdiff',
789 coreconfigitem(
1143 default=False,
790 b'format', b'obsstore-version', default=None,
1144 )
791 )
1145 coreconfigitem(
792 coreconfigitem(
1146 b'extensions',
793 b'format', b'sparse-revlog', default=True,
1147 b'.*',
1148 default=None,
1149 generic=True,
1150 )
1151 coreconfigitem(
1152 b'extdata',
1153 b'.*',
1154 default=None,
1155 generic=True,
1156 )
1157 coreconfigitem(
1158 b'format',
1159 b'bookmarks-in-store',
1160 default=False,
1161 )
1162 coreconfigitem(
1163 b'format',
1164 b'chunkcachesize',
1165 default=None,
1166 experimental=True,
1167 )
1168 coreconfigitem(
1169 b'format',
1170 b'dotencode',
1171 default=True,
1172 )
1173 coreconfigitem(
1174 b'format',
1175 b'generaldelta',
1176 default=False,
1177 experimental=True,
1178 )
1179 coreconfigitem(
1180 b'format',
1181 b'manifestcachesize',
1182 default=None,
1183 experimental=True,
1184 )
1185 coreconfigitem(
1186 b'format',
1187 b'maxchainlen',
1188 default=dynamicdefault,
1189 experimental=True,
1190 )
1191 coreconfigitem(
1192 b'format',
1193 b'obsstore-version',
1194 default=None,
1195 )
1196 coreconfigitem(
1197 b'format',
1198 b'sparse-revlog',
1199 default=True,
794 )
1200 )
795 coreconfigitem(
1201 coreconfigitem(
796 b'format',
1202 b'format',
@@ -799,13 +1205,19 b' coreconfigitem('
799 alias=[(b'experimental', b'format.compression')],
1205 alias=[(b'experimental', b'format.compression')],
800 )
1206 )
801 coreconfigitem(
1207 coreconfigitem(
802 b'format', b'usefncache', default=True,
1208 b'format',
803 )
1209 b'usefncache',
804 coreconfigitem(
1210 default=True,
805 b'format', b'usegeneraldelta', default=True,
1211 )
806 )
1212 coreconfigitem(
807 coreconfigitem(
1213 b'format',
808 b'format', b'usestore', default=True,
1214 b'usegeneraldelta',
1215 default=True,
1216 )
1217 coreconfigitem(
1218 b'format',
1219 b'usestore',
1220 default=True,
809 )
1221 )
810 # Right now, the only efficient implement of the nodemap logic is in Rust, so
1222 # Right now, the only efficient implement of the nodemap logic is in Rust, so
811 # the persistent nodemap feature needs to stay experimental as long as the Rust
1223 # the persistent nodemap feature needs to stay experimental as long as the Rust
@@ -820,43 +1232,77 b' coreconfigitem('
820 experimental=True,
1232 experimental=True,
821 )
1233 )
822 coreconfigitem(
1234 coreconfigitem(
823 b'format', b'exp-use-side-data', default=False, experimental=True,
1235 b'format',
824 )
1236 b'exp-use-side-data',
825 coreconfigitem(
1237 default=False,
826 b'format', b'exp-share-safe', default=False, experimental=True,
1238 experimental=True,
827 )
1239 )
828 coreconfigitem(
1240 coreconfigitem(
829 b'format', b'internal-phase', default=False, experimental=True,
1241 b'format',
830 )
1242 b'exp-share-safe',
831 coreconfigitem(
1243 default=False,
832 b'fsmonitor', b'warn_when_unused', default=True,
1244 experimental=True,
833 )
1245 )
834 coreconfigitem(
1246 coreconfigitem(
835 b'fsmonitor', b'warn_update_file_count', default=50000,
1247 b'format',
836 )
1248 b'internal-phase',
837 coreconfigitem(
1249 default=False,
838 b'fsmonitor', b'warn_update_file_count_rust', default=400000,
1250 experimental=True,
839 )
1251 )
840 coreconfigitem(
1252 coreconfigitem(
841 b'help', br'hidden-command\..*', default=False, generic=True,
1253 b'fsmonitor',
842 )
1254 b'warn_when_unused',
843 coreconfigitem(
1255 default=True,
844 b'help', br'hidden-topic\..*', default=False, generic=True,
1256 )
845 )
1257 coreconfigitem(
846 coreconfigitem(
1258 b'fsmonitor',
847 b'hooks', b'.*', default=dynamicdefault, generic=True,
1259 b'warn_update_file_count',
848 )
1260 default=50000,
849 coreconfigitem(
1261 )
850 b'hgweb-paths', b'.*', default=list, generic=True,
1262 coreconfigitem(
851 )
1263 b'fsmonitor',
852 coreconfigitem(
1264 b'warn_update_file_count_rust',
853 b'hostfingerprints', b'.*', default=list, generic=True,
1265 default=400000,
854 )
1266 )
855 coreconfigitem(
1267 coreconfigitem(
856 b'hostsecurity', b'ciphers', default=None,
1268 b'help',
857 )
1269 br'hidden-command\..*',
858 coreconfigitem(
1270 default=False,
859 b'hostsecurity', b'minimumprotocol', default=dynamicdefault,
1271 generic=True,
1272 )
1273 coreconfigitem(
1274 b'help',
1275 br'hidden-topic\..*',
1276 default=False,
1277 generic=True,
1278 )
1279 coreconfigitem(
1280 b'hooks',
1281 b'.*',
1282 default=dynamicdefault,
1283 generic=True,
1284 )
1285 coreconfigitem(
1286 b'hgweb-paths',
1287 b'.*',
1288 default=list,
1289 generic=True,
1290 )
1291 coreconfigitem(
1292 b'hostfingerprints',
1293 b'.*',
1294 default=list,
1295 generic=True,
1296 )
1297 coreconfigitem(
1298 b'hostsecurity',
1299 b'ciphers',
1300 default=None,
1301 )
1302 coreconfigitem(
1303 b'hostsecurity',
1304 b'minimumprotocol',
1305 default=dynamicdefault,
860 )
1306 )
861 coreconfigitem(
1307 coreconfigitem(
862 b'hostsecurity',
1308 b'hostsecurity',
@@ -865,73 +1311,122 b' coreconfigitem('
865 generic=True,
1311 generic=True,
866 )
1312 )
867 coreconfigitem(
1313 coreconfigitem(
868 b'hostsecurity', b'.*:ciphers$', default=dynamicdefault, generic=True,
1314 b'hostsecurity',
869 )
1315 b'.*:ciphers$',
870 coreconfigitem(
1316 default=dynamicdefault,
871 b'hostsecurity', b'.*:fingerprints$', default=list, generic=True,
1317 generic=True,
872 )
1318 )
873 coreconfigitem(
1319 coreconfigitem(
874 b'hostsecurity', b'.*:verifycertsfile$', default=None, generic=True,
1320 b'hostsecurity',
1321 b'.*:fingerprints$',
1322 default=list,
1323 generic=True,
1324 )
1325 coreconfigitem(
1326 b'hostsecurity',
1327 b'.*:verifycertsfile$',
1328 default=None,
1329 generic=True,
875 )
1330 )
876
1331
877 coreconfigitem(
1332 coreconfigitem(
878 b'http_proxy', b'always', default=False,
1333 b'http_proxy',
879 )
1334 b'always',
880 coreconfigitem(
1335 default=False,
881 b'http_proxy', b'host', default=None,
1336 )
882 )
1337 coreconfigitem(
883 coreconfigitem(
1338 b'http_proxy',
884 b'http_proxy', b'no', default=list,
1339 b'host',
885 )
1340 default=None,
886 coreconfigitem(
1341 )
887 b'http_proxy', b'passwd', default=None,
1342 coreconfigitem(
888 )
1343 b'http_proxy',
889 coreconfigitem(
1344 b'no',
890 b'http_proxy', b'user', default=None,
1345 default=list,
1346 )
1347 coreconfigitem(
1348 b'http_proxy',
1349 b'passwd',
1350 default=None,
1351 )
1352 coreconfigitem(
1353 b'http_proxy',
1354 b'user',
1355 default=None,
891 )
1356 )
892
1357
893 coreconfigitem(
1358 coreconfigitem(
894 b'http', b'timeout', default=None,
1359 b'http',
1360 b'timeout',
1361 default=None,
895 )
1362 )
896
1363
897 coreconfigitem(
1364 coreconfigitem(
898 b'logtoprocess', b'commandexception', default=None,
1365 b'logtoprocess',
899 )
1366 b'commandexception',
900 coreconfigitem(
1367 default=None,
901 b'logtoprocess', b'commandfinish', default=None,
1368 )
902 )
1369 coreconfigitem(
903 coreconfigitem(
1370 b'logtoprocess',
904 b'logtoprocess', b'command', default=None,
1371 b'commandfinish',
905 )
1372 default=None,
906 coreconfigitem(
1373 )
907 b'logtoprocess', b'develwarn', default=None,
1374 coreconfigitem(
908 )
1375 b'logtoprocess',
909 coreconfigitem(
1376 b'command',
910 b'logtoprocess', b'uiblocked', default=None,
1377 default=None,
911 )
1378 )
912 coreconfigitem(
1379 coreconfigitem(
913 b'merge', b'checkunknown', default=b'abort',
1380 b'logtoprocess',
914 )
1381 b'develwarn',
915 coreconfigitem(
1382 default=None,
916 b'merge', b'checkignored', default=b'abort',
1383 )
917 )
1384 coreconfigitem(
918 coreconfigitem(
1385 b'logtoprocess',
919 b'experimental', b'merge.checkpathconflicts', default=False,
1386 b'uiblocked',
920 )
1387 default=None,
921 coreconfigitem(
1388 )
922 b'merge', b'followcopies', default=True,
1389 coreconfigitem(
923 )
1390 b'merge',
924 coreconfigitem(
1391 b'checkunknown',
925 b'merge', b'on-failure', default=b'continue',
1392 default=b'abort',
926 )
1393 )
927 coreconfigitem(
1394 coreconfigitem(
928 b'merge', b'preferancestor', default=lambda: [b'*'], experimental=True,
1395 b'merge',
929 )
1396 b'checkignored',
930 coreconfigitem(
1397 default=b'abort',
931 b'merge', b'strict-capability-check', default=False,
1398 )
932 )
1399 coreconfigitem(
933 coreconfigitem(
1400 b'experimental',
934 b'merge-tools', b'.*', default=None, generic=True,
1401 b'merge.checkpathconflicts',
1402 default=False,
1403 )
1404 coreconfigitem(
1405 b'merge',
1406 b'followcopies',
1407 default=True,
1408 )
1409 coreconfigitem(
1410 b'merge',
1411 b'on-failure',
1412 default=b'continue',
1413 )
1414 coreconfigitem(
1415 b'merge',
1416 b'preferancestor',
1417 default=lambda: [b'*'],
1418 experimental=True,
1419 )
1420 coreconfigitem(
1421 b'merge',
1422 b'strict-capability-check',
1423 default=False,
1424 )
1425 coreconfigitem(
1426 b'merge-tools',
1427 b'.*',
1428 default=None,
1429 generic=True,
935 )
1430 )
936 coreconfigitem(
1431 coreconfigitem(
937 b'merge-tools',
1432 b'merge-tools',
@@ -941,10 +1436,18 b' coreconfigitem('
941 priority=-1,
1436 priority=-1,
942 )
1437 )
943 coreconfigitem(
1438 coreconfigitem(
944 b'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
1439 b'merge-tools',
945 )
1440 br'.*\.binary$',
946 coreconfigitem(
1441 default=False,
947 b'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
1442 generic=True,
1443 priority=-1,
1444 )
1445 coreconfigitem(
1446 b'merge-tools',
1447 br'.*\.check$',
1448 default=list,
1449 generic=True,
1450 priority=-1,
948 )
1451 )
949 coreconfigitem(
1452 coreconfigitem(
950 b'merge-tools',
1453 b'merge-tools',
@@ -961,10 +1464,18 b' coreconfigitem('
961 priority=-1,
1464 priority=-1,
962 )
1465 )
963 coreconfigitem(
1466 coreconfigitem(
964 b'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
1467 b'merge-tools',
965 )
1468 br'.*\.fixeol$',
966 coreconfigitem(
1469 default=False,
967 b'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
1470 generic=True,
1471 priority=-1,
1472 )
1473 coreconfigitem(
1474 b'merge-tools',
1475 br'.*\.gui$',
1476 default=False,
1477 generic=True,
1478 priority=-1,
968 )
1479 )
969 coreconfigitem(
1480 coreconfigitem(
970 b'merge-tools',
1481 b'merge-tools',
@@ -981,7 +1492,11 b' coreconfigitem('
981 priority=-1,
1492 priority=-1,
982 )
1493 )
983 coreconfigitem(
1494 coreconfigitem(
984 b'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
1495 b'merge-tools',
1496 br'.*\.priority$',
1497 default=0,
1498 generic=True,
1499 priority=-1,
985 )
1500 )
986 coreconfigitem(
1501 coreconfigitem(
987 b'merge-tools',
1502 b'merge-tools',
@@ -991,100 +1506,168 b' coreconfigitem('
991 priority=-1,
1506 priority=-1,
992 )
1507 )
993 coreconfigitem(
1508 coreconfigitem(
994 b'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
1509 b'merge-tools',
995 )
1510 br'.*\.symlink$',
996 coreconfigitem(
1511 default=False,
997 b'pager', b'attend-.*', default=dynamicdefault, generic=True,
1512 generic=True,
998 )
1513 priority=-1,
999 coreconfigitem(
1514 )
1000 b'pager', b'ignore', default=list,
1515 coreconfigitem(
1001 )
1516 b'pager',
1002 coreconfigitem(
1517 b'attend-.*',
1003 b'pager', b'pager', default=dynamicdefault,
1518 default=dynamicdefault,
1004 )
1519 generic=True,
1005 coreconfigitem(
1520 )
1006 b'patch', b'eol', default=b'strict',
1521 coreconfigitem(
1007 )
1522 b'pager',
1008 coreconfigitem(
1523 b'ignore',
1009 b'patch', b'fuzz', default=2,
1524 default=list,
1010 )
1525 )
1011 coreconfigitem(
1526 coreconfigitem(
1012 b'paths', b'default', default=None,
1527 b'pager',
1013 )
1528 b'pager',
1014 coreconfigitem(
1529 default=dynamicdefault,
1015 b'paths', b'default-push', default=None,
1530 )
1016 )
1531 coreconfigitem(
1017 coreconfigitem(
1532 b'patch',
1018 b'paths', b'.*', default=None, generic=True,
1533 b'eol',
1019 )
1534 default=b'strict',
1020 coreconfigitem(
1535 )
1021 b'phases', b'checksubrepos', default=b'follow',
1536 coreconfigitem(
1022 )
1537 b'patch',
1023 coreconfigitem(
1538 b'fuzz',
1024 b'phases', b'new-commit', default=b'draft',
1539 default=2,
1025 )
1540 )
1026 coreconfigitem(
1541 coreconfigitem(
1027 b'phases', b'publish', default=True,
1542 b'paths',
1028 )
1543 b'default',
1029 coreconfigitem(
1544 default=None,
1030 b'profiling', b'enabled', default=False,
1545 )
1031 )
1546 coreconfigitem(
1032 coreconfigitem(
1547 b'paths',
1033 b'profiling', b'format', default=b'text',
1548 b'default-push',
1034 )
1549 default=None,
1035 coreconfigitem(
1550 )
1036 b'profiling', b'freq', default=1000,
1551 coreconfigitem(
1037 )
1552 b'paths',
1038 coreconfigitem(
1553 b'.*',
1039 b'profiling', b'limit', default=30,
1554 default=None,
1040 )
1555 generic=True,
1041 coreconfigitem(
1556 )
1042 b'profiling', b'nested', default=0,
1557 coreconfigitem(
1043 )
1558 b'phases',
1044 coreconfigitem(
1559 b'checksubrepos',
1045 b'profiling', b'output', default=None,
1560 default=b'follow',
1046 )
1561 )
1047 coreconfigitem(
1562 coreconfigitem(
1048 b'profiling', b'showmax', default=0.999,
1563 b'phases',
1049 )
1564 b'new-commit',
1050 coreconfigitem(
1565 default=b'draft',
1051 b'profiling', b'showmin', default=dynamicdefault,
1566 )
1052 )
1567 coreconfigitem(
1053 coreconfigitem(
1568 b'phases',
1054 b'profiling', b'showtime', default=True,
1569 b'publish',
1055 )
1570 default=True,
1056 coreconfigitem(
1571 )
1057 b'profiling', b'sort', default=b'inlinetime',
1572 coreconfigitem(
1058 )
1573 b'profiling',
1059 coreconfigitem(
1574 b'enabled',
1060 b'profiling', b'statformat', default=b'hotpath',
1575 default=False,
1061 )
1576 )
1062 coreconfigitem(
1577 coreconfigitem(
1063 b'profiling', b'time-track', default=dynamicdefault,
1578 b'profiling',
1064 )
1579 b'format',
1065 coreconfigitem(
1580 default=b'text',
1066 b'profiling', b'type', default=b'stat',
1581 )
1067 )
1582 coreconfigitem(
1068 coreconfigitem(
1583 b'profiling',
1069 b'progress', b'assume-tty', default=False,
1584 b'freq',
1070 )
1585 default=1000,
1071 coreconfigitem(
1586 )
1072 b'progress', b'changedelay', default=1,
1587 coreconfigitem(
1073 )
1588 b'profiling',
1074 coreconfigitem(
1589 b'limit',
1075 b'progress', b'clear-complete', default=True,
1590 default=30,
1076 )
1591 )
1077 coreconfigitem(
1592 coreconfigitem(
1078 b'progress', b'debug', default=False,
1593 b'profiling',
1079 )
1594 b'nested',
1080 coreconfigitem(
1595 default=0,
1081 b'progress', b'delay', default=3,
1596 )
1082 )
1597 coreconfigitem(
1083 coreconfigitem(
1598 b'profiling',
1084 b'progress', b'disable', default=False,
1599 b'output',
1085 )
1600 default=None,
1086 coreconfigitem(
1601 )
1087 b'progress', b'estimateinterval', default=60.0,
1602 coreconfigitem(
1603 b'profiling',
1604 b'showmax',
1605 default=0.999,
1606 )
1607 coreconfigitem(
1608 b'profiling',
1609 b'showmin',
1610 default=dynamicdefault,
1611 )
1612 coreconfigitem(
1613 b'profiling',
1614 b'showtime',
1615 default=True,
1616 )
1617 coreconfigitem(
1618 b'profiling',
1619 b'sort',
1620 default=b'inlinetime',
1621 )
1622 coreconfigitem(
1623 b'profiling',
1624 b'statformat',
1625 default=b'hotpath',
1626 )
1627 coreconfigitem(
1628 b'profiling',
1629 b'time-track',
1630 default=dynamicdefault,
1631 )
1632 coreconfigitem(
1633 b'profiling',
1634 b'type',
1635 default=b'stat',
1636 )
1637 coreconfigitem(
1638 b'progress',
1639 b'assume-tty',
1640 default=False,
1641 )
1642 coreconfigitem(
1643 b'progress',
1644 b'changedelay',
1645 default=1,
1646 )
1647 coreconfigitem(
1648 b'progress',
1649 b'clear-complete',
1650 default=True,
1651 )
1652 coreconfigitem(
1653 b'progress',
1654 b'debug',
1655 default=False,
1656 )
1657 coreconfigitem(
1658 b'progress',
1659 b'delay',
1660 default=3,
1661 )
1662 coreconfigitem(
1663 b'progress',
1664 b'disable',
1665 default=False,
1666 )
1667 coreconfigitem(
1668 b'progress',
1669 b'estimateinterval',
1670 default=60.0,
1088 )
1671 )
1089 coreconfigitem(
1672 coreconfigitem(
1090 b'progress',
1673 b'progress',
@@ -1092,16 +1675,24 b' coreconfigitem('
1092 default=lambda: [b'topic', b'bar', b'number', b'estimate'],
1675 default=lambda: [b'topic', b'bar', b'number', b'estimate'],
1093 )
1676 )
1094 coreconfigitem(
1677 coreconfigitem(
1095 b'progress', b'refresh', default=0.1,
1678 b'progress',
1096 )
1679 b'refresh',
1097 coreconfigitem(
1680 default=0.1,
1098 b'progress', b'width', default=dynamicdefault,
1681 )
1099 )
1682 coreconfigitem(
1100 coreconfigitem(
1683 b'progress',
1101 b'pull', b'confirm', default=False,
1684 b'width',
1102 )
1685 default=dynamicdefault,
1103 coreconfigitem(
1686 )
1104 b'push', b'pushvars.server', default=False,
1687 coreconfigitem(
1688 b'pull',
1689 b'confirm',
1690 default=False,
1691 )
1692 coreconfigitem(
1693 b'push',
1694 b'pushvars.server',
1695 default=False,
1105 )
1696 )
1106 coreconfigitem(
1697 coreconfigitem(
1107 b'rewrite',
1698 b'rewrite',
@@ -1110,13 +1701,21 b' coreconfigitem('
1110 alias=[(b'ui', b'history-editing-backup')],
1701 alias=[(b'ui', b'history-editing-backup')],
1111 )
1702 )
1112 coreconfigitem(
1703 coreconfigitem(
1113 b'rewrite', b'update-timestamp', default=False,
1704 b'rewrite',
1114 )
1705 b'update-timestamp',
1115 coreconfigitem(
1706 default=False,
1116 b'rewrite', b'empty-successor', default=b'skip', experimental=True,
1707 )
1117 )
1708 coreconfigitem(
1118 coreconfigitem(
1709 b'rewrite',
1119 b'storage', b'new-repo-backend', default=b'revlogv1', experimental=True,
1710 b'empty-successor',
1711 default=b'skip',
1712 experimental=True,
1713 )
1714 coreconfigitem(
1715 b'storage',
1716 b'new-repo-backend',
1717 default=b'revlogv1',
1718 experimental=True,
1120 )
1719 )
1121 coreconfigitem(
1720 coreconfigitem(
1122 b'storage',
1721 b'storage',
@@ -1133,37 +1732,59 b' coreconfigitem('
1133 b'storage', b'revlog.nodemap.mode', default=b'compat', experimental=True
1732 b'storage', b'revlog.nodemap.mode', default=b'compat', experimental=True
1134 )
1733 )
1135 coreconfigitem(
1734 coreconfigitem(
1136 b'storage', b'revlog.reuse-external-delta', default=True,
1735 b'storage',
1137 )
1736 b'revlog.reuse-external-delta',
1138 coreconfigitem(
1737 default=True,
1139 b'storage', b'revlog.reuse-external-delta-parent', default=None,
1738 )
1140 )
1739 coreconfigitem(
1141 coreconfigitem(
1740 b'storage',
1142 b'storage', b'revlog.zlib.level', default=None,
1741 b'revlog.reuse-external-delta-parent',
1143 )
1742 default=None,
1144 coreconfigitem(
1743 )
1145 b'storage', b'revlog.zstd.level', default=None,
1744 coreconfigitem(
1146 )
1745 b'storage',
1147 coreconfigitem(
1746 b'revlog.zlib.level',
1148 b'server', b'bookmarks-pushkey-compat', default=True,
1747 default=None,
1149 )
1748 )
1150 coreconfigitem(
1749 coreconfigitem(
1151 b'server', b'bundle1', default=True,
1750 b'storage',
1152 )
1751 b'revlog.zstd.level',
1153 coreconfigitem(
1752 default=None,
1154 b'server', b'bundle1gd', default=None,
1753 )
1155 )
1754 coreconfigitem(
1156 coreconfigitem(
1755 b'server',
1157 b'server', b'bundle1.pull', default=None,
1756 b'bookmarks-pushkey-compat',
1158 )
1757 default=True,
1159 coreconfigitem(
1758 )
1160 b'server', b'bundle1gd.pull', default=None,
1759 coreconfigitem(
1161 )
1760 b'server',
1162 coreconfigitem(
1761 b'bundle1',
1163 b'server', b'bundle1.push', default=None,
1762 default=True,
1164 )
1763 )
1165 coreconfigitem(
1764 coreconfigitem(
1166 b'server', b'bundle1gd.push', default=None,
1765 b'server',
1766 b'bundle1gd',
1767 default=None,
1768 )
1769 coreconfigitem(
1770 b'server',
1771 b'bundle1.pull',
1772 default=None,
1773 )
1774 coreconfigitem(
1775 b'server',
1776 b'bundle1gd.pull',
1777 default=None,
1778 )
1779 coreconfigitem(
1780 b'server',
1781 b'bundle1.push',
1782 default=None,
1783 )
1784 coreconfigitem(
1785 b'server',
1786 b'bundle1gd.push',
1787 default=None,
1167 )
1788 )
1168 coreconfigitem(
1789 coreconfigitem(
1169 b'server',
1790 b'server',
@@ -1172,73 +1793,120 b' coreconfigitem('
1172 alias=[(b'experimental', b'bundle2.stream')],
1793 alias=[(b'experimental', b'bundle2.stream')],
1173 )
1794 )
1174 coreconfigitem(
1795 coreconfigitem(
1175 b'server', b'compressionengines', default=list,
1796 b'server',
1176 )
1797 b'compressionengines',
1177 coreconfigitem(
1798 default=list,
1178 b'server', b'concurrent-push-mode', default=b'check-related',
1799 )
1179 )
1800 coreconfigitem(
1180 coreconfigitem(
1801 b'server',
1181 b'server', b'disablefullbundle', default=False,
1802 b'concurrent-push-mode',
1182 )
1803 default=b'check-related',
1183 coreconfigitem(
1804 )
1184 b'server', b'maxhttpheaderlen', default=1024,
1805 coreconfigitem(
1185 )
1806 b'server',
1186 coreconfigitem(
1807 b'disablefullbundle',
1187 b'server', b'pullbundle', default=False,
1808 default=False,
1188 )
1809 )
1189 coreconfigitem(
1810 coreconfigitem(
1190 b'server', b'preferuncompressed', default=False,
1811 b'server',
1191 )
1812 b'maxhttpheaderlen',
1192 coreconfigitem(
1813 default=1024,
1193 b'server', b'streamunbundle', default=False,
1814 )
1194 )
1815 coreconfigitem(
1195 coreconfigitem(
1816 b'server',
1196 b'server', b'uncompressed', default=True,
1817 b'pullbundle',
1197 )
1818 default=False,
1198 coreconfigitem(
1819 )
1199 b'server', b'uncompressedallowsecret', default=False,
1820 coreconfigitem(
1200 )
1821 b'server',
1201 coreconfigitem(
1822 b'preferuncompressed',
1202 b'server', b'view', default=b'served',
1823 default=False,
1203 )
1824 )
1204 coreconfigitem(
1825 coreconfigitem(
1205 b'server', b'validate', default=False,
1826 b'server',
1206 )
1827 b'streamunbundle',
1207 coreconfigitem(
1828 default=False,
1208 b'server', b'zliblevel', default=-1,
1829 )
1209 )
1830 coreconfigitem(
1210 coreconfigitem(
1831 b'server',
1211 b'server', b'zstdlevel', default=3,
1832 b'uncompressed',
1212 )
1833 default=True,
1213 coreconfigitem(
1834 )
1214 b'share', b'pool', default=None,
1835 coreconfigitem(
1215 )
1836 b'server',
1216 coreconfigitem(
1837 b'uncompressedallowsecret',
1217 b'share', b'poolnaming', default=b'identity',
1838 default=False,
1218 )
1839 )
1219 coreconfigitem(
1840 coreconfigitem(
1220 b'shelve', b'maxbackups', default=10,
1841 b'server',
1221 )
1842 b'view',
1222 coreconfigitem(
1843 default=b'served',
1223 b'smtp', b'host', default=None,
1844 )
1224 )
1845 coreconfigitem(
1225 coreconfigitem(
1846 b'server',
1226 b'smtp', b'local_hostname', default=None,
1847 b'validate',
1227 )
1848 default=False,
1228 coreconfigitem(
1849 )
1229 b'smtp', b'password', default=None,
1850 coreconfigitem(
1230 )
1851 b'server',
1231 coreconfigitem(
1852 b'zliblevel',
1232 b'smtp', b'port', default=dynamicdefault,
1853 default=-1,
1233 )
1854 )
1234 coreconfigitem(
1855 coreconfigitem(
1235 b'smtp', b'tls', default=b'none',
1856 b'server',
1236 )
1857 b'zstdlevel',
1237 coreconfigitem(
1858 default=3,
1238 b'smtp', b'username', default=None,
1859 )
1239 )
1860 coreconfigitem(
1240 coreconfigitem(
1861 b'share',
1241 b'sparse', b'missingwarning', default=True, experimental=True,
1862 b'pool',
1863 default=None,
1864 )
1865 coreconfigitem(
1866 b'share',
1867 b'poolnaming',
1868 default=b'identity',
1869 )
1870 coreconfigitem(
1871 b'shelve',
1872 b'maxbackups',
1873 default=10,
1874 )
1875 coreconfigitem(
1876 b'smtp',
1877 b'host',
1878 default=None,
1879 )
1880 coreconfigitem(
1881 b'smtp',
1882 b'local_hostname',
1883 default=None,
1884 )
1885 coreconfigitem(
1886 b'smtp',
1887 b'password',
1888 default=None,
1889 )
1890 coreconfigitem(
1891 b'smtp',
1892 b'port',
1893 default=dynamicdefault,
1894 )
1895 coreconfigitem(
1896 b'smtp',
1897 b'tls',
1898 default=b'none',
1899 )
1900 coreconfigitem(
1901 b'smtp',
1902 b'username',
1903 default=None,
1904 )
1905 coreconfigitem(
1906 b'sparse',
1907 b'missingwarning',
1908 default=True,
1909 experimental=True,
1242 )
1910 )
1243 coreconfigitem(
1911 coreconfigitem(
1244 b'subrepos',
1912 b'subrepos',
@@ -1246,367 +1914,612 b' coreconfigitem('
1246 default=dynamicdefault, # to make backporting simpler
1914 default=dynamicdefault, # to make backporting simpler
1247 )
1915 )
1248 coreconfigitem(
1916 coreconfigitem(
1249 b'subrepos', b'hg:allowed', default=dynamicdefault,
1917 b'subrepos',
1250 )
1918 b'hg:allowed',
1251 coreconfigitem(
1919 default=dynamicdefault,
1252 b'subrepos', b'git:allowed', default=dynamicdefault,
1920 )
1253 )
1921 coreconfigitem(
1254 coreconfigitem(
1922 b'subrepos',
1255 b'subrepos', b'svn:allowed', default=dynamicdefault,
1923 b'git:allowed',
1256 )
1924 default=dynamicdefault,
1257 coreconfigitem(
1925 )
1258 b'templates', b'.*', default=None, generic=True,
1926 coreconfigitem(
1259 )
1927 b'subrepos',
1260 coreconfigitem(
1928 b'svn:allowed',
1261 b'templateconfig', b'.*', default=dynamicdefault, generic=True,
1929 default=dynamicdefault,
1262 )
1930 )
1263 coreconfigitem(
1931 coreconfigitem(
1264 b'trusted', b'groups', default=list,
1932 b'templates',
1265 )
1933 b'.*',
1266 coreconfigitem(
1934 default=None,
1267 b'trusted', b'users', default=list,
1935 generic=True,
1268 )
1936 )
1269 coreconfigitem(
1937 coreconfigitem(
1270 b'ui', b'_usedassubrepo', default=False,
1938 b'templateconfig',
1271 )
1939 b'.*',
1272 coreconfigitem(
1940 default=dynamicdefault,
1273 b'ui', b'allowemptycommit', default=False,
1941 generic=True,
1274 )
1942 )
1275 coreconfigitem(
1943 coreconfigitem(
1276 b'ui', b'archivemeta', default=True,
1944 b'trusted',
1277 )
1945 b'groups',
1278 coreconfigitem(
1946 default=list,
1279 b'ui', b'askusername', default=False,
1947 )
1280 )
1948 coreconfigitem(
1281 coreconfigitem(
1949 b'trusted',
1282 b'ui', b'available-memory', default=None,
1950 b'users',
1951 default=list,
1952 )
1953 coreconfigitem(
1954 b'ui',
1955 b'_usedassubrepo',
1956 default=False,
1957 )
1958 coreconfigitem(
1959 b'ui',
1960 b'allowemptycommit',
1961 default=False,
1962 )
1963 coreconfigitem(
1964 b'ui',
1965 b'archivemeta',
1966 default=True,
1967 )
1968 coreconfigitem(
1969 b'ui',
1970 b'askusername',
1971 default=False,
1972 )
1973 coreconfigitem(
1974 b'ui',
1975 b'available-memory',
1976 default=None,
1283 )
1977 )
1284
1978
1285 coreconfigitem(
1979 coreconfigitem(
1286 b'ui', b'clonebundlefallback', default=False,
1980 b'ui',
1287 )
1981 b'clonebundlefallback',
1288 coreconfigitem(
1982 default=False,
1289 b'ui', b'clonebundleprefers', default=list,
1983 )
1290 )
1984 coreconfigitem(
1291 coreconfigitem(
1985 b'ui',
1292 b'ui', b'clonebundles', default=True,
1986 b'clonebundleprefers',
1293 )
1987 default=list,
1294 coreconfigitem(
1988 )
1295 b'ui', b'color', default=b'auto',
1989 coreconfigitem(
1296 )
1990 b'ui',
1297 coreconfigitem(
1991 b'clonebundles',
1298 b'ui', b'commitsubrepos', default=False,
1992 default=True,
1299 )
1993 )
1300 coreconfigitem(
1994 coreconfigitem(
1301 b'ui', b'debug', default=False,
1995 b'ui',
1302 )
1996 b'color',
1303 coreconfigitem(
1997 default=b'auto',
1304 b'ui', b'debugger', default=None,
1998 )
1305 )
1999 coreconfigitem(
1306 coreconfigitem(
2000 b'ui',
1307 b'ui', b'editor', default=dynamicdefault,
2001 b'commitsubrepos',
1308 )
2002 default=False,
1309 coreconfigitem(
2003 )
1310 b'ui', b'detailed-exit-code', default=False, experimental=True,
2004 coreconfigitem(
1311 )
2005 b'ui',
1312 coreconfigitem(
2006 b'debug',
1313 b'ui', b'fallbackencoding', default=None,
2007 default=False,
1314 )
2008 )
1315 coreconfigitem(
2009 coreconfigitem(
1316 b'ui', b'forcecwd', default=None,
2010 b'ui',
1317 )
2011 b'debugger',
1318 coreconfigitem(
2012 default=None,
1319 b'ui', b'forcemerge', default=None,
2013 )
1320 )
2014 coreconfigitem(
1321 coreconfigitem(
2015 b'ui',
1322 b'ui', b'formatdebug', default=False,
2016 b'editor',
1323 )
2017 default=dynamicdefault,
1324 coreconfigitem(
2018 )
1325 b'ui', b'formatjson', default=False,
2019 coreconfigitem(
1326 )
2020 b'ui',
1327 coreconfigitem(
2021 b'detailed-exit-code',
1328 b'ui', b'formatted', default=None,
2022 default=False,
1329 )
2023 experimental=True,
1330 coreconfigitem(
2024 )
1331 b'ui', b'interactive', default=None,
2025 coreconfigitem(
1332 )
2026 b'ui',
1333 coreconfigitem(
2027 b'fallbackencoding',
1334 b'ui', b'interface', default=None,
2028 default=None,
1335 )
2029 )
1336 coreconfigitem(
2030 coreconfigitem(
1337 b'ui', b'interface.chunkselector', default=None,
2031 b'ui',
1338 )
2032 b'forcecwd',
1339 coreconfigitem(
2033 default=None,
1340 b'ui', b'large-file-limit', default=10000000,
2034 )
1341 )
2035 coreconfigitem(
1342 coreconfigitem(
2036 b'ui',
1343 b'ui', b'logblockedtimes', default=False,
2037 b'forcemerge',
1344 )
2038 default=None,
1345 coreconfigitem(
2039 )
1346 b'ui', b'merge', default=None,
2040 coreconfigitem(
1347 )
2041 b'ui',
1348 coreconfigitem(
2042 b'formatdebug',
1349 b'ui', b'mergemarkers', default=b'basic',
2043 default=False,
1350 )
2044 )
1351 coreconfigitem(
2045 coreconfigitem(
1352 b'ui', b'message-output', default=b'stdio',
2046 b'ui',
1353 )
2047 b'formatjson',
1354 coreconfigitem(
2048 default=False,
1355 b'ui', b'nontty', default=False,
2049 )
1356 )
2050 coreconfigitem(
1357 coreconfigitem(
2051 b'ui',
1358 b'ui', b'origbackuppath', default=None,
2052 b'formatted',
1359 )
2053 default=None,
1360 coreconfigitem(
2054 )
1361 b'ui', b'paginate', default=True,
2055 coreconfigitem(
1362 )
2056 b'ui',
1363 coreconfigitem(
2057 b'interactive',
1364 b'ui', b'patch', default=None,
2058 default=None,
1365 )
2059 )
1366 coreconfigitem(
2060 coreconfigitem(
1367 b'ui', b'portablefilenames', default=b'warn',
2061 b'ui',
1368 )
2062 b'interface',
1369 coreconfigitem(
2063 default=None,
1370 b'ui', b'promptecho', default=False,
2064 )
1371 )
2065 coreconfigitem(
1372 coreconfigitem(
2066 b'ui',
1373 b'ui', b'quiet', default=False,
2067 b'interface.chunkselector',
1374 )
2068 default=None,
1375 coreconfigitem(
2069 )
1376 b'ui', b'quietbookmarkmove', default=False,
2070 coreconfigitem(
1377 )
2071 b'ui',
1378 coreconfigitem(
2072 b'large-file-limit',
1379 b'ui', b'relative-paths', default=b'legacy',
2073 default=10000000,
1380 )
2074 )
1381 coreconfigitem(
2075 coreconfigitem(
1382 b'ui', b'remotecmd', default=b'hg',
2076 b'ui',
1383 )
2077 b'logblockedtimes',
1384 coreconfigitem(
2078 default=False,
1385 b'ui', b'report_untrusted', default=True,
2079 )
1386 )
2080 coreconfigitem(
1387 coreconfigitem(
2081 b'ui',
1388 b'ui', b'rollback', default=True,
2082 b'merge',
1389 )
2083 default=None,
1390 coreconfigitem(
2084 )
1391 b'ui', b'signal-safe-lock', default=True,
2085 coreconfigitem(
1392 )
2086 b'ui',
1393 coreconfigitem(
2087 b'mergemarkers',
1394 b'ui', b'slash', default=False,
2088 default=b'basic',
1395 )
2089 )
1396 coreconfigitem(
2090 coreconfigitem(
1397 b'ui', b'ssh', default=b'ssh',
2091 b'ui',
1398 )
2092 b'message-output',
1399 coreconfigitem(
2093 default=b'stdio',
1400 b'ui', b'ssherrorhint', default=None,
2094 )
1401 )
2095 coreconfigitem(
1402 coreconfigitem(
2096 b'ui',
1403 b'ui', b'statuscopies', default=False,
2097 b'nontty',
1404 )
2098 default=False,
1405 coreconfigitem(
2099 )
1406 b'ui', b'strict', default=False,
2100 coreconfigitem(
1407 )
2101 b'ui',
1408 coreconfigitem(
2102 b'origbackuppath',
1409 b'ui', b'style', default=b'',
2103 default=None,
1410 )
2104 )
1411 coreconfigitem(
2105 coreconfigitem(
1412 b'ui', b'supportcontact', default=None,
2106 b'ui',
1413 )
2107 b'paginate',
1414 coreconfigitem(
2108 default=True,
1415 b'ui', b'textwidth', default=78,
2109 )
1416 )
2110 coreconfigitem(
1417 coreconfigitem(
2111 b'ui',
1418 b'ui', b'timeout', default=b'600',
2112 b'patch',
1419 )
2113 default=None,
1420 coreconfigitem(
2114 )
1421 b'ui', b'timeout.warn', default=0,
2115 coreconfigitem(
1422 )
2116 b'ui',
1423 coreconfigitem(
2117 b'portablefilenames',
1424 b'ui', b'timestamp-output', default=False,
2118 default=b'warn',
1425 )
2119 )
1426 coreconfigitem(
2120 coreconfigitem(
1427 b'ui', b'traceback', default=False,
2121 b'ui',
1428 )
2122 b'promptecho',
1429 coreconfigitem(
2123 default=False,
1430 b'ui', b'tweakdefaults', default=False,
2124 )
2125 coreconfigitem(
2126 b'ui',
2127 b'quiet',
2128 default=False,
2129 )
2130 coreconfigitem(
2131 b'ui',
2132 b'quietbookmarkmove',
2133 default=False,
2134 )
2135 coreconfigitem(
2136 b'ui',
2137 b'relative-paths',
2138 default=b'legacy',
2139 )
2140 coreconfigitem(
2141 b'ui',
2142 b'remotecmd',
2143 default=b'hg',
2144 )
2145 coreconfigitem(
2146 b'ui',
2147 b'report_untrusted',
2148 default=True,
2149 )
2150 coreconfigitem(
2151 b'ui',
2152 b'rollback',
2153 default=True,
2154 )
2155 coreconfigitem(
2156 b'ui',
2157 b'signal-safe-lock',
2158 default=True,
2159 )
2160 coreconfigitem(
2161 b'ui',
2162 b'slash',
2163 default=False,
2164 )
2165 coreconfigitem(
2166 b'ui',
2167 b'ssh',
2168 default=b'ssh',
2169 )
2170 coreconfigitem(
2171 b'ui',
2172 b'ssherrorhint',
2173 default=None,
2174 )
2175 coreconfigitem(
2176 b'ui',
2177 b'statuscopies',
2178 default=False,
2179 )
2180 coreconfigitem(
2181 b'ui',
2182 b'strict',
2183 default=False,
2184 )
2185 coreconfigitem(
2186 b'ui',
2187 b'style',
2188 default=b'',
2189 )
2190 coreconfigitem(
2191 b'ui',
2192 b'supportcontact',
2193 default=None,
2194 )
2195 coreconfigitem(
2196 b'ui',
2197 b'textwidth',
2198 default=78,
2199 )
2200 coreconfigitem(
2201 b'ui',
2202 b'timeout',
2203 default=b'600',
2204 )
2205 coreconfigitem(
2206 b'ui',
2207 b'timeout.warn',
2208 default=0,
2209 )
2210 coreconfigitem(
2211 b'ui',
2212 b'timestamp-output',
2213 default=False,
2214 )
2215 coreconfigitem(
2216 b'ui',
2217 b'traceback',
2218 default=False,
2219 )
2220 coreconfigitem(
2221 b'ui',
2222 b'tweakdefaults',
2223 default=False,
1431 )
2224 )
1432 coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
2225 coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
1433 coreconfigitem(
2226 coreconfigitem(
1434 b'ui', b'verbose', default=False,
2227 b'ui',
1435 )
2228 b'verbose',
1436 coreconfigitem(
2229 default=False,
1437 b'verify', b'skipflags', default=None,
2230 )
1438 )
2231 coreconfigitem(
1439 coreconfigitem(
2232 b'verify',
1440 b'web', b'allowbz2', default=False,
2233 b'skipflags',
1441 )
2234 default=None,
1442 coreconfigitem(
2235 )
1443 b'web', b'allowgz', default=False,
2236 coreconfigitem(
1444 )
2237 b'web',
1445 coreconfigitem(
2238 b'allowbz2',
1446 b'web', b'allow-pull', alias=[(b'web', b'allowpull')], default=True,
2239 default=False,
1447 )
2240 )
1448 coreconfigitem(
2241 coreconfigitem(
1449 b'web', b'allow-push', alias=[(b'web', b'allow_push')], default=list,
2242 b'web',
1450 )
2243 b'allowgz',
1451 coreconfigitem(
2244 default=False,
1452 b'web', b'allowzip', default=False,
2245 )
1453 )
2246 coreconfigitem(
1454 coreconfigitem(
2247 b'web',
1455 b'web', b'archivesubrepos', default=False,
2248 b'allow-pull',
1456 )
2249 alias=[(b'web', b'allowpull')],
1457 coreconfigitem(
2250 default=True,
1458 b'web', b'cache', default=True,
2251 )
1459 )
2252 coreconfigitem(
1460 coreconfigitem(
2253 b'web',
1461 b'web', b'comparisoncontext', default=5,
2254 b'allow-push',
1462 )
2255 alias=[(b'web', b'allow_push')],
1463 coreconfigitem(
2256 default=list,
1464 b'web', b'contact', default=None,
2257 )
1465 )
2258 coreconfigitem(
1466 coreconfigitem(
2259 b'web',
1467 b'web', b'deny_push', default=list,
2260 b'allowzip',
1468 )
2261 default=False,
1469 coreconfigitem(
2262 )
1470 b'web', b'guessmime', default=False,
2263 coreconfigitem(
1471 )
2264 b'web',
1472 coreconfigitem(
2265 b'archivesubrepos',
1473 b'web', b'hidden', default=False,
2266 default=False,
1474 )
2267 )
1475 coreconfigitem(
2268 coreconfigitem(
1476 b'web', b'labels', default=list,
2269 b'web',
1477 )
2270 b'cache',
1478 coreconfigitem(
2271 default=True,
1479 b'web', b'logoimg', default=b'hglogo.png',
2272 )
1480 )
2273 coreconfigitem(
1481 coreconfigitem(
2274 b'web',
1482 b'web', b'logourl', default=b'https://mercurial-scm.org/',
2275 b'comparisoncontext',
1483 )
2276 default=5,
1484 coreconfigitem(
2277 )
1485 b'web', b'accesslog', default=b'-',
2278 coreconfigitem(
1486 )
2279 b'web',
1487 coreconfigitem(
2280 b'contact',
1488 b'web', b'address', default=b'',
2281 default=None,
1489 )
2282 )
1490 coreconfigitem(
2283 coreconfigitem(
1491 b'web', b'allow-archive', alias=[(b'web', b'allow_archive')], default=list,
2284 b'web',
1492 )
2285 b'deny_push',
1493 coreconfigitem(
2286 default=list,
1494 b'web', b'allow_read', default=list,
2287 )
1495 )
2288 coreconfigitem(
1496 coreconfigitem(
2289 b'web',
1497 b'web', b'baseurl', default=None,
2290 b'guessmime',
1498 )
2291 default=False,
1499 coreconfigitem(
2292 )
1500 b'web', b'cacerts', default=None,
2293 coreconfigitem(
1501 )
2294 b'web',
1502 coreconfigitem(
2295 b'hidden',
1503 b'web', b'certificate', default=None,
2296 default=False,
1504 )
2297 )
1505 coreconfigitem(
2298 coreconfigitem(
1506 b'web', b'collapse', default=False,
2299 b'web',
1507 )
2300 b'labels',
1508 coreconfigitem(
2301 default=list,
1509 b'web', b'csp', default=None,
2302 )
1510 )
2303 coreconfigitem(
1511 coreconfigitem(
2304 b'web',
1512 b'web', b'deny_read', default=list,
2305 b'logoimg',
1513 )
2306 default=b'hglogo.png',
1514 coreconfigitem(
2307 )
1515 b'web', b'descend', default=True,
2308 coreconfigitem(
1516 )
2309 b'web',
1517 coreconfigitem(
2310 b'logourl',
1518 b'web', b'description', default=b"",
2311 default=b'https://mercurial-scm.org/',
1519 )
2312 )
1520 coreconfigitem(
2313 coreconfigitem(
1521 b'web', b'encoding', default=lambda: encoding.encoding,
2314 b'web',
1522 )
2315 b'accesslog',
1523 coreconfigitem(
2316 default=b'-',
1524 b'web', b'errorlog', default=b'-',
2317 )
1525 )
2318 coreconfigitem(
1526 coreconfigitem(
2319 b'web',
1527 b'web', b'ipv6', default=False,
2320 b'address',
1528 )
2321 default=b'',
1529 coreconfigitem(
2322 )
1530 b'web', b'maxchanges', default=10,
2323 coreconfigitem(
1531 )
2324 b'web',
1532 coreconfigitem(
2325 b'allow-archive',
1533 b'web', b'maxfiles', default=10,
2326 alias=[(b'web', b'allow_archive')],
1534 )
2327 default=list,
1535 coreconfigitem(
2328 )
1536 b'web', b'maxshortchanges', default=60,
2329 coreconfigitem(
1537 )
2330 b'web',
1538 coreconfigitem(
2331 b'allow_read',
1539 b'web', b'motd', default=b'',
2332 default=list,
1540 )
2333 )
1541 coreconfigitem(
2334 coreconfigitem(
1542 b'web', b'name', default=dynamicdefault,
2335 b'web',
1543 )
2336 b'baseurl',
1544 coreconfigitem(
2337 default=None,
1545 b'web', b'port', default=8000,
2338 )
1546 )
2339 coreconfigitem(
1547 coreconfigitem(
2340 b'web',
1548 b'web', b'prefix', default=b'',
2341 b'cacerts',
1549 )
2342 default=None,
1550 coreconfigitem(
2343 )
1551 b'web', b'push_ssl', default=True,
2344 coreconfigitem(
1552 )
2345 b'web',
1553 coreconfigitem(
2346 b'certificate',
1554 b'web', b'refreshinterval', default=20,
2347 default=None,
1555 )
2348 )
1556 coreconfigitem(
2349 coreconfigitem(
1557 b'web', b'server-header', default=None,
2350 b'web',
1558 )
2351 b'collapse',
1559 coreconfigitem(
2352 default=False,
1560 b'web', b'static', default=None,
2353 )
1561 )
2354 coreconfigitem(
1562 coreconfigitem(
2355 b'web',
1563 b'web', b'staticurl', default=None,
2356 b'csp',
1564 )
2357 default=None,
1565 coreconfigitem(
2358 )
1566 b'web', b'stripes', default=1,
2359 coreconfigitem(
1567 )
2360 b'web',
1568 coreconfigitem(
2361 b'deny_read',
1569 b'web', b'style', default=b'paper',
2362 default=list,
1570 )
2363 )
1571 coreconfigitem(
2364 coreconfigitem(
1572 b'web', b'templates', default=None,
2365 b'web',
1573 )
2366 b'descend',
1574 coreconfigitem(
2367 default=True,
1575 b'web', b'view', default=b'served', experimental=True,
2368 )
1576 )
2369 coreconfigitem(
1577 coreconfigitem(
2370 b'web',
1578 b'worker', b'backgroundclose', default=dynamicdefault,
2371 b'description',
2372 default=b"",
2373 )
2374 coreconfigitem(
2375 b'web',
2376 b'encoding',
2377 default=lambda: encoding.encoding,
2378 )
2379 coreconfigitem(
2380 b'web',
2381 b'errorlog',
2382 default=b'-',
2383 )
2384 coreconfigitem(
2385 b'web',
2386 b'ipv6',
2387 default=False,
2388 )
2389 coreconfigitem(
2390 b'web',
2391 b'maxchanges',
2392 default=10,
2393 )
2394 coreconfigitem(
2395 b'web',
2396 b'maxfiles',
2397 default=10,
2398 )
2399 coreconfigitem(
2400 b'web',
2401 b'maxshortchanges',
2402 default=60,
2403 )
2404 coreconfigitem(
2405 b'web',
2406 b'motd',
2407 default=b'',
2408 )
2409 coreconfigitem(
2410 b'web',
2411 b'name',
2412 default=dynamicdefault,
2413 )
2414 coreconfigitem(
2415 b'web',
2416 b'port',
2417 default=8000,
2418 )
2419 coreconfigitem(
2420 b'web',
2421 b'prefix',
2422 default=b'',
2423 )
2424 coreconfigitem(
2425 b'web',
2426 b'push_ssl',
2427 default=True,
2428 )
2429 coreconfigitem(
2430 b'web',
2431 b'refreshinterval',
2432 default=20,
2433 )
2434 coreconfigitem(
2435 b'web',
2436 b'server-header',
2437 default=None,
2438 )
2439 coreconfigitem(
2440 b'web',
2441 b'static',
2442 default=None,
2443 )
2444 coreconfigitem(
2445 b'web',
2446 b'staticurl',
2447 default=None,
2448 )
2449 coreconfigitem(
2450 b'web',
2451 b'stripes',
2452 default=1,
2453 )
2454 coreconfigitem(
2455 b'web',
2456 b'style',
2457 default=b'paper',
2458 )
2459 coreconfigitem(
2460 b'web',
2461 b'templates',
2462 default=None,
2463 )
2464 coreconfigitem(
2465 b'web',
2466 b'view',
2467 default=b'served',
2468 experimental=True,
2469 )
2470 coreconfigitem(
2471 b'worker',
2472 b'backgroundclose',
2473 default=dynamicdefault,
1579 )
2474 )
1580 # Windows defaults to a limit of 512 open files. A buffer of 128
2475 # Windows defaults to a limit of 512 open files. A buffer of 128
1581 # should give us enough headway.
2476 # should give us enough headway.
1582 coreconfigitem(
2477 coreconfigitem(
1583 b'worker', b'backgroundclosemaxqueue', default=384,
2478 b'worker',
1584 )
2479 b'backgroundclosemaxqueue',
1585 coreconfigitem(
2480 default=384,
1586 b'worker', b'backgroundcloseminfilecount', default=2048,
2481 )
1587 )
2482 coreconfigitem(
1588 coreconfigitem(
2483 b'worker',
1589 b'worker', b'backgroundclosethreadcount', default=4,
2484 b'backgroundcloseminfilecount',
1590 )
2485 default=2048,
1591 coreconfigitem(
2486 )
1592 b'worker', b'enabled', default=True,
2487 coreconfigitem(
1593 )
2488 b'worker',
1594 coreconfigitem(
2489 b'backgroundclosethreadcount',
1595 b'worker', b'numcpus', default=None,
2490 default=4,
2491 )
2492 coreconfigitem(
2493 b'worker',
2494 b'enabled',
2495 default=True,
2496 )
2497 coreconfigitem(
2498 b'worker',
2499 b'numcpus',
2500 default=None,
1596 )
2501 )
1597
2502
1598 # Rebase related configuration moved to core because other extension are doing
2503 # Rebase related configuration moved to core because other extension are doing
1599 # strange things. For example, shelve import the extensions to reuse some bit
2504 # strange things. For example, shelve import the extensions to reuse some bit
1600 # without formally loading it.
2505 # without formally loading it.
1601 coreconfigitem(
2506 coreconfigitem(
1602 b'commands', b'rebase.requiredest', default=False,
2507 b'commands',
1603 )
2508 b'rebase.requiredest',
1604 coreconfigitem(
2509 default=False,
1605 b'experimental', b'rebaseskipobsolete', default=True,
2510 )
1606 )
2511 coreconfigitem(
1607 coreconfigitem(
2512 b'experimental',
1608 b'rebase', b'singletransaction', default=False,
2513 b'rebaseskipobsolete',
1609 )
2514 default=True,
1610 coreconfigitem(
2515 )
1611 b'rebase', b'experimental.inmemory', default=False,
2516 coreconfigitem(
1612 )
2517 b'rebase',
2518 b'singletransaction',
2519 default=False,
2520 )
2521 coreconfigitem(
2522 b'rebase',
2523 b'experimental.inmemory',
2524 default=False,
2525 )
@@ -316,9 +316,9 b' class basectx(object):'
316 return subrepo.nullsubrepo(self, path, pctx)
316 return subrepo.nullsubrepo(self, path, pctx)
317
317
318 def workingsub(self, path):
318 def workingsub(self, path):
319 '''return a subrepo for the stored revision, or wdir if this is a wdir
319 """return a subrepo for the stored revision, or wdir if this is a wdir
320 context.
320 context.
321 '''
321 """
322 return subrepo.subrepo(self, path, allowwdir=True)
322 return subrepo.subrepo(self, path, allowwdir=True)
323
323
324 def match(
324 def match(
@@ -1054,8 +1054,7 b' class basefilectx(object):'
1054 return lkr
1054 return lkr
1055
1055
1056 def isintroducedafter(self, changelogrev):
1056 def isintroducedafter(self, changelogrev):
1057 """True if a filectx has been introduced after a given floor revision
1057 """True if a filectx has been introduced after a given floor revision"""
1058 """
1059 if self.linkrev() >= changelogrev:
1058 if self.linkrev() >= changelogrev:
1060 return True
1059 return True
1061 introrev = self._introrev(stoprev=changelogrev)
1060 introrev = self._introrev(stoprev=changelogrev)
@@ -1232,7 +1231,7 b' class basefilectx(object):'
1232
1231
1233 class filectx(basefilectx):
1232 class filectx(basefilectx):
1234 """A filecontext object makes access to data related to a particular
1233 """A filecontext object makes access to data related to a particular
1235 filerevision convenient."""
1234 filerevision convenient."""
1236
1235
1237 def __init__(
1236 def __init__(
1238 self,
1237 self,
@@ -1244,15 +1243,16 b' class filectx(basefilectx):'
1244 changectx=None,
1243 changectx=None,
1245 ):
1244 ):
1246 """changeid must be a revision number, if specified.
1245 """changeid must be a revision number, if specified.
1247 fileid can be a file revision or node."""
1246 fileid can be a file revision or node."""
1248 self._repo = repo
1247 self._repo = repo
1249 self._path = path
1248 self._path = path
1250
1249
1251 assert (
1250 assert (
1252 changeid is not None or fileid is not None or changectx is not None
1251 changeid is not None or fileid is not None or changectx is not None
1253 ), (
1252 ), b"bad args: changeid=%r, fileid=%r, changectx=%r" % (
1254 b"bad args: changeid=%r, fileid=%r, changectx=%r"
1253 changeid,
1255 % (changeid, fileid, changectx,)
1254 fileid,
1255 changectx,
1256 )
1256 )
1257
1257
1258 if filelog is not None:
1258 if filelog is not None:
@@ -1289,8 +1289,8 b' class filectx(basefilectx):'
1289 return self._repo.unfiltered()[self._changeid]
1289 return self._repo.unfiltered()[self._changeid]
1290
1290
1291 def filectx(self, fileid, changeid=None):
1291 def filectx(self, fileid, changeid=None):
1292 '''opens an arbitrary revision of the file without
1292 """opens an arbitrary revision of the file without
1293 opening a new filelog'''
1293 opening a new filelog"""
1294 return filectx(
1294 return filectx(
1295 self._repo,
1295 self._repo,
1296 self._path,
1296 self._path,
@@ -2101,7 +2101,7 b' class committablefilectx(basefilectx):'
2101
2101
2102 class workingfilectx(committablefilectx):
2102 class workingfilectx(committablefilectx):
2103 """A workingfilectx object makes access to data related to a particular
2103 """A workingfilectx object makes access to data related to a particular
2104 file in the working directory convenient."""
2104 file in the working directory convenient."""
2105
2105
2106 def __init__(self, repo, path, filelog=None, workingctx=None):
2106 def __init__(self, repo, path, filelog=None, workingctx=None):
2107 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
2107 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
@@ -2702,8 +2702,7 b' class workingcommitctx(workingctx):'
2702
2702
2703 @propertycache
2703 @propertycache
2704 def _changedset(self):
2704 def _changedset(self):
2705 """Return the set of files changed in this context
2705 """Return the set of files changed in this context"""
2706 """
2707 changed = set(self._status.modified)
2706 changed = set(self._status.modified)
2708 changed.update(self._status.added)
2707 changed.update(self._status.added)
2709 changed.update(self._status.removed)
2708 changed.update(self._status.removed)
@@ -2877,8 +2876,7 b' class memctx(committablectx):'
2877
2876
2878 @propertycache
2877 @propertycache
2879 def _status(self):
2878 def _status(self):
2880 """Calculate exact status from ``files`` specified at construction
2879 """Calculate exact status from ``files`` specified at construction"""
2881 """
2882 man1 = self.p1().manifest()
2880 man1 = self.p1().manifest()
2883 p2 = self._parents[1]
2881 p2 = self._parents[1]
2884 # "1 < len(self._parents)" can't be used for checking
2882 # "1 < len(self._parents)" can't be used for checking
@@ -702,7 +702,7 b' def mergecopies(repo, c1, c2, base):'
702
702
703
703
704 def _isfullcopytraceable(repo, c1, base):
704 def _isfullcopytraceable(repo, c1, base):
705 """ Checks that if base, source and destination are all no-public branches,
705 """Checks that if base, source and destination are all no-public branches,
706 if yes let's use the full copytrace algorithm for increased capabilities
706 if yes let's use the full copytrace algorithm for increased capabilities
707 since it will be fast enough.
707 since it will be fast enough.
708
708
@@ -770,14 +770,16 b' class branch_copies(object):'
770 self.movewithdir = {} if movewithdir is None else movewithdir
770 self.movewithdir = {} if movewithdir is None else movewithdir
771
771
772 def __repr__(self):
772 def __repr__(self):
773 return (
773 return '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' % (
774 '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>'
774 self.copy,
775 % (self.copy, self.renamedelete, self.dirmove, self.movewithdir,)
775 self.renamedelete,
776 self.dirmove,
777 self.movewithdir,
776 )
778 )
777
779
778
780
779 def _fullcopytracing(repo, c1, c2, base):
781 def _fullcopytracing(repo, c1, c2, base):
780 """ The full copytracing algorithm which finds all the new files that were
782 """The full copytracing algorithm which finds all the new files that were
781 added from merge base up to the top commit and for each file it checks if
783 added from merge base up to the top commit and for each file it checks if
782 this file was copied from another file.
784 this file was copied from another file.
783
785
@@ -967,7 +969,7 b' def _dir_renames(repo, ctx, copy, fullco'
967
969
968
970
969 def _heuristicscopytracing(repo, c1, c2, base):
971 def _heuristicscopytracing(repo, c1, c2, base):
970 """ Fast copytracing using filename heuristics
972 """Fast copytracing using filename heuristics
971
973
972 Assumes that moves or renames are of following two types:
974 Assumes that moves or renames are of following two types:
973
975
@@ -1000,7 +1000,7 b' class curseschunkselector(object):'
1000
1000
1001 def toggleallbetween(self):
1001 def toggleallbetween(self):
1002 """toggle applied on or off for all items in range [lastapplied,
1002 """toggle applied on or off for all items in range [lastapplied,
1003 current]. """
1003 current]."""
1004 if (
1004 if (
1005 not self.lastapplieditem
1005 not self.lastapplieditem
1006 or self.currentselecteditem == self.lastapplieditem
1006 or self.currentselecteditem == self.lastapplieditem
@@ -682,7 +682,7 b' def _decoratelines(text, fctx):'
682
682
683
683
684 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
684 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
685 r'''
685 r"""
686 Given parent and child fctxes and annotate data for parents, for all lines
686 Given parent and child fctxes and annotate data for parents, for all lines
687 in either parent that match the child, annotate the child with the parent's
687 in either parent that match the child, annotate the child with the parent's
688 data.
688 data.
@@ -691,7 +691,7 b' def _annotatepair(parents, childfctx, ch'
691 annotate data as well such that child is never blamed for any lines.
691 annotate data as well such that child is never blamed for any lines.
692
692
693 See test-annotate.py for unit tests.
693 See test-annotate.py for unit tests.
694 '''
694 """
695 pblocks = [
695 pblocks = [
696 (parent, mdiff.allblocks(parent.text, child.text, opts=diffopts))
696 (parent, mdiff.allblocks(parent.text, child.text, opts=diffopts))
697 for parent in parents
697 for parent in parents
@@ -425,7 +425,7 b' def dagtext('
425 usedots=False,
425 usedots=False,
426 maxlinewidth=70,
426 maxlinewidth=70,
427 ):
427 ):
428 '''generates lines of a textual representation for a dag event stream
428 """generates lines of a textual representation for a dag event stream
429
429
430 events should generate what parsedag() does, so:
430 events should generate what parsedag() does, so:
431
431
@@ -501,7 +501,7 b' def dagtext('
501 >>> dagtext(parsedag(b'+1 :f +1 :p2 *f */p2'))
501 >>> dagtext(parsedag(b'+1 :f +1 :p2 *f */p2'))
502 '+1 :f +1 :p2 *f */p2'
502 '+1 :f +1 :p2 *f */p2'
503
503
504 '''
504 """
505 return b"\n".join(
505 return b"\n".join(
506 dagtextlines(
506 dagtextlines(
507 dag,
507 dag,
@@ -1062,11 +1062,14 b' def debugdiscovery(ui, repo, remoteurl=b'
1062
1062
1063
1063
1064 @command(
1064 @command(
1065 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1065 b'debugdownload',
1066 [
1067 (b'o', b'output', b'', _(b'path')),
1068 ],
1069 optionalrepo=True,
1066 )
1070 )
1067 def debugdownload(ui, repo, url, output=None, **opts):
1071 def debugdownload(ui, repo, url, output=None, **opts):
1068 """download a resource using Mercurial logic and config
1072 """download a resource using Mercurial logic and config"""
1069 """
1070 fh = urlmod.open(ui, url, output)
1073 fh = urlmod.open(ui, url, output)
1071
1074
1072 dest = ui
1075 dest = ui
@@ -1510,10 +1513,10 b' def debugindexstats(ui, repo):'
1510
1513
1511 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1514 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1512 def debuginstall(ui, **opts):
1515 def debuginstall(ui, **opts):
1513 '''test Mercurial installation
1516 """test Mercurial installation
1514
1517
1515 Returns 0 on success.
1518 Returns 0 on success.
1516 '''
1519 """
1517 opts = pycompat.byteskwargs(opts)
1520 opts = pycompat.byteskwargs(opts)
1518
1521
1519 problems = 0
1522 problems = 0
@@ -2173,8 +2176,7 b' def debugnamecomplete(ui, repo, *args):'
2173 ],
2176 ],
2174 )
2177 )
2175 def debugnodemap(ui, repo, **opts):
2178 def debugnodemap(ui, repo, **opts):
2176 """write and inspect on disk nodemap
2179 """write and inspect on disk nodemap"""
2177 """
2178 if opts['dump_new']:
2180 if opts['dump_new']:
2179 unfi = repo.unfiltered()
2181 unfi = repo.unfiltered()
2180 cl = unfi.changelog
2182 cl = unfi.changelog
@@ -2402,13 +2404,13 b' def debugp1copies(ui, repo, **opts):'
2402 _(b'FILESPEC...'),
2404 _(b'FILESPEC...'),
2403 )
2405 )
2404 def debugpathcomplete(ui, repo, *specs, **opts):
2406 def debugpathcomplete(ui, repo, *specs, **opts):
2405 '''complete part or all of a tracked path
2407 """complete part or all of a tracked path
2406
2408
2407 This command supports shells that offer path name completion. It
2409 This command supports shells that offer path name completion. It
2408 currently completes only files already known to the dirstate.
2410 currently completes only files already known to the dirstate.
2409
2411
2410 Completion extends only to the next path segment unless
2412 Completion extends only to the next path segment unless
2411 --full is specified, in which case entire paths are used.'''
2413 --full is specified, in which case entire paths are used."""
2412
2414
2413 def complete(path, acceptable):
2415 def complete(path, acceptable):
2414 dirstate = repo.dirstate
2416 dirstate = repo.dirstate
@@ -2587,13 +2589,13 b' def debugpickmergetool(ui, repo, *pats, '
2587
2589
2588 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2590 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2589 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2591 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2590 '''access the pushkey key/value protocol
2592 """access the pushkey key/value protocol
2591
2593
2592 With two args, list the keys in the given namespace.
2594 With two args, list the keys in the given namespace.
2593
2595
2594 With five args, set a key to new if it currently is set to old.
2596 With five args, set a key to new if it currently is set to old.
2595 Reports success or failure.
2597 Reports success or failure.
2596 '''
2598 """
2597
2599
2598 target = hg.peer(ui, {}, repopath)
2600 target = hg.peer(ui, {}, repopath)
2599 if keyinfo:
2601 if keyinfo:
@@ -3432,7 +3434,7 b' def debugsidedata(ui, repo, file_, rev=N'
3432
3434
3433 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3435 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3434 def debugssl(ui, repo, source=None, **opts):
3436 def debugssl(ui, repo, source=None, **opts):
3435 '''test a secure connection to a server
3437 """test a secure connection to a server
3436
3438
3437 This builds the certificate chain for the server on Windows, installing the
3439 This builds the certificate chain for the server on Windows, installing the
3438 missing intermediates and trusted root via Windows Update if necessary. It
3440 missing intermediates and trusted root via Windows Update if necessary. It
@@ -3443,7 +3445,7 b' def debugssl(ui, repo, source=None, **op'
3443
3445
3444 If the update succeeds, retry the original operation. Otherwise, the cause
3446 If the update succeeds, retry the original operation. Otherwise, the cause
3445 of the SSL error is likely another issue.
3447 of the SSL error is likely another issue.
3446 '''
3448 """
3447 if not pycompat.iswindows:
3449 if not pycompat.iswindows:
3448 raise error.Abort(
3450 raise error.Abort(
3449 _(b'certificate chain building is only possible on Windows')
3451 _(b'certificate chain building is only possible on Windows')
@@ -3785,7 +3787,9 b' def debugtemplate(ui, repo, tmpl, **opts'
3785
3787
3786 @command(
3788 @command(
3787 b'debuguigetpass',
3789 b'debuguigetpass',
3788 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3790 [
3791 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3792 ],
3789 _(b'[-p TEXT]'),
3793 _(b'[-p TEXT]'),
3790 norepo=True,
3794 norepo=True,
3791 )
3795 )
@@ -3801,7 +3805,9 b" def debuguigetpass(ui, prompt=b''):"
3801
3805
3802 @command(
3806 @command(
3803 b'debuguiprompt',
3807 b'debuguiprompt',
3804 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3808 [
3809 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3810 ],
3805 _(b'[-p TEXT]'),
3811 _(b'[-p TEXT]'),
3806 norepo=True,
3812 norepo=True,
3807 )
3813 )
@@ -4314,7 +4320,10 b' def debugwireproto(ui, repo, path=None, '
4314 {
4320 {
4315 'loggingfh': ui,
4321 'loggingfh': ui,
4316 'loggingname': b's',
4322 'loggingname': b's',
4317 'loggingopts': {'logdata': True, 'logdataapis': False,},
4323 'loggingopts': {
4324 'logdata': True,
4325 'logdataapis': False,
4326 },
4318 }
4327 }
4319 )
4328 )
4320
4329
@@ -43,14 +43,14 b' def difffeatureopts('
43 formatchanging=False,
43 formatchanging=False,
44 configprefix=b'',
44 configprefix=b'',
45 ):
45 ):
46 '''return diffopts with only opted-in features parsed
46 """return diffopts with only opted-in features parsed
47
47
48 Features:
48 Features:
49 - git: git-style diffs
49 - git: git-style diffs
50 - whitespace: whitespace options like ignoreblanklines and ignorews
50 - whitespace: whitespace options like ignoreblanklines and ignorews
51 - formatchanging: options that will likely break or cause correctness issues
51 - formatchanging: options that will likely break or cause correctness issues
52 with most diff parsers
52 with most diff parsers
53 '''
53 """
54
54
55 def get(key, name=None, getter=ui.configbool, forceplain=None):
55 def get(key, name=None, getter=ui.configbool, forceplain=None):
56 if opts:
56 if opts:
@@ -74,12 +74,12 b' def _getfsnow(vfs):'
74 @interfaceutil.implementer(intdirstate.idirstate)
74 @interfaceutil.implementer(intdirstate.idirstate)
75 class dirstate(object):
75 class dirstate(object):
76 def __init__(self, opener, ui, root, validate, sparsematchfn):
76 def __init__(self, opener, ui, root, validate, sparsematchfn):
77 '''Create a new dirstate object.
77 """Create a new dirstate object.
78
78
79 opener is an open()-like callable that can be used to open the
79 opener is an open()-like callable that can be used to open the
80 dirstate file; root is the root of the directory tracked by
80 dirstate file; root is the root of the directory tracked by
81 the dirstate.
81 the dirstate.
82 '''
82 """
83 self._opener = opener
83 self._opener = opener
84 self._validate = validate
84 self._validate = validate
85 self._root = root
85 self._root = root
@@ -112,12 +112,12 b' class dirstate(object):'
112
112
113 @contextlib.contextmanager
113 @contextlib.contextmanager
114 def parentchange(self):
114 def parentchange(self):
115 '''Context manager for handling dirstate parents.
115 """Context manager for handling dirstate parents.
116
116
117 If an exception occurs in the scope of the context manager,
117 If an exception occurs in the scope of the context manager,
118 the incoherent dirstate won't be written when wlock is
118 the incoherent dirstate won't be written when wlock is
119 released.
119 released.
120 '''
120 """
121 self._parentwriters += 1
121 self._parentwriters += 1
122 yield
122 yield
123 # Typically we want the "undo" step of a context manager in a
123 # Typically we want the "undo" step of a context manager in a
@@ -128,9 +128,9 b' class dirstate(object):'
128 self._parentwriters -= 1
128 self._parentwriters -= 1
129
129
130 def pendingparentchange(self):
130 def pendingparentchange(self):
131 '''Returns true if the dirstate is in the middle of a set of changes
131 """Returns true if the dirstate is in the middle of a set of changes
132 that modify the dirstate parent.
132 that modify the dirstate parent.
133 '''
133 """
134 return self._parentwriters > 0
134 return self._parentwriters > 0
135
135
136 @propertycache
136 @propertycache
@@ -247,12 +247,12 b' class dirstate(object):'
247 return encoding.getcwd()
247 return encoding.getcwd()
248
248
249 def getcwd(self):
249 def getcwd(self):
250 '''Return the path from which a canonical path is calculated.
250 """Return the path from which a canonical path is calculated.
251
251
252 This path should be used to resolve file patterns or to convert
252 This path should be used to resolve file patterns or to convert
253 canonical paths back to file paths for display. It shouldn't be
253 canonical paths back to file paths for display. It shouldn't be
254 used to get real file paths. Use vfs functions instead.
254 used to get real file paths. Use vfs functions instead.
255 '''
255 """
256 cwd = self._cwd
256 cwd = self._cwd
257 if cwd == self._root:
257 if cwd == self._root:
258 return b''
258 return b''
@@ -275,7 +275,7 b' class dirstate(object):'
275 return path
275 return path
276
276
277 def __getitem__(self, key):
277 def __getitem__(self, key):
278 '''Return the current state of key (a filename) in the dirstate.
278 """Return the current state of key (a filename) in the dirstate.
279
279
280 States are:
280 States are:
281 n normal
281 n normal
@@ -283,7 +283,7 b' class dirstate(object):'
283 r marked for removal
283 r marked for removal
284 a marked for addition
284 a marked for addition
285 ? not tracked
285 ? not tracked
286 '''
286 """
287 return self._map.get(key, (b"?",))[0]
287 return self._map.get(key, (b"?",))[0]
288
288
289 def __contains__(self, key):
289 def __contains__(self, key):
@@ -370,11 +370,11 b' class dirstate(object):'
370 raise
370 raise
371
371
372 def invalidate(self):
372 def invalidate(self):
373 '''Causes the next access to reread the dirstate.
373 """Causes the next access to reread the dirstate.
374
374
375 This is different from localrepo.invalidatedirstate() because it always
375 This is different from localrepo.invalidatedirstate() because it always
376 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
376 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
377 check whether the dirstate has changed before rereading it.'''
377 check whether the dirstate has changed before rereading it."""
378
378
379 for a in ("_map", "_branch", "_ignore"):
379 for a in ("_map", "_branch", "_ignore"):
380 if a in self.__dict__:
380 if a in self.__dict__:
@@ -426,7 +426,7 b' class dirstate(object):'
426 self._map.addfile(f, oldstate, state, mode, size, mtime)
426 self._map.addfile(f, oldstate, state, mode, size, mtime)
427
427
428 def normal(self, f, parentfiledata=None):
428 def normal(self, f, parentfiledata=None):
429 '''Mark a file normal and clean.
429 """Mark a file normal and clean.
430
430
431 parentfiledata: (mode, size, mtime) of the clean file
431 parentfiledata: (mode, size, mtime) of the clean file
432
432
@@ -434,7 +434,7 b' class dirstate(object):'
434 size), as or close as possible from the point where we
434 size), as or close as possible from the point where we
435 determined the file was clean, to limit the risk of the
435 determined the file was clean, to limit the risk of the
436 file having been changed by an external process between the
436 file having been changed by an external process between the
437 moment where the file was determined to be clean and now.'''
437 moment where the file was determined to be clean and now."""
438 if parentfiledata:
438 if parentfiledata:
439 (mode, size, mtime) = parentfiledata
439 (mode, size, mtime) = parentfiledata
440 else:
440 else:
@@ -581,7 +581,7 b' class dirstate(object):'
581 return folded
581 return folded
582
582
583 def normalize(self, path, isknown=False, ignoremissing=False):
583 def normalize(self, path, isknown=False, ignoremissing=False):
584 '''
584 """
585 normalize the case of a pathname when on a casefolding filesystem
585 normalize the case of a pathname when on a casefolding filesystem
586
586
587 isknown specifies whether the filename came from walking the
587 isknown specifies whether the filename came from walking the
@@ -596,7 +596,7 b' class dirstate(object):'
596 - version of name already stored in the dirstate
596 - version of name already stored in the dirstate
597 - version of name stored on disk
597 - version of name stored on disk
598 - version provided via command arguments
598 - version provided via command arguments
599 '''
599 """
600
600
601 if self._checkcase:
601 if self._checkcase:
602 return self._normalize(path, isknown, ignoremissing)
602 return self._normalize(path, isknown, ignoremissing)
@@ -643,11 +643,11 b' class dirstate(object):'
643 self._dirty = True
643 self._dirty = True
644
644
645 def identity(self):
645 def identity(self):
646 '''Return identity of dirstate itself to detect changing in storage
646 """Return identity of dirstate itself to detect changing in storage
647
647
648 If identity of previous dirstate is equal to this, writing
648 If identity of previous dirstate is equal to this, writing
649 changes based on the former dirstate out can keep consistency.
649 changes based on the former dirstate out can keep consistency.
650 '''
650 """
651 return self._map.identity
651 return self._map.identity
652
652
653 def write(self, tr):
653 def write(self, tr):
@@ -769,14 +769,14 b' class dirstate(object):'
769 return (None, -1, b"")
769 return (None, -1, b"")
770
770
771 def _walkexplicit(self, match, subrepos):
771 def _walkexplicit(self, match, subrepos):
772 '''Get stat data about the files explicitly specified by match.
772 """Get stat data about the files explicitly specified by match.
773
773
774 Return a triple (results, dirsfound, dirsnotfound).
774 Return a triple (results, dirsfound, dirsnotfound).
775 - results is a mapping from filename to stat result. It also contains
775 - results is a mapping from filename to stat result. It also contains
776 listings mapping subrepos and .hg to None.
776 listings mapping subrepos and .hg to None.
777 - dirsfound is a list of files found to be directories.
777 - dirsfound is a list of files found to be directories.
778 - dirsnotfound is a list of files that the dirstate thinks are
778 - dirsnotfound is a list of files that the dirstate thinks are
779 directories and that were not found.'''
779 directories and that were not found."""
780
780
781 def badtype(mode):
781 def badtype(mode):
782 kind = _(b'unknown')
782 kind = _(b'unknown')
@@ -904,7 +904,7 b' class dirstate(object):'
904 return results, dirsfound, dirsnotfound
904 return results, dirsfound, dirsnotfound
905
905
906 def walk(self, match, subrepos, unknown, ignored, full=True):
906 def walk(self, match, subrepos, unknown, ignored, full=True):
907 '''
907 """
908 Walk recursively through the directory tree, finding all files
908 Walk recursively through the directory tree, finding all files
909 matched by match.
909 matched by match.
910
910
@@ -913,7 +913,7 b' class dirstate(object):'
913 Return a dict mapping filename to stat-like object (either
913 Return a dict mapping filename to stat-like object (either
914 mercurial.osutil.stat instance or return value of os.stat()).
914 mercurial.osutil.stat instance or return value of os.stat()).
915
915
916 '''
916 """
917 # full is a flag that extensions that hook into walk can use -- this
917 # full is a flag that extensions that hook into walk can use -- this
918 # implementation doesn't use it at all. This satisfies the contract
918 # implementation doesn't use it at all. This satisfies the contract
919 # because we only guarantee a "maybe".
919 # because we only guarantee a "maybe".
@@ -1168,7 +1168,7 b' class dirstate(object):'
1168 return (lookup, status)
1168 return (lookup, status)
1169
1169
1170 def status(self, match, subrepos, ignored, clean, unknown):
1170 def status(self, match, subrepos, ignored, clean, unknown):
1171 '''Determine the status of the working copy relative to the
1171 """Determine the status of the working copy relative to the
1172 dirstate and return a pair of (unsure, status), where status is of type
1172 dirstate and return a pair of (unsure, status), where status is of type
1173 scmutil.status and:
1173 scmutil.status and:
1174
1174
@@ -1182,7 +1182,7 b' class dirstate(object):'
1182 status.clean:
1182 status.clean:
1183 files that have definitely not been modified since the
1183 files that have definitely not been modified since the
1184 dirstate was written
1184 dirstate was written
1185 '''
1185 """
1186 listignored, listclean, listunknown = ignored, clean, unknown
1186 listignored, listclean, listunknown = ignored, clean, unknown
1187 lookup, modified, added, unknown, ignored = [], [], [], [], []
1187 lookup, modified, added, unknown, ignored = [], [], [], [], []
1188 removed, deleted, clean = [], [], []
1188 removed, deleted, clean = [], [], []
@@ -1305,9 +1305,9 b' class dirstate(object):'
1305 return (lookup, status)
1305 return (lookup, status)
1306
1306
1307 def matches(self, match):
1307 def matches(self, match):
1308 '''
1308 """
1309 return files in the dirstate (in whatever state) filtered by match
1309 return files in the dirstate (in whatever state) filtered by match
1310 '''
1310 """
1311 dmap = self._map
1311 dmap = self._map
1312 if rustmod is not None:
1312 if rustmod is not None:
1313 dmap = self._map._rustmap
1313 dmap = self._map._rustmap
@@ -17,7 +17,7 b' from . import ('
17
17
18
18
19 class dirstateguard(util.transactional):
19 class dirstateguard(util.transactional):
20 '''Restore dirstate at unexpected failure.
20 """Restore dirstate at unexpected failure.
21
21
22 At the construction, this class does:
22 At the construction, this class does:
23
23
@@ -28,7 +28,7 b' class dirstateguard(util.transactional):'
28 is invoked before ``close()``.
28 is invoked before ``close()``.
29
29
30 This just removes the backup file at ``close()`` before ``release()``.
30 This just removes the backup file at ``close()`` before ``release()``.
31 '''
31 """
32
32
33 def __init__(self, repo, name):
33 def __init__(self, repo, name):
34 self._repo = repo
34 self._repo = repo
@@ -75,7 +75,7 b' def findcommonincoming(repo, remote, hea'
75
75
76
76
77 class outgoing(object):
77 class outgoing(object):
78 '''Represents the result of a findcommonoutgoing() call.
78 """Represents the result of a findcommonoutgoing() call.
79
79
80 Members:
80 Members:
81
81
@@ -94,7 +94,7 b' class outgoing(object):'
94 remotely.
94 remotely.
95
95
96 Some members are computed on demand from the heads, unless provided upfront
96 Some members are computed on demand from the heads, unless provided upfront
97 by discovery.'''
97 by discovery."""
98
98
99 def __init__(
99 def __init__(
100 self, repo, commonheads=None, ancestorsof=None, missingroots=None
100 self, repo, commonheads=None, ancestorsof=None, missingroots=None
@@ -157,7 +157,7 b' class outgoing(object):'
157 def findcommonoutgoing(
157 def findcommonoutgoing(
158 repo, other, onlyheads=None, force=False, commoninc=None, portable=False
158 repo, other, onlyheads=None, force=False, commoninc=None, portable=False
159 ):
159 ):
160 '''Return an outgoing instance to identify the nodes present in repo but
160 """Return an outgoing instance to identify the nodes present in repo but
161 not in other.
161 not in other.
162
162
163 If onlyheads is given, only nodes ancestral to nodes in onlyheads
163 If onlyheads is given, only nodes ancestral to nodes in onlyheads
@@ -168,7 +168,7 b' def findcommonoutgoing('
168 findcommonincoming(repo, other, force) to avoid recomputing it here.
168 findcommonincoming(repo, other, force) to avoid recomputing it here.
169
169
170 If portable is given, compute more conservative common and ancestorsof,
170 If portable is given, compute more conservative common and ancestorsof,
171 to make bundles created from the instance more portable.'''
171 to make bundles created from the instance more portable."""
172 # declare an empty outgoing object to be filled later
172 # declare an empty outgoing object to be filled later
173 og = outgoing(repo, None, None)
173 og = outgoing(repo, None, None)
174
174
@@ -332,7 +332,10 b' def _nowarnheads(pushop):'
332
332
333 with remote.commandexecutor() as e:
333 with remote.commandexecutor() as e:
334 remotebookmarks = e.callcommand(
334 remotebookmarks = e.callcommand(
335 b'listkeys', {b'namespace': b'bookmarks',}
335 b'listkeys',
336 {
337 b'namespace': b'bookmarks',
338 },
336 ).result()
339 ).result()
337
340
338 bookmarkedheads = set()
341 bookmarkedheads = set()
@@ -470,7 +473,10 b' def checkheads(pushop):'
470 if branch not in (b'default', None):
473 if branch not in (b'default', None):
471 errormsg = _(
474 errormsg = _(
472 b"push creates new remote head %s on branch '%s'"
475 b"push creates new remote head %s on branch '%s'"
473 ) % (short(dhs[0]), branch,)
476 ) % (
477 short(dhs[0]),
478 branch,
479 )
474 elif repo[dhs[0]].bookmarks():
480 elif repo[dhs[0]].bookmarks():
475 errormsg = _(
481 errormsg = _(
476 b"push creates new remote head %s "
482 b"push creates new remote head %s "
@@ -519,10 +519,10 b' def aliasargs(fn, givenargs):'
519
519
520
520
521 def aliasinterpolate(name, args, cmd):
521 def aliasinterpolate(name, args, cmd):
522 '''interpolate args into cmd for shell aliases
522 """interpolate args into cmd for shell aliases
523
523
524 This also handles $0, $@ and "$@".
524 This also handles $0, $@ and "$@".
525 '''
525 """
526 # util.interpolate can't deal with "$@" (with quotes) because it's only
526 # util.interpolate can't deal with "$@" (with quotes) because it's only
527 # built to match prefix + patterns.
527 # built to match prefix + patterns.
528 replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)}
528 replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)}
@@ -630,12 +630,18 b' class cmdalias(object):'
630 except error.UnknownCommand:
630 except error.UnknownCommand:
631 self.badalias = _(
631 self.badalias = _(
632 b"alias '%s' resolves to unknown command '%s'"
632 b"alias '%s' resolves to unknown command '%s'"
633 ) % (self.name, cmd,)
633 ) % (
634 self.name,
635 cmd,
636 )
634 self.unknowncmd = True
637 self.unknowncmd = True
635 except error.AmbiguousCommand:
638 except error.AmbiguousCommand:
636 self.badalias = _(
639 self.badalias = _(
637 b"alias '%s' resolves to ambiguous command '%s'"
640 b"alias '%s' resolves to ambiguous command '%s'"
638 ) % (self.name, cmd,)
641 ) % (
642 self.name,
643 cmd,
644 )
639
645
640 def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
646 def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
641 # confine strings to be passed to i18n.gettext()
647 # confine strings to be passed to i18n.gettext()
@@ -113,8 +113,8 b" fallbackencoding = b'ISO-8859-1'"
113
113
114
114
115 class localstr(bytes):
115 class localstr(bytes):
116 '''This class allows strings that are unmodified to be
116 """This class allows strings that are unmodified to be
117 round-tripped to the local encoding and back'''
117 round-tripped to the local encoding and back"""
118
118
119 def __new__(cls, u, l):
119 def __new__(cls, u, l):
120 s = bytes.__new__(cls, l)
120 s = bytes.__new__(cls, l)
@@ -329,8 +329,8 b' def ucolwidth(d):'
329
329
330 def getcols(s, start, c):
330 def getcols(s, start, c):
331 # type: (bytes, int, int) -> bytes
331 # type: (bytes, int, int) -> bytes
332 '''Use colwidth to find a c-column substring of s starting at byte
332 """Use colwidth to find a c-column substring of s starting at byte
333 index start'''
333 index start"""
334 for x in pycompat.xrange(start + c, len(s)):
334 for x in pycompat.xrange(start + c, len(s)):
335 t = s[start:x]
335 t = s[start:x]
336 if colwidth(t) == c:
336 if colwidth(t) == c:
@@ -487,7 +487,7 b' def upperfallback(s):'
487
487
488
488
489 class normcasespecs(object):
489 class normcasespecs(object):
490 '''what a platform's normcase does to ASCII strings
490 """what a platform's normcase does to ASCII strings
491
491
492 This is specified per platform, and should be consistent with what normcase
492 This is specified per platform, and should be consistent with what normcase
493 on that platform actually does.
493 on that platform actually does.
@@ -496,7 +496,7 b' class normcasespecs(object):'
496 upper: normcase uppercases ASCII strings
496 upper: normcase uppercases ASCII strings
497 other: the fallback function should always be called
497 other: the fallback function should always be called
498
498
499 This should be kept in sync with normcase_spec in util.h.'''
499 This should be kept in sync with normcase_spec in util.h."""
500
500
501 lower = -1
501 lower = -1
502 upper = 1
502 upper = 1
@@ -505,7 +505,7 b' class normcasespecs(object):'
505
505
506 def jsonescape(s, paranoid=False):
506 def jsonescape(s, paranoid=False):
507 # type: (Any, Any) -> Any
507 # type: (Any, Any) -> Any
508 '''returns a string suitable for JSON
508 """returns a string suitable for JSON
509
509
510 JSON is problematic for us because it doesn't support non-Unicode
510 JSON is problematic for us because it doesn't support non-Unicode
511 bytes. To deal with this, we take the following approach:
511 bytes. To deal with this, we take the following approach:
@@ -547,7 +547,7 b' def jsonescape(s, paranoid=False):'
547 'non-BMP: \\\\ud834\\\\udd1e'
547 'non-BMP: \\\\ud834\\\\udd1e'
548 >>> jsonescape(b'<foo@example.org>', paranoid=True)
548 >>> jsonescape(b'<foo@example.org>', paranoid=True)
549 '\\\\u003cfoo@example.org\\\\u003e'
549 '\\\\u003cfoo@example.org\\\\u003e'
550 '''
550 """
551
551
552 u8chars = toutf8b(s)
552 u8chars = toutf8b(s)
553 try:
553 try:
@@ -569,11 +569,11 b' else:'
569
569
570 def getutf8char(s, pos):
570 def getutf8char(s, pos):
571 # type: (bytes, int) -> bytes
571 # type: (bytes, int) -> bytes
572 '''get the next full utf-8 character in the given string, starting at pos
572 """get the next full utf-8 character in the given string, starting at pos
573
573
574 Raises a UnicodeError if the given location does not start a valid
574 Raises a UnicodeError if the given location does not start a valid
575 utf-8 character.
575 utf-8 character.
576 '''
576 """
577
577
578 # find how many bytes to attempt decoding from first nibble
578 # find how many bytes to attempt decoding from first nibble
579 l = _utf8len[ord(s[pos : pos + 1]) >> 4]
579 l = _utf8len[ord(s[pos : pos + 1]) >> 4]
@@ -588,7 +588,7 b' def getutf8char(s, pos):'
588
588
589 def toutf8b(s):
589 def toutf8b(s):
590 # type: (bytes) -> bytes
590 # type: (bytes) -> bytes
591 '''convert a local, possibly-binary string into UTF-8b
591 """convert a local, possibly-binary string into UTF-8b
592
592
593 This is intended as a generic method to preserve data when working
593 This is intended as a generic method to preserve data when working
594 with schemes like JSON and XML that have no provision for
594 with schemes like JSON and XML that have no provision for
@@ -616,7 +616,7 b' def toutf8b(s):'
616 arbitrary bytes into an internal Unicode format that can be
616 arbitrary bytes into an internal Unicode format that can be
617 re-encoded back into the original. Here we are exposing the
617 re-encoded back into the original. Here we are exposing the
618 internal surrogate encoding as a UTF-8 string.)
618 internal surrogate encoding as a UTF-8 string.)
619 '''
619 """
620
620
621 if isinstance(s, localstr):
621 if isinstance(s, localstr):
622 # assume that the original UTF-8 sequence would never contain
622 # assume that the original UTF-8 sequence would never contain
@@ -657,7 +657,7 b' def toutf8b(s):'
657
657
658 def fromutf8b(s):
658 def fromutf8b(s):
659 # type: (bytes) -> bytes
659 # type: (bytes) -> bytes
660 '''Given a UTF-8b string, return a local, possibly-binary string.
660 """Given a UTF-8b string, return a local, possibly-binary string.
661
661
662 return the original binary string. This
662 return the original binary string. This
663 is a round-trip process for strings like filenames, but metadata
663 is a round-trip process for strings like filenames, but metadata
@@ -677,7 +677,7 b' def fromutf8b(s):'
677 True
677 True
678 >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
678 >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
679 True
679 True
680 '''
680 """
681
681
682 if isasciistr(s):
682 if isasciistr(s):
683 return s
683 return s
@@ -394,8 +394,7 b' class UnsupportedMergeRecords(Abort):'
394
394
395
395
396 class UnknownVersion(Abort):
396 class UnknownVersion(Abort):
397 """generic exception for aborting from an encounter with an unknown version
397 """generic exception for aborting from an encounter with an unknown version"""
398 """
399
398
400 def __init__(self, msg, hint=None, version=None):
399 def __init__(self, msg, hint=None, version=None):
401 self.version = version
400 self.version = version
@@ -378,14 +378,14 b' def push('
378 publish=False,
378 publish=False,
379 opargs=None,
379 opargs=None,
380 ):
380 ):
381 '''Push outgoing changesets (limited by revs) from a local
381 """Push outgoing changesets (limited by revs) from a local
382 repository to remote. Return an integer:
382 repository to remote. Return an integer:
383 - None means nothing to push
383 - None means nothing to push
384 - 0 means HTTP error
384 - 0 means HTTP error
385 - 1 means we pushed and remote head count is unchanged *or*
385 - 1 means we pushed and remote head count is unchanged *or*
386 we have outgoing changesets but refused to push
386 we have outgoing changesets but refused to push
387 - other values as described by addchangegroup()
387 - other values as described by addchangegroup()
388 '''
388 """
389 if opargs is None:
389 if opargs is None:
390 opargs = {}
390 opargs = {}
391 pushop = pushoperation(
391 pushop = pushoperation(
@@ -1510,8 +1510,8 b' def _fullpullbundle2(repo, pullop):'
1510
1510
1511
1511
1512 def add_confirm_callback(repo, pullop):
1512 def add_confirm_callback(repo, pullop):
1513 """ adds a finalize callback to transaction which can be used to show stats
1513 """adds a finalize callback to transaction which can be used to show stats
1514 to user and confirm the pull before committing transaction """
1514 to user and confirm the pull before committing transaction"""
1515
1515
1516 tr = pullop.trmanager.transaction()
1516 tr = pullop.trmanager.transaction()
1517 scmutil.registersummarycallback(
1517 scmutil.registersummarycallback(
@@ -1892,7 +1892,11 b' def _pullchangeset(pullop):'
1892 elif pullop.heads is None:
1892 elif pullop.heads is None:
1893 with pullop.remote.commandexecutor() as e:
1893 with pullop.remote.commandexecutor() as e:
1894 cg = e.callcommand(
1894 cg = e.callcommand(
1895 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
1895 b'changegroup',
1896 {
1897 b'nodes': pullop.fetch,
1898 b'source': b'pull',
1899 },
1896 ).result()
1900 ).result()
1897
1901
1898 elif not pullop.remote.capable(b'changegroupsubset'):
1902 elif not pullop.remote.capable(b'changegroupsubset'):
@@ -89,7 +89,10 b' def pull(pullop):'
89 continue
89 continue
90
90
91 phases.advanceboundary(
91 phases.advanceboundary(
92 repo, tr, phasenumber, csetres[b'nodesbyphase'][phase],
92 repo,
93 tr,
94 phasenumber,
95 csetres[b'nodesbyphase'][phase],
93 )
96 )
94
97
95 # Write bookmark updates.
98 # Write bookmark updates.
@@ -189,7 +192,10 b' def _checkuserawstorefiledata(pullop):'
189 def _fetchrawstorefiles(repo, remote):
192 def _fetchrawstorefiles(repo, remote):
190 with remote.commandexecutor() as e:
193 with remote.commandexecutor() as e:
191 objs = e.callcommand(
194 objs = e.callcommand(
192 b'rawstorefiledata', {b'files': [b'changelog', b'manifestlog'],}
195 b'rawstorefiledata',
196 {
197 b'files': [b'changelog', b'manifestlog'],
198 },
193 ).result()
199 ).result()
194
200
195 # First object is a summary of files data that follows.
201 # First object is a summary of files data that follows.
@@ -746,7 +752,10 b' def _fetchfilesfromcsets('
746 with remote.commandexecutor() as e:
752 with remote.commandexecutor() as e:
747 args = {
753 args = {
748 b'revisions': [
754 b'revisions': [
749 {b'type': b'changesetexplicit', b'nodes': batch,}
755 {
756 b'type': b'changesetexplicit',
757 b'nodes': batch,
758 }
750 ],
759 ],
751 b'fields': fields,
760 b'fields': fields,
752 b'haveparents': haveparents,
761 b'haveparents': haveparents,
@@ -457,7 +457,7 b' def _loadextra(ui, newindex, extraloader'
457
457
458
458
459 def afterloaded(extension, callback):
459 def afterloaded(extension, callback):
460 '''Run the specified function after a named extension is loaded.
460 """Run the specified function after a named extension is loaded.
461
461
462 If the named extension is already loaded, the callback will be called
462 If the named extension is already loaded, the callback will be called
463 immediately.
463 immediately.
@@ -467,7 +467,7 b' def afterloaded(extension, callback):'
467
467
468 The callback receives the named argument ``loaded``, which is a boolean
468 The callback receives the named argument ``loaded``, which is a boolean
469 indicating whether the dependent extension actually loaded.
469 indicating whether the dependent extension actually loaded.
470 '''
470 """
471
471
472 if extension in _extensions:
472 if extension in _extensions:
473 # Report loaded as False if the extension is disabled
473 # Report loaded as False if the extension is disabled
@@ -500,12 +500,12 b' def populateui(ui):'
500
500
501
501
502 def bind(func, *args):
502 def bind(func, *args):
503 '''Partial function application
503 """Partial function application
504
504
505 Returns a new function that is the partial application of args and kwargs
505 Returns a new function that is the partial application of args and kwargs
506 to func. For example,
506 to func. For example,
507
507
508 f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
508 f(1, 2, bar=3) === bind(f, 1)(2, bar=3)"""
509 assert callable(func)
509 assert callable(func)
510
510
511 def closure(*a, **kw):
511 def closure(*a, **kw):
@@ -618,7 +618,7 b' class wrappedfunction(object):'
618
618
619
619
620 def wrapfunction(container, funcname, wrapper):
620 def wrapfunction(container, funcname, wrapper):
621 '''Wrap the function named funcname in container
621 """Wrap the function named funcname in container
622
622
623 Replace the funcname member in the given container with the specified
623 Replace the funcname member in the given container with the specified
624 wrapper. The container is typically a module, class, or instance.
624 wrapper. The container is typically a module, class, or instance.
@@ -649,7 +649,7 b' def wrapfunction(container, funcname, wr'
649 work. Since you cannot control what other extensions are loaded by
649 work. Since you cannot control what other extensions are loaded by
650 your end users, you should play nicely with others by using the
650 your end users, you should play nicely with others by using the
651 subclass trick.
651 subclass trick.
652 '''
652 """
653 assert callable(wrapper)
653 assert callable(wrapper)
654
654
655 origfn = getattr(container, funcname)
655 origfn = getattr(container, funcname)
@@ -668,7 +668,7 b' def wrapfunction(container, funcname, wr'
668
668
669
669
670 def unwrapfunction(container, funcname, wrapper=None):
670 def unwrapfunction(container, funcname, wrapper=None):
671 '''undo wrapfunction
671 """undo wrapfunction
672
672
673 If wrappers is None, undo the last wrap. Otherwise removes the wrapper
673 If wrappers is None, undo the last wrap. Otherwise removes the wrapper
674 from the chain of wrappers.
674 from the chain of wrappers.
@@ -676,7 +676,7 b' def unwrapfunction(container, funcname, '
676 Return the removed wrapper.
676 Return the removed wrapper.
677 Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
677 Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
678 wrapper is not None but is not found in the wrapper chain.
678 wrapper is not None but is not found in the wrapper chain.
679 '''
679 """
680 chain = getwrapperchain(container, funcname)
680 chain = getwrapperchain(container, funcname)
681 origfn = chain.pop()
681 origfn = chain.pop()
682 if wrapper is None:
682 if wrapper is None:
@@ -689,13 +689,13 b' def unwrapfunction(container, funcname, '
689
689
690
690
691 def getwrapperchain(container, funcname):
691 def getwrapperchain(container, funcname):
692 '''get a chain of wrappers of a function
692 """get a chain of wrappers of a function
693
693
694 Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
694 Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
695
695
696 The wrapper functions are the ones passed to wrapfunction, whose first
696 The wrapper functions are the ones passed to wrapfunction, whose first
697 argument is origfunc.
697 argument is origfunc.
698 '''
698 """
699 result = []
699 result = []
700 fn = getattr(container, funcname)
700 fn = getattr(container, funcname)
701 while fn:
701 while fn:
@@ -744,11 +744,11 b' def _disabledpaths():'
744
744
745
745
746 def _moduledoc(file):
746 def _moduledoc(file):
747 '''return the top-level python documentation for the given file
747 """return the top-level python documentation for the given file
748
748
749 Loosely inspired by pydoc.source_synopsis(), but rewritten to
749 Loosely inspired by pydoc.source_synopsis(), but rewritten to
750 handle triple quotes and to return the whole text instead of just
750 handle triple quotes and to return the whole text instead of just
751 the synopsis'''
751 the synopsis"""
752 result = []
752 result = []
753
753
754 line = file.readline()
754 line = file.readline()
@@ -883,8 +883,8 b' def _finddisabledcmd(ui, cmd, name, path'
883
883
884
884
885 def disabledcmd(ui, cmd, strict=False):
885 def disabledcmd(ui, cmd, strict=False):
886 '''find cmd from disabled extensions without importing.
886 """find cmd from disabled extensions without importing.
887 returns (cmdname, extname, doc)'''
887 returns (cmdname, extname, doc)"""
888
888
889 paths = _disabledpaths()
889 paths = _disabledpaths()
890 if not paths:
890 if not paths:
@@ -1232,8 +1232,7 b' def filemerge(repo, wctx, mynode, orig, '
1232
1232
1233
1233
1234 def loadinternalmerge(ui, extname, registrarobj):
1234 def loadinternalmerge(ui, extname, registrarobj):
1235 """Load internal merge tool from specified registrarobj
1235 """Load internal merge tool from specified registrarobj"""
1236 """
1237 for name, func in pycompat.iteritems(registrarobj._table):
1236 for name, func in pycompat.iteritems(registrarobj._table):
1238 fullname = b':' + name
1237 fullname = b':' + name
1239 internals[fullname] = func
1238 internals[fullname] = func
@@ -122,8 +122,7 b' predicate = registrar.filesetpredicate(s'
122
122
123 @predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
123 @predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
124 def modified(mctx, x):
124 def modified(mctx, x):
125 """File that is modified according to :hg:`status`.
125 """File that is modified according to :hg:`status`."""
126 """
127 # i18n: "modified" is a keyword
126 # i18n: "modified" is a keyword
128 getargs(x, 0, 0, _(b"modified takes no arguments"))
127 getargs(x, 0, 0, _(b"modified takes no arguments"))
129 s = set(mctx.status().modified)
128 s = set(mctx.status().modified)
@@ -132,8 +131,7 b' def modified(mctx, x):'
132
131
133 @predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
132 @predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
134 def added(mctx, x):
133 def added(mctx, x):
135 """File that is added according to :hg:`status`.
134 """File that is added according to :hg:`status`."""
136 """
137 # i18n: "added" is a keyword
135 # i18n: "added" is a keyword
138 getargs(x, 0, 0, _(b"added takes no arguments"))
136 getargs(x, 0, 0, _(b"added takes no arguments"))
139 s = set(mctx.status().added)
137 s = set(mctx.status().added)
@@ -142,8 +140,7 b' def added(mctx, x):'
142
140
143 @predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
141 @predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
144 def removed(mctx, x):
142 def removed(mctx, x):
145 """File that is removed according to :hg:`status`.
143 """File that is removed according to :hg:`status`."""
146 """
147 # i18n: "removed" is a keyword
144 # i18n: "removed" is a keyword
148 getargs(x, 0, 0, _(b"removed takes no arguments"))
145 getargs(x, 0, 0, _(b"removed takes no arguments"))
149 s = set(mctx.status().removed)
146 s = set(mctx.status().removed)
@@ -152,8 +149,7 b' def removed(mctx, x):'
152
149
153 @predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
150 @predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
154 def deleted(mctx, x):
151 def deleted(mctx, x):
155 """Alias for ``missing()``.
152 """Alias for ``missing()``."""
156 """
157 # i18n: "deleted" is a keyword
153 # i18n: "deleted" is a keyword
158 getargs(x, 0, 0, _(b"deleted takes no arguments"))
154 getargs(x, 0, 0, _(b"deleted takes no arguments"))
159 s = set(mctx.status().deleted)
155 s = set(mctx.status().deleted)
@@ -162,8 +158,7 b' def deleted(mctx, x):'
162
158
163 @predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
159 @predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
164 def missing(mctx, x):
160 def missing(mctx, x):
165 """File that is missing according to :hg:`status`.
161 """File that is missing according to :hg:`status`."""
166 """
167 # i18n: "missing" is a keyword
162 # i18n: "missing" is a keyword
168 getargs(x, 0, 0, _(b"missing takes no arguments"))
163 getargs(x, 0, 0, _(b"missing takes no arguments"))
169 s = set(mctx.status().deleted)
164 s = set(mctx.status().deleted)
@@ -190,8 +185,7 b' def ignored(mctx, x):'
190
185
191 @predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
186 @predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
192 def clean(mctx, x):
187 def clean(mctx, x):
193 """File that is clean according to :hg:`status`.
188 """File that is clean according to :hg:`status`."""
194 """
195 # i18n: "clean" is a keyword
189 # i18n: "clean" is a keyword
196 getargs(x, 0, 0, _(b"clean takes no arguments"))
190 getargs(x, 0, 0, _(b"clean takes no arguments"))
197 s = set(mctx.status().clean)
191 s = set(mctx.status().clean)
@@ -208,8 +202,7 b' def tracked(mctx, x):'
208
202
209 @predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
203 @predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
210 def binary(mctx, x):
204 def binary(mctx, x):
211 """File that appears to be binary (contains NUL bytes).
205 """File that appears to be binary (contains NUL bytes)."""
212 """
213 # i18n: "binary" is a keyword
206 # i18n: "binary" is a keyword
214 getargs(x, 0, 0, _(b"binary takes no arguments"))
207 getargs(x, 0, 0, _(b"binary takes no arguments"))
215 return mctx.fpredicate(
208 return mctx.fpredicate(
@@ -219,8 +212,7 b' def binary(mctx, x):'
219
212
220 @predicate(b'exec()')
213 @predicate(b'exec()')
221 def exec_(mctx, x):
214 def exec_(mctx, x):
222 """File that is marked as executable.
215 """File that is marked as executable."""
223 """
224 # i18n: "exec" is a keyword
216 # i18n: "exec" is a keyword
225 getargs(x, 0, 0, _(b"exec takes no arguments"))
217 getargs(x, 0, 0, _(b"exec takes no arguments"))
226 ctx = mctx.ctx
218 ctx = mctx.ctx
@@ -229,8 +221,7 b' def exec_(mctx, x):'
229
221
230 @predicate(b'symlink()')
222 @predicate(b'symlink()')
231 def symlink(mctx, x):
223 def symlink(mctx, x):
232 """File that is marked as a symlink.
224 """File that is marked as a symlink."""
233 """
234 # i18n: "symlink" is a keyword
225 # i18n: "symlink" is a keyword
235 getargs(x, 0, 0, _(b"symlink takes no arguments"))
226 getargs(x, 0, 0, _(b"symlink takes no arguments"))
236 ctx = mctx.ctx
227 ctx = mctx.ctx
@@ -239,8 +230,7 b' def symlink(mctx, x):'
239
230
240 @predicate(b'resolved()', weight=_WEIGHT_STATUS)
231 @predicate(b'resolved()', weight=_WEIGHT_STATUS)
241 def resolved(mctx, x):
232 def resolved(mctx, x):
242 """File that is marked resolved according to :hg:`resolve -l`.
233 """File that is marked resolved according to :hg:`resolve -l`."""
243 """
244 # i18n: "resolved" is a keyword
234 # i18n: "resolved" is a keyword
245 getargs(x, 0, 0, _(b"resolved takes no arguments"))
235 getargs(x, 0, 0, _(b"resolved takes no arguments"))
246 if mctx.ctx.rev() is not None:
236 if mctx.ctx.rev() is not None:
@@ -253,8 +243,7 b' def resolved(mctx, x):'
253
243
254 @predicate(b'unresolved()', weight=_WEIGHT_STATUS)
244 @predicate(b'unresolved()', weight=_WEIGHT_STATUS)
255 def unresolved(mctx, x):
245 def unresolved(mctx, x):
256 """File that is marked unresolved according to :hg:`resolve -l`.
246 """File that is marked unresolved according to :hg:`resolve -l`."""
257 """
258 # i18n: "unresolved" is a keyword
247 # i18n: "unresolved" is a keyword
259 getargs(x, 0, 0, _(b"unresolved takes no arguments"))
248 getargs(x, 0, 0, _(b"unresolved takes no arguments"))
260 if mctx.ctx.rev() is not None:
249 if mctx.ctx.rev() is not None:
@@ -267,8 +256,7 b' def unresolved(mctx, x):'
267
256
268 @predicate(b'hgignore()', weight=_WEIGHT_STATUS)
257 @predicate(b'hgignore()', weight=_WEIGHT_STATUS)
269 def hgignore(mctx, x):
258 def hgignore(mctx, x):
270 """File that matches the active .hgignore pattern.
259 """File that matches the active .hgignore pattern."""
271 """
272 # i18n: "hgignore" is a keyword
260 # i18n: "hgignore" is a keyword
273 getargs(x, 0, 0, _(b"hgignore takes no arguments"))
261 getargs(x, 0, 0, _(b"hgignore takes no arguments"))
274 return mctx.ctx.repo().dirstate._ignore
262 return mctx.ctx.repo().dirstate._ignore
@@ -288,8 +276,7 b' def portable(mctx, x):'
288
276
289 @predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
277 @predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
290 def grep(mctx, x):
278 def grep(mctx, x):
291 """File contains the given regular expression.
279 """File contains the given regular expression."""
292 """
293 try:
280 try:
294 # i18n: "grep" is a keyword
281 # i18n: "grep" is a keyword
295 r = re.compile(getstring(x, _(b"grep requires a pattern")))
282 r = re.compile(getstring(x, _(b"grep requires a pattern")))
@@ -414,8 +401,7 b' def eol(mctx, x):'
414
401
415 @predicate(b'copied()')
402 @predicate(b'copied()')
416 def copied(mctx, x):
403 def copied(mctx, x):
417 """File that is recorded as being copied.
404 """File that is recorded as being copied."""
418 """
419 # i18n: "copied" is a keyword
405 # i18n: "copied" is a keyword
420 getargs(x, 0, 0, _(b"copied takes no arguments"))
406 getargs(x, 0, 0, _(b"copied takes no arguments"))
421
407
@@ -476,8 +462,7 b' def status(mctx, x):'
476
462
477 @predicate(b'subrepo([pattern])')
463 @predicate(b'subrepo([pattern])')
478 def subrepo(mctx, x):
464 def subrepo(mctx, x):
479 """Subrepositories whose paths match the given pattern.
465 """Subrepositories whose paths match the given pattern."""
480 """
481 # i18n: "subrepo" is a keyword
466 # i18n: "subrepo" is a keyword
482 getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
467 getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
483 ctx = mctx.ctx
468 ctx = mctx.ctx
@@ -628,8 +613,7 b' def match(ctx, cwd, expr, badfn=None):'
628
613
629
614
630 def loadpredicate(ui, extname, registrarobj):
615 def loadpredicate(ui, extname, registrarobj):
631 """Load fileset predicates from specified registrarobj
616 """Load fileset predicates from specified registrarobj"""
632 """
633 for name, func in pycompat.iteritems(registrarobj._table):
617 for name, func in pycompat.iteritems(registrarobj._table):
634 symbols[name] = func
618 symbols[name] = func
635
619
@@ -698,10 +698,10 b' def help_('
698 fullname=None,
698 fullname=None,
699 **opts
699 **opts
700 ):
700 ):
701 '''
701 """
702 Generate the help for 'name' as unformatted restructured text. If
702 Generate the help for 'name' as unformatted restructured text. If
703 'name' is None, describe the commands available.
703 'name' is None, describe the commands available.
704 '''
704 """
705
705
706 opts = pycompat.byteskwargs(opts)
706 opts = pycompat.byteskwargs(opts)
707
707
@@ -243,7 +243,7 b' def peer(uiorrepo, opts, path, create=Fa'
243
243
244
244
245 def defaultdest(source):
245 def defaultdest(source):
246 '''return default destination of clone if none is given
246 """return default destination of clone if none is given
247
247
248 >>> defaultdest(b'foo')
248 >>> defaultdest(b'foo')
249 'foo'
249 'foo'
@@ -257,7 +257,7 b' def defaultdest(source):'
257 ''
257 ''
258 >>> defaultdest(b'http://example.org/foo/')
258 >>> defaultdest(b'http://example.org/foo/')
259 'foo'
259 'foo'
260 '''
260 """
261 path = util.url(source).path
261 path = util.url(source).path
262 if not path:
262 if not path:
263 return b''
263 return b''
@@ -333,7 +333,7 b' def share('
333
333
334
334
335 def _prependsourcehgrc(repo):
335 def _prependsourcehgrc(repo):
336 """ copies the source repo config and prepend it in current repo .hg/hgrc
336 """copies the source repo config and prepend it in current repo .hg/hgrc
337 on unshare. This is only done if the share was perfomed using share safe
337 on unshare. This is only done if the share was perfomed using share safe
338 method where we share config of source in shares"""
338 method where we share config of source in shares"""
339 srcvfs = vfsmod.vfs(repo.sharedpath)
339 srcvfs = vfsmod.vfs(repo.sharedpath)
@@ -443,10 +443,10 b' def _postshareupdate(repo, update, check'
443
443
444
444
445 def copystore(ui, srcrepo, destpath):
445 def copystore(ui, srcrepo, destpath):
446 '''copy files from store of srcrepo in destpath
446 """copy files from store of srcrepo in destpath
447
447
448 returns destlock
448 returns destlock
449 '''
449 """
450 destlock = None
450 destlock = None
451 try:
451 try:
452 hardlink = None
452 hardlink = None
@@ -517,7 +517,12 b' def clonewithshare('
517 for r in rev:
517 for r in rev:
518 with srcpeer.commandexecutor() as e:
518 with srcpeer.commandexecutor() as e:
519 remoterevs.append(
519 remoterevs.append(
520 e.callcommand(b'lookup', {b'key': r,}).result()
520 e.callcommand(
521 b'lookup',
522 {
523 b'key': r,
524 },
525 ).result()
521 )
526 )
522 revs = remoterevs
527 revs = remoterevs
523
528
@@ -751,7 +756,10 b' def clone('
751 try:
756 try:
752 with srcpeer.commandexecutor() as e:
757 with srcpeer.commandexecutor() as e:
753 rootnode = e.callcommand(
758 rootnode = e.callcommand(
754 b'lookup', {b'key': b'0',}
759 b'lookup',
760 {
761 b'key': b'0',
762 },
755 ).result()
763 ).result()
756
764
757 if rootnode != node.nullid:
765 if rootnode != node.nullid:
@@ -900,7 +908,12 b' def clone('
900 for rev in revs:
908 for rev in revs:
901 with srcpeer.commandexecutor() as e:
909 with srcpeer.commandexecutor() as e:
902 remoterevs.append(
910 remoterevs.append(
903 e.callcommand(b'lookup', {b'key': rev,}).result()
911 e.callcommand(
912 b'lookup',
913 {
914 b'key': rev,
915 },
916 ).result()
904 )
917 )
905 revs = remoterevs
918 revs = remoterevs
906
919
@@ -974,7 +987,10 b' def clone('
974 if update is not True:
987 if update is not True:
975 with srcpeer.commandexecutor() as e:
988 with srcpeer.commandexecutor() as e:
976 checkout = e.callcommand(
989 checkout = e.callcommand(
977 b'lookup', {b'key': update,}
990 b'lookup',
991 {
992 b'key': update,
993 },
978 ).result()
994 ).result()
979
995
980 uprev = None
996 uprev = None
@@ -1176,7 +1192,10 b' def updatetotally(ui, repo, checkout, br'
1176
1192
1177
1193
1178 def merge(
1194 def merge(
1179 ctx, force=False, remind=True, labels=None,
1195 ctx,
1196 force=False,
1197 remind=True,
1198 labels=None,
1180 ):
1199 ):
1181 """Branch merge with node, resolving changes. Return true if any
1200 """Branch merge with node, resolving changes. Return true if any
1182 unresolved conflicts."""
1201 unresolved conflicts."""
@@ -27,7 +27,7 b' from . import ('
27
27
28
28
29 def hgweb(config, name=None, baseui=None):
29 def hgweb(config, name=None, baseui=None):
30 '''create an hgweb wsgi object
30 """create an hgweb wsgi object
31
31
32 config can be one of:
32 config can be one of:
33 - repo object (single repo view)
33 - repo object (single repo view)
@@ -35,7 +35,7 b' def hgweb(config, name=None, baseui=None'
35 - path to config file (multi-repo view)
35 - path to config file (multi-repo view)
36 - dict of virtual:real pairs (multi-repo view)
36 - dict of virtual:real pairs (multi-repo view)
37 - list of virtual:real tuples (multi-repo view)
37 - list of virtual:real tuples (multi-repo view)
38 '''
38 """
39
39
40 if isinstance(config, pycompat.unicode):
40 if isinstance(config, pycompat.unicode):
41 raise error.ProgrammingError(
41 raise error.ProgrammingError(
@@ -51,9 +51,9 b' def ismember(ui, username, userlist):'
51
51
52
52
53 def checkauthz(hgweb, req, op):
53 def checkauthz(hgweb, req, op):
54 '''Check permission for operation based on request data (including
54 """Check permission for operation based on request data (including
55 authentication info). Return if op allowed, else raise an ErrorResponse
55 authentication info). Return if op allowed, else raise an ErrorResponse
56 exception.'''
56 exception."""
57
57
58 user = req.remoteuser
58 user = req.remoteuser
59
59
@@ -86,12 +86,12 b' def _stylemap(styles, path=None):'
86
86
87
87
88 def makebreadcrumb(url, prefix=b''):
88 def makebreadcrumb(url, prefix=b''):
89 '''Return a 'URL breadcrumb' list
89 """Return a 'URL breadcrumb' list
90
90
91 A 'URL breadcrumb' is a list of URL-name pairs,
91 A 'URL breadcrumb' is a list of URL-name pairs,
92 corresponding to each of the path items on a URL.
92 corresponding to each of the path items on a URL.
93 This can be used to create path navigation entries.
93 This can be used to create path navigation entries.
94 '''
94 """
95 if url.endswith(b'/'):
95 if url.endswith(b'/'):
96 url = url[:-1]
96 url = url[:-1]
97 if prefix:
97 if prefix:
@@ -622,8 +622,8 b' class wsgiresponse(object):'
622
622
623
623
624 def wsgiapplication(app_maker):
624 def wsgiapplication(app_maker):
625 '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
625 """For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
626 can and should now be used as a WSGI application.'''
626 can and should now be used as a WSGI application."""
627 application = app_maker()
627 application = app_maker()
628
628
629 def run_wsgi(env, respond):
629 def run_wsgi(env, respond):
@@ -491,11 +491,11 b' def commonentry(repo, ctx):'
491
491
492
492
493 def changelistentry(web, ctx):
493 def changelistentry(web, ctx):
494 '''Obtain a dictionary to be used for entries in a changelist.
494 """Obtain a dictionary to be used for entries in a changelist.
495
495
496 This function is called when producing items for the "entries" list passed
496 This function is called when producing items for the "entries" list passed
497 to the "shortlog" and "changelog" templates.
497 to the "shortlog" and "changelog" templates.
498 '''
498 """
499 repo = web.repo
499 repo = web.repo
500 rev = ctx.rev()
500 rev = ctx.rev()
501 n = scmutil.binnode(ctx)
501 n = scmutil.binnode(ctx)
@@ -30,14 +30,14 b' from .utils import ('
30
30
31
31
32 def pythonhook(ui, repo, htype, hname, funcname, args, throw):
32 def pythonhook(ui, repo, htype, hname, funcname, args, throw):
33 '''call python hook. hook is callable object, looked up as
33 """call python hook. hook is callable object, looked up as
34 name in python module. if callable returns "true", hook
34 name in python module. if callable returns "true", hook
35 fails, else passes. if hook raises exception, treated as
35 fails, else passes. if hook raises exception, treated as
36 hook failure. exception propagates if throw is "true".
36 hook failure. exception propagates if throw is "true".
37
37
38 reason for "true" meaning "hook failed" is so that
38 reason for "true" meaning "hook failed" is so that
39 unmodified commands (e.g. mercurial.commands.update) can
39 unmodified commands (e.g. mercurial.commands.update) can
40 be run as hooks without wrappers to convert return values.'''
40 be run as hooks without wrappers to convert return values."""
41
41
42 if callable(funcname):
42 if callable(funcname):
43 obj = funcname
43 obj = funcname
@@ -766,7 +766,10 b' class httpv2executor(object):'
766 % _(b', ').join(sorted(permissions))
766 % _(b', ').join(sorted(permissions))
767 )
767 )
768
768
769 permission = {b'push': b'rw', b'pull': b'ro',}[permissions.pop()]
769 permission = {
770 b'push': b'rw',
771 b'pull': b'ro',
772 }[permissions.pop()]
770
773
771 handler, resp = sendv2request(
774 handler, resp = sendv2request(
772 self._ui,
775 self._ui,
@@ -942,7 +945,10 b' class httpv2peer(object):'
942 # Integer priority for the service. If we could choose from multiple
945 # Integer priority for the service. If we could choose from multiple
943 # services, we choose the one with the highest priority.
946 # services, we choose the one with the highest priority.
944 API_PEERS = {
947 API_PEERS = {
945 wireprototypes.HTTP_WIREPROTO_V2: {b'init': httpv2peer, b'priority': 50,},
948 wireprototypes.HTTP_WIREPROTO_V2: {
949 b'init': httpv2peer,
950 b'priority': 50,
951 },
946 }
952 }
947
953
948
954
@@ -9,12 +9,12 b' from . import util as interfaceutil'
9
9
10 class idirstate(interfaceutil.Interface):
10 class idirstate(interfaceutil.Interface):
11 def __init__(opener, ui, root, validate, sparsematchfn):
11 def __init__(opener, ui, root, validate, sparsematchfn):
12 '''Create a new dirstate object.
12 """Create a new dirstate object.
13
13
14 opener is an open()-like callable that can be used to open the
14 opener is an open()-like callable that can be used to open the
15 dirstate file; root is the root of the directory tracked by
15 dirstate file; root is the root of the directory tracked by
16 the dirstate.
16 the dirstate.
17 '''
17 """
18
18
19 # TODO: all these private methods and attributes should be made
19 # TODO: all these private methods and attributes should be made
20 # public or removed from the interface.
20 # public or removed from the interface.
@@ -31,17 +31,17 b' class idirstate(interfaceutil.Interface)'
31
31
32 @contextlib.contextmanager
32 @contextlib.contextmanager
33 def parentchange():
33 def parentchange():
34 '''Context manager for handling dirstate parents.
34 """Context manager for handling dirstate parents.
35
35
36 If an exception occurs in the scope of the context manager,
36 If an exception occurs in the scope of the context manager,
37 the incoherent dirstate won't be written when wlock is
37 the incoherent dirstate won't be written when wlock is
38 released.
38 released.
39 '''
39 """
40
40
41 def pendingparentchange():
41 def pendingparentchange():
42 '''Returns true if the dirstate is in the middle of a set of changes
42 """Returns true if the dirstate is in the middle of a set of changes
43 that modify the dirstate parent.
43 that modify the dirstate parent.
44 '''
44 """
45
45
46 def hasdir(d):
46 def hasdir(d):
47 pass
47 pass
@@ -50,18 +50,18 b' class idirstate(interfaceutil.Interface)'
50 pass
50 pass
51
51
52 def getcwd():
52 def getcwd():
53 '''Return the path from which a canonical path is calculated.
53 """Return the path from which a canonical path is calculated.
54
54
55 This path should be used to resolve file patterns or to convert
55 This path should be used to resolve file patterns or to convert
56 canonical paths back to file paths for display. It shouldn't be
56 canonical paths back to file paths for display. It shouldn't be
57 used to get real file paths. Use vfs functions instead.
57 used to get real file paths. Use vfs functions instead.
58 '''
58 """
59
59
60 def pathto(f, cwd=None):
60 def pathto(f, cwd=None):
61 pass
61 pass
62
62
63 def __getitem__(key):
63 def __getitem__(key):
64 '''Return the current state of key (a filename) in the dirstate.
64 """Return the current state of key (a filename) in the dirstate.
65
65
66 States are:
66 States are:
67 n normal
67 n normal
@@ -69,7 +69,7 b' class idirstate(interfaceutil.Interface)'
69 r marked for removal
69 r marked for removal
70 a marked for addition
70 a marked for addition
71 ? not tracked
71 ? not tracked
72 '''
72 """
73
73
74 def __contains__(key):
74 def __contains__(key):
75 """Check if bytestring `key` is known to the dirstate."""
75 """Check if bytestring `key` is known to the dirstate."""
@@ -111,11 +111,11 b' class idirstate(interfaceutil.Interface)'
111 pass
111 pass
112
112
113 def invalidate():
113 def invalidate():
114 '''Causes the next access to reread the dirstate.
114 """Causes the next access to reread the dirstate.
115
115
116 This is different from localrepo.invalidatedirstate() because it always
116 This is different from localrepo.invalidatedirstate() because it always
117 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
117 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
118 check whether the dirstate has changed before rereading it.'''
118 check whether the dirstate has changed before rereading it."""
119
119
120 def copy(source, dest):
120 def copy(source, dest):
121 """Mark dest as a copy of source. Unmark dest if source is None."""
121 """Mark dest as a copy of source. Unmark dest if source is None."""
@@ -127,7 +127,7 b' class idirstate(interfaceutil.Interface)'
127 pass
127 pass
128
128
129 def normal(f, parentfiledata=None):
129 def normal(f, parentfiledata=None):
130 '''Mark a file normal and clean.
130 """Mark a file normal and clean.
131
131
132 parentfiledata: (mode, size, mtime) of the clean file
132 parentfiledata: (mode, size, mtime) of the clean file
133
133
@@ -135,7 +135,7 b' class idirstate(interfaceutil.Interface)'
135 size), as or close as possible from the point where we
135 size), as or close as possible from the point where we
136 determined the file was clean, to limit the risk of the
136 determined the file was clean, to limit the risk of the
137 file having been changed by an external process between the
137 file having been changed by an external process between the
138 moment where the file was determined to be clean and now.'''
138 moment where the file was determined to be clean and now."""
139 pass
139 pass
140
140
141 def normallookup(f):
141 def normallookup(f):
@@ -157,7 +157,7 b' class idirstate(interfaceutil.Interface)'
157 '''Drop a file from the dirstate'''
157 '''Drop a file from the dirstate'''
158
158
159 def normalize(path, isknown=False, ignoremissing=False):
159 def normalize(path, isknown=False, ignoremissing=False):
160 '''
160 """
161 normalize the case of a pathname when on a casefolding filesystem
161 normalize the case of a pathname when on a casefolding filesystem
162
162
163 isknown specifies whether the filename came from walking the
163 isknown specifies whether the filename came from walking the
@@ -172,7 +172,7 b' class idirstate(interfaceutil.Interface)'
172 - version of name already stored in the dirstate
172 - version of name already stored in the dirstate
173 - version of name stored on disk
173 - version of name stored on disk
174 - version provided via command arguments
174 - version provided via command arguments
175 '''
175 """
176
176
177 def clear():
177 def clear():
178 pass
178 pass
@@ -181,11 +181,11 b' class idirstate(interfaceutil.Interface)'
181 pass
181 pass
182
182
183 def identity():
183 def identity():
184 '''Return identity of dirstate it to detect changing in storage
184 """Return identity of dirstate it to detect changing in storage
185
185
186 If identity of previous dirstate is equal to this, writing
186 If identity of previous dirstate is equal to this, writing
187 changes based on the former dirstate out can keep consistency.
187 changes based on the former dirstate out can keep consistency.
188 '''
188 """
189
189
190 def write(tr):
190 def write(tr):
191 pass
191 pass
@@ -201,7 +201,7 b' class idirstate(interfaceutil.Interface)'
201 """
201 """
202
202
203 def walk(match, subrepos, unknown, ignored, full=True):
203 def walk(match, subrepos, unknown, ignored, full=True):
204 '''
204 """
205 Walk recursively through the directory tree, finding all files
205 Walk recursively through the directory tree, finding all files
206 matched by match.
206 matched by match.
207
207
@@ -210,10 +210,10 b' class idirstate(interfaceutil.Interface)'
210 Return a dict mapping filename to stat-like object (either
210 Return a dict mapping filename to stat-like object (either
211 mercurial.osutil.stat instance or return value of os.stat()).
211 mercurial.osutil.stat instance or return value of os.stat()).
212
212
213 '''
213 """
214
214
215 def status(match, subrepos, ignored, clean, unknown):
215 def status(match, subrepos, ignored, clean, unknown):
216 '''Determine the status of the working copy relative to the
216 """Determine the status of the working copy relative to the
217 dirstate and return a pair of (unsure, status), where status is of type
217 dirstate and return a pair of (unsure, status), where status is of type
218 scmutil.status and:
218 scmutil.status and:
219
219
@@ -227,12 +227,12 b' class idirstate(interfaceutil.Interface)'
227 status.clean:
227 status.clean:
228 files that have definitely not been modified since the
228 files that have definitely not been modified since the
229 dirstate was written
229 dirstate was written
230 '''
230 """
231
231
232 def matches(match):
232 def matches(match):
233 '''
233 """
234 return files in the dirstate (in whatever state) filtered by match
234 return files in the dirstate (in whatever state) filtered by match
235 '''
235 """
236
236
237 def savebackup(tr, backupname):
237 def savebackup(tr, backupname):
238 '''Save current dirstate into backup file'''
238 '''Save current dirstate into backup file'''
@@ -617,7 +617,7 b' class ifiledata(interfaceutil.Interface)'
617 """
617 """
618
618
619 def revision(node, raw=False):
619 def revision(node, raw=False):
620 """"Obtain fulltext data for a node.
620 """ "Obtain fulltext data for a node.
621
621
622 By default, any storage transformations are applied before the data
622 By default, any storage transformations are applied before the data
623 is returned. If ``raw`` is True, non-raw storage transformations
623 is returned. If ``raw`` is True, non-raw storage transformations
@@ -628,8 +628,7 b' class ifiledata(interfaceutil.Interface)'
628 """
628 """
629
629
630 def rawdata(node):
630 def rawdata(node):
631 """Obtain raw data for a node.
631 """Obtain raw data for a node."""
632 """
633
632
634 def read(node):
633 def read(node):
635 """Resolve file fulltext data.
634 """Resolve file fulltext data.
@@ -112,7 +112,7 b' class ConnectionManager(object):'
112 """
112 """
113 The connection manager must be able to:
113 The connection manager must be able to:
114 * keep track of all existing
114 * keep track of all existing
115 """
115 """
116
116
117 def __init__(self):
117 def __init__(self):
118 self._lock = threading.Lock()
118 self._lock = threading.Lock()
@@ -675,8 +675,7 b' def safesend(self, str):'
675
675
676
676
677 def wrapgetresponse(cls):
677 def wrapgetresponse(cls):
678 """Wraps getresponse in cls with a broken-pipe sane version.
678 """Wraps getresponse in cls with a broken-pipe sane version."""
679 """
680
679
681 def safegetresponse(self):
680 def safegetresponse(self):
682 # In safesend() we might set the _broken_pipe_resp
681 # In safesend() we might set the _broken_pipe_resp
@@ -96,8 +96,7 b' urlreq = util.urlreq'
96
96
97
97
98 class _basefilecache(scmutil.filecache):
98 class _basefilecache(scmutil.filecache):
99 """All filecache usage on repo are done for logic that should be unfiltered
99 """All filecache usage on repo are done for logic that should be unfiltered"""
100 """
101
100
102 def __get__(self, repo, type=None):
101 def __get__(self, repo, type=None):
103 if repo is None:
102 if repo is None:
@@ -400,8 +399,8 b' class localpeer(repository.peer):'
400
399
401 @interfaceutil.implementer(repository.ipeerlegacycommands)
400 @interfaceutil.implementer(repository.ipeerlegacycommands)
402 class locallegacypeer(localpeer):
401 class locallegacypeer(localpeer):
403 '''peer extension which implements legacy methods too; used for tests with
402 """peer extension which implements legacy methods too; used for tests with
404 restricted capabilities'''
403 restricted capabilities"""
405
404
406 def __init__(self, repo):
405 def __init__(self, repo):
407 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
406 super(locallegacypeer, self).__init__(repo, caps=legacycaps)
@@ -440,7 +439,7 b' featuresetupfuncs = set()'
440
439
441
440
442 def _getsharedvfs(hgvfs, requirements):
441 def _getsharedvfs(hgvfs, requirements):
443 """ returns the vfs object pointing to root of shared source
442 """returns the vfs object pointing to root of shared source
444 repo for a shared repository
443 repo for a shared repository
445
444
446 hgvfs is vfs pointing at .hg/ of current repo (shared one)
445 hgvfs is vfs pointing at .hg/ of current repo (shared one)
@@ -465,7 +464,7 b' def _getsharedvfs(hgvfs, requirements):'
465
464
466
465
467 def _readrequires(vfs, allowmissing):
466 def _readrequires(vfs, allowmissing):
468 """ reads the require file present at root of this vfs
467 """reads the require file present at root of this vfs
469 and return a set of requirements
468 and return a set of requirements
470
469
471 If allowmissing is True, we suppress ENOENT if raised"""
470 If allowmissing is True, we suppress ENOENT if raised"""
@@ -1756,7 +1755,7 b' class localrepository(object):'
1756 return iter(self.changelog)
1755 return iter(self.changelog)
1757
1756
1758 def revs(self, expr, *args):
1757 def revs(self, expr, *args):
1759 '''Find revisions matching a revset.
1758 """Find revisions matching a revset.
1760
1759
1761 The revset is specified as a string ``expr`` that may contain
1760 The revset is specified as a string ``expr`` that may contain
1762 %-formatting to escape certain types. See ``revsetlang.formatspec``.
1761 %-formatting to escape certain types. See ``revsetlang.formatspec``.
@@ -1767,30 +1766,30 b' class localrepository(object):'
1767
1766
1768 Returns a smartset.abstractsmartset, which is a list-like interface
1767 Returns a smartset.abstractsmartset, which is a list-like interface
1769 that contains integer revisions.
1768 that contains integer revisions.
1770 '''
1769 """
1771 tree = revsetlang.spectree(expr, *args)
1770 tree = revsetlang.spectree(expr, *args)
1772 return revset.makematcher(tree)(self)
1771 return revset.makematcher(tree)(self)
1773
1772
1774 def set(self, expr, *args):
1773 def set(self, expr, *args):
1775 '''Find revisions matching a revset and emit changectx instances.
1774 """Find revisions matching a revset and emit changectx instances.
1776
1775
1777 This is a convenience wrapper around ``revs()`` that iterates the
1776 This is a convenience wrapper around ``revs()`` that iterates the
1778 result and is a generator of changectx instances.
1777 result and is a generator of changectx instances.
1779
1778
1780 Revset aliases from the configuration are not expanded. To expand
1779 Revset aliases from the configuration are not expanded. To expand
1781 user aliases, consider calling ``scmutil.revrange()``.
1780 user aliases, consider calling ``scmutil.revrange()``.
1782 '''
1781 """
1783 for r in self.revs(expr, *args):
1782 for r in self.revs(expr, *args):
1784 yield self[r]
1783 yield self[r]
1785
1784
1786 def anyrevs(self, specs, user=False, localalias=None):
1785 def anyrevs(self, specs, user=False, localalias=None):
1787 '''Find revisions matching one of the given revsets.
1786 """Find revisions matching one of the given revsets.
1788
1787
1789 Revset aliases from the configuration are not expanded by default. To
1788 Revset aliases from the configuration are not expanded by default. To
1790 expand user aliases, specify ``user=True``. To provide some local
1789 expand user aliases, specify ``user=True``. To provide some local
1791 definitions overriding user aliases, set ``localalias`` to
1790 definitions overriding user aliases, set ``localalias`` to
1792 ``{name: definitionstring}``.
1791 ``{name: definitionstring}``.
1793 '''
1792 """
1794 if specs == [b'null']:
1793 if specs == [b'null']:
1795 return revset.baseset([nullrev])
1794 return revset.baseset([nullrev])
1796 if specs == [b'.']:
1795 if specs == [b'.']:
@@ -1822,8 +1821,8 b' class localrepository(object):'
1822
1821
1823 @filteredpropertycache
1822 @filteredpropertycache
1824 def _tagscache(self):
1823 def _tagscache(self):
1825 '''Returns a tagscache object that contains various tags related
1824 """Returns a tagscache object that contains various tags related
1826 caches.'''
1825 caches."""
1827
1826
1828 # This simplifies its cache management by having one decorated
1827 # This simplifies its cache management by having one decorated
1829 # function (this one) and the rest simply fetch things from it.
1828 # function (this one) and the rest simply fetch things from it.
@@ -1861,12 +1860,12 b' class localrepository(object):'
1861 return t
1860 return t
1862
1861
1863 def _findtags(self):
1862 def _findtags(self):
1864 '''Do the hard work of finding tags. Return a pair of dicts
1863 """Do the hard work of finding tags. Return a pair of dicts
1865 (tags, tagtypes) where tags maps tag name to node, and tagtypes
1864 (tags, tagtypes) where tags maps tag name to node, and tagtypes
1866 maps tag name to a string like \'global\' or \'local\'.
1865 maps tag name to a string like \'global\' or \'local\'.
1867 Subclasses or extensions are free to add their own tags, but
1866 Subclasses or extensions are free to add their own tags, but
1868 should be aware that the returned dicts will be retained for the
1867 should be aware that the returned dicts will be retained for the
1869 duration of the localrepo object.'''
1868 duration of the localrepo object."""
1870
1869
1871 # XXX what tagtype should subclasses/extensions use? Currently
1870 # XXX what tagtype should subclasses/extensions use? Currently
1872 # mq and bookmarks add tags, but do not set the tagtype at all.
1871 # mq and bookmarks add tags, but do not set the tagtype at all.
@@ -1897,13 +1896,13 b' class localrepository(object):'
1897 return (tags, tagtypes)
1896 return (tags, tagtypes)
1898
1897
1899 def tagtype(self, tagname):
1898 def tagtype(self, tagname):
1900 '''
1899 """
1901 return the type of the given tag. result can be:
1900 return the type of the given tag. result can be:
1902
1901
1903 'local' : a local tag
1902 'local' : a local tag
1904 'global' : a global tag
1903 'global' : a global tag
1905 None : tag does not exist
1904 None : tag does not exist
1906 '''
1905 """
1907
1906
1908 return self._tagscache.tagtypes.get(tagname)
1907 return self._tagscache.tagtypes.get(tagname)
1909
1908
@@ -1933,8 +1932,8 b' class localrepository(object):'
1933 return self._bookmarks.names(node)
1932 return self._bookmarks.names(node)
1934
1933
1935 def branchmap(self):
1934 def branchmap(self):
1936 '''returns a dictionary {branch: [branchheads]} with branchheads
1935 """returns a dictionary {branch: [branchheads]} with branchheads
1937 ordered by increasing revision number'''
1936 ordered by increasing revision number"""
1938 return self._branchcaches[self]
1937 return self._branchcaches[self]
1939
1938
1940 @unfilteredmethod
1939 @unfilteredmethod
@@ -1944,13 +1943,13 b' class localrepository(object):'
1944 return self._revbranchcache
1943 return self._revbranchcache
1945
1944
1946 def branchtip(self, branch, ignoremissing=False):
1945 def branchtip(self, branch, ignoremissing=False):
1947 '''return the tip node for a given branch
1946 """return the tip node for a given branch
1948
1947
1949 If ignoremissing is True, then this method will not raise an error.
1948 If ignoremissing is True, then this method will not raise an error.
1950 This is helpful for callers that only expect None for a missing branch
1949 This is helpful for callers that only expect None for a missing branch
1951 (e.g. namespace).
1950 (e.g. namespace).
1952
1951
1953 '''
1952 """
1954 try:
1953 try:
1955 return self.branchmap().branchtip(branch)
1954 return self.branchmap().branchtip(branch)
1956 except KeyError:
1955 except KeyError:
@@ -2014,7 +2013,7 b' class localrepository(object):'
2014
2013
2015 def filectx(self, path, changeid=None, fileid=None, changectx=None):
2014 def filectx(self, path, changeid=None, fileid=None, changectx=None):
2016 """changeid must be a changeset revision, if specified.
2015 """changeid must be a changeset revision, if specified.
2017 fileid can be a file revision or node."""
2016 fileid can be a file revision or node."""
2018 return context.filectx(
2017 return context.filectx(
2019 self, path, changeid, fileid, changectx=changectx
2018 self, path, changeid, fileid, changectx=changectx
2020 )
2019 )
@@ -2311,8 +2310,7 b' class localrepository(object):'
2311 tr.addfinalize(b'flush-fncache', self.store.write)
2310 tr.addfinalize(b'flush-fncache', self.store.write)
2312
2311
2313 def txnclosehook(tr2):
2312 def txnclosehook(tr2):
2314 """To be run if transaction is successful, will schedule a hook run
2313 """To be run if transaction is successful, will schedule a hook run"""
2315 """
2316 # Don't reference tr2 in hook() so we don't hold a reference.
2314 # Don't reference tr2 in hook() so we don't hold a reference.
2317 # This reduces memory consumption when there are multiple
2315 # This reduces memory consumption when there are multiple
2318 # transactions per lock. This can likely go away if issue5045
2316 # transactions per lock. This can likely go away if issue5045
@@ -2362,8 +2360,7 b' class localrepository(object):'
2362 tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
2360 tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
2363
2361
2364 def txnaborthook(tr2):
2362 def txnaborthook(tr2):
2365 """To be run if transaction is aborted
2363 """To be run if transaction is aborted"""
2366 """
2367 reporef().hook(
2364 reporef().hook(
2368 b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
2365 b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
2369 )
2366 )
@@ -2620,14 +2617,14 b' class localrepository(object):'
2620 self._quick_access_changeid_invalidate()
2617 self._quick_access_changeid_invalidate()
2621
2618
2622 def invalidatedirstate(self):
2619 def invalidatedirstate(self):
2623 '''Invalidates the dirstate, causing the next call to dirstate
2620 """Invalidates the dirstate, causing the next call to dirstate
2624 to check if it was modified since the last time it was read,
2621 to check if it was modified since the last time it was read,
2625 rereading it if it has.
2622 rereading it if it has.
2626
2623
2627 This is different to dirstate.invalidate() that it doesn't always
2624 This is different to dirstate.invalidate() that it doesn't always
2628 rereads the dirstate. Use dirstate.invalidate() if you want to
2625 rereads the dirstate. Use dirstate.invalidate() if you want to
2629 explicitly read the dirstate again (i.e. restoring it to a previous
2626 explicitly read the dirstate again (i.e. restoring it to a previous
2630 known good state).'''
2627 known good state)."""
2631 if hasunfilteredcache(self, 'dirstate'):
2628 if hasunfilteredcache(self, 'dirstate'):
2632 for k in self.dirstate._filecache:
2629 for k in self.dirstate._filecache:
2633 try:
2630 try:
@@ -2637,13 +2634,13 b' class localrepository(object):'
2637 delattr(self.unfiltered(), 'dirstate')
2634 delattr(self.unfiltered(), 'dirstate')
2638
2635
2639 def invalidate(self, clearfilecache=False):
2636 def invalidate(self, clearfilecache=False):
2640 '''Invalidates both store and non-store parts other than dirstate
2637 """Invalidates both store and non-store parts other than dirstate
2641
2638
2642 If a transaction is running, invalidation of store is omitted,
2639 If a transaction is running, invalidation of store is omitted,
2643 because discarding in-memory changes might cause inconsistency
2640 because discarding in-memory changes might cause inconsistency
2644 (e.g. incomplete fncache causes unintentional failure, but
2641 (e.g. incomplete fncache causes unintentional failure, but
2645 redundant one doesn't).
2642 redundant one doesn't).
2646 '''
2643 """
2647 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2644 unfiltered = self.unfiltered() # all file caches are stored unfiltered
2648 for k in list(self._filecache.keys()):
2645 for k in list(self._filecache.keys()):
2649 # dirstate is invalidated separately in invalidatedirstate()
2646 # dirstate is invalidated separately in invalidatedirstate()
@@ -2673,8 +2670,8 b' class localrepository(object):'
2673 self.store.invalidatecaches()
2670 self.store.invalidatecaches()
2674
2671
2675 def invalidateall(self):
2672 def invalidateall(self):
2676 '''Fully invalidates both store and non-store parts, causing the
2673 """Fully invalidates both store and non-store parts, causing the
2677 subsequent operation to reread any outside changes.'''
2674 subsequent operation to reread any outside changes."""
2678 # extension should hook this to invalidate its caches
2675 # extension should hook this to invalidate its caches
2679 self.invalidate()
2676 self.invalidate()
2680 self.invalidatedirstate()
2677 self.invalidatedirstate()
@@ -2689,7 +2686,13 b' class localrepository(object):'
2689 ce.refresh()
2686 ce.refresh()
2690
2687
2691 def _lock(
2688 def _lock(
2692 self, vfs, lockname, wait, releasefn, acquirefn, desc,
2689 self,
2690 vfs,
2691 lockname,
2692 wait,
2693 releasefn,
2694 acquirefn,
2695 desc,
2693 ):
2696 ):
2694 timeout = 0
2697 timeout = 0
2695 warntimeout = 0
2698 warntimeout = 0
@@ -2726,12 +2729,12 b' class localrepository(object):'
2726 callback(True)
2729 callback(True)
2727
2730
2728 def lock(self, wait=True):
2731 def lock(self, wait=True):
2729 '''Lock the repository store (.hg/store) and return a weak reference
2732 """Lock the repository store (.hg/store) and return a weak reference
2730 to the lock. Use this before modifying the store (e.g. committing or
2733 to the lock. Use this before modifying the store (e.g. committing or
2731 stripping). If you are opening a transaction, get a lock as well.)
2734 stripping). If you are opening a transaction, get a lock as well.)
2732
2735
2733 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2736 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2734 'wlock' first to avoid a dead-lock hazard.'''
2737 'wlock' first to avoid a dead-lock hazard."""
2735 l = self._currentlock(self._lockref)
2738 l = self._currentlock(self._lockref)
2736 if l is not None:
2739 if l is not None:
2737 l.lock()
2740 l.lock()
@@ -2749,13 +2752,13 b' class localrepository(object):'
2749 return l
2752 return l
2750
2753
2751 def wlock(self, wait=True):
2754 def wlock(self, wait=True):
2752 '''Lock the non-store parts of the repository (everything under
2755 """Lock the non-store parts of the repository (everything under
2753 .hg except .hg/store) and return a weak reference to the lock.
2756 .hg except .hg/store) and return a weak reference to the lock.
2754
2757
2755 Use this before modifying files in .hg.
2758 Use this before modifying files in .hg.
2756
2759
2757 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2760 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
2758 'wlock' first to avoid a dead-lock hazard.'''
2761 'wlock' first to avoid a dead-lock hazard."""
2759 l = self._wlockref and self._wlockref()
2762 l = self._wlockref and self._wlockref()
2760 if l is not None and l.held:
2763 if l is not None and l.held:
2761 l.lock()
2764 l.lock()
@@ -2963,7 +2966,7 b' class localrepository(object):'
2963
2966
2964 @unfilteredmethod
2967 @unfilteredmethod
2965 def destroying(self):
2968 def destroying(self):
2966 '''Inform the repository that nodes are about to be destroyed.
2969 """Inform the repository that nodes are about to be destroyed.
2967 Intended for use by strip and rollback, so there's a common
2970 Intended for use by strip and rollback, so there's a common
2968 place for anything that has to be done before destroying history.
2971 place for anything that has to be done before destroying history.
2969
2972
@@ -2972,7 +2975,7 b' class localrepository(object):'
2972 destroyed is imminent, the repo will be invalidated causing those
2975 destroyed is imminent, the repo will be invalidated causing those
2973 changes to stay in memory (waiting for the next unlock), or vanish
2976 changes to stay in memory (waiting for the next unlock), or vanish
2974 completely.
2977 completely.
2975 '''
2978 """
2976 # When using the same lock to commit and strip, the phasecache is left
2979 # When using the same lock to commit and strip, the phasecache is left
2977 # dirty after committing. Then when we strip, the repo is invalidated,
2980 # dirty after committing. Then when we strip, the repo is invalidated,
2978 # causing those changes to disappear.
2981 # causing those changes to disappear.
@@ -2981,10 +2984,10 b' class localrepository(object):'
2981
2984
2982 @unfilteredmethod
2985 @unfilteredmethod
2983 def destroyed(self):
2986 def destroyed(self):
2984 '''Inform the repository that nodes have been destroyed.
2987 """Inform the repository that nodes have been destroyed.
2985 Intended for use by strip and rollback, so there's a common
2988 Intended for use by strip and rollback, so there's a common
2986 place for anything that has to be done after destroying history.
2989 place for anything that has to be done after destroying history.
2987 '''
2990 """
2988 # When one tries to:
2991 # When one tries to:
2989 # 1) destroy nodes thus calling this method (e.g. strip)
2992 # 1) destroy nodes thus calling this method (e.g. strip)
2990 # 2) use phasecache somewhere (e.g. commit)
2993 # 2) use phasecache somewhere (e.g. commit)
@@ -3067,13 +3070,13 b' class localrepository(object):'
3067 return sorted(heads, key=self.changelog.rev, reverse=True)
3070 return sorted(heads, key=self.changelog.rev, reverse=True)
3068
3071
3069 def branchheads(self, branch=None, start=None, closed=False):
3072 def branchheads(self, branch=None, start=None, closed=False):
3070 '''return a (possibly filtered) list of heads for the given branch
3073 """return a (possibly filtered) list of heads for the given branch
3071
3074
3072 Heads are returned in topological order, from newest to oldest.
3075 Heads are returned in topological order, from newest to oldest.
3073 If branch is None, use the dirstate branch.
3076 If branch is None, use the dirstate branch.
3074 If start is not None, return only heads reachable from start.
3077 If start is not None, return only heads reachable from start.
3075 If closed is True, return heads that are marked as closed as well.
3078 If closed is True, return heads that are marked as closed as well.
3076 '''
3079 """
3077 if branch is None:
3080 if branch is None:
3078 branch = self[None].branch()
3081 branch = self[None].branch()
3079 branches = self.branchmap()
3082 branches = self.branchmap()
@@ -3352,10 +3355,10 b' def newreporequirements(ui, createopts):'
3352
3355
3353
3356
3354 def checkrequirementscompat(ui, requirements):
3357 def checkrequirementscompat(ui, requirements):
3355 """ Checks compatibility of repository requirements enabled and disabled.
3358 """Checks compatibility of repository requirements enabled and disabled.
3356
3359
3357 Returns a set of requirements which needs to be dropped because dependend
3360 Returns a set of requirements which needs to be dropped because dependend
3358 requirements are not enabled. Also warns users about it """
3361 requirements are not enabled. Also warns users about it"""
3359
3362
3360 dropped = set()
3363 dropped = set()
3361
3364
@@ -175,14 +175,14 b' def trylock(ui, vfs, lockname, timeout, '
175
175
176
176
177 class lock(object):
177 class lock(object):
178 '''An advisory lock held by one process to control access to a set
178 """An advisory lock held by one process to control access to a set
179 of files. Non-cooperating processes or incorrectly written scripts
179 of files. Non-cooperating processes or incorrectly written scripts
180 can ignore Mercurial's locking scheme and stomp all over the
180 can ignore Mercurial's locking scheme and stomp all over the
181 repository, so don't do that.
181 repository, so don't do that.
182
182
183 Typically used via localrepository.lock() to lock the repository
183 Typically used via localrepository.lock() to lock the repository
184 store (.hg/store/) or localrepository.wlock() to lock everything
184 store (.hg/store/) or localrepository.wlock() to lock everything
185 else under .hg/.'''
185 else under .hg/."""
186
186
187 # lock is symlink on platforms that support it, file on others.
187 # lock is symlink on platforms that support it, file on others.
188
188
@@ -417,8 +417,7 b' class changesetprinter(object):'
417 )
417 )
418
418
419 def _exthook(self, ctx):
419 def _exthook(self, ctx):
420 '''empty method used by extension as a hook point
420 """empty method used by extension as a hook point"""
421 '''
422
421
423 def _showpatch(self, ctx, graphwidth=0):
422 def _showpatch(self, ctx, graphwidth=0):
424 if self._includestat:
423 if self._includestat:
@@ -509,13 +508,13 b' class changesetformatter(changesetprinte'
509
508
510
509
511 class changesettemplater(changesetprinter):
510 class changesettemplater(changesetprinter):
512 '''format changeset information.
511 """format changeset information.
513
512
514 Note: there are a variety of convenience functions to build a
513 Note: there are a variety of convenience functions to build a
515 changesettemplater for common cases. See functions such as:
514 changesettemplater for common cases. See functions such as:
516 maketemplater, changesetdisplayer, buildcommittemplate, or other
515 maketemplater, changesetdisplayer, buildcommittemplate, or other
517 functions that use changesest_templater.
516 functions that use changesest_templater.
518 '''
517 """
519
518
520 # Arguments before "buffered" used to be positional. Consider not
519 # Arguments before "buffered" used to be positional. Consider not
521 # adding/removing arguments before "buffered" to not break callers.
520 # adding/removing arguments before "buffered" to not break callers.
@@ -141,7 +141,10 b' def pullremotenames(localrepo, remoterep'
141
141
142 with remoterepo.commandexecutor() as e:
142 with remoterepo.commandexecutor() as e:
143 bookmarks = e.callcommand(
143 bookmarks = e.callcommand(
144 b'listkeys', {b'namespace': b'bookmarks',}
144 b'listkeys',
145 {
146 b'namespace': b'bookmarks',
147 },
145 ).result()
148 ).result()
146
149
147 # on a push, we don't want to keep obsolete heads since
150 # on a push, we don't want to keep obsolete heads since
@@ -44,10 +44,10 b' if pycompat.TYPE_CHECKING:'
44
44
45
45
46 class STARTTLS(smtplib.SMTP):
46 class STARTTLS(smtplib.SMTP):
47 '''Derived class to verify the peer certificate for STARTTLS.
47 """Derived class to verify the peer certificate for STARTTLS.
48
48
49 This class allows to pass any keyword arguments to SSL socket creation.
49 This class allows to pass any keyword arguments to SSL socket creation.
50 '''
50 """
51
51
52 def __init__(self, ui, host=None, **kwargs):
52 def __init__(self, ui, host=None, **kwargs):
53 smtplib.SMTP.__init__(self, **kwargs)
53 smtplib.SMTP.__init__(self, **kwargs)
@@ -76,10 +76,10 b' class STARTTLS(smtplib.SMTP):'
76
76
77
77
78 class SMTPS(smtplib.SMTP):
78 class SMTPS(smtplib.SMTP):
79 '''Derived class to verify the peer certificate for SMTPS.
79 """Derived class to verify the peer certificate for SMTPS.
80
80
81 This class allows to pass any keyword arguments to SSL socket creation.
81 This class allows to pass any keyword arguments to SSL socket creation.
82 '''
82 """
83
83
84 def __init__(self, ui, keyfile=None, certfile=None, host=None, **kwargs):
84 def __init__(self, ui, keyfile=None, certfile=None, host=None, **kwargs):
85 self.keyfile = keyfile
85 self.keyfile = keyfile
@@ -221,8 +221,8 b' def _mbox(mbox, sender, recipients, msg)'
221
221
222
222
223 def connect(ui, mbox=None):
223 def connect(ui, mbox=None):
224 '''make a mail connection. return a function to send mail.
224 """make a mail connection. return a function to send mail.
225 call as sendmail(sender, list-of-recipients, msg).'''
225 call as sendmail(sender, list-of-recipients, msg)."""
226 if mbox:
226 if mbox:
227 open(mbox, b'wb').close()
227 open(mbox, b'wb').close()
228 return lambda s, r, m: _mbox(mbox, s, r, m)
228 return lambda s, r, m: _mbox(mbox, s, r, m)
@@ -267,11 +267,11 b' def codec2iana(cs):'
267
267
268 def mimetextpatch(s, subtype='plain', display=False):
268 def mimetextpatch(s, subtype='plain', display=False):
269 # type: (bytes, str, bool) -> email.message.Message
269 # type: (bytes, str, bool) -> email.message.Message
270 '''Return MIME message suitable for a patch.
270 """Return MIME message suitable for a patch.
271 Charset will be detected by first trying to decode as us-ascii, then utf-8,
271 Charset will be detected by first trying to decode as us-ascii, then utf-8,
272 and finally the global encodings. If all those fail, fall back to
272 and finally the global encodings. If all those fail, fall back to
273 ISO-8859-1, an encoding with that allows all byte sequences.
273 ISO-8859-1, an encoding with that allows all byte sequences.
274 Transfer encodings will be used if necessary.'''
274 Transfer encodings will be used if necessary."""
275
275
276 cs = [
276 cs = [
277 'us-ascii',
277 'us-ascii',
@@ -293,9 +293,9 b" def mimetextpatch(s, subtype='plain', di"
293
293
294 def mimetextqp(body, subtype, charset):
294 def mimetextqp(body, subtype, charset):
295 # type: (bytes, str, str) -> email.message.Message
295 # type: (bytes, str, str) -> email.message.Message
296 '''Return MIME message.
296 """Return MIME message.
297 Quoted-printable transfer encoding will be used if necessary.
297 Quoted-printable transfer encoding will be used if necessary.
298 '''
298 """
299 cs = email.charset.Charset(charset)
299 cs = email.charset.Charset(charset)
300 msg = email.message.Message()
300 msg = email.message.Message()
301 msg.set_type('text/' + subtype)
301 msg.set_type('text/' + subtype)
@@ -337,11 +337,11 b' def _charsets(ui):'
337
337
338 def _encode(ui, s, charsets):
338 def _encode(ui, s, charsets):
339 # type: (Any, bytes, List[str]) -> Tuple[bytes, str]
339 # type: (Any, bytes, List[str]) -> Tuple[bytes, str]
340 '''Returns (converted) string, charset tuple.
340 """Returns (converted) string, charset tuple.
341 Finds out best charset by cycling through sendcharsets in descending
341 Finds out best charset by cycling through sendcharsets in descending
342 order. Tries both encoding and fallbackencoding for input. Only as
342 order. Tries both encoding and fallbackencoding for input. Only as
343 last resort send as is in fake ascii.
343 last resort send as is in fake ascii.
344 Caveat: Do not use for mail parts containing patches!'''
344 Caveat: Do not use for mail parts containing patches!"""
345 sendcharsets = charsets or _charsets(ui)
345 sendcharsets = charsets or _charsets(ui)
346 if not isinstance(s, bytes):
346 if not isinstance(s, bytes):
347 # We have unicode data, which we need to try and encode to
347 # We have unicode data, which we need to try and encode to
@@ -427,9 +427,9 b' def addressencode(ui, address, charsets='
427
427
428 def addrlistencode(ui, addrs, charsets=None, display=False):
428 def addrlistencode(ui, addrs, charsets=None, display=False):
429 # type: (Any, List[bytes], List[str], bool) -> List[str]
429 # type: (Any, List[bytes], List[str], bool) -> List[str]
430 '''Turns a list of addresses into a list of RFC-2047 compliant headers.
430 """Turns a list of addresses into a list of RFC-2047 compliant headers.
431 A single element of input list may contain multiple addresses, but output
431 A single element of input list may contain multiple addresses, but output
432 always has one address per item'''
432 always has one address per item"""
433 straddrs = []
433 straddrs = []
434 for a in addrs:
434 for a in addrs:
435 assert isinstance(a, bytes), '%r unexpectedly not a bytestr' % a
435 assert isinstance(a, bytes), '%r unexpectedly not a bytestr' % a
@@ -447,8 +447,8 b' def addrlistencode(ui, addrs, charsets=N'
447
447
448 def mimeencode(ui, s, charsets=None, display=False):
448 def mimeencode(ui, s, charsets=None, display=False):
449 # type: (Any, bytes, List[str], bool) -> email.message.Message
449 # type: (Any, bytes, List[str], bool) -> email.message.Message
450 '''creates mime text object, encodes it if needed, and sets
450 """creates mime text object, encodes it if needed, and sets
451 charset and transfer-encoding accordingly.'''
451 charset and transfer-encoding accordingly."""
452 cs = 'us-ascii'
452 cs = 'us-ascii'
453 if not display:
453 if not display:
454 s, cs = _encode(ui, s, charsets)
454 s, cs = _encode(ui, s, charsets)
@@ -528,8 +528,8 b' class manifestdict(object):'
528 return dir in self._dirs
528 return dir in self._dirs
529
529
530 def _filesfastpath(self, match):
530 def _filesfastpath(self, match):
531 '''Checks whether we can correctly and quickly iterate over matcher
531 """Checks whether we can correctly and quickly iterate over matcher
532 files instead of over manifest files.'''
532 files instead of over manifest files."""
533 files = match.files()
533 files = match.files()
534 return len(files) < 100 and (
534 return len(files) < 100 and (
535 match.isexact()
535 match.isexact()
@@ -537,13 +537,13 b' class manifestdict(object):'
537 )
537 )
538
538
539 def walk(self, match):
539 def walk(self, match):
540 '''Generates matching file names.
540 """Generates matching file names.
541
541
542 Equivalent to manifest.matches(match).iterkeys(), but without creating
542 Equivalent to manifest.matches(match).iterkeys(), but without creating
543 an entirely new manifest.
543 an entirely new manifest.
544
544
545 It also reports nonexistent files by marking them bad with match.bad().
545 It also reports nonexistent files by marking them bad with match.bad().
546 '''
546 """
547 if match.always():
547 if match.always():
548 for f in iter(self):
548 for f in iter(self):
549 yield f
549 yield f
@@ -591,7 +591,7 b' class manifestdict(object):'
591 return m
591 return m
592
592
593 def diff(self, m2, match=None, clean=False):
593 def diff(self, m2, match=None, clean=False):
594 '''Finds changes between the current manifest and m2.
594 """Finds changes between the current manifest and m2.
595
595
596 Args:
596 Args:
597 m2: the manifest to which this manifest should be compared.
597 m2: the manifest to which this manifest should be compared.
@@ -604,7 +604,7 b' class manifestdict(object):'
604 in the current/other manifest. Where the file does not exist,
604 in the current/other manifest. Where the file does not exist,
605 the nodeid will be None and the flags will be the empty
605 the nodeid will be None and the flags will be the empty
606 string.
606 string.
607 '''
607 """
608 if match:
608 if match:
609 m1 = self._matches(match)
609 m1 = self._matches(match)
610 m2 = m2._matches(match)
610 m2 = m2._matches(match)
@@ -703,14 +703,14 b' class manifestdict(object):'
703
703
704
704
705 def _msearch(m, s, lo=0, hi=None):
705 def _msearch(m, s, lo=0, hi=None):
706 '''return a tuple (start, end) that says where to find s within m.
706 """return a tuple (start, end) that says where to find s within m.
707
707
708 If the string is found m[start:end] are the line containing
708 If the string is found m[start:end] are the line containing
709 that string. If start == end the string was not found and
709 that string. If start == end the string was not found and
710 they indicate the proper sorted insertion point.
710 they indicate the proper sorted insertion point.
711
711
712 m should be a buffer, a memoryview or a byte string.
712 m should be a buffer, a memoryview or a byte string.
713 s is a byte string'''
713 s is a byte string"""
714
714
715 def advance(i, c):
715 def advance(i, c):
716 while i < lenm and m[i : i + 1] != c:
716 while i < lenm and m[i : i + 1] != c:
@@ -909,14 +909,14 b' class treemanifest(object):'
909 )
909 )
910
910
911 def dir(self):
911 def dir(self):
912 '''The directory that this tree manifest represents, including a
912 """The directory that this tree manifest represents, including a
913 trailing '/'. Empty string for the repo root directory.'''
913 trailing '/'. Empty string for the repo root directory."""
914 return self._dir
914 return self._dir
915
915
916 def node(self):
916 def node(self):
917 '''This node of this instance. nullid for unsaved instances. Should
917 """This node of this instance. nullid for unsaved instances. Should
918 be updated when the instance is read or written from a revlog.
918 be updated when the instance is read or written from a revlog.
919 '''
919 """
920 assert not self._dirty
920 assert not self._dirty
921 return self._node
921 return self._node
922
922
@@ -1157,10 +1157,10 b' class treemanifest(object):'
1157 return dirslash in self._dirs or dirslash in self._lazydirs
1157 return dirslash in self._dirs or dirslash in self._lazydirs
1158
1158
1159 def walk(self, match):
1159 def walk(self, match):
1160 '''Generates matching file names.
1160 """Generates matching file names.
1161
1161
1162 It also reports nonexistent files by marking them bad with match.bad().
1162 It also reports nonexistent files by marking them bad with match.bad().
1163 '''
1163 """
1164 if match.always():
1164 if match.always():
1165 for f in iter(self):
1165 for f in iter(self):
1166 yield f
1166 yield f
@@ -1202,8 +1202,7 b' class treemanifest(object):'
1202 yield f
1202 yield f
1203
1203
1204 def _matches(self, match):
1204 def _matches(self, match):
1205 '''recursively generate a new manifest filtered by the match argument.
1205 """recursively generate a new manifest filtered by the match argument."""
1206 '''
1207 if match.always():
1206 if match.always():
1208 return self.copy()
1207 return self.copy()
1209 return self._matches_inner(match)
1208 return self._matches_inner(match)
@@ -1253,7 +1252,7 b' class treemanifest(object):'
1253 raise FastdeltaUnavailable()
1252 raise FastdeltaUnavailable()
1254
1253
1255 def diff(self, m2, match=None, clean=False):
1254 def diff(self, m2, match=None, clean=False):
1256 '''Finds changes between the current manifest and m2.
1255 """Finds changes between the current manifest and m2.
1257
1256
1258 Args:
1257 Args:
1259 m2: the manifest to which this manifest should be compared.
1258 m2: the manifest to which this manifest should be compared.
@@ -1266,7 +1265,7 b' class treemanifest(object):'
1266 in the current/other manifest. Where the file does not exist,
1265 in the current/other manifest. Where the file does not exist,
1267 the nodeid will be None and the flags will be the empty
1266 the nodeid will be None and the flags will be the empty
1268 string.
1267 string.
1269 '''
1268 """
1270 if match and not match.always():
1269 if match and not match.always():
1271 m1 = self._matches(match)
1270 m1 = self._matches(match)
1272 m2 = m2._matches(match)
1271 m2 = m2._matches(match)
@@ -1546,9 +1545,9 b' class FastdeltaUnavailable(Exception):'
1546
1545
1547 @interfaceutil.implementer(repository.imanifeststorage)
1546 @interfaceutil.implementer(repository.imanifeststorage)
1548 class manifestrevlog(object):
1547 class manifestrevlog(object):
1549 '''A revlog that stores manifest texts. This is responsible for caching the
1548 """A revlog that stores manifest texts. This is responsible for caching the
1550 full-text manifest contents.
1549 full-text manifest contents.
1551 '''
1550 """
1552
1551
1553 def __init__(
1552 def __init__(
1554 self,
1553 self,
@@ -2077,12 +2076,12 b' class manifestctx(object):'
2077 return self._data
2076 return self._data
2078
2077
2079 def readfast(self, shallow=False):
2078 def readfast(self, shallow=False):
2080 '''Calls either readdelta or read, based on which would be less work.
2079 """Calls either readdelta or read, based on which would be less work.
2081 readdelta is called if the delta is against the p1, and therefore can be
2080 readdelta is called if the delta is against the p1, and therefore can be
2082 read quickly.
2081 read quickly.
2083
2082
2084 If `shallow` is True, nothing changes since this is a flat manifest.
2083 If `shallow` is True, nothing changes since this is a flat manifest.
2085 '''
2084 """
2086 store = self._storage()
2085 store = self._storage()
2087 r = store.rev(self._node)
2086 r = store.rev(self._node)
2088 deltaparent = store.deltaparent(r)
2087 deltaparent = store.deltaparent(r)
@@ -2091,12 +2090,12 b' class manifestctx(object):'
2091 return self.read()
2090 return self.read()
2092
2091
2093 def readdelta(self, shallow=False):
2092 def readdelta(self, shallow=False):
2094 '''Returns a manifest containing just the entries that are present
2093 """Returns a manifest containing just the entries that are present
2095 in this manifest, but not in its p1 manifest. This is efficient to read
2094 in this manifest, but not in its p1 manifest. This is efficient to read
2096 if the revlog delta is already p1.
2095 if the revlog delta is already p1.
2097
2096
2098 Changing the value of `shallow` has no effect on flat manifests.
2097 Changing the value of `shallow` has no effect on flat manifests.
2099 '''
2098 """
2100 store = self._storage()
2099 store = self._storage()
2101 r = store.rev(self._node)
2100 r = store.rev(self._node)
2102 d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
2101 d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
@@ -2208,7 +2207,7 b' class treemanifestctx(object):'
2208 return self._storage().parents(self._node)
2207 return self._storage().parents(self._node)
2209
2208
2210 def readdelta(self, shallow=False):
2209 def readdelta(self, shallow=False):
2211 '''Returns a manifest containing just the entries that are present
2210 """Returns a manifest containing just the entries that are present
2212 in this manifest, but not in its p1 manifest. This is efficient to read
2211 in this manifest, but not in its p1 manifest. This is efficient to read
2213 if the revlog delta is already p1.
2212 if the revlog delta is already p1.
2214
2213
@@ -2217,7 +2216,7 b' class treemanifestctx(object):'
2217 subdirectory entry will be reported as it appears in the manifest, i.e.
2216 subdirectory entry will be reported as it appears in the manifest, i.e.
2218 the subdirectory will be reported among files and distinguished only by
2217 the subdirectory will be reported among files and distinguished only by
2219 its 't' flag.
2218 its 't' flag.
2220 '''
2219 """
2221 store = self._storage()
2220 store = self._storage()
2222 if shallow:
2221 if shallow:
2223 r = store.rev(self._node)
2222 r = store.rev(self._node)
@@ -2237,13 +2236,13 b' class treemanifestctx(object):'
2237 return md
2236 return md
2238
2237
2239 def readfast(self, shallow=False):
2238 def readfast(self, shallow=False):
2240 '''Calls either readdelta or read, based on which would be less work.
2239 """Calls either readdelta or read, based on which would be less work.
2241 readdelta is called if the delta is against the p1, and therefore can be
2240 readdelta is called if the delta is against the p1, and therefore can be
2242 read quickly.
2241 read quickly.
2243
2242
2244 If `shallow` is True, it only returns the entries from this manifest,
2243 If `shallow` is True, it only returns the entries from this manifest,
2245 and not any submanifests.
2244 and not any submanifests.
2246 '''
2245 """
2247 store = self._storage()
2246 store = self._storage()
2248 r = store.rev(self._node)
2247 r = store.rev(self._node)
2249 deltaparent = store.deltaparent(r)
2248 deltaparent = store.deltaparent(r)
@@ -47,8 +47,8 b' propertycache = util.propertycache'
47
47
48
48
49 def _rematcher(regex):
49 def _rematcher(regex):
50 '''compile the regexp with the best available regexp engine and return a
50 """compile the regexp with the best available regexp engine and return a
51 matcher function'''
51 matcher function"""
52 m = util.re.compile(regex)
52 m = util.re.compile(regex)
53 try:
53 try:
54 # slightly faster, provided by facebook's re2 bindings
54 # slightly faster, provided by facebook's re2 bindings
@@ -82,8 +82,8 b' def _expandsets(cwd, kindpats, ctx=None,'
82
82
83
83
84 def _expandsubinclude(kindpats, root):
84 def _expandsubinclude(kindpats, root):
85 '''Returns the list of subinclude matcher args and the kindpats without the
85 """Returns the list of subinclude matcher args and the kindpats without the
86 subincludes in it.'''
86 subincludes in it."""
87 relmatchers = []
87 relmatchers = []
88 other = []
88 other = []
89
89
@@ -107,7 +107,7 b' def _expandsubinclude(kindpats, root):'
107
107
108
108
109 def _kindpatsalwaysmatch(kindpats):
109 def _kindpatsalwaysmatch(kindpats):
110 """"Checks whether the kindspats match everything, as e.g.
110 """ "Checks whether the kindspats match everything, as e.g.
111 'relpath:.' does.
111 'relpath:.' does.
112 """
112 """
113 for kind, pat, source in kindpats:
113 for kind, pat, source in kindpats:
@@ -117,11 +117,21 b' def _kindpatsalwaysmatch(kindpats):'
117
117
118
118
119 def _buildkindpatsmatcher(
119 def _buildkindpatsmatcher(
120 matchercls, root, cwd, kindpats, ctx=None, listsubrepos=False, badfn=None,
120 matchercls,
121 root,
122 cwd,
123 kindpats,
124 ctx=None,
125 listsubrepos=False,
126 badfn=None,
121 ):
127 ):
122 matchers = []
128 matchers = []
123 fms, kindpats = _expandsets(
129 fms, kindpats = _expandsets(
124 cwd, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn,
130 cwd,
131 kindpats,
132 ctx=ctx,
133 listsubrepos=listsubrepos,
134 badfn=badfn,
125 )
135 )
126 if kindpats:
136 if kindpats:
127 m = matchercls(root, kindpats, badfn=badfn)
137 m = matchercls(root, kindpats, badfn=badfn)
@@ -321,8 +331,8 b' def badmatch(match, badfn):'
321
331
322
332
323 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
333 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
324 '''Convert 'kind:pat' from the patterns list to tuples with kind and
334 """Convert 'kind:pat' from the patterns list to tuples with kind and
325 normalized and rooted patterns and with listfiles expanded.'''
335 normalized and rooted patterns and with listfiles expanded."""
326 kindpats = []
336 kindpats = []
327 for kind, pat in [_patsplit(p, default) for p in patterns]:
337 for kind, pat in [_patsplit(p, default) for p in patterns]:
328 if kind in cwdrelativepatternkinds:
338 if kind in cwdrelativepatternkinds:
@@ -383,8 +393,8 b' class basematcher(object):'
383 # Callbacks related to how the matcher is used by dirstate.walk.
393 # Callbacks related to how the matcher is used by dirstate.walk.
384 # Subscribers to these events must monkeypatch the matcher object.
394 # Subscribers to these events must monkeypatch the matcher object.
385 def bad(self, f, msg):
395 def bad(self, f, msg):
386 '''Callback from dirstate.walk for each explicit file that can't be
396 """Callback from dirstate.walk for each explicit file that can't be
387 found/accessed, with an error message.'''
397 found/accessed, with an error message."""
388
398
389 # If an traversedir is set, it will be called when a directory discovered
399 # If an traversedir is set, it will be called when a directory discovered
390 # by recursive traversal is visited.
400 # by recursive traversal is visited.
@@ -395,11 +405,11 b' class basematcher(object):'
395 return []
405 return []
396
406
397 def files(self):
407 def files(self):
398 '''Explicitly listed files or patterns or roots:
408 """Explicitly listed files or patterns or roots:
399 if no patterns or .always(): empty list,
409 if no patterns or .always(): empty list,
400 if exact: list exact files,
410 if exact: list exact files,
401 if not .anypats(): list all files and dirs,
411 if not .anypats(): list all files and dirs,
402 else: optimal roots'''
412 else: optimal roots"""
403 return self._files
413 return self._files
404
414
405 @propertycache
415 @propertycache
@@ -414,18 +424,18 b' class basematcher(object):'
414 return False
424 return False
415
425
416 def visitdir(self, dir):
426 def visitdir(self, dir):
417 '''Decides whether a directory should be visited based on whether it
427 """Decides whether a directory should be visited based on whether it
418 has potential matches in it or one of its subdirectories. This is
428 has potential matches in it or one of its subdirectories. This is
419 based on the match's primary, included, and excluded patterns.
429 based on the match's primary, included, and excluded patterns.
420
430
421 Returns the string 'all' if the given directory and all subdirectories
431 Returns the string 'all' if the given directory and all subdirectories
422 should be visited. Otherwise returns True or False indicating whether
432 should be visited. Otherwise returns True or False indicating whether
423 the given directory should be visited.
433 the given directory should be visited.
424 '''
434 """
425 return True
435 return True
426
436
427 def visitchildrenset(self, dir):
437 def visitchildrenset(self, dir):
428 '''Decides whether a directory should be visited based on whether it
438 """Decides whether a directory should be visited based on whether it
429 has potential matches in it or one of its subdirectories, and
439 has potential matches in it or one of its subdirectories, and
430 potentially lists which subdirectories of that directory should be
440 potentially lists which subdirectories of that directory should be
431 visited. This is based on the match's primary, included, and excluded
441 visited. This is based on the match's primary, included, and excluded
@@ -464,27 +474,27 b' class basematcher(object):'
464 indicating that there are no files in this dir to investigate (or
474 indicating that there are no files in this dir to investigate (or
465 equivalently that if there are files to investigate in 'dir' that it
475 equivalently that if there are files to investigate in 'dir' that it
466 will always return 'this').
476 will always return 'this').
467 '''
477 """
468 return b'this'
478 return b'this'
469
479
470 def always(self):
480 def always(self):
471 '''Matcher will match everything and .files() will be empty --
481 """Matcher will match everything and .files() will be empty --
472 optimization might be possible.'''
482 optimization might be possible."""
473 return False
483 return False
474
484
475 def isexact(self):
485 def isexact(self):
476 '''Matcher will match exactly the list of files in .files() --
486 """Matcher will match exactly the list of files in .files() --
477 optimization might be possible.'''
487 optimization might be possible."""
478 return False
488 return False
479
489
480 def prefix(self):
490 def prefix(self):
481 '''Matcher will match the paths in .files() recursively --
491 """Matcher will match the paths in .files() recursively --
482 optimization might be possible.'''
492 optimization might be possible."""
483 return False
493 return False
484
494
485 def anypats(self):
495 def anypats(self):
486 '''None of .always(), .isexact(), and .prefix() is true --
496 """None of .always(), .isexact(), and .prefix() is true --
487 optimizations will be difficult.'''
497 optimizations will be difficult."""
488 return not self.always() and not self.isexact() and not self.prefix()
498 return not self.always() and not self.isexact() and not self.prefix()
489
499
490
500
@@ -734,7 +744,7 b' class includematcher(basematcher):'
734
744
735
745
736 class exactmatcher(basematcher):
746 class exactmatcher(basematcher):
737 r'''Matches the input files exactly. They are interpreted as paths, not
747 r"""Matches the input files exactly. They are interpreted as paths, not
738 patterns (so no kind-prefixes).
748 patterns (so no kind-prefixes).
739
749
740 >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
750 >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
@@ -752,7 +762,7 b' class exactmatcher(basematcher):'
752 False
762 False
753 >>> m(br're:.*\.c$')
763 >>> m(br're:.*\.c$')
754 True
764 True
755 '''
765 """
756
766
757 def __init__(self, files, badfn=None):
767 def __init__(self, files, badfn=None):
758 super(exactmatcher, self).__init__(badfn)
768 super(exactmatcher, self).__init__(badfn)
@@ -799,11 +809,11 b' class exactmatcher(basematcher):'
799
809
800
810
801 class differencematcher(basematcher):
811 class differencematcher(basematcher):
802 '''Composes two matchers by matching if the first matches and the second
812 """Composes two matchers by matching if the first matches and the second
803 does not.
813 does not.
804
814
805 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
815 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
806 '''
816 """
807
817
808 def __init__(self, m1, m2):
818 def __init__(self, m1, m2):
809 super(differencematcher, self).__init__()
819 super(differencematcher, self).__init__()
@@ -868,10 +878,10 b' class differencematcher(basematcher):'
868
878
869
879
870 def intersectmatchers(m1, m2):
880 def intersectmatchers(m1, m2):
871 '''Composes two matchers by matching if both of them match.
881 """Composes two matchers by matching if both of them match.
872
882
873 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
883 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
874 '''
884 """
875 if m1 is None or m2 is None:
885 if m1 is None or m2 is None:
876 return m1 or m2
886 return m1 or m2
877 if m1.always():
887 if m1.always():
@@ -1166,7 +1176,7 b' class unionmatcher(basematcher):'
1166
1176
1167
1177
1168 def patkind(pattern, default=None):
1178 def patkind(pattern, default=None):
1169 r'''If pattern is 'kind:pat' with a known kind, return kind.
1179 r"""If pattern is 'kind:pat' with a known kind, return kind.
1170
1180
1171 >>> patkind(br're:.*\.c$')
1181 >>> patkind(br're:.*\.c$')
1172 're'
1182 're'
@@ -1177,7 +1187,7 b' def patkind(pattern, default=None):'
1177 >>> patkind(b'main.py')
1187 >>> patkind(b'main.py')
1178 >>> patkind(b'main.py', default=b're')
1188 >>> patkind(b'main.py', default=b're')
1179 're'
1189 're'
1180 '''
1190 """
1181 return _patsplit(pattern, default)[0]
1191 return _patsplit(pattern, default)[0]
1182
1192
1183
1193
@@ -1192,7 +1202,7 b' def _patsplit(pattern, default):'
1192
1202
1193
1203
1194 def _globre(pat):
1204 def _globre(pat):
1195 r'''Convert an extended glob string to a regexp string.
1205 r"""Convert an extended glob string to a regexp string.
1196
1206
1197 >>> from . import pycompat
1207 >>> from . import pycompat
1198 >>> def bprint(s):
1208 >>> def bprint(s):
@@ -1213,7 +1223,7 b' def _globre(pat):'
1213 (?:a|b)
1223 (?:a|b)
1214 >>> bprint(_globre(br'.\*\?'))
1224 >>> bprint(_globre(br'.\*\?'))
1215 \.\*\?
1225 \.\*\?
1216 '''
1226 """
1217 i, n = 0, len(pat)
1227 i, n = 0, len(pat)
1218 res = b''
1228 res = b''
1219 group = 0
1229 group = 0
@@ -1276,9 +1286,9 b' def _globre(pat):'
1276
1286
1277
1287
1278 def _regex(kind, pat, globsuffix):
1288 def _regex(kind, pat, globsuffix):
1279 '''Convert a (normalized) pattern of any kind into a
1289 """Convert a (normalized) pattern of any kind into a
1280 regular expression.
1290 regular expression.
1281 globsuffix is appended to the regexp of globs.'''
1291 globsuffix is appended to the regexp of globs."""
1282 if not pat and kind in (b'glob', b'relpath'):
1292 if not pat and kind in (b'glob', b'relpath'):
1283 return b''
1293 return b''
1284 if kind == b're':
1294 if kind == b're':
@@ -1312,8 +1322,8 b' def _regex(kind, pat, globsuffix):'
1312
1322
1313
1323
1314 def _buildmatch(kindpats, globsuffix, root):
1324 def _buildmatch(kindpats, globsuffix, root):
1315 '''Return regexp string and a matcher function for kindpats.
1325 """Return regexp string and a matcher function for kindpats.
1316 globsuffix is appended to the regexp of globs.'''
1326 globsuffix is appended to the regexp of globs."""
1317 matchfuncs = []
1327 matchfuncs = []
1318
1328
1319 subincludes, kindpats = _expandsubinclude(kindpats, root)
1329 subincludes, kindpats = _expandsubinclude(kindpats, root)
@@ -1422,13 +1432,13 b' def _buildregexmatch(kindpats, globsuffi'
1422
1432
1423
1433
1424 def _patternrootsanddirs(kindpats):
1434 def _patternrootsanddirs(kindpats):
1425 '''Returns roots and directories corresponding to each pattern.
1435 """Returns roots and directories corresponding to each pattern.
1426
1436
1427 This calculates the roots and directories exactly matching the patterns and
1437 This calculates the roots and directories exactly matching the patterns and
1428 returns a tuple of (roots, dirs) for each. It does not return other
1438 returns a tuple of (roots, dirs) for each. It does not return other
1429 directories which may also need to be considered, like the parent
1439 directories which may also need to be considered, like the parent
1430 directories.
1440 directories.
1431 '''
1441 """
1432 r = []
1442 r = []
1433 d = []
1443 d = []
1434 for kind, pat, source in kindpats:
1444 for kind, pat, source in kindpats:
@@ -1459,7 +1469,7 b' def _roots(kindpats):'
1459
1469
1460
1470
1461 def _rootsdirsandparents(kindpats):
1471 def _rootsdirsandparents(kindpats):
1462 '''Returns roots and exact directories from patterns.
1472 """Returns roots and exact directories from patterns.
1463
1473
1464 `roots` are directories to match recursively, `dirs` should
1474 `roots` are directories to match recursively, `dirs` should
1465 be matched non-recursively, and `parents` are the implicitly required
1475 be matched non-recursively, and `parents` are the implicitly required
@@ -1486,7 +1496,7 b' def _rootsdirsandparents(kindpats):'
1486 ... (b'relre', b'rr', b'')])
1496 ... (b'relre', b'rr', b'')])
1487 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1497 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1488 (['', '', ''], []) ['']
1498 (['', '', ''], []) ['']
1489 '''
1499 """
1490 r, d = _patternrootsanddirs(kindpats)
1500 r, d = _patternrootsanddirs(kindpats)
1491
1501
1492 p = set()
1502 p = set()
@@ -1503,13 +1513,13 b' def _rootsdirsandparents(kindpats):'
1503
1513
1504
1514
1505 def _explicitfiles(kindpats):
1515 def _explicitfiles(kindpats):
1506 '''Returns the potential explicit filenames from the patterns.
1516 """Returns the potential explicit filenames from the patterns.
1507
1517
1508 >>> _explicitfiles([(b'path', b'foo/bar', b'')])
1518 >>> _explicitfiles([(b'path', b'foo/bar', b'')])
1509 ['foo/bar']
1519 ['foo/bar']
1510 >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
1520 >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
1511 []
1521 []
1512 '''
1522 """
1513 # Keep only the pattern kinds where one can specify filenames (vs only
1523 # Keep only the pattern kinds where one can specify filenames (vs only
1514 # directory names).
1524 # directory names).
1515 filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
1525 filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
@@ -1528,7 +1538,7 b' def _prefix(kindpats):'
1528
1538
1529
1539
1530 def readpatternfile(filepath, warn, sourceinfo=False):
1540 def readpatternfile(filepath, warn, sourceinfo=False):
1531 '''parse a pattern file, returning a list of
1541 """parse a pattern file, returning a list of
1532 patterns. These patterns should be given to compile()
1542 patterns. These patterns should be given to compile()
1533 to be validated and converted into a match function.
1543 to be validated and converted into a match function.
1534
1544
@@ -1549,7 +1559,7 b' def readpatternfile(filepath, warn, sour'
1549 if sourceinfo is set, returns a list of tuples:
1559 if sourceinfo is set, returns a list of tuples:
1550 (pattern, lineno, originalline).
1560 (pattern, lineno, originalline).
1551 This is useful to debug ignore patterns.
1561 This is useful to debug ignore patterns.
1552 '''
1562 """
1553
1563
1554 syntaxes = {
1564 syntaxes = {
1555 b're': b'relre:',
1565 b're': b'relre:',
@@ -39,7 +39,7 b' splitnewlines = bdiff.splitnewlines'
39
39
40 # TODO: this looks like it could be an attrs, which might help pytype
40 # TODO: this looks like it could be an attrs, which might help pytype
41 class diffopts(object):
41 class diffopts(object):
42 '''context is the number of context lines
42 """context is the number of context lines
43 text treats all files as text
43 text treats all files as text
44 showfunc enables diff -p output
44 showfunc enables diff -p output
45 git enables the git extended patch format
45 git enables the git extended patch format
@@ -50,7 +50,7 b' class diffopts(object):'
50 ignorewsamount ignores changes in the amount of whitespace
50 ignorewsamount ignores changes in the amount of whitespace
51 ignoreblanklines ignores changes whose lines are all blank
51 ignoreblanklines ignores changes whose lines are all blank
52 upgrade generates git diffs to avoid data loss
52 upgrade generates git diffs to avoid data loss
53 '''
53 """
54
54
55 _HAS_DYNAMIC_ATTRIBUTES = True
55 _HAS_DYNAMIC_ATTRIBUTES = True
56
56
@@ -217,7 +217,10 b' def _checkunknownfiles(repo, wctx, mctx,'
217 if config == b'warn':
217 if config == b'warn':
218 warnconflicts.add(f)
218 warnconflicts.add(f)
219 mresult.addfile(
219 mresult.addfile(
220 f, mergestatemod.ACTION_GET, (fl2, True), b'remote created',
220 f,
221 mergestatemod.ACTION_GET,
222 (fl2, True),
223 b'remote created',
221 )
224 )
222
225
223 for f in sorted(abortconflicts):
226 for f in sorted(abortconflicts):
@@ -281,7 +284,10 b' def _forgetremoved(wctx, mctx, branchmer'
281 for f in wctx.removed():
284 for f in wctx.removed():
282 if f not in mctx:
285 if f not in mctx:
283 mresult.addfile(
286 mresult.addfile(
284 f, mergestatemod.ACTION_FORGET, None, b"forget removed",
287 f,
288 mergestatemod.ACTION_FORGET,
289 None,
290 b"forget removed",
285 )
291 )
286
292
287
293
@@ -544,10 +550,10 b' def _filternarrowactions(narrowmatch, br'
544
550
545
551
546 class mergeresult(object):
552 class mergeresult(object):
547 '''An object representing result of merging manifests.
553 """An object representing result of merging manifests.
548
554
549 It has information about what actions need to be performed on dirstate
555 It has information about what actions need to be performed on dirstate
550 mapping of divergent renames and other such cases.'''
556 mapping of divergent renames and other such cases."""
551
557
552 def __init__(self):
558 def __init__(self):
553 """
559 """
@@ -572,7 +578,7 b' class mergeresult(object):'
572 self._renamedelete = renamedelete
578 self._renamedelete = renamedelete
573
579
574 def addfile(self, filename, action, data, message):
580 def addfile(self, filename, action, data, message):
575 """ adds a new file to the mergeresult object
581 """adds a new file to the mergeresult object
576
582
577 filename: file which we are adding
583 filename: file which we are adding
578 action: one of mergestatemod.ACTION_*
584 action: one of mergestatemod.ACTION_*
@@ -589,15 +595,15 b' class mergeresult(object):'
589 self._actionmapping[action][filename] = (data, message)
595 self._actionmapping[action][filename] = (data, message)
590
596
591 def getfile(self, filename, default_return=None):
597 def getfile(self, filename, default_return=None):
592 """ returns (action, args, msg) about this file
598 """returns (action, args, msg) about this file
593
599
594 returns default_return if the file is not present """
600 returns default_return if the file is not present"""
595 if filename in self._filemapping:
601 if filename in self._filemapping:
596 return self._filemapping[filename]
602 return self._filemapping[filename]
597 return default_return
603 return default_return
598
604
599 def files(self, actions=None):
605 def files(self, actions=None):
600 """ returns files on which provided action needs to perfromed
606 """returns files on which provided action needs to perfromed
601
607
602 If actions is None, all files are returned
608 If actions is None, all files are returned
603 """
609 """
@@ -613,14 +619,14 b' class mergeresult(object):'
613 yield f
619 yield f
614
620
615 def removefile(self, filename):
621 def removefile(self, filename):
616 """ removes a file from the mergeresult object as the file might
622 """removes a file from the mergeresult object as the file might
617 not merging anymore """
623 not merging anymore"""
618 action, data, message = self._filemapping[filename]
624 action, data, message = self._filemapping[filename]
619 del self._filemapping[filename]
625 del self._filemapping[filename]
620 del self._actionmapping[action][filename]
626 del self._actionmapping[action][filename]
621
627
622 def getactions(self, actions, sort=False):
628 def getactions(self, actions, sort=False):
623 """ get list of files which are marked with these actions
629 """get list of files which are marked with these actions
624 if sort is true, files for each action is sorted and then added
630 if sort is true, files for each action is sorted and then added
625
631
626 Returns a list of tuple of form (filename, data, message)
632 Returns a list of tuple of form (filename, data, message)
@@ -637,10 +643,10 b' class mergeresult(object):'
637 yield f, args, msg
643 yield f, args, msg
638
644
639 def len(self, actions=None):
645 def len(self, actions=None):
640 """ returns number of files which needs actions
646 """returns number of files which needs actions
641
647
642 if actions is passed, total of number of files in that action
648 if actions is passed, total of number of files in that action
643 only is returned """
649 only is returned"""
644
650
645 if actions is None:
651 if actions is None:
646 return len(self._filemapping)
652 return len(self._filemapping)
@@ -656,8 +662,8 b' class mergeresult(object):'
656 yield key, val
662 yield key, val
657
663
658 def addcommitinfo(self, filename, key, value):
664 def addcommitinfo(self, filename, key, value):
659 """ adds key-value information about filename which will be required
665 """adds key-value information about filename which will be required
660 while committing this merge """
666 while committing this merge"""
661 self._commitinfo[filename][key] = value
667 self._commitinfo[filename][key] = value
662
668
663 @property
669 @property
@@ -674,8 +680,8 b' class mergeresult(object):'
674
680
675 @property
681 @property
676 def actionsdict(self):
682 def actionsdict(self):
677 """ returns a dictionary of actions to be perfomed with action as key
683 """returns a dictionary of actions to be perfomed with action as key
678 and a list of files and related arguments as values """
684 and a list of files and related arguments as values"""
679 res = collections.defaultdict(list)
685 res = collections.defaultdict(list)
680 for a, d in pycompat.iteritems(self._actionmapping):
686 for a, d in pycompat.iteritems(self._actionmapping):
681 for f, (args, msg) in pycompat.iteritems(d):
687 for f, (args, msg) in pycompat.iteritems(d):
@@ -689,8 +695,8 b' class mergeresult(object):'
689 self._actionmapping[act][f] = data, msg
695 self._actionmapping[act][f] = data, msg
690
696
691 def hasconflicts(self):
697 def hasconflicts(self):
692 """ tells whether this merge resulted in some actions which can
698 """tells whether this merge resulted in some actions which can
693 result in conflicts or not """
699 result in conflicts or not"""
694 for a in self._actionmapping.keys():
700 for a in self._actionmapping.keys():
695 if (
701 if (
696 a
702 a
@@ -839,7 +845,10 b' def manifestmerge('
839 nol = b'l' not in fl1 + fl2 + fla
845 nol = b'l' not in fl1 + fl2 + fla
840 if n2 == a and fl2 == fla:
846 if n2 == a and fl2 == fla:
841 mresult.addfile(
847 mresult.addfile(
842 f, mergestatemod.ACTION_KEEP, (), b'remote unchanged',
848 f,
849 mergestatemod.ACTION_KEEP,
850 (),
851 b'remote unchanged',
843 )
852 )
844 elif n1 == a and fl1 == fla: # local unchanged - use remote
853 elif n1 == a and fl1 == fla: # local unchanged - use remote
845 if n1 == n2: # optimization: keep local content
854 if n1 == n2: # optimization: keep local content
@@ -936,11 +945,17 b' def manifestmerge('
936 # This file was locally added. We should forget it instead of
945 # This file was locally added. We should forget it instead of
937 # deleting it.
946 # deleting it.
938 mresult.addfile(
947 mresult.addfile(
939 f, mergestatemod.ACTION_FORGET, None, b'remote deleted',
948 f,
949 mergestatemod.ACTION_FORGET,
950 None,
951 b'remote deleted',
940 )
952 )
941 else:
953 else:
942 mresult.addfile(
954 mresult.addfile(
943 f, mergestatemod.ACTION_REMOVE, None, b'other deleted',
955 f,
956 mergestatemod.ACTION_REMOVE,
957 None,
958 b'other deleted',
944 )
959 )
945 if branchmerge:
960 if branchmerge:
946 # the file must be absent after merging,
961 # the file must be absent after merging,
@@ -1086,7 +1101,7 b' def manifestmerge('
1086
1101
1087 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
1102 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
1088 """Resolves false conflicts where the nodeid changed but the content
1103 """Resolves false conflicts where the nodeid changed but the content
1089 remained the same."""
1104 remained the same."""
1090 # We force a copy of actions.items() because we're going to mutate
1105 # We force a copy of actions.items() because we're going to mutate
1091 # actions as we resolve trivial conflicts.
1106 # actions as we resolve trivial conflicts.
1092 for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
1107 for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
@@ -1423,7 +1438,13 b' def _prefetchfiles(repo, ctx, mresult):'
1423 prefetch = scmutil.prefetchfiles
1438 prefetch = scmutil.prefetchfiles
1424 matchfiles = scmutil.matchfiles
1439 matchfiles = scmutil.matchfiles
1425 prefetch(
1440 prefetch(
1426 repo, [(ctx.rev(), matchfiles(repo, files),)],
1441 repo,
1442 [
1443 (
1444 ctx.rev(),
1445 matchfiles(repo, files),
1446 )
1447 ],
1427 )
1448 )
1428
1449
1429
1450
@@ -1444,7 +1465,13 b' class updateresult(object):'
1444
1465
1445
1466
1446 def applyupdates(
1467 def applyupdates(
1447 repo, mresult, wctx, mctx, overwrite, wantfiledata, labels=None,
1468 repo,
1469 mresult,
1470 wctx,
1471 mctx,
1472 overwrite,
1473 wantfiledata,
1474 labels=None,
1448 ):
1475 ):
1449 """apply the merge action list to the working directory
1476 """apply the merge action list to the working directory
1450
1477
@@ -1734,7 +1761,8 b' def _advertisefsmonitor(repo, num_gets, '
1734 if dirstate.rustmod is not None:
1761 if dirstate.rustmod is not None:
1735 # When using rust status, fsmonitor becomes necessary at higher sizes
1762 # When using rust status, fsmonitor becomes necessary at higher sizes
1736 fsmonitorthreshold = repo.ui.configint(
1763 fsmonitorthreshold = repo.ui.configint(
1737 b'fsmonitor', b'warn_update_file_count_rust',
1764 b'fsmonitor',
1765 b'warn_update_file_count_rust',
1738 )
1766 )
1739
1767
1740 try:
1768 try:
@@ -2001,7 +2029,10 b' def _update('
2001 0,
2029 0,
2002 ):
2030 ):
2003 mresult.addfile(
2031 mresult.addfile(
2004 f, mergestatemod.ACTION_REMOVE, None, b'prompt delete',
2032 f,
2033 mergestatemod.ACTION_REMOVE,
2034 None,
2035 b'prompt delete',
2005 )
2036 )
2006 elif f in p1:
2037 elif f in p1:
2007 mresult.addfile(
2038 mresult.addfile(
@@ -2012,7 +2043,10 b' def _update('
2012 )
2043 )
2013 else:
2044 else:
2014 mresult.addfile(
2045 mresult.addfile(
2015 f, mergestatemod.ACTION_ADD, None, b'prompt keep',
2046 f,
2047 mergestatemod.ACTION_ADD,
2048 None,
2049 b'prompt keep',
2016 )
2050 )
2017 elif m == mergestatemod.ACTION_DELETED_CHANGED:
2051 elif m == mergestatemod.ACTION_DELETED_CHANGED:
2018 f1, f2, fa, move, anc = args
2052 f1, f2, fa, move, anc = args
@@ -2089,7 +2123,13 b' def _update('
2089
2123
2090 wantfiledata = updatedirstate and not branchmerge
2124 wantfiledata = updatedirstate and not branchmerge
2091 stats, getfiledata = applyupdates(
2125 stats, getfiledata = applyupdates(
2092 repo, mresult, wc, p2, overwrite, wantfiledata, labels=labels,
2126 repo,
2127 mresult,
2128 wc,
2129 p2,
2130 overwrite,
2131 wantfiledata,
2132 labels=labels,
2093 )
2133 )
2094
2134
2095 if updatedirstate:
2135 if updatedirstate:
@@ -132,7 +132,7 b' NO_OP_ACTIONS = ('
132
132
133
133
134 class _mergestate_base(object):
134 class _mergestate_base(object):
135 '''track 3-way merge state of individual files
135 """track 3-way merge state of individual files
136
136
137 The merge state is stored on disk when needed. Two files are used: one with
137 The merge state is stored on disk when needed. Two files are used: one with
138 an old format (version 1), and one with a new format (version 2). Version 2
138 an old format (version 1), and one with a new format (version 2). Version 2
@@ -164,7 +164,7 b' class _mergestate_base(object):'
164
164
165 The resolve command transitions between 'u' and 'r' for conflicts and
165 The resolve command transitions between 'u' and 'r' for conflicts and
166 'pu' and 'pr' for path conflicts.
166 'pu' and 'pr' for path conflicts.
167 '''
167 """
168
168
169 def __init__(self, repo):
169 def __init__(self, repo):
170 """Initialize the merge state.
170 """Initialize the merge state.
@@ -275,8 +275,8 b' class _mergestate_base(object):'
275 self._dirty = True
275 self._dirty = True
276
276
277 def addcommitinfo(self, path, data):
277 def addcommitinfo(self, path, data):
278 """ stores information which is required at commit
278 """stores information which is required at commit
279 into _stateextras """
279 into _stateextras"""
280 self._stateextras[path].update(data)
280 self._stateextras[path].update(data)
281 self._dirty = True
281 self._dirty = True
282
282
@@ -254,8 +254,7 b' def compute_all_files_changes(ctx):'
254
254
255
255
256 def _process_root(ctx):
256 def _process_root(ctx):
257 """compute the appropriate changed files for a changeset with no parents
257 """compute the appropriate changed files for a changeset with no parents"""
258 """
259 # Simple, there was nothing before it, so everything is added.
258 # Simple, there was nothing before it, so everything is added.
260 md = ChangingFiles()
259 md = ChangingFiles()
261 manifest = ctx.manifest()
260 manifest = ctx.manifest()
@@ -265,8 +264,7 b' def _process_root(ctx):'
265
264
266
265
267 def _process_linear(parent_ctx, children_ctx, parent=1):
266 def _process_linear(parent_ctx, children_ctx, parent=1):
268 """compute the appropriate changed files for a changeset with a single parent
267 """compute the appropriate changed files for a changeset with a single parent"""
269 """
270 md = ChangingFiles()
268 md = ChangingFiles()
271 parent_manifest = parent_ctx.manifest()
269 parent_manifest = parent_ctx.manifest()
272 children_manifest = children_ctx.manifest()
270 children_manifest = children_ctx.manifest()
@@ -515,8 +513,7 b' def _missing_from_all_ancestors(mas, fil'
515
513
516
514
517 def computechangesetfilesadded(ctx):
515 def computechangesetfilesadded(ctx):
518 """return the list of files added in a changeset
516 """return the list of files added in a changeset"""
519 """
520 added = []
517 added = []
521 for f in ctx.files():
518 for f in ctx.files():
522 if not any(f in p for p in ctx.parents()):
519 if not any(f in p for p in ctx.parents()):
@@ -580,8 +577,7 b' def get_removal_filter(ctx, x=None):'
580
577
581
578
582 def computechangesetfilesremoved(ctx):
579 def computechangesetfilesremoved(ctx):
583 """return the list of files removed in a changeset
580 """return the list of files removed in a changeset"""
584 """
585 removed = []
581 removed = []
586 for f in ctx.files():
582 for f in ctx.files():
587 if f not in ctx:
583 if f not in ctx:
@@ -593,8 +589,7 b' def computechangesetfilesremoved(ctx):'
593
589
594
590
595 def computechangesetfilesmerged(ctx):
591 def computechangesetfilesmerged(ctx):
596 """return the list of files merged in a changeset
592 """return the list of files merged in a changeset"""
597 """
598 merged = []
593 merged = []
599 if len(ctx.parents()) < 2:
594 if len(ctx.parents()) < 2:
600 return merged
595 return merged
@@ -52,7 +52,7 b' def subsubsubsubsection(s):'
52
52
53
53
54 def replace(text, substs):
54 def replace(text, substs):
55 '''
55 """
56 Apply a list of (find, replace) pairs to a text.
56 Apply a list of (find, replace) pairs to a text.
57
57
58 >>> replace(b"foo bar", [(b'f', b'F'), (b'b', b'B')])
58 >>> replace(b"foo bar", [(b'f', b'F'), (b'b', b'B')])
@@ -63,7 +63,7 b' def replace(text, substs):'
63 >>> encoding.encoding = b'shiftjis'
63 >>> encoding.encoding = b'shiftjis'
64 >>> replace(b'\\x81\\\\', [(b'\\\\', b'/')])
64 >>> replace(b'\\x81\\\\', [(b'\\\\', b'/')])
65 '\\x81\\\\'
65 '\\x81\\\\'
66 '''
66 """
67
67
68 # some character encodings (cp932 for Japanese, at least) use
68 # some character encodings (cp932 for Japanese, at least) use
69 # ASCII characters other than control/alphabet/digit as a part of
69 # ASCII characters other than control/alphabet/digit as a part of
@@ -322,10 +322,10 b' def prunecontainers(blocks, keep):'
322
322
323
323
324 def findtables(blocks):
324 def findtables(blocks):
325 '''Find simple tables
325 """Find simple tables
326
326
327 Only simple one-line table elements are supported
327 Only simple one-line table elements are supported
328 '''
328 """
329
329
330 for block in blocks:
330 for block in blocks:
331 # Searching for a block that looks like this:
331 # Searching for a block that looks like this:
@@ -432,7 +432,11 b' def addmargins(blocks):'
432 while i < len(blocks):
432 while i < len(blocks):
433 if blocks[i][b'type'] == blocks[i - 1][b'type'] and blocks[i][
433 if blocks[i][b'type'] == blocks[i - 1][b'type'] and blocks[i][
434 b'type'
434 b'type'
435 ] in (b'bullet', b'option', b'field',):
435 ] in (
436 b'bullet',
437 b'option',
438 b'field',
439 ):
436 i += 1
440 i += 1
437 elif not blocks[i - 1][b'lines']:
441 elif not blocks[i - 1][b'lines']:
438 # no lines in previous block, do not separate
442 # no lines in previous block, do not separate
@@ -226,7 +226,7 b' def clearwcbackup(repo, backupname):'
226
226
227
227
228 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
228 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
229 r""" Restricts the patterns according to repo settings,
229 r"""Restricts the patterns according to repo settings,
230 results in a logical AND operation
230 results in a logical AND operation
231
231
232 :param req_includes: requested includes
232 :param req_includes: requested includes
@@ -998,8 +998,7 b' def _computephasedivergentset(repo):'
998
998
999 @cachefor(b'contentdivergent')
999 @cachefor(b'contentdivergent')
1000 def _computecontentdivergentset(repo):
1000 def _computecontentdivergentset(repo):
1001 """the set of rev that compete to be the final successors of some revision.
1001 """the set of rev that compete to be the final successors of some revision."""
1002 """
1003 divergent = set()
1002 divergent = set()
1004 obsstore = repo.obsstore
1003 obsstore = repo.obsstore
1005 newermap = {}
1004 newermap = {}
@@ -381,7 +381,7 b' METABLACKLIST = ['
381
381
382
382
383 def metanotblacklisted(metaitem):
383 def metanotblacklisted(metaitem):
384 """ Check that the key of a meta item (extrakey, extravalue) does not
384 """Check that the key of a meta item (extrakey, extravalue) does not
385 match at least one of the blacklist pattern
385 match at least one of the blacklist pattern
386 """
386 """
387 metakey = metaitem[0]
387 metakey = metaitem[0]
@@ -439,7 +439,7 b' def _cmpdiff(leftctx, rightctx):'
439
439
440
440
441 def geteffectflag(source, successors):
441 def geteffectflag(source, successors):
442 """ From an obs-marker relation, compute what changed between the
442 """From an obs-marker relation, compute what changed between the
443 predecessor and the successor.
443 predecessor and the successor.
444 """
444 """
445 effects = 0
445 effects = 0
@@ -816,7 +816,7 b' def successorsandmarkers(repo, ctx):'
816
816
817
817
818 def _getobsfate(successorssets):
818 def _getobsfate(successorssets):
819 """ Compute a changeset obsolescence fate based on its successorssets.
819 """Compute a changeset obsolescence fate based on its successorssets.
820 Successors can be the tipmost ones or the immediate ones. This function
820 Successors can be the tipmost ones or the immediate ones. This function
821 return values are not meant to be shown directly to users, it is meant to
821 return values are not meant to be shown directly to users, it is meant to
822 be used by internal functions only.
822 be used by internal functions only.
@@ -843,7 +843,7 b' def _getobsfate(successorssets):'
843
843
844
844
845 def obsfateverb(successorset, markers):
845 def obsfateverb(successorset, markers):
846 """ Return the verb summarizing the successorset and potentially using
846 """Return the verb summarizing the successorset and potentially using
847 information from the markers
847 information from the markers
848 """
848 """
849 if not successorset:
849 if not successorset:
@@ -856,14 +856,12 b' def obsfateverb(successorset, markers):'
856
856
857
857
858 def markersdates(markers):
858 def markersdates(markers):
859 """returns the list of dates for a list of markers
859 """returns the list of dates for a list of markers"""
860 """
861 return [m[4] for m in markers]
860 return [m[4] for m in markers]
862
861
863
862
864 def markersusers(markers):
863 def markersusers(markers):
865 """ Returns a sorted list of markers users without duplicates
864 """Returns a sorted list of markers users without duplicates"""
866 """
867 markersmeta = [dict(m[3]) for m in markers]
865 markersmeta = [dict(m[3]) for m in markers]
868 users = {
866 users = {
869 encoding.tolocal(meta[b'user'])
867 encoding.tolocal(meta[b'user'])
@@ -875,8 +873,7 b' def markersusers(markers):'
875
873
876
874
877 def markersoperations(markers):
875 def markersoperations(markers):
878 """ Returns a sorted list of markers operations without duplicates
876 """Returns a sorted list of markers operations without duplicates"""
879 """
880 markersmeta = [dict(m[3]) for m in markers]
877 markersmeta = [dict(m[3]) for m in markers]
881 operations = {
878 operations = {
882 meta.get(b'operation') for meta in markersmeta if meta.get(b'operation')
879 meta.get(b'operation') for meta in markersmeta if meta.get(b'operation')
@@ -886,7 +883,7 b' def markersoperations(markers):'
886
883
887
884
888 def obsfateprinter(ui, repo, successors, markers, formatctx):
885 def obsfateprinter(ui, repo, successors, markers, formatctx):
889 """ Build a obsfate string for a single successorset using all obsfate
886 """Build a obsfate string for a single successorset using all obsfate
890 related function defined in obsutil
887 related function defined in obsutil
891 """
888 """
892 quiet = ui.quiet
889 quiet = ui.quiet
@@ -950,8 +947,7 b' filteredmsgtable = {'
950
947
951
948
952 def _getfilteredreason(repo, changeid, ctx):
949 def _getfilteredreason(repo, changeid, ctx):
953 """return a human-friendly string on why a obsolete changeset is hidden
950 """return a human-friendly string on why a obsolete changeset is hidden"""
954 """
955 successors = successorssets(repo, ctx.node())
951 successors = successorssets(repo, ctx.node())
956 fate = _getobsfate(successors)
952 fate = _getobsfate(successors)
957
953
@@ -406,8 +406,7 b' def matchtree(pattern, tree, placeholder'
406
406
407
407
408 def parseerrordetail(inst):
408 def parseerrordetail(inst):
409 """Compose error message from specified ParseError object
409 """Compose error message from specified ParseError object"""
410 """
411 if inst.location is not None:
410 if inst.location is not None:
412 return _(b'at %d: %s') % (inst.location, inst.message)
411 return _(b'at %d: %s') % (inst.location, inst.message)
413 else:
412 else:
@@ -200,7 +200,7 b' patchheadermap = ['
200
200
201 @contextlib.contextmanager
201 @contextlib.contextmanager
202 def extract(ui, fileobj):
202 def extract(ui, fileobj):
203 '''extract patch from data read from fileobj.
203 """extract patch from data read from fileobj.
204
204
205 patch can be a normal patch or contained in an email message.
205 patch can be a normal patch or contained in an email message.
206
206
@@ -214,7 +214,7 b' def extract(ui, fileobj):'
214 - p1,
214 - p1,
215 - p2.
215 - p2.
216 Any item can be missing from the dictionary. If filename is missing,
216 Any item can be missing from the dictionary. If filename is missing,
217 fileobj did not contain a patch. Caller must unlink filename when done.'''
217 fileobj did not contain a patch. Caller must unlink filename when done."""
218
218
219 fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
219 fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
220 tmpfp = os.fdopen(fd, 'wb')
220 tmpfp = os.fdopen(fd, 'wb')
@@ -905,8 +905,7 b' class patchfile(object):'
905
905
906
906
907 class header(object):
907 class header(object):
908 """patch header
908 """patch header"""
909 """
910
909
911 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
910 diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
912 diff_re = re.compile(b'diff -r .* (.*)$')
911 diff_re = re.compile(b'diff -r .* (.*)$')
@@ -1854,7 +1853,7 b' def parsepatch(originalchunks, maxcontex'
1854
1853
1855
1854
1856 def pathtransform(path, strip, prefix):
1855 def pathtransform(path, strip, prefix):
1857 '''turn a path from a patch into a path suitable for the repository
1856 """turn a path from a patch into a path suitable for the repository
1858
1857
1859 prefix, if not empty, is expected to be normalized with a / at the end.
1858 prefix, if not empty, is expected to be normalized with a / at the end.
1860
1859
@@ -1873,7 +1872,7 b' def pathtransform(path, strip, prefix):'
1873 >>> pathtransform(b'a/b/c', 3, b'')
1872 >>> pathtransform(b'a/b/c', 3, b'')
1874 Traceback (most recent call last):
1873 Traceback (most recent call last):
1875 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1874 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1876 '''
1875 """
1877 pathlen = len(path)
1876 pathlen = len(path)
1878 i = 0
1877 i = 0
1879 if strip == 0:
1878 if strip == 0:
@@ -2503,7 +2502,7 b' def diff('
2503 copysourcematch=None,
2502 copysourcematch=None,
2504 hunksfilterfn=None,
2503 hunksfilterfn=None,
2505 ):
2504 ):
2506 '''yields diff of changes to files between two nodes, or node and
2505 """yields diff of changes to files between two nodes, or node and
2507 working directory.
2506 working directory.
2508
2507
2509 if node1 is None, use first dirstate parent instead.
2508 if node1 is None, use first dirstate parent instead.
@@ -2531,7 +2530,7 b' def diff('
2531
2530
2532 hunksfilterfn, if not None, should be a function taking a filectx and
2531 hunksfilterfn, if not None, should be a function taking a filectx and
2533 hunks generator that may yield filtered hunks.
2532 hunks generator that may yield filtered hunks.
2534 '''
2533 """
2535 if not node1 and not node2:
2534 if not node1 and not node2:
2536 node1 = repo.dirstate.p1()
2535 node1 = repo.dirstate.p1()
2537
2536
@@ -2886,10 +2885,10 b' def diffui(*args, **kw):'
2886
2885
2887
2886
2888 def _filepairs(modified, added, removed, copy, opts):
2887 def _filepairs(modified, added, removed, copy, opts):
2889 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2888 """generates tuples (f1, f2, copyop), where f1 is the name of the file
2890 before and f2 is the the name after. For added files, f1 will be None,
2889 before and f2 is the the name after. For added files, f1 will be None,
2891 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2890 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2892 or 'rename' (the latter two only if opts.git is set).'''
2891 or 'rename' (the latter two only if opts.git is set)."""
2893 gone = set()
2892 gone = set()
2894
2893
2895 copyto = {v: k for k, v in copy.items()}
2894 copyto = {v: k for k, v in copy.items()}
@@ -2948,13 +2947,13 b' def trydiff('
2948 losedatafn,
2947 losedatafn,
2949 pathfn,
2948 pathfn,
2950 ):
2949 ):
2951 '''given input data, generate a diff and yield it in blocks
2950 """given input data, generate a diff and yield it in blocks
2952
2951
2953 If generating a diff would lose data like flags or binary data and
2952 If generating a diff would lose data like flags or binary data and
2954 losedatafn is not None, it will be called.
2953 losedatafn is not None, it will be called.
2955
2954
2956 pathfn is applied to every path in the diff output.
2955 pathfn is applied to every path in the diff output.
2957 '''
2956 """
2958
2957
2959 if opts.noprefix:
2958 if opts.noprefix:
2960 aprefix = bprefix = b''
2959 aprefix = bprefix = b''
@@ -3079,7 +3078,7 b' def trydiff('
3079
3078
3080
3079
3081 def diffcontent(data1, data2, header, binary, opts):
3080 def diffcontent(data1, data2, header, binary, opts):
3082 """ diffs two versions of a file.
3081 """diffs two versions of a file.
3083
3082
3084 data1 and data2 are tuples containg:
3083 data1 and data2 are tuples containg:
3085
3084
@@ -3241,9 +3240,9 b' def diffstat(lines, width=80):'
3241
3240
3242
3241
3243 def diffstatui(*args, **kw):
3242 def diffstatui(*args, **kw):
3244 '''like diffstat(), but yields 2-tuples of (output, label) for
3243 """like diffstat(), but yields 2-tuples of (output, label) for
3245 ui.write()
3244 ui.write()
3246 '''
3245 """
3247
3246
3248 for line in diffstat(*args, **kw).splitlines():
3247 for line in diffstat(*args, **kw).splitlines():
3249 if line and line[-1] in b'+-':
3248 if line and line[-1] in b'+-':
@@ -24,7 +24,7 b' def _lowerclean(s):'
24
24
25
25
26 class pathauditor(object):
26 class pathauditor(object):
27 '''ensure that a filesystem path contains no banned components.
27 """ensure that a filesystem path contains no banned components.
28 the following properties of a path are checked:
28 the following properties of a path are checked:
29
29
30 - ends with a directory separator
30 - ends with a directory separator
@@ -44,7 +44,7 b' class pathauditor(object):'
44 If 'cached' is set to True, audited paths and sub-directories are cached.
44 If 'cached' is set to True, audited paths and sub-directories are cached.
45 Be careful to not keep the cache of unmanaged directories for long because
45 Be careful to not keep the cache of unmanaged directories for long because
46 audited paths may be replaced with symlinks.
46 audited paths may be replaced with symlinks.
47 '''
47 """
48
48
49 def __init__(self, root, callback=None, realfs=True, cached=False):
49 def __init__(self, root, callback=None, realfs=True, cached=False):
50 self.audited = set()
50 self.audited = set()
@@ -59,8 +59,8 b' class pathauditor(object):'
59 self.normcase = lambda x: x
59 self.normcase = lambda x: x
60
60
61 def __call__(self, path, mode=None):
61 def __call__(self, path, mode=None):
62 '''Check the relative path.
62 """Check the relative path.
63 path may contain a pattern (e.g. foodir/**.txt)'''
63 path may contain a pattern (e.g. foodir/**.txt)"""
64
64
65 path = util.localpath(path)
65 path = util.localpath(path)
66 normpath = self.normcase(path)
66 normpath = self.normcase(path)
@@ -164,7 +164,7 b' class pathauditor(object):'
164
164
165
165
166 def canonpath(root, cwd, myname, auditor=None):
166 def canonpath(root, cwd, myname, auditor=None):
167 '''return the canonical path of myname, given cwd and root
167 """return the canonical path of myname, given cwd and root
168
168
169 >>> def check(root, cwd, myname):
169 >>> def check(root, cwd, myname):
170 ... a = pathauditor(root, realfs=False)
170 ... a = pathauditor(root, realfs=False)
@@ -204,7 +204,7 b' def canonpath(root, cwd, myname, auditor'
204 'filename'
204 'filename'
205 >>> unixonly(b'/repo', b'/repo/subdir', b'filename', b'subdir/filename')
205 >>> unixonly(b'/repo', b'/repo/subdir', b'filename', b'subdir/filename')
206 'subdir/filename'
206 'subdir/filename'
207 '''
207 """
208 if util.endswithsep(root):
208 if util.endswithsep(root):
209 rootsep = root
209 rootsep = root
210 else:
210 else:
@@ -266,7 +266,7 b' def canonpath(root, cwd, myname, auditor'
266
266
267
267
268 def normasprefix(path):
268 def normasprefix(path):
269 '''normalize the specified path as path prefix
269 """normalize the specified path as path prefix
270
270
271 Returned value can be used safely for "p.startswith(prefix)",
271 Returned value can be used safely for "p.startswith(prefix)",
272 "p[len(prefix):]", and so on.
272 "p[len(prefix):]", and so on.
@@ -280,7 +280,7 b' def normasprefix(path):'
280 '/foo/bar/'
280 '/foo/bar/'
281 >>> normasprefix(b'/').replace(pycompat.ossep, b'/')
281 >>> normasprefix(b'/').replace(pycompat.ossep, b'/')
282 '/'
282 '/'
283 '''
283 """
284 d, p = os.path.splitdrive(path)
284 d, p = os.path.splitdrive(path)
285 if len(p) != len(pycompat.ossep):
285 if len(p) != len(pycompat.ossep):
286 return path + pycompat.ossep
286 return path + pycompat.ossep
@@ -300,9 +300,9 b' class dirs(object):'
300 '''a multiset of directory names from a set of file paths'''
300 '''a multiset of directory names from a set of file paths'''
301
301
302 def __init__(self, map, skip=None):
302 def __init__(self, map, skip=None):
303 '''
303 """
304 a dict map indicates a dirstate while a list indicates a manifest
304 a dict map indicates a dirstate while a list indicates a manifest
305 '''
305 """
306 self._dirs = {}
306 self._dirs = {}
307 addpath = self.addpath
307 addpath = self.addpath
308 if isinstance(map, dict) and skip is not None:
308 if isinstance(map, dict) and skip is not None:
@@ -76,7 +76,7 b' else:'
76
76
77
77
78 def split(p):
78 def split(p):
79 '''Same as posixpath.split, but faster
79 """Same as posixpath.split, but faster
80
80
81 >>> import posixpath
81 >>> import posixpath
82 >>> for f in [b'/absolute/path/to/file',
82 >>> for f in [b'/absolute/path/to/file',
@@ -88,7 +88,7 b' def split(p):'
88 ... b'///multiple_leading_separators_at_root',
88 ... b'///multiple_leading_separators_at_root',
89 ... b'']:
89 ... b'']:
90 ... assert split(f) == posixpath.split(f), f
90 ... assert split(f) == posixpath.split(f), f
91 '''
91 """
92 ht = p.rsplit(b'/', 1)
92 ht = p.rsplit(b'/', 1)
93 if len(ht) == 1:
93 if len(ht) == 1:
94 return b'', p
94 return b'', p
@@ -183,9 +183,9 b' def setflags(f, l, x):'
183
183
184
184
185 def copymode(src, dst, mode=None, enforcewritable=False):
185 def copymode(src, dst, mode=None, enforcewritable=False):
186 '''Copy the file mode from the file at path src to dst.
186 """Copy the file mode from the file at path src to dst.
187 If src doesn't exist, we're using mode instead. If mode is None, we're
187 If src doesn't exist, we're using mode instead. If mode is None, we're
188 using umask.'''
188 using umask."""
189 try:
189 try:
190 st_mode = os.lstat(src).st_mode & 0o777
190 st_mode = os.lstat(src).st_mode & 0o777
191 except OSError as inst:
191 except OSError as inst:
@@ -359,24 +359,24 b' def checklink(path):'
359
359
360
360
361 def checkosfilename(path):
361 def checkosfilename(path):
362 '''Check that the base-relative path is a valid filename on this platform.
362 """Check that the base-relative path is a valid filename on this platform.
363 Returns None if the path is ok, or a UI string describing the problem.'''
363 Returns None if the path is ok, or a UI string describing the problem."""
364 return None # on posix platforms, every path is ok
364 return None # on posix platforms, every path is ok
365
365
366
366
367 def getfsmountpoint(dirpath):
367 def getfsmountpoint(dirpath):
368 '''Get the filesystem mount point from a directory (best-effort)
368 """Get the filesystem mount point from a directory (best-effort)
369
369
370 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
370 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
371 '''
371 """
372 return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
372 return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
373
373
374
374
375 def getfstype(dirpath):
375 def getfstype(dirpath):
376 '''Get the filesystem type name from a directory (best-effort)
376 """Get the filesystem type name from a directory (best-effort)
377
377
378 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
378 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
379 '''
379 """
380 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
380 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
381
381
382
382
@@ -419,7 +419,7 b' normcasefallback = normcase'
419 if pycompat.isdarwin:
419 if pycompat.isdarwin:
420
420
421 def normcase(path):
421 def normcase(path):
422 '''
422 """
423 Normalize a filename for OS X-compatible comparison:
423 Normalize a filename for OS X-compatible comparison:
424 - escape-encode invalid characters
424 - escape-encode invalid characters
425 - decompose to NFD
425 - decompose to NFD
@@ -434,7 +434,7 b' if pycompat.isdarwin:'
434 'e\\xcc\\x81'
434 'e\\xcc\\x81'
435 >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
435 >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
436 '%b8%ca%c3\\xca\\xbe%c8.jpg'
436 '%b8%ca%c3\\xca\\xbe%c8.jpg'
437 '''
437 """
438
438
439 try:
439 try:
440 return encoding.asciilower(path) # exception for non-ASCII
440 return encoding.asciilower(path) # exception for non-ASCII
@@ -475,7 +475,12 b" if pycompat.sysplatform == b'cygwin':"
475
475
476 # default mount points
476 # default mount points
477 cygwinmountpoints = sorted(
477 cygwinmountpoints = sorted(
478 [b"/usr/bin", b"/usr/lib", b"/cygdrive",], reverse=True
478 [
479 b"/usr/bin",
480 b"/usr/lib",
481 b"/cygdrive",
482 ],
483 reverse=True,
479 )
484 )
480
485
481 # use upper-ing as normcase as same as NTFS workaround
486 # use upper-ing as normcase as same as NTFS workaround
@@ -553,10 +558,10 b' def isowner(st):'
553
558
554
559
555 def findexe(command):
560 def findexe(command):
556 '''Find executable for command searching like which does.
561 """Find executable for command searching like which does.
557 If command is a basename then PATH is searched for command.
562 If command is a basename then PATH is searched for command.
558 PATH isn't searched if command is an absolute or relative path.
563 PATH isn't searched if command is an absolute or relative path.
559 If command isn't found None is returned.'''
564 If command isn't found None is returned."""
560 if pycompat.sysplatform == b'OpenVMS':
565 if pycompat.sysplatform == b'OpenVMS':
561 return command
566 return command
562
567
@@ -587,8 +592,8 b' def setsignalhandler():'
587
592
588
593
589 def statfiles(files):
594 def statfiles(files):
590 '''Stat each file in files. Yield each stat, or None if a file does not
595 """Stat each file in files. Yield each stat, or None if a file does not
591 exist or has a type we don't care about.'''
596 exist or has a type we don't care about."""
592 lstat = os.lstat
597 lstat = os.lstat
593 getkind = stat.S_IFMT
598 getkind = stat.S_IFMT
594 for nf in files:
599 for nf in files:
@@ -251,7 +251,7 b' class progbar(object):'
251 return False
251 return False
252
252
253 def _calibrateestimate(self, topic, now, pos):
253 def _calibrateestimate(self, topic, now, pos):
254 '''Adjust starttimes and startvals for topic so ETA works better
254 """Adjust starttimes and startvals for topic so ETA works better
255
255
256 If progress is non-linear (ex. get much slower in the last minute),
256 If progress is non-linear (ex. get much slower in the last minute),
257 it's more friendly to only use a recent time span for ETA and speed
257 it's more friendly to only use a recent time span for ETA and speed
@@ -260,7 +260,7 b' class progbar(object):'
260 [======================================> ]
260 [======================================> ]
261 ^^^^^^^
261 ^^^^^^^
262 estimateinterval, only use this for estimation
262 estimateinterval, only use this for estimation
263 '''
263 """
264 interval = self.estimateinterval
264 interval = self.estimateinterval
265 if interval <= 0:
265 if interval <= 0:
266 return
266 return
@@ -21,17 +21,17 b' def isasciistr(s):'
21
21
22
22
23 def asciilower(s):
23 def asciilower(s):
24 '''convert a string to lowercase if ASCII
24 """convert a string to lowercase if ASCII
25
25
26 Raises UnicodeDecodeError if non-ASCII characters are found.'''
26 Raises UnicodeDecodeError if non-ASCII characters are found."""
27 s.decode('ascii')
27 s.decode('ascii')
28 return s.lower()
28 return s.lower()
29
29
30
30
31 def asciiupper(s):
31 def asciiupper(s):
32 '''convert a string to uppercase if ASCII
32 """convert a string to uppercase if ASCII
33
33
34 Raises UnicodeDecodeError if non-ASCII characters are found.'''
34 Raises UnicodeDecodeError if non-ASCII characters are found."""
35 s.decode('ascii')
35 s.decode('ascii')
36 return s.upper()
36 return s.upper()
37
37
@@ -15,8 +15,7 b' stringio = pycompat.bytesio'
15
15
16
16
17 class mpatchError(Exception):
17 class mpatchError(Exception):
18 """error raised when a delta cannot be decoded
18 """error raised when a delta cannot be decoded"""
19 """
20
19
21
20
22 # This attempts to apply a series of patches in time proportional to
21 # This attempts to apply a series of patches in time proportional to
@@ -39,7 +39,7 b' def _mode_to_kind(mode):'
39
39
40
40
41 def listdir(path, stat=False, skip=None):
41 def listdir(path, stat=False, skip=None):
42 '''listdir(path, stat=False) -> list_of_tuples
42 """listdir(path, stat=False) -> list_of_tuples
43
43
44 Return a sorted list containing information about the entries
44 Return a sorted list containing information about the entries
45 in the directory.
45 in the directory.
@@ -51,7 +51,7 b' def listdir(path, stat=False, skip=None)'
51 Otherwise, each element is a 2-tuple:
51 Otherwise, each element is a 2-tuple:
52
52
53 (name, type)
53 (name, type)
54 '''
54 """
55 result = []
55 result = []
56 prefix = path
56 prefix = path
57 if not prefix.endswith(pycompat.ossep):
57 if not prefix.endswith(pycompat.ossep):
@@ -222,7 +222,7 b' else:'
222 )
222 )
223
223
224 class posixfile(object):
224 class posixfile(object):
225 '''a file object aiming for POSIX-like semantics
225 """a file object aiming for POSIX-like semantics
226
226
227 CPython's open() returns a file that was opened *without* setting the
227 CPython's open() returns a file that was opened *without* setting the
228 _FILE_SHARE_DELETE flag, which causes rename and unlink to abort.
228 _FILE_SHARE_DELETE flag, which causes rename and unlink to abort.
@@ -231,7 +231,7 b' else:'
231 renamed and deleted while they are held open.
231 renamed and deleted while they are held open.
232 Note that if a file opened with posixfile is unlinked, the file
232 Note that if a file opened with posixfile is unlinked, the file
233 remains but cannot be opened again or be recreated under the same name,
233 remains but cannot be opened again or be recreated under the same name,
234 until all reading processes have closed the file.'''
234 until all reading processes have closed the file."""
235
235
236 def __init__(self, name, mode=b'r', bufsize=-1):
236 def __init__(self, name, mode=b'r', bufsize=-1):
237 if b'b' in mode:
237 if b'b' in mode:
@@ -290,11 +290,11 b' else:'
290 return getattr(self._file, name)
290 return getattr(self._file, name)
291
291
292 def __setattr__(self, name, value):
292 def __setattr__(self, name, value):
293 '''mimics the read-only attributes of Python file objects
293 """mimics the read-only attributes of Python file objects
294 by raising 'TypeError: readonly attribute' if someone tries:
294 by raising 'TypeError: readonly attribute' if someone tries:
295 f = posixfile('foo.txt')
295 f = posixfile('foo.txt')
296 f.name = 'bla'
296 f.name = 'bla'
297 '''
297 """
298 return self._file.__setattr__(name, value)
298 return self._file.__setattr__(name, value)
299
299
300 def __enter__(self):
300 def __enter__(self):
@@ -234,8 +234,7 b' def parse_index2(data, inline):'
234
234
235
235
236 def parse_index_devel_nodemap(data, inline):
236 def parse_index_devel_nodemap(data, inline):
237 """like parse_index2, but alway return a PersistentNodeMapIndexObject
237 """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
238 """
239 return PersistentNodeMapIndexObject(data), None
238 return PersistentNodeMapIndexObject(data), None
240
239
241
240
@@ -39,13 +39,13 b' def _expandrcpath(path):'
39
39
40
40
41 def envrcitems(env=None):
41 def envrcitems(env=None):
42 '''Return [(section, name, value, source)] config items.
42 """Return [(section, name, value, source)] config items.
43
43
44 The config items are extracted from environment variables specified by env,
44 The config items are extracted from environment variables specified by env,
45 used to override systemrc, but not userrc.
45 used to override systemrc, but not userrc.
46
46
47 If env is not provided, encoding.environ will be used.
47 If env is not provided, encoding.environ will be used.
48 '''
48 """
49 if env is None:
49 if env is None:
50 env = encoding.environ
50 env = encoding.environ
51 checklist = [
51 checklist = [
@@ -73,7 +73,7 b' def default_rc_resources():'
73
73
74
74
75 def rccomponents():
75 def rccomponents():
76 '''return an ordered [(type, obj)] about where to load configs.
76 """return an ordered [(type, obj)] about where to load configs.
77
77
78 respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is
78 respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is
79 used. if $HGRCPATH is not set, the platform default will be used.
79 used. if $HGRCPATH is not set, the platform default will be used.
@@ -84,7 +84,7 b' def rccomponents():'
84 obj is a string, and is the config file path. if type is 'items', obj is a
84 obj is a string, and is the config file path. if type is 'items', obj is a
85 list of (section, name, value, source) that should fill the config directly.
85 list of (section, name, value, source) that should fill the config directly.
86 If type is 'resource', obj is a tuple of (package name, resource name).
86 If type is 'resource', obj is a tuple of (package name, resource name).
87 '''
87 """
88 envrc = (b'items', envrcitems())
88 envrc = (b'items', envrcitems())
89
89
90 if b'HGRCPATH' in encoding.environ:
90 if b'HGRCPATH' in encoding.environ:
@@ -108,9 +108,9 b' def rccomponents():'
108
108
109
109
110 def defaultpagerenv():
110 def defaultpagerenv():
111 '''return a dict of default environment variables and their values,
111 """return a dict of default environment variables and their values,
112 intended to be set before starting a pager.
112 intended to be set before starting a pager.
113 '''
113 """
114 return {b'LESS': b'FRX', b'LV': b'-c'}
114 return {b'LESS': b'FRX', b'LV': b'-c'}
115
115
116
116
@@ -95,8 +95,7 b' class _funcregistrarbase(object):'
95 self._table.update(registrarbase._table)
95 self._table.update(registrarbase._table)
96
96
97 def _parsefuncdecl(self, decl):
97 def _parsefuncdecl(self, decl):
98 """Parse function declaration and return the name of function in it
98 """Parse function declaration and return the name of function in it"""
99 """
100 i = decl.find(b'(')
99 i = decl.find(b'(')
101 if i >= 0:
100 if i >= 0:
102 return decl[:i]
101 return decl[:i]
@@ -121,8 +120,7 b' class _funcregistrarbase(object):'
121 return self._docformat % (decl, doc)
120 return self._docformat % (decl, doc)
122
121
123 def _extrasetup(self, name, func):
122 def _extrasetup(self, name, func):
124 """Execute extra setup for registered function, if needed
123 """Execute extra setup for registered function, if needed"""
125 """
126
124
127
125
128 class command(_funcregistrarbase):
126 class command(_funcregistrarbase):
@@ -345,8 +343,7 b' class filesetpredicate(_funcregistrarbas'
345
343
346
344
347 class _templateregistrarbase(_funcregistrarbase):
345 class _templateregistrarbase(_funcregistrarbase):
348 """Base of decorator to register functions as template specific one
346 """Base of decorator to register functions as template specific one"""
349 """
350
347
351 _docformat = b":%s: %s"
348 _docformat = b":%s: %s"
352
349
@@ -48,8 +48,7 b' def hideablerevs(repo):'
48
48
49
49
50 def pinnedrevs(repo):
50 def pinnedrevs(repo):
51 """revisions blocking hidden changesets from being filtered
51 """revisions blocking hidden changesets from being filtered"""
52 """
53
52
54 cl = repo.changelog
53 cl = repo.changelog
55 pinned = set()
54 pinned = set()
@@ -1491,8 +1491,8 b' class revlog(object):'
1491
1491
1492 def lookup(self, id):
1492 def lookup(self, id):
1493 """locate a node based on:
1493 """locate a node based on:
1494 - revision number or str(revision number)
1494 - revision number or str(revision number)
1495 - nodeid or subset of hex nodeid
1495 - nodeid or subset of hex nodeid
1496 """
1496 """
1497 n = self._match(id)
1497 n = self._match(id)
1498 if n is not None:
1498 if n is not None:
@@ -1771,8 +1771,7 b' class revlog(object):'
1771 return rev - 1
1771 return rev - 1
1772
1772
1773 def issnapshot(self, rev):
1773 def issnapshot(self, rev):
1774 """tells whether rev is a snapshot
1774 """tells whether rev is a snapshot"""
1775 """
1776 if not self._sparserevlog:
1775 if not self._sparserevlog:
1777 return self.deltaparent(rev) == nullrev
1776 return self.deltaparent(rev) == nullrev
1778 elif util.safehasattr(self.index, b'issnapshot'):
1777 elif util.safehasattr(self.index, b'issnapshot'):
@@ -2037,8 +2036,7 b' class revlog(object):'
2037 self._chunkclear()
2036 self._chunkclear()
2038
2037
2039 def _nodeduplicatecallback(self, transaction, node):
2038 def _nodeduplicatecallback(self, transaction, node):
2040 """called when trying to add a node already stored.
2039 """called when trying to add a node already stored."""
2041 """
2042
2040
2043 def addrevision(
2041 def addrevision(
2044 self,
2042 self,
@@ -86,8 +86,7 b' def setup_persistent_nodemap(tr, revlog)'
86
86
87
87
88 class _NoTransaction(object):
88 class _NoTransaction(object):
89 """transaction like object to update the nodemap outside a transaction
89 """transaction like object to update the nodemap outside a transaction"""
90 """
91
90
92 def __init__(self):
91 def __init__(self):
93 self._postclose = {}
92 self._postclose = {}
@@ -129,8 +128,7 b' def update_persistent_nodemap(revlog):'
129
128
130
129
131 def _persist_nodemap(tr, revlog, pending=False):
130 def _persist_nodemap(tr, revlog, pending=False):
132 """Write nodemap data on disk for a given revlog
131 """Write nodemap data on disk for a given revlog"""
133 """
134 if getattr(revlog, 'filteredrevs', ()):
132 if getattr(revlog, 'filteredrevs', ()):
135 raise error.ProgrammingError(
133 raise error.ProgrammingError(
136 "cannot persist nodemap of a filtered changelog"
134 "cannot persist nodemap of a filtered changelog"
@@ -400,15 +398,13 b' def _other_rawdata_filepath(revlog, dock'
400
398
401
399
402 def persistent_data(index):
400 def persistent_data(index):
403 """return the persistent binary form for a nodemap for a given index
401 """return the persistent binary form for a nodemap for a given index"""
404 """
405 trie = _build_trie(index)
402 trie = _build_trie(index)
406 return _persist_trie(trie)
403 return _persist_trie(trie)
407
404
408
405
409 def update_persistent_data(index, root, max_idx, last_rev):
406 def update_persistent_data(index, root, max_idx, last_rev):
410 """return the incremental update for persistent nodemap from a given index
407 """return the incremental update for persistent nodemap from a given index"""
411 """
412 changed_block, trie = _update_trie(index, root, last_rev)
408 changed_block, trie = _update_trie(index, root, last_rev)
413 return (
409 return (
414 changed_block * S_BLOCK.size,
410 changed_block * S_BLOCK.size,
@@ -529,8 +529,7 b' def ancestorspec(repo, subset, x, n, ord'
529
529
530 @predicate(b'author(string)', safe=True, weight=10)
530 @predicate(b'author(string)', safe=True, weight=10)
531 def author(repo, subset, x):
531 def author(repo, subset, x):
532 """Alias for ``user(string)``.
532 """Alias for ``user(string)``."""
533 """
534 # i18n: "author" is a keyword
533 # i18n: "author" is a keyword
535 n = getstring(x, _(b"author requires a string"))
534 n = getstring(x, _(b"author requires a string"))
536 kind, pattern, matcher = _substringmatcher(n, casesensitive=False)
535 kind, pattern, matcher = _substringmatcher(n, casesensitive=False)
@@ -737,8 +736,7 b' def _children(repo, subset, parentset):'
737
736
738 @predicate(b'children(set)', safe=True)
737 @predicate(b'children(set)', safe=True)
739 def children(repo, subset, x):
738 def children(repo, subset, x):
740 """Child changesets of changesets in set.
739 """Child changesets of changesets in set."""
741 """
742 s = getset(repo, fullreposet(repo), x)
740 s = getset(repo, fullreposet(repo), x)
743 cs = _children(repo, subset, s)
741 cs = _children(repo, subset, s)
744 return subset & cs
742 return subset & cs
@@ -746,8 +744,7 b' def children(repo, subset, x):'
746
744
747 @predicate(b'closed()', safe=True, weight=10)
745 @predicate(b'closed()', safe=True, weight=10)
748 def closed(repo, subset, x):
746 def closed(repo, subset, x):
749 """Changeset is closed.
747 """Changeset is closed."""
750 """
751 # i18n: "closed" is a keyword
748 # i18n: "closed" is a keyword
752 getargs(x, 0, 0, _(b"closed takes no arguments"))
749 getargs(x, 0, 0, _(b"closed takes no arguments"))
753 return subset.filter(
750 return subset.filter(
@@ -771,8 +768,7 b' def _commonancestorheads(repo, subset, x'
771
768
772 @predicate(b'commonancestors(set)', safe=True)
769 @predicate(b'commonancestors(set)', safe=True)
773 def commonancestors(repo, subset, x):
770 def commonancestors(repo, subset, x):
774 """Changesets that are ancestors of every changeset in set.
771 """Changesets that are ancestors of every changeset in set."""
775 """
776 startrevs = getset(repo, fullreposet(repo), x, order=anyorder)
772 startrevs = getset(repo, fullreposet(repo), x, order=anyorder)
777 if not startrevs:
773 if not startrevs:
778 return baseset()
774 return baseset()
@@ -868,8 +864,7 b' def converted(repo, subset, x):'
868
864
869 @predicate(b'date(interval)', safe=True, weight=10)
865 @predicate(b'date(interval)', safe=True, weight=10)
870 def date(repo, subset, x):
866 def date(repo, subset, x):
871 """Changesets within the interval, see :hg:`help dates`.
867 """Changesets within the interval, see :hg:`help dates`."""
872 """
873 # i18n: "date" is a keyword
868 # i18n: "date" is a keyword
874 ds = getstring(x, _(b"date requires a string"))
869 ds = getstring(x, _(b"date requires a string"))
875 dm = dateutil.matchdate(ds)
870 dm = dateutil.matchdate(ds)
@@ -1108,8 +1103,7 b' def extdata(repo, subset, x):'
1108
1103
1109 @predicate(b'extinct()', safe=True)
1104 @predicate(b'extinct()', safe=True)
1110 def extinct(repo, subset, x):
1105 def extinct(repo, subset, x):
1111 """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)
1106 """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)"""
1112 """
1113 # i18n: "extinct" is a keyword
1107 # i18n: "extinct" is a keyword
1114 getargs(x, 0, 0, _(b"extinct takes no arguments"))
1108 getargs(x, 0, 0, _(b"extinct takes no arguments"))
1115 extincts = obsmod.getrevs(repo, b'extinct')
1109 extincts = obsmod.getrevs(repo, b'extinct')
@@ -1216,8 +1210,7 b' def filelog(repo, subset, x):'
1216
1210
1217 @predicate(b'first(set, [n])', safe=True, takeorder=True, weight=0)
1211 @predicate(b'first(set, [n])', safe=True, takeorder=True, weight=0)
1218 def first(repo, subset, x, order):
1212 def first(repo, subset, x, order):
1219 """An alias for limit().
1213 """An alias for limit()."""
1220 """
1221 return limit(repo, subset, x, order)
1214 return limit(repo, subset, x, order)
1222
1215
1223
1216
@@ -1341,8 +1334,7 b' def followlines(repo, subset, x):'
1341
1334
1342 @predicate(b'all()', safe=True)
1335 @predicate(b'all()', safe=True)
1343 def getall(repo, subset, x):
1336 def getall(repo, subset, x):
1344 """All changesets, the same as ``0:tip``.
1337 """All changesets, the same as ``0:tip``."""
1345 """
1346 # i18n: "all" is a keyword
1338 # i18n: "all" is a keyword
1347 getargs(x, 0, 0, _(b"all takes no arguments"))
1339 getargs(x, 0, 0, _(b"all takes no arguments"))
1348 return subset & spanset(repo) # drop "null" if any
1340 return subset & spanset(repo) # drop "null" if any
@@ -1480,8 +1472,7 b' def hasfile(repo, subset, x):'
1480
1472
1481 @predicate(b'head()', safe=True)
1473 @predicate(b'head()', safe=True)
1482 def head(repo, subset, x):
1474 def head(repo, subset, x):
1483 """Changeset is a named branch head.
1475 """Changeset is a named branch head."""
1484 """
1485 # i18n: "head" is a keyword
1476 # i18n: "head" is a keyword
1486 getargs(x, 0, 0, _(b"head takes no arguments"))
1477 getargs(x, 0, 0, _(b"head takes no arguments"))
1487 hs = set()
1478 hs = set()
@@ -1493,8 +1484,7 b' def head(repo, subset, x):'
1493
1484
1494 @predicate(b'heads(set)', safe=True, takeorder=True)
1485 @predicate(b'heads(set)', safe=True, takeorder=True)
1495 def heads(repo, subset, x, order):
1486 def heads(repo, subset, x, order):
1496 """Members of set with no children in set.
1487 """Members of set with no children in set."""
1497 """
1498 # argument set should never define order
1488 # argument set should never define order
1499 if order == defineorder:
1489 if order == defineorder:
1500 order = followorder
1490 order = followorder
@@ -1515,8 +1505,7 b' def heads(repo, subset, x, order):'
1515
1505
1516 @predicate(b'hidden()', safe=True)
1506 @predicate(b'hidden()', safe=True)
1517 def hidden(repo, subset, x):
1507 def hidden(repo, subset, x):
1518 """Hidden changesets.
1508 """Hidden changesets."""
1519 """
1520 # i18n: "hidden" is a keyword
1509 # i18n: "hidden" is a keyword
1521 getargs(x, 0, 0, _(b"hidden takes no arguments"))
1510 getargs(x, 0, 0, _(b"hidden takes no arguments"))
1522 hiddenrevs = repoview.filterrevs(repo, b'visible')
1511 hiddenrevs = repoview.filterrevs(repo, b'visible')
@@ -1546,8 +1535,7 b' def keyword(repo, subset, x):'
1546
1535
1547 @predicate(b'limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
1536 @predicate(b'limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
1548 def limit(repo, subset, x, order):
1537 def limit(repo, subset, x, order):
1549 """First n members of set, defaulting to 1, starting from offset.
1538 """First n members of set, defaulting to 1, starting from offset."""
1550 """
1551 args = getargsdict(x, b'limit', b'set n offset')
1539 args = getargsdict(x, b'limit', b'set n offset')
1552 if b'set' not in args:
1540 if b'set' not in args:
1553 # i18n: "limit" is a keyword
1541 # i18n: "limit" is a keyword
@@ -1571,8 +1559,7 b' def limit(repo, subset, x, order):'
1571
1559
1572 @predicate(b'last(set, [n])', safe=True, takeorder=True)
1560 @predicate(b'last(set, [n])', safe=True, takeorder=True)
1573 def last(repo, subset, x, order):
1561 def last(repo, subset, x, order):
1574 """Last n members of set, defaulting to 1.
1562 """Last n members of set, defaulting to 1."""
1575 """
1576 # i18n: "last" is a keyword
1563 # i18n: "last" is a keyword
1577 l = getargs(x, 1, 2, _(b"last requires one or two arguments"))
1564 l = getargs(x, 1, 2, _(b"last requires one or two arguments"))
1578 lim = 1
1565 lim = 1
@@ -1592,8 +1579,7 b' def last(repo, subset, x, order):'
1592
1579
1593 @predicate(b'max(set)', safe=True)
1580 @predicate(b'max(set)', safe=True)
1594 def maxrev(repo, subset, x):
1581 def maxrev(repo, subset, x):
1595 """Changeset with highest revision number in set.
1582 """Changeset with highest revision number in set."""
1596 """
1597 os = getset(repo, fullreposet(repo), x)
1583 os = getset(repo, fullreposet(repo), x)
1598 try:
1584 try:
1599 m = os.max()
1585 m = os.max()
@@ -1608,8 +1594,7 b' def maxrev(repo, subset, x):'
1608
1594
1609 @predicate(b'merge()', safe=True)
1595 @predicate(b'merge()', safe=True)
1610 def merge(repo, subset, x):
1596 def merge(repo, subset, x):
1611 """Changeset is a merge changeset.
1597 """Changeset is a merge changeset."""
1612 """
1613 # i18n: "merge" is a keyword
1598 # i18n: "merge" is a keyword
1614 getargs(x, 0, 0, _(b"merge takes no arguments"))
1599 getargs(x, 0, 0, _(b"merge takes no arguments"))
1615 cl = repo.changelog
1600 cl = repo.changelog
@@ -1626,8 +1611,7 b' def merge(repo, subset, x):'
1626
1611
1627 @predicate(b'branchpoint()', safe=True)
1612 @predicate(b'branchpoint()', safe=True)
1628 def branchpoint(repo, subset, x):
1613 def branchpoint(repo, subset, x):
1629 """Changesets with more than one child.
1614 """Changesets with more than one child."""
1630 """
1631 # i18n: "branchpoint" is a keyword
1615 # i18n: "branchpoint" is a keyword
1632 getargs(x, 0, 0, _(b"branchpoint takes no arguments"))
1616 getargs(x, 0, 0, _(b"branchpoint takes no arguments"))
1633 cl = repo.changelog
1617 cl = repo.changelog
@@ -1648,8 +1632,7 b' def branchpoint(repo, subset, x):'
1648
1632
1649 @predicate(b'min(set)', safe=True)
1633 @predicate(b'min(set)', safe=True)
1650 def minrev(repo, subset, x):
1634 def minrev(repo, subset, x):
1651 """Changeset with lowest revision number in set.
1635 """Changeset with lowest revision number in set."""
1652 """
1653 os = getset(repo, fullreposet(repo), x)
1636 os = getset(repo, fullreposet(repo), x)
1654 try:
1637 try:
1655 m = os.min()
1638 m = os.min()
@@ -1715,8 +1698,7 b' def named(repo, subset, x):'
1715
1698
1716 @predicate(b'id(string)', safe=True)
1699 @predicate(b'id(string)', safe=True)
1717 def node_(repo, subset, x):
1700 def node_(repo, subset, x):
1718 """Revision non-ambiguously specified by the given hex string prefix.
1701 """Revision non-ambiguously specified by the given hex string prefix."""
1719 """
1720 # i18n: "id" is a keyword
1702 # i18n: "id" is a keyword
1721 l = getargs(x, 1, 1, _(b"id requires one argument"))
1703 l = getargs(x, 1, 1, _(b"id requires one argument"))
1722 # i18n: "id" is a keyword
1704 # i18n: "id" is a keyword
@@ -1747,8 +1729,7 b' def node_(repo, subset, x):'
1747
1729
1748 @predicate(b'none()', safe=True)
1730 @predicate(b'none()', safe=True)
1749 def none(repo, subset, x):
1731 def none(repo, subset, x):
1750 """No changesets.
1732 """No changesets."""
1751 """
1752 # i18n: "none" is a keyword
1733 # i18n: "none" is a keyword
1753 getargs(x, 0, 0, _(b"none takes no arguments"))
1734 getargs(x, 0, 0, _(b"none takes no arguments"))
1754 return baseset()
1735 return baseset()
@@ -1869,8 +1850,7 b' def outgoing(repo, subset, x):'
1869
1850
1870 @predicate(b'p1([set])', safe=True)
1851 @predicate(b'p1([set])', safe=True)
1871 def p1(repo, subset, x):
1852 def p1(repo, subset, x):
1872 """First parent of changesets in set, or the working directory.
1853 """First parent of changesets in set, or the working directory."""
1873 """
1874 if x is None:
1854 if x is None:
1875 p = repo[x].p1().rev()
1855 p = repo[x].p1().rev()
1876 if p >= 0:
1856 if p >= 0:
@@ -1892,8 +1872,7 b' def p1(repo, subset, x):'
1892
1872
1893 @predicate(b'p2([set])', safe=True)
1873 @predicate(b'p2([set])', safe=True)
1894 def p2(repo, subset, x):
1874 def p2(repo, subset, x):
1895 """Second parent of changesets in set, or the working directory.
1875 """Second parent of changesets in set, or the working directory."""
1896 """
1897 if x is None:
1876 if x is None:
1898 ps = repo[x].parents()
1877 ps = repo[x].parents()
1899 try:
1878 try:
@@ -2305,8 +2284,7 b' def matching(repo, subset, x):'
2305
2284
2306 @predicate(b'reverse(set)', safe=True, takeorder=True, weight=0)
2285 @predicate(b'reverse(set)', safe=True, takeorder=True, weight=0)
2307 def reverse(repo, subset, x, order):
2286 def reverse(repo, subset, x, order):
2308 """Reverse order of set.
2287 """Reverse order of set."""
2309 """
2310 l = getset(repo, subset, x, order)
2288 l = getset(repo, subset, x, order)
2311 if order == defineorder:
2289 if order == defineorder:
2312 l.reverse()
2290 l.reverse()
@@ -2315,8 +2293,7 b' def reverse(repo, subset, x, order):'
2315
2293
2316 @predicate(b'roots(set)', safe=True)
2294 @predicate(b'roots(set)', safe=True)
2317 def roots(repo, subset, x):
2295 def roots(repo, subset, x):
2318 """Changesets in set with no parent changeset in set.
2296 """Changesets in set with no parent changeset in set."""
2319 """
2320 s = getset(repo, fullreposet(repo), x)
2297 s = getset(repo, fullreposet(repo), x)
2321 parents = repo.changelog.parentrevs
2298 parents = repo.changelog.parentrevs
2322
2299
@@ -2556,8 +2533,7 b' def tagged(repo, subset, x):'
2556
2533
2557 @predicate(b'orphan()', safe=True)
2534 @predicate(b'orphan()', safe=True)
2558 def orphan(repo, subset, x):
2535 def orphan(repo, subset, x):
2559 """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)
2536 """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)"""
2560 """
2561 # i18n: "orphan" is a keyword
2537 # i18n: "orphan" is a keyword
2562 getargs(x, 0, 0, _(b"orphan takes no arguments"))
2538 getargs(x, 0, 0, _(b"orphan takes no arguments"))
2563 orphan = obsmod.getrevs(repo, b'orphan')
2539 orphan = obsmod.getrevs(repo, b'orphan')
@@ -2566,8 +2542,7 b' def orphan(repo, subset, x):'
2566
2542
2567 @predicate(b'unstable()', safe=True)
2543 @predicate(b'unstable()', safe=True)
2568 def unstable(repo, subset, x):
2544 def unstable(repo, subset, x):
2569 """Changesets with instabilities. (EXPERIMENTAL)
2545 """Changesets with instabilities. (EXPERIMENTAL)"""
2570 """
2571 # i18n: "unstable" is a keyword
2546 # i18n: "unstable" is a keyword
2572 getargs(x, 0, 0, b'unstable takes no arguments')
2547 getargs(x, 0, 0, b'unstable takes no arguments')
2573 _unstable = set()
2548 _unstable = set()
@@ -2781,8 +2756,7 b' def makematcher(tree):'
2781
2756
2782
2757
2783 def loadpredicate(ui, extname, registrarobj):
2758 def loadpredicate(ui, extname, registrarobj):
2784 """Load revset predicates from specified registrarobj
2759 """Load revset predicates from specified registrarobj"""
2785 """
2786 for name, func in pycompat.iteritems(registrarobj._table):
2760 for name, func in pycompat.iteritems(registrarobj._table):
2787 symbols[name] = func
2761 symbols[name] = func
2788 if func._safe:
2762 if func._safe:
@@ -83,7 +83,7 b' symbols = {}'
83
83
84
84
85 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
85 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
86 '''
86 """
87 Parse a revset statement into a stream of tokens
87 Parse a revset statement into a stream of tokens
88
88
89 ``syminitletters`` is the set of valid characters for the initial
89 ``syminitletters`` is the set of valid characters for the initial
@@ -102,7 +102,7 b' def tokenize(program, lookup=None, symin'
102 >>> list(tokenize(b"@::"))
102 >>> list(tokenize(b"@::"))
103 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
103 [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
104
104
105 '''
105 """
106 if not isinstance(program, bytes):
106 if not isinstance(program, bytes):
107 raise error.ProgrammingError(
107 raise error.ProgrammingError(
108 b'revset statement must be bytes, got %r' % program
108 b'revset statement must be bytes, got %r' % program
@@ -621,8 +621,7 b' def expandaliases(tree, aliases, warn=No'
621
621
622
622
623 def foldconcat(tree):
623 def foldconcat(tree):
624 """Fold elements to be concatenated by `##`
624 """Fold elements to be concatenated by `##`"""
625 """
626 if not isinstance(tree, tuple) or tree[0] in (
625 if not isinstance(tree, tuple) or tree[0] in (
627 b'string',
626 b'string',
628 b'symbol',
627 b'symbol',
@@ -742,7 +741,7 b' def _formatparamexp(args, t):'
742
741
743
742
744 def formatspec(expr, *args):
743 def formatspec(expr, *args):
745 '''
744 """
746 This is a convenience function for using revsets internally, and
745 This is a convenience function for using revsets internally, and
747 escapes arguments appropriately. Aliases are intentionally ignored
746 escapes arguments appropriately. Aliases are intentionally ignored
748 so that intended expression behavior isn't accidentally subverted.
747 so that intended expression behavior isn't accidentally subverted.
@@ -777,7 +776,7 b' def formatspec(expr, *args):'
777 "sort((:), 'desc', 'user')"
776 "sort((:), 'desc', 'user')"
778 >>> formatspec(b'%ls', [b'a', b"'"])
777 >>> formatspec(b'%ls', [b'a', b"'"])
779 "_list('a\\\\x00\\\\'')"
778 "_list('a\\\\x00\\\\'')"
780 '''
779 """
781 parsed = _parseargs(expr, args)
780 parsed = _parseargs(expr, args)
782 ret = []
781 ret = []
783 for t, arg in parsed:
782 for t, arg in parsed:
@@ -66,11 +66,11 b' termsize = scmplatform.termsize'
66
66
67 @attr.s(slots=True, repr=False)
67 @attr.s(slots=True, repr=False)
68 class status(object):
68 class status(object):
69 '''Struct with a list of files per status.
69 """Struct with a list of files per status.
70
70
71 The 'deleted', 'unknown' and 'ignored' properties are only
71 The 'deleted', 'unknown' and 'ignored' properties are only
72 relevant to the working copy.
72 relevant to the working copy.
73 '''
73 """
74
74
75 modified = attr.ib(default=attr.Factory(list))
75 modified = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
76 added = attr.ib(default=attr.Factory(list))
@@ -123,9 +123,9 b' def itersubrepos(ctx1, ctx2):'
123
123
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 """Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 '''
128 """
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
@@ -335,8 +335,8 b' def checkportable(ui, f):'
335
335
336
336
337 def checkportabilityalert(ui):
337 def checkportabilityalert(ui):
338 '''check if the user's config requests nothing, a warning, or abort for
338 """check if the user's config requests nothing, a warning, or abort for
339 non-portable filenames'''
339 non-portable filenames"""
340 val = ui.config(b'ui', b'portablefilenames')
340 val = ui.config(b'ui', b'portablefilenames')
341 lval = val.lower()
341 lval = val.lower()
342 bval = stringutil.parsebool(val)
342 bval = stringutil.parsebool(val)
@@ -402,8 +402,8 b' def filteredhash(repo, maxrev):'
402
402
403
403
404 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
404 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
405 '''yield every hg repository under path, always recursively.
405 """yield every hg repository under path, always recursively.
406 The recurse flag will only control recursion into repo working dirs'''
406 The recurse flag will only control recursion into repo working dirs"""
407
407
408 def errhandler(err):
408 def errhandler(err):
409 if err.filename == path:
409 if err.filename == path:
@@ -793,7 +793,7 b' def increasingwindows(windowsize=8, size'
793
793
794
794
795 def walkchangerevs(repo, revs, makefilematcher, prepare):
795 def walkchangerevs(repo, revs, makefilematcher, prepare):
796 '''Iterate over files and the revs in a "windowed" way.
796 """Iterate over files and the revs in a "windowed" way.
797
797
798 Callers most commonly need to iterate backwards over the history
798 Callers most commonly need to iterate backwards over the history
799 in which they are interested. Doing so has awful (quadratic-looking)
799 in which they are interested. Doing so has awful (quadratic-looking)
@@ -805,7 +805,7 b' def walkchangerevs(repo, revs, makefilem'
805
805
806 This function returns an iterator yielding contexts. Before
806 This function returns an iterator yielding contexts. Before
807 yielding each context, the iterator will first call the prepare
807 yielding each context, the iterator will first call the prepare
808 function on each context in the window in forward order.'''
808 function on each context in the window in forward order."""
809
809
810 if not revs:
810 if not revs:
811 return []
811 return []
@@ -897,17 +897,17 b' def subdiruipathfn(subpath, uipathfn):'
897
897
898
898
899 def anypats(pats, opts):
899 def anypats(pats, opts):
900 '''Checks if any patterns, including --include and --exclude were given.
900 """Checks if any patterns, including --include and --exclude were given.
901
901
902 Some commands (e.g. addremove) use this condition for deciding whether to
902 Some commands (e.g. addremove) use this condition for deciding whether to
903 print absolute or relative paths.
903 print absolute or relative paths.
904 '''
904 """
905 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
905 return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
906
906
907
907
908 def expandpats(pats):
908 def expandpats(pats):
909 '''Expand bare globs when running on windows.
909 """Expand bare globs when running on windows.
910 On posix we assume it already has already been done by sh.'''
910 On posix we assume it already has already been done by sh."""
911 if not util.expandglobs:
911 if not util.expandglobs:
912 return list(pats)
912 return list(pats)
913 ret = []
913 ret = []
@@ -928,9 +928,9 b' def expandpats(pats):'
928 def matchandpats(
928 def matchandpats(
929 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
929 ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
930 ):
930 ):
931 '''Return a matcher and the patterns that were used.
931 """Return a matcher and the patterns that were used.
932 The matcher will warn about bad matches, unless an alternate badfn callback
932 The matcher will warn about bad matches, unless an alternate badfn callback
933 is provided.'''
933 is provided."""
934 if opts is None:
934 if opts is None:
935 opts = {}
935 opts = {}
936 if not globbed and default == b'relpath':
936 if not globbed and default == b'relpath':
@@ -1001,7 +1001,7 b' def getorigvfs(ui, repo):'
1001
1001
1002
1002
1003 def backuppath(ui, repo, filepath):
1003 def backuppath(ui, repo, filepath):
1004 '''customize where working copy backup files (.orig files) are created
1004 """customize where working copy backup files (.orig files) are created
1005
1005
1006 Fetch user defined path from config file: [ui] origbackuppath = <path>
1006 Fetch user defined path from config file: [ui] origbackuppath = <path>
1007 Fall back to default (filepath with .orig suffix) if not specified
1007 Fall back to default (filepath with .orig suffix) if not specified
@@ -1009,7 +1009,7 b' def backuppath(ui, repo, filepath):'
1009 filepath is repo-relative
1009 filepath is repo-relative
1010
1010
1011 Returns an absolute path
1011 Returns an absolute path
1012 '''
1012 """
1013 origvfs = getorigvfs(ui, repo)
1013 origvfs = getorigvfs(ui, repo)
1014 if origvfs is None:
1014 if origvfs is None:
1015 return repo.wjoin(filepath + b".orig")
1015 return repo.wjoin(filepath + b".orig")
@@ -1300,8 +1300,8 b' def addremove(repo, matcher, prefix, uip'
1300
1300
1301
1301
1302 def marktouched(repo, files, similarity=0.0):
1302 def marktouched(repo, files, similarity=0.0):
1303 '''Assert that files have somehow been operated upon. files are relative to
1303 """Assert that files have somehow been operated upon. files are relative to
1304 the repo root.'''
1304 the repo root."""
1305 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1305 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1306 rejected = []
1306 rejected = []
1307
1307
@@ -1335,11 +1335,11 b' def marktouched(repo, files, similarity='
1335
1335
1336
1336
1337 def _interestingfiles(repo, matcher):
1337 def _interestingfiles(repo, matcher):
1338 '''Walk dirstate with matcher, looking for files that addremove would care
1338 """Walk dirstate with matcher, looking for files that addremove would care
1339 about.
1339 about.
1340
1340
1341 This is different from dirstate.status because it doesn't care about
1341 This is different from dirstate.status because it doesn't care about
1342 whether files are modified or clean.'''
1342 whether files are modified or clean."""
1343 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1343 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1344 audit_path = pathutil.pathauditor(repo.root, cached=True)
1344 audit_path = pathutil.pathauditor(repo.root, cached=True)
1345
1345
@@ -1394,8 +1394,8 b' def _findrenames(repo, matcher, added, r'
1394
1394
1395
1395
1396 def _markchanges(repo, unknown, deleted, renames):
1396 def _markchanges(repo, unknown, deleted, renames):
1397 '''Marks the files in unknown as added, the files in deleted as removed,
1397 """Marks the files in unknown as added, the files in deleted as removed,
1398 and the files in renames as copied.'''
1398 and the files in renames as copied."""
1399 wctx = repo[None]
1399 wctx = repo[None]
1400 with repo.wlock():
1400 with repo.wlock():
1401 wctx.forget(deleted)
1401 wctx.forget(deleted)
@@ -1424,10 +1424,10 b' def getrenamedfn(repo, endrev=None):'
1424 endrev = len(repo)
1424 endrev = len(repo)
1425
1425
1426 def getrenamed(fn, rev):
1426 def getrenamed(fn, rev):
1427 '''looks up all renames for a file (up to endrev) the first
1427 """looks up all renames for a file (up to endrev) the first
1428 time the file is given. It indexes on the changerev and only
1428 time the file is given. It indexes on the changerev and only
1429 parses the manifest if linkrev != changerev.
1429 parses the manifest if linkrev != changerev.
1430 Returns rename info for fn at changerev rev.'''
1430 Returns rename info for fn at changerev rev."""
1431 if fn not in rcache:
1431 if fn not in rcache:
1432 rcache[fn] = {}
1432 rcache[fn] = {}
1433 fl = repo.file(fn)
1433 fl = repo.file(fn)
@@ -1548,7 +1548,7 b' def movedirstate(repo, newctx, match=Non'
1548
1548
1549
1549
1550 def filterrequirements(requirements):
1550 def filterrequirements(requirements):
1551 """ filters the requirements into two sets:
1551 """filters the requirements into two sets:
1552
1552
1553 wcreq: requirements which should be written in .hg/requires
1553 wcreq: requirements which should be written in .hg/requires
1554 storereq: which should be written in .hg/store/requires
1554 storereq: which should be written in .hg/store/requires
@@ -1871,8 +1871,7 b' class progress(object):'
1871
1871
1872
1872
1873 def gdinitconfig(ui):
1873 def gdinitconfig(ui):
1874 """helper function to know if a repo should be created as general delta
1874 """helper function to know if a repo should be created as general delta"""
1875 """
1876 # experimental config: format.generaldelta
1875 # experimental config: format.generaldelta
1877 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1876 return ui.configbool(b'format', b'generaldelta') or ui.configbool(
1878 b'format', b'usegeneraldelta'
1877 b'format', b'usegeneraldelta'
@@ -1880,8 +1879,7 b' def gdinitconfig(ui):'
1880
1879
1881
1880
1882 def gddeltaconfig(ui):
1881 def gddeltaconfig(ui):
1883 """helper function to know if incoming delta should be optimised
1882 """helper function to know if incoming delta should be optimised"""
1884 """
1885 # experimental config: format.generaldelta
1883 # experimental config: format.generaldelta
1886 return ui.configbool(b'format', b'generaldelta')
1884 return ui.configbool(b'format', b'generaldelta')
1887
1885
@@ -292,9 +292,9 b' def findcommonheads('
292 ancestorsof=None,
292 ancestorsof=None,
293 samplegrowth=1.05,
293 samplegrowth=1.05,
294 ):
294 ):
295 '''Return a tuple (common, anyincoming, remoteheads) used to identify
295 """Return a tuple (common, anyincoming, remoteheads) used to identify
296 missing nodes from or in remote.
296 missing nodes from or in remote.
297 '''
297 """
298 start = util.timer()
298 start = util.timer()
299
299
300 roundtrips = 0
300 roundtrips = 0
@@ -371,7 +371,10 b' def findcommonheads('
371 with remote.commandexecutor() as e:
371 with remote.commandexecutor() as e:
372 fheads = e.callcommand(b'heads', {})
372 fheads = e.callcommand(b'heads', {})
373 fknown = e.callcommand(
373 fknown = e.callcommand(
374 b'known', {b'nodes': [clnode(r) for r in sample],}
374 b'known',
375 {
376 b'nodes': [clnode(r) for r in sample],
377 },
375 )
378 )
376
379
377 srvheadhashes, yesno = fheads.result(), fknown.result()
380 srvheadhashes, yesno = fheads.result(), fknown.result()
@@ -449,7 +452,10 b' def findcommonheads('
449
452
450 with remote.commandexecutor() as e:
453 with remote.commandexecutor() as e:
451 yesno = e.callcommand(
454 yesno = e.callcommand(
452 b'known', {b'nodes': [clnode(r) for r in sample],}
455 b'known',
456 {
457 b'nodes': [clnode(r) for r in sample],
458 },
453 ).result()
459 ).result()
454
460
455 full = True
461 full = True
@@ -350,8 +350,7 b' def _restoreactivebookmark(repo, mark):'
350
350
351
351
352 def _aborttransaction(repo, tr):
352 def _aborttransaction(repo, tr):
353 '''Abort current transaction for shelve/unshelve, but keep dirstate
353 """Abort current transaction for shelve/unshelve, but keep dirstate"""
354 '''
355 dirstatebackupname = b'dirstate.shelve'
354 dirstatebackupname = b'dirstate.shelve'
356 repo.dirstate.savebackup(tr, dirstatebackupname)
355 repo.dirstate.savebackup(tr, dirstatebackupname)
357 tr.abort()
356 tr.abort()
@@ -15,11 +15,11 b' from . import ('
15
15
16
16
17 def _findexactmatches(repo, added, removed):
17 def _findexactmatches(repo, added, removed):
18 '''find renamed files that have no changes
18 """find renamed files that have no changes
19
19
20 Takes a list of new filectxs and a list of removed filectxs, and yields
20 Takes a list of new filectxs and a list of removed filectxs, and yields
21 (before, after) tuples of exact matches.
21 (before, after) tuples of exact matches.
22 '''
22 """
23 # Build table of removed files: {hash(fctx.data()): [fctx, ...]}.
23 # Build table of removed files: {hash(fctx.data()): [fctx, ...]}.
24 # We use hash() to discard fctx.data() from memory.
24 # We use hash() to discard fctx.data() from memory.
25 hashes = {}
25 hashes = {}
@@ -77,11 +77,11 b' def score(fctx1, fctx2):'
77
77
78
78
79 def _findsimilarmatches(repo, added, removed, threshold):
79 def _findsimilarmatches(repo, added, removed, threshold):
80 '''find potentially renamed files based on similar file content
80 """find potentially renamed files based on similar file content
81
81
82 Takes a list of new filectxs and a list of removed filectxs, and yields
82 Takes a list of new filectxs and a list of removed filectxs, and yields
83 (before, after, score) tuples of partial matches.
83 (before, after, score) tuples of partial matches.
84 '''
84 """
85 copies = {}
85 copies = {}
86 progress = repo.ui.makeprogress(
86 progress = repo.ui.makeprogress(
87 _(b'searching for similar files'), unit=_(b'files'), total=len(removed)
87 _(b'searching for similar files'), unit=_(b'files'), total=len(removed)
@@ -57,8 +57,7 b' def intersect(ra, rb):'
57
57
58
58
59 def compare_range(a, astart, aend, b, bstart, bend):
59 def compare_range(a, astart, aend, b, bstart, bend):
60 """Compare a[astart:aend] == b[bstart:bend], without slicing.
60 """Compare a[astart:aend] == b[bstart:bend], without slicing."""
61 """
62 if (aend - astart) != (bend - bstart):
61 if (aend - astart) != (bend - bstart):
63 return False
62 return False
64 for ia, ib in zip(
63 for ia, ib in zip(
@@ -102,8 +101,7 b' class Merge3Text(object):'
102 localorother=None,
101 localorother=None,
103 minimize=False,
102 minimize=False,
104 ):
103 ):
105 """Return merge in cvs-like form.
104 """Return merge in cvs-like form."""
106 """
107 self.conflicts = False
105 self.conflicts = False
108 newline = b'\n'
106 newline = b'\n'
109 if len(self.a) > 0:
107 if len(self.a) > 0:
@@ -121,8 +121,7 b' class doublepipe(object):'
121 return self._call(b'readline')
121 return self._call(b'readline')
122
122
123 def _call(self, methname, data=None):
123 def _call(self, methname, data=None):
124 """call <methname> on "main", forward output of "side" while blocking
124 """call <methname> on "main", forward output of "side" while blocking"""
125 """
126 # data can be '' or 0
125 # data can be '' or 0
127 if (data is not None and not data) or self._main.closed:
126 if (data is not None and not data) or self._main.closed:
128 _forwardoutput(self._ui, self._side)
127 _forwardoutput(self._ui, self._side)
@@ -227,8 +227,7 b' def _hostsettings(ui, hostname):'
227
227
228
228
229 def commonssloptions(minimumprotocol):
229 def commonssloptions(minimumprotocol):
230 """Return SSLContext options common to servers and clients.
230 """Return SSLContext options common to servers and clients."""
231 """
232 if minimumprotocol not in configprotocols:
231 if minimumprotocol not in configprotocols:
233 raise ValueError(b'protocol value not supported: %s' % minimumprotocol)
232 raise ValueError(b'protocol value not supported: %s' % minimumprotocol)
234
233
@@ -617,11 +616,11 b' def _dnsnamematch(dn, hostname, maxwildc'
617
616
618
617
619 def _verifycert(cert, hostname):
618 def _verifycert(cert, hostname):
620 '''Verify that cert (in socket.getpeercert() format) matches hostname.
619 """Verify that cert (in socket.getpeercert() format) matches hostname.
621 CRLs is not handled.
620 CRLs is not handled.
622
621
623 Returns error message if any problems are found and None on success.
622 Returns error message if any problems are found and None on success.
624 '''
623 """
625 if not cert:
624 if not cert:
626 return _(b'no certificate received')
625 return _(b'no certificate received')
627
626
@@ -55,7 +55,7 b' class cmdstate(object):'
55 """
55 """
56
56
57 def __init__(self, repo, fname):
57 def __init__(self, repo, fname):
58 """ repo is the repo object
58 """repo is the repo object
59 fname is the file name in which data should be stored in .hg directory
59 fname is the file name in which data should be stored in .hg directory
60 """
60 """
61 self._repo = repo
61 self._repo = repo
@@ -105,11 +105,11 b' class cmdstate(object):'
105
105
106 class _statecheck(object):
106 class _statecheck(object):
107 """a utility class that deals with multistep operations like graft,
107 """a utility class that deals with multistep operations like graft,
108 histedit, bisect, update etc and check whether such commands
108 histedit, bisect, update etc and check whether such commands
109 are in an unfinished conditition or not and return appropriate message
109 are in an unfinished conditition or not and return appropriate message
110 and hint.
110 and hint.
111 It also has the ability to register and determine the states of any new
111 It also has the ability to register and determine the states of any new
112 multistep operation or multistep command extension.
112 multistep operation or multistep command extension.
113 """
113 """
114
114
115 def __init__(
115 def __init__(
@@ -173,7 +173,11 b' class _statecheck(object):'
173 return _(
173 return _(
174 b"use 'hg %s --continue', 'hg %s --abort', "
174 b"use 'hg %s --continue', 'hg %s --abort', "
175 b"or 'hg %s --stop'"
175 b"or 'hg %s --stop'"
176 ) % (self._opname, self._opname, self._opname,)
176 ) % (
177 self._opname,
178 self._opname,
179 self._opname,
180 )
177
181
178 return self._cmdhint
182 return self._cmdhint
179
183
@@ -411,11 +411,11 b' def load_data(path):'
411
411
412
412
413 def reset(frequency=None):
413 def reset(frequency=None):
414 '''Clear out the state of the profiler. Do not call while the
414 """Clear out the state of the profiler. Do not call while the
415 profiler is running.
415 profiler is running.
416
416
417 The optional frequency argument specifies the number of samples to
417 The optional frequency argument specifies the number of samples to
418 collect per second.'''
418 collect per second."""
419 assert state.profile_level == 0, b"Can't reset() while statprof is running"
419 assert state.profile_level == 0, b"Can't reset() while statprof is running"
420 CodeSite.cache.clear()
420 CodeSite.cache.clear()
421 state.reset(frequency)
421 state.reset(frequency)
@@ -525,8 +525,8 b' def display(fp=None, format=3, data=None'
525
525
526
526
527 def display_by_line(data, fp):
527 def display_by_line(data, fp):
528 '''Print the profiler data with each sample line represented
528 """Print the profiler data with each sample line represented
529 as one row in a table. Sorted by self-time per line.'''
529 as one row in a table. Sorted by self-time per line."""
530 stats = SiteStats.buildstats(data.samples)
530 stats = SiteStats.buildstats(data.samples)
531 stats.sort(reverse=True, key=lambda x: x.selfseconds())
531 stats.sort(reverse=True, key=lambda x: x.selfseconds())
532
532
@@ -554,9 +554,9 b' def display_by_line(data, fp):'
554
554
555
555
556 def display_by_method(data, fp):
556 def display_by_method(data, fp):
557 '''Print the profiler data with each sample function represented
557 """Print the profiler data with each sample function represented
558 as one row in a table. Important lines within that function are
558 as one row in a table. Important lines within that function are
559 output as nested rows. Sorted by self-time per line.'''
559 output as nested rows. Sorted by self-time per line."""
560 fp.write(
560 fp.write(
561 b'%5.5s %10.10s %7.7s %-8.8s\n'
561 b'%5.5s %10.10s %7.7s %-8.8s\n'
562 % (b'% ', b'cumulative', b'self', b'')
562 % (b'% ', b'cumulative', b'self', b'')
@@ -835,9 +835,9 b' def write_to_flame(data, fp, scriptpath='
835
835
836
836
837 def simplifypath(path):
837 def simplifypath(path):
838 '''Attempt to make the path to a Python module easier to read by
838 """Attempt to make the path to a Python module easier to read by
839 removing whatever part of the Python search path it was found
839 removing whatever part of the Python search path it was found
840 on.'''
840 on."""
841
841
842 if path in _pathcache:
842 if path in _pathcache:
843 return _pathcache[path]
843 return _pathcache[path]
@@ -52,7 +52,7 b' def _matchtrackedpath(path, matcher):'
52 # This avoids a collision between a file named foo and a dir named
52 # This avoids a collision between a file named foo and a dir named
53 # foo.i or foo.d
53 # foo.i or foo.d
54 def _encodedir(path):
54 def _encodedir(path):
55 '''
55 """
56 >>> _encodedir(b'data/foo.i')
56 >>> _encodedir(b'data/foo.i')
57 'data/foo.i'
57 'data/foo.i'
58 >>> _encodedir(b'data/foo.i/bla.i')
58 >>> _encodedir(b'data/foo.i/bla.i')
@@ -61,7 +61,7 b' def _encodedir(path):'
61 'data/foo.i.hg.hg/bla.i'
61 'data/foo.i.hg.hg/bla.i'
62 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
62 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
63 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
63 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
64 '''
64 """
65 return (
65 return (
66 path.replace(b".hg/", b".hg.hg/")
66 path.replace(b".hg/", b".hg.hg/")
67 .replace(b".i/", b".i.hg/")
67 .replace(b".i/", b".i.hg/")
@@ -73,14 +73,14 b" encodedir = getattr(parsers, 'encodedir'"
73
73
74
74
75 def decodedir(path):
75 def decodedir(path):
76 '''
76 """
77 >>> decodedir(b'data/foo.i')
77 >>> decodedir(b'data/foo.i')
78 'data/foo.i'
78 'data/foo.i'
79 >>> decodedir(b'data/foo.i.hg/bla.i')
79 >>> decodedir(b'data/foo.i.hg/bla.i')
80 'data/foo.i/bla.i'
80 'data/foo.i/bla.i'
81 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
81 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
82 'data/foo.i.hg/bla.i'
82 'data/foo.i.hg/bla.i'
83 '''
83 """
84 if b".hg/" not in path:
84 if b".hg/" not in path:
85 return path
85 return path
86 return (
86 return (
@@ -91,14 +91,14 b' def decodedir(path):'
91
91
92
92
93 def _reserved():
93 def _reserved():
94 ''' characters that are problematic for filesystems
94 """characters that are problematic for filesystems
95
95
96 * ascii escapes (0..31)
96 * ascii escapes (0..31)
97 * ascii hi (126..255)
97 * ascii hi (126..255)
98 * windows specials
98 * windows specials
99
99
100 these characters will be escaped by encodefunctions
100 these characters will be escaped by encodefunctions
101 '''
101 """
102 winreserved = [ord(x) for x in u'\\:*?"<>|']
102 winreserved = [ord(x) for x in u'\\:*?"<>|']
103 for x in range(32):
103 for x in range(32):
104 yield x
104 yield x
@@ -109,7 +109,7 b' def _reserved():'
109
109
110
110
111 def _buildencodefun():
111 def _buildencodefun():
112 '''
112 """
113 >>> enc, dec = _buildencodefun()
113 >>> enc, dec = _buildencodefun()
114
114
115 >>> enc(b'nothing/special.txt')
115 >>> enc(b'nothing/special.txt')
@@ -131,7 +131,7 b' def _buildencodefun():'
131 'the~07quick~adshot'
131 'the~07quick~adshot'
132 >>> dec(b'the~07quick~adshot')
132 >>> dec(b'the~07quick~adshot')
133 'the\\x07quick\\xadshot'
133 'the\\x07quick\\xadshot'
134 '''
134 """
135 e = b'_'
135 e = b'_'
136 xchr = pycompat.bytechr
136 xchr = pycompat.bytechr
137 asciistr = list(map(xchr, range(127)))
137 asciistr = list(map(xchr, range(127)))
@@ -172,23 +172,23 b' def _buildencodefun():'
172
172
173
173
174 def encodefilename(s):
174 def encodefilename(s):
175 '''
175 """
176 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
176 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
177 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
177 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
178 '''
178 """
179 return _encodefname(encodedir(s))
179 return _encodefname(encodedir(s))
180
180
181
181
182 def decodefilename(s):
182 def decodefilename(s):
183 '''
183 """
184 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
184 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
185 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
185 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
186 '''
186 """
187 return decodedir(_decodefname(s))
187 return decodedir(_decodefname(s))
188
188
189
189
190 def _buildlowerencodefun():
190 def _buildlowerencodefun():
191 '''
191 """
192 >>> f = _buildlowerencodefun()
192 >>> f = _buildlowerencodefun()
193 >>> f(b'nothing/special.txt')
193 >>> f(b'nothing/special.txt')
194 'nothing/special.txt'
194 'nothing/special.txt'
@@ -198,7 +198,7 b' def _buildlowerencodefun():'
198 'hello~3aworld~3f'
198 'hello~3aworld~3f'
199 >>> f(b'the\\x07quick\\xADshot')
199 >>> f(b'the\\x07quick\\xADshot')
200 'the~07quick~adshot'
200 'the~07quick~adshot'
201 '''
201 """
202 xchr = pycompat.bytechr
202 xchr = pycompat.bytechr
203 cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
203 cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
204 for x in _reserved():
204 for x in _reserved():
@@ -220,7 +220,7 b" lowerencode = getattr(parsers, 'lowerenc"
220
220
221
221
222 def _auxencode(path, dotencode):
222 def _auxencode(path, dotencode):
223 '''
223 """
224 Encodes filenames containing names reserved by Windows or which end in
224 Encodes filenames containing names reserved by Windows or which end in
225 period or space. Does not touch other single reserved characters c.
225 period or space. Does not touch other single reserved characters c.
226 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
226 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
@@ -240,7 +240,7 b' def _auxencode(path, dotencode):'
240 ['foo.~20']
240 ['foo.~20']
241 >>> _auxencode([b' .foo'], True)
241 >>> _auxencode([b' .foo'], True)
242 ['~20.foo']
242 ['~20.foo']
243 '''
243 """
244 for i, n in enumerate(path):
244 for i, n in enumerate(path):
245 if not n:
245 if not n:
246 continue
246 continue
@@ -305,7 +305,7 b' def _hashencode(path, dotencode):'
305
305
306
306
307 def _hybridencode(path, dotencode):
307 def _hybridencode(path, dotencode):
308 '''encodes path with a length limit
308 """encodes path with a length limit
309
309
310 Encodes all paths that begin with 'data/', according to the following.
310 Encodes all paths that begin with 'data/', according to the following.
311
311
@@ -334,7 +334,7 b' def _hybridencode(path, dotencode):'
334
334
335 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
335 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
336 encoding was used.
336 encoding was used.
337 '''
337 """
338 path = encodedir(path)
338 path = encodedir(path)
339 ef = _encodefname(path).split(b'/')
339 ef = _encodefname(path).split(b'/')
340 res = b'/'.join(_auxencode(ef, dotencode))
340 res = b'/'.join(_auxencode(ef, dotencode))
@@ -444,11 +444,11 b' class basicstore(object):'
444 return reversed(self._walk(b'', False))
444 return reversed(self._walk(b'', False))
445
445
446 def walk(self, matcher=None):
446 def walk(self, matcher=None):
447 '''yields (unencoded, encoded, size)
447 """yields (unencoded, encoded, size)
448
448
449 if a matcher is passed, storage files of only those tracked paths
449 if a matcher is passed, storage files of only those tracked paths
450 are passed with matches the matcher
450 are passed with matches the matcher
451 '''
451 """
452 # yield data files first
452 # yield data files first
453 for x in self.datafiles(matcher):
453 for x in self.datafiles(matcher):
454 yield x
454 yield x
@@ -517,10 +517,10 b' class fncache(object):'
517 self.addls = set()
517 self.addls = set()
518
518
519 def ensureloaded(self, warn=None):
519 def ensureloaded(self, warn=None):
520 '''read the fncache file if not already read.
520 """read the fncache file if not already read.
521
521
522 If the file on disk is corrupted, raise. If warn is provided,
522 If the file on disk is corrupted, raise. If warn is provided,
523 warn and keep going instead.'''
523 warn and keep going instead."""
524 if self.entries is None:
524 if self.entries is None:
525 self._load(warn)
525 self._load(warn)
526
526
@@ -114,7 +114,12 b' def strip('
114 ),
114 ),
115 ),
115 ),
116 (b'', b'no-backup', None, _(b'do not save backup bundle')),
116 (b'', b'no-backup', None, _(b'do not save backup bundle')),
117 (b'', b'nobackup', None, _(b'do not save backup bundle (DEPRECATED)'),),
117 (
118 b'',
119 b'nobackup',
120 None,
121 _(b'do not save backup bundle (DEPRECATED)'),
122 ),
118 (b'n', b'', None, _(b'ignored (DEPRECATED)')),
123 (b'n', b'', None, _(b'ignored (DEPRECATED)')),
119 (
124 (
120 b'k',
125 b'k',
@@ -49,9 +49,9 b' propertycache = util.propertycache'
49
49
50
50
51 def _expandedabspath(path):
51 def _expandedabspath(path):
52 '''
52 """
53 get a path or url and if it is a path expand it and return an absolute path
53 get a path or url and if it is a path expand it and return an absolute path
54 '''
54 """
55 expandedpath = util.urllocalpath(util.expandpath(path))
55 expandedpath = util.urllocalpath(util.expandpath(path))
56 u = util.url(expandedpath)
56 u = util.url(expandedpath)
57 if not u.scheme:
57 if not u.scheme:
@@ -268,8 +268,7 b' class abstractsubrepo(object):'
268 )
268 )
269
269
270 def bailifchanged(self, ignoreupdate=False, hint=None):
270 def bailifchanged(self, ignoreupdate=False, hint=None):
271 """raise Abort if subrepository is ``dirty()``
271 """raise Abort if subrepository is ``dirty()``"""
272 """
273 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate, missing=True)
272 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate, missing=True)
274 if dirtyreason:
273 if dirtyreason:
275 raise error.Abort(dirtyreason, hint=hint)
274 raise error.Abort(dirtyreason, hint=hint)
@@ -291,8 +290,7 b' class abstractsubrepo(object):'
291 raise NotImplementedError
290 raise NotImplementedError
292
291
293 def phase(self, state):
292 def phase(self, state):
294 """returns phase of specified state in the subrepository.
293 """returns phase of specified state in the subrepository."""
295 """
296 return phases.public
294 return phases.public
297
295
298 def remove(self):
296 def remove(self):
@@ -384,10 +382,10 b' class abstractsubrepo(object):'
384 return total
382 return total
385
383
386 def walk(self, match):
384 def walk(self, match):
387 '''
385 """
388 walk recursively through the directory tree, finding all files
386 walk recursively through the directory tree, finding all files
389 matched by the match function
387 matched by the match function
390 '''
388 """
391
389
392 def forget(self, match, prefix, uipathfn, dryrun, interactive):
390 def forget(self, match, prefix, uipathfn, dryrun, interactive):
393 return ([], [])
391 return ([], [])
@@ -423,9 +421,9 b' class abstractsubrepo(object):'
423 return revid
421 return revid
424
422
425 def unshare(self):
423 def unshare(self):
426 '''
424 """
427 convert this repository from shared to normal storage.
425 convert this repository from shared to normal storage.
428 '''
426 """
429
427
430 def verify(self, onpush=False):
428 def verify(self, onpush=False):
431 """verify the revision of this repository that is held in `_state` is
429 """verify the revision of this repository that is held in `_state` is
@@ -437,14 +435,12 b' class abstractsubrepo(object):'
437
435
438 @propertycache
436 @propertycache
439 def wvfs(self):
437 def wvfs(self):
440 """return vfs to access the working directory of this subrepository
438 """return vfs to access the working directory of this subrepository"""
441 """
442 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
439 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
443
440
444 @propertycache
441 @propertycache
445 def _relpath(self):
442 def _relpath(self):
446 """return path to this subrepository as seen from outermost repository
443 """return path to this subrepository as seen from outermost repository"""
447 """
448 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
444 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
449
445
450
446
@@ -503,10 +499,10 b' class hgsubrepo(abstractsubrepo):'
503 return clean
499 return clean
504
500
505 def _calcstorehash(self, remotepath):
501 def _calcstorehash(self, remotepath):
506 '''calculate a unique "store hash"
502 """calculate a unique "store hash"
507
503
508 This method is used to to detect when there are changes that may
504 This method is used to to detect when there are changes that may
509 require a push to a given remote path.'''
505 require a push to a given remote path."""
510 # sort the files that will be hashed in increasing (likely) file size
506 # sort the files that will be hashed in increasing (likely) file size
511 filelist = (b'bookmarks', b'store/phaseroots', b'store/00changelog.i')
507 filelist = (b'bookmarks', b'store/phaseroots', b'store/00changelog.i')
512 yield b'# %s\n' % _expandedabspath(remotepath)
508 yield b'# %s\n' % _expandedabspath(remotepath)
@@ -525,11 +521,11 b' class hgsubrepo(abstractsubrepo):'
525 return self._cachestorehashvfs.tryreadlines(cachefile, b'r')
521 return self._cachestorehashvfs.tryreadlines(cachefile, b'r')
526
522
527 def _cachestorehash(self, remotepath):
523 def _cachestorehash(self, remotepath):
528 '''cache the current store hash
524 """cache the current store hash
529
525
530 Each remote repo requires its own store hash cache, because a subrepo
526 Each remote repo requires its own store hash cache, because a subrepo
531 store may be "clean" versus a given remote repo, but not versus another
527 store may be "clean" versus a given remote repo, but not versus another
532 '''
528 """
533 cachefile = _getstorehashcachename(remotepath)
529 cachefile = _getstorehashcachename(remotepath)
534 with self._repo.lock():
530 with self._repo.lock():
535 storehash = list(self._calcstorehash(remotepath))
531 storehash = list(self._calcstorehash(remotepath))
@@ -537,8 +533,7 b' class hgsubrepo(abstractsubrepo):'
537 vfs.writelines(cachefile, storehash, mode=b'wb', notindexed=True)
533 vfs.writelines(cachefile, storehash, mode=b'wb', notindexed=True)
538
534
539 def _getctx(self):
535 def _getctx(self):
540 '''fetch the context for this subrepo revision, possibly a workingctx
536 """fetch the context for this subrepo revision, possibly a workingctx"""
541 '''
542 if self._ctx.rev() is None:
537 if self._ctx.rev() is None:
543 return self._repo[None] # workingctx if parent is workingctx
538 return self._repo[None] # workingctx if parent is workingctx
544 else:
539 else:
@@ -1048,14 +1043,12 b' class hgsubrepo(abstractsubrepo):'
1048
1043
1049 @propertycache
1044 @propertycache
1050 def wvfs(self):
1045 def wvfs(self):
1051 """return own wvfs for efficiency and consistency
1046 """return own wvfs for efficiency and consistency"""
1052 """
1053 return self._repo.wvfs
1047 return self._repo.wvfs
1054
1048
1055 @propertycache
1049 @propertycache
1056 def _relpath(self):
1050 def _relpath(self):
1057 """return path to this subrepository as seen from outermost repository
1051 """return path to this subrepository as seen from outermost repository"""
1058 """
1059 # Keep consistent dir separators by avoiding vfs.join(self._path)
1052 # Keep consistent dir separators by avoiding vfs.join(self._path)
1060 return reporelpath(self._repo)
1053 return reporelpath(self._repo)
1061
1054
@@ -1170,12 +1163,16 b' class svnsubrepo(abstractsubrepo):'
1170 externals.append(path)
1163 externals.append(path)
1171 elif item == 'missing':
1164 elif item == 'missing':
1172 missing.append(path)
1165 missing.append(path)
1173 if item not in (
1166 if (
1174 '',
1167 item
1175 'normal',
1168 not in (
1176 'unversioned',
1169 '',
1177 'external',
1170 'normal',
1178 ) or props not in ('', 'none', 'normal'):
1171 'unversioned',
1172 'external',
1173 )
1174 or props not in ('', 'none', 'normal')
1175 ):
1179 changes.append(path)
1176 changes.append(path)
1180 for path in changes:
1177 for path in changes:
1181 for ext in externals:
1178 for ext in externals:
@@ -1384,7 +1381,7 b' class gitsubrepo(abstractsubrepo):'
1384
1381
1385 @staticmethod
1382 @staticmethod
1386 def _checkversion(out):
1383 def _checkversion(out):
1387 '''ensure git version is new enough
1384 """ensure git version is new enough
1388
1385
1389 >>> _checkversion = gitsubrepo._checkversion
1386 >>> _checkversion = gitsubrepo._checkversion
1390 >>> _checkversion(b'git version 1.6.0')
1387 >>> _checkversion(b'git version 1.6.0')
@@ -1405,7 +1402,7 b' class gitsubrepo(abstractsubrepo):'
1405 'unknown'
1402 'unknown'
1406 >>> _checkversion(b'no')
1403 >>> _checkversion(b'no')
1407 'unknown'
1404 'unknown'
1408 '''
1405 """
1409 version = gitsubrepo._gitversion(out)
1406 version = gitsubrepo._gitversion(out)
1410 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1407 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1411 # despite the docstring comment. For now, error on 1.4.0, warn on
1408 # despite the docstring comment. For now, error on 1.4.0, warn on
@@ -1516,9 +1513,9 b' class gitsubrepo(abstractsubrepo):'
1516 self._gitcommand([b'update-index', b'-q', b'--refresh'])
1513 self._gitcommand([b'update-index', b'-q', b'--refresh'])
1517
1514
1518 def _gitbranchmap(self):
1515 def _gitbranchmap(self):
1519 '''returns 2 things:
1516 """returns 2 things:
1520 a map from git branch to revision
1517 a map from git branch to revision
1521 a map from revision to branches'''
1518 a map from revision to branches"""
1522 branch2rev = {}
1519 branch2rev = {}
1523 rev2branch = {}
1520 rev2branch = {}
1524
1521
@@ -87,12 +87,12 b' hexnullid = hex(nullid)'
87
87
88
88
89 def readtagsformerge(ui, repo, lines, fn=b'', keeplinenums=False):
89 def readtagsformerge(ui, repo, lines, fn=b'', keeplinenums=False):
90 '''read the .hgtags file into a structure that is suitable for merging
90 """read the .hgtags file into a structure that is suitable for merging
91
91
92 Depending on the keeplinenums flag, clear the line numbers associated
92 Depending on the keeplinenums flag, clear the line numbers associated
93 with each tag. This is done because only the line numbers of the first
93 with each tag. This is done because only the line numbers of the first
94 parent are useful for merging.
94 parent are useful for merging.
95 '''
95 """
96 filetags = tagsmod._readtaghist(
96 filetags = tagsmod._readtaghist(
97 ui, repo, lines, fn=fn, recode=None, calcnodelines=True
97 ui, repo, lines, fn=fn, recode=None, calcnodelines=True
98 )[1]
98 )[1]
@@ -104,7 +104,7 b' def readtagsformerge(ui, repo, lines, fn'
104
104
105
105
106 def grouptagnodesbyline(tagnodes):
106 def grouptagnodesbyline(tagnodes):
107 '''
107 """
108 Group nearby nodes (i.e. those that must be written next to each other)
108 Group nearby nodes (i.e. those that must be written next to each other)
109
109
110 The input is a list of [node, position] pairs, corresponding to a given tag
110 The input is a list of [node, position] pairs, corresponding to a given tag
@@ -118,7 +118,7 b' def grouptagnodesbyline(tagnodes):'
118 position is None).
118 position is None).
119
119
120 The result is a list of [position, [consecutive node list]]
120 The result is a list of [position, [consecutive node list]]
121 '''
121 """
122 firstlinenum = None
122 firstlinenum = None
123 for hexnode, linenum in tagnodes:
123 for hexnode, linenum in tagnodes:
124 firstlinenum = linenum
124 firstlinenum = linenum
@@ -139,14 +139,14 b' def grouptagnodesbyline(tagnodes):'
139
139
140
140
141 def writemergedtags(fcd, mergedtags):
141 def writemergedtags(fcd, mergedtags):
142 '''
142 """
143 write the merged tags while trying to minimize the diff to the first parent
143 write the merged tags while trying to minimize the diff to the first parent
144
144
145 This function uses the ordering info stored on the merged tags dict to
145 This function uses the ordering info stored on the merged tags dict to
146 generate an .hgtags file which is correct (in the sense that its contents
146 generate an .hgtags file which is correct (in the sense that its contents
147 correspond to the result of the tag merge) while also being as close as
147 correspond to the result of the tag merge) while also being as close as
148 possible to the first parent's .hgtags file.
148 possible to the first parent's .hgtags file.
149 '''
149 """
150 # group the node-tag pairs that must be written next to each other
150 # group the node-tag pairs that must be written next to each other
151 for tname, taglist in list(mergedtags.items()):
151 for tname, taglist in list(mergedtags.items()):
152 mergedtags[tname] = grouptagnodesbyline(taglist)
152 mergedtags[tname] = grouptagnodesbyline(taglist)
@@ -175,12 +175,12 b' def writemergedtags(fcd, mergedtags):'
175
175
176
176
177 def singletagmerge(p1nodes, p2nodes):
177 def singletagmerge(p1nodes, p2nodes):
178 '''
178 """
179 merge the nodes corresponding to a single tag
179 merge the nodes corresponding to a single tag
180
180
181 Note that the inputs are lists of node-linenum pairs (i.e. not just lists
181 Note that the inputs are lists of node-linenum pairs (i.e. not just lists
182 of nodes)
182 of nodes)
183 '''
183 """
184 if not p2nodes:
184 if not p2nodes:
185 return p1nodes
185 return p1nodes
186 if not p1nodes:
186 if not p1nodes:
@@ -221,10 +221,10 b' def singletagmerge(p1nodes, p2nodes):'
221
221
222
222
223 def merge(repo, fcd, fco, fca):
223 def merge(repo, fcd, fco, fca):
224 '''
224 """
225 Merge the tags of two revisions, taking into account the base tags
225 Merge the tags of two revisions, taking into account the base tags
226 Try to minimize the diff between the merged tags and the first parent tags
226 Try to minimize the diff between the merged tags and the first parent tags
227 '''
227 """
228 ui = repo.ui
228 ui = repo.ui
229 # read the p1, p2 and base tags
229 # read the p1, p2 and base tags
230 # only keep the line numbers for the p1 tags
230 # only keep the line numbers for the p1 tags
@@ -177,12 +177,12 b' def writediff(fp, difflist):'
177
177
178
178
179 def findglobaltags(ui, repo):
179 def findglobaltags(ui, repo):
180 '''Find global tags in a repo: return a tagsmap
180 """Find global tags in a repo: return a tagsmap
181
181
182 tagsmap: tag name to (node, hist) 2-tuples.
182 tagsmap: tag name to (node, hist) 2-tuples.
183
183
184 The tags cache is read and updated as a side-effect of calling.
184 The tags cache is read and updated as a side-effect of calling.
185 '''
185 """
186 (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
186 (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
187 if cachetags is not None:
187 if cachetags is not None:
188 assert not shouldwrite
188 assert not shouldwrite
@@ -267,7 +267,7 b' def readlocaltags(ui, repo, alltags, tag'
267
267
268
268
269 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
269 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
270 '''Read tag definitions from a file (or any source of lines).
270 """Read tag definitions from a file (or any source of lines).
271
271
272 This function returns two sortdicts with similar information:
272 This function returns two sortdicts with similar information:
273
273
@@ -283,7 +283,7 b' def _readtaghist(ui, repo, lines, fn, re'
283 When calcnodelines is False the hextaglines dict is not calculated (an
283 When calcnodelines is False the hextaglines dict is not calculated (an
284 empty dict is returned). This is done to improve this function's
284 empty dict is returned). This is done to improve this function's
285 performance in cases where the line numbers are not needed.
285 performance in cases where the line numbers are not needed.
286 '''
286 """
287
287
288 bintaghist = util.sortdict()
288 bintaghist = util.sortdict()
289 hextaglines = util.sortdict()
289 hextaglines = util.sortdict()
@@ -325,14 +325,14 b' def _readtaghist(ui, repo, lines, fn, re'
325
325
326
326
327 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
327 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
328 '''Read tag definitions from a file (or any source of lines).
328 """Read tag definitions from a file (or any source of lines).
329
329
330 Returns a mapping from tag name to (node, hist).
330 Returns a mapping from tag name to (node, hist).
331
331
332 "node" is the node id from the last line read for that name. "hist"
332 "node" is the node id from the last line read for that name. "hist"
333 is the list of node ids previously associated with it (in file order).
333 is the list of node ids previously associated with it (in file order).
334 All node ids are binary, not hex.
334 All node ids are binary, not hex.
335 '''
335 """
336 filetags, nodelines = _readtaghist(
336 filetags, nodelines = _readtaghist(
337 ui, repo, lines, fn, recode=recode, calcnodelines=calcnodelines
337 ui, repo, lines, fn, recode=recode, calcnodelines=calcnodelines
338 )
338 )
@@ -390,7 +390,7 b' def _filename(repo):'
390
390
391
391
392 def _readtagcache(ui, repo):
392 def _readtagcache(ui, repo):
393 '''Read the tag cache.
393 """Read the tag cache.
394
394
395 Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
395 Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
396
396
@@ -406,7 +406,7 b' def _readtagcache(ui, repo):'
406
406
407 If the cache is not up to date, the caller is responsible for reading tag
407 If the cache is not up to date, the caller is responsible for reading tag
408 info from each returned head. (See findglobaltags().)
408 info from each returned head. (See findglobaltags().)
409 '''
409 """
410 try:
410 try:
411 cachefile = repo.cachevfs(_filename(repo), b'r')
411 cachefile = repo.cachevfs(_filename(repo), b'r')
412 # force reading the file for static-http
412 # force reading the file for static-http
@@ -549,7 +549,7 b' def _writetagcache(ui, repo, valid, cach'
549
549
550
550
551 def tag(repo, names, node, message, local, user, date, editor=False):
551 def tag(repo, names, node, message, local, user, date, editor=False):
552 '''tag a revision with one or more symbolic names.
552 """tag a revision with one or more symbolic names.
553
553
554 names is a list of strings or, when adding a single tag, names may be a
554 names is a list of strings or, when adding a single tag, names may be a
555 string.
555 string.
@@ -567,7 +567,7 b' def tag(repo, names, node, message, loca'
567
567
568 user: name of user to use if committing
568 user: name of user to use if committing
569
569
570 date: date tuple to use if committing'''
570 date: date tuple to use if committing"""
571
571
572 if not local:
572 if not local:
573 m = matchmod.exact([b'.hgtags'])
573 m = matchmod.exact([b'.hgtags'])
@@ -548,8 +548,7 b' def websub(text, websubtable):'
548
548
549
549
550 def loadfilter(ui, extname, registrarobj):
550 def loadfilter(ui, extname, registrarobj):
551 """Load template filter from specified registrarobj
551 """Load template filter from specified registrarobj"""
552 """
553 for name, func in pycompat.iteritems(registrarobj._table):
552 for name, func in pycompat.iteritems(registrarobj._table):
554 filters[name] = func
553 filters[name] = func
555
554
@@ -912,8 +912,7 b' def word(context, mapping, args):'
912
912
913
913
914 def loadfunction(ui, extname, registrarobj):
914 def loadfunction(ui, extname, registrarobj):
915 """Load template function from specified registrarobj
915 """Load template function from specified registrarobj"""
916 """
917 for name, func in pycompat.iteritems(registrarobj._table):
916 for name, func in pycompat.iteritems(registrarobj._table):
918 funcs[name] = func
917 funcs[name] = func
919
918
@@ -994,8 +994,7 b' def showwhyunstable(context, mapping):'
994
994
995
995
996 def loadkeyword(ui, extname, registrarobj):
996 def loadkeyword(ui, extname, registrarobj):
997 """Load template keyword from specified registrarobj
997 """Load template keyword from specified registrarobj"""
998 """
999 for name, func in pycompat.iteritems(registrarobj._table):
998 for name, func in pycompat.iteritems(registrarobj._table):
1000 keywords[name] = func
999 keywords[name] = func
1001
1000
@@ -663,7 +663,7 b' class nullresourcemapper(resourcemapper)'
663
663
664
664
665 class engine(object):
665 class engine(object):
666 '''template expansion engine.
666 """template expansion engine.
667
667
668 template expansion works like this. a map file contains key=value
668 template expansion works like this. a map file contains key=value
669 pairs. if value is quoted, it is treated as string. otherwise, it
669 pairs. if value is quoted, it is treated as string. otherwise, it
@@ -680,7 +680,7 b' class engine(object):'
680 {key%format}.
680 {key%format}.
681
681
682 filter uses function to transform value. syntax is
682 filter uses function to transform value. syntax is
683 {key|filter1|filter2|...}.'''
683 {key|filter1|filter2|...}."""
684
684
685 def __init__(self, loader, filters=None, defaults=None, resources=None):
685 def __init__(self, loader, filters=None, defaults=None, resources=None):
686 self._loader = loader
686 self._loader = loader
@@ -781,9 +781,9 b' class engine(object):'
781 return False
781 return False
782
782
783 def process(self, t, mapping):
783 def process(self, t, mapping):
784 '''Perform expansion. t is name of map element to expand.
784 """Perform expansion. t is name of map element to expand.
785 mapping contains added elements for use during expansion. Is a
785 mapping contains added elements for use during expansion. Is a
786 generator.'''
786 generator."""
787 func, data = self._load(t)
787 func, data = self._load(t)
788 return self._expand(func, data, mapping)
788 return self._expand(func, data, mapping)
789
789
@@ -857,7 +857,11 b' def _readmapfile(fp, mapfile):'
857 if subresource:
857 if subresource:
858 data = subresource.read()
858 data = subresource.read()
859 conf.parse(
859 conf.parse(
860 abs, data, sections=sections, remap=remap, include=include,
860 abs,
861 data,
862 sections=sections,
863 remap=remap,
864 include=include,
861 )
865 )
862
866
863 data = fp.read()
867 data = fp.read()
@@ -1094,12 +1098,12 b' def templatedir():'
1094
1098
1095
1099
1096 def open_template(name, templatepath=None):
1100 def open_template(name, templatepath=None):
1097 '''returns a file-like object for the given template, and its full path
1101 """returns a file-like object for the given template, and its full path
1098
1102
1099 If the name is a relative path and we're in a frozen binary, the template
1103 If the name is a relative path and we're in a frozen binary, the template
1100 will be read from the mercurial.templates package instead. The returned path
1104 will be read from the mercurial.templates package instead. The returned path
1101 will then be the relative path.
1105 will then be the relative path.
1102 '''
1106 """
1103 # Does the name point directly to a map file?
1107 # Does the name point directly to a map file?
1104 if os.path.isfile(name) or os.path.isabs(name):
1108 if os.path.isfile(name) or os.path.isabs(name):
1105 return name, open(name, mode='rb')
1109 return name, open(name, mode='rb')
@@ -1021,7 +1021,12 b' class ifiledatatests(basetestcase):'
1021 def testcensored(self):
1021 def testcensored(self):
1022 f = self._makefilefn()
1022 f = self._makefilefn()
1023
1023
1024 stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
1024 stored1 = storageutil.packmeta(
1025 {
1026 b'censored': b'tombstone',
1027 },
1028 b'',
1029 )
1025
1030
1026 with self._maketransactionfn() as tr:
1031 with self._maketransactionfn() as tr:
1027 node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
1032 node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1050,7 +1055,12 b' class ifiledatatests(basetestcase):'
1050
1055
1051 f = self._makefilefn()
1056 f = self._makefilefn()
1052
1057
1053 stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
1058 stored1 = storageutil.packmeta(
1059 {
1060 b'censored': b'tombstone',
1061 },
1062 b'',
1063 )
1054
1064
1055 with self._maketransactionfn() as tr:
1065 with self._maketransactionfn() as tr:
1056 node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
1066 node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1232,7 +1242,12 b' class ifilemutationtests(basetestcase):'
1232 # Attempt to apply a delta made against a censored revision.
1242 # Attempt to apply a delta made against a censored revision.
1233 f = self._makefilefn()
1243 f = self._makefilefn()
1234
1244
1235 stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
1245 stored1 = storageutil.packmeta(
1246 {
1247 b'censored': b'tombstone',
1248 },
1249 b'',
1250 )
1236
1251
1237 with self._maketransactionfn() as tr:
1252 with self._maketransactionfn() as tr:
1238 node0 = f.add(b'foo\n' * 30, None, tr, 0, nullid, nullid)
1253 node0 = f.add(b'foo\n' * 30, None, tr, 0, nullid, nullid)
@@ -425,10 +425,10 b' class transaction(util.transactional):'
425
425
426 @active
426 @active
427 def replace(self, file, offset):
427 def replace(self, file, offset):
428 '''
428 """
429 replace can only replace already committed entries
429 replace can only replace already committed entries
430 that are not pending in the queue
430 that are not pending in the queue
431 '''
431 """
432 if file in self._newfiles:
432 if file in self._newfiles:
433 if not offset:
433 if not offset:
434 return
434 return
@@ -476,9 +476,9 b' class transaction(util.transactional):'
476
476
477 @active
477 @active
478 def writepending(self):
478 def writepending(self):
479 '''write pending file to temporary version
479 """write pending file to temporary version
480
480
481 This is used to allow hooks to view a transaction before commit'''
481 This is used to allow hooks to view a transaction before commit"""
482 categories = sorted(self._pendingcallback)
482 categories = sorted(self._pendingcallback)
483 for cat in categories:
483 for cat in categories:
484 # remove callback since the data will have been flushed
484 # remove callback since the data will have been flushed
@@ -489,8 +489,7 b' class transaction(util.transactional):'
489
489
490 @active
490 @active
491 def hasfinalize(self, category):
491 def hasfinalize(self, category):
492 """check is a callback already exist for a category
492 """check is a callback already exist for a category"""
493 """
494 return category in self._finalizecallback
493 return category in self._finalizecallback
495
494
496 @active
495 @active
@@ -533,11 +532,11 b' class transaction(util.transactional):'
533
532
534 @active
533 @active
535 def addvalidator(self, category, callback):
534 def addvalidator(self, category, callback):
536 """ adds a callback to be called when validating the transaction.
535 """adds a callback to be called when validating the transaction.
537
536
538 The transaction will be given as the first argument to the callback.
537 The transaction will be given as the first argument to the callback.
539
538
540 callback should raise exception if to abort transaction """
539 callback should raise exception if to abort transaction"""
541 self._validatecallback[category] = callback
540 self._validatecallback[category] = callback
542
541
543 @active
542 @active
@@ -624,9 +623,9 b' class transaction(util.transactional):'
624
623
625 @active
624 @active
626 def abort(self):
625 def abort(self):
627 '''abort the transaction (generally called on error, or when the
626 """abort the transaction (generally called on error, or when the
628 transaction is not explicitly committed before going out of
627 transaction is not explicitly committed before going out of
629 scope)'''
628 scope)"""
630 self._abort()
629 self._abort()
631
630
632 def _writeundo(self):
631 def _writeundo(self):
@@ -117,7 +117,10 b' def findcommonincoming(repo, remote, hea'
117 for p in pycompat.xrange(0, len(r), 10):
117 for p in pycompat.xrange(0, len(r), 10):
118 with remote.commandexecutor() as e:
118 with remote.commandexecutor() as e:
119 branches = e.callcommand(
119 branches = e.callcommand(
120 b'branches', {b'nodes': r[p : p + 10],}
120 b'branches',
121 {
122 b'nodes': r[p : p + 10],
123 },
121 ).result()
124 ).result()
122
125
123 for b in branches:
126 for b in branches:
@@ -13,20 +13,20 b' from . import encoding'
13
13
14
14
15 def mayhavepending(root):
15 def mayhavepending(root):
16 '''return whether 'root' may have pending changes, which are
16 """return whether 'root' may have pending changes, which are
17 visible to this process.
17 visible to this process.
18 '''
18 """
19 return root == encoding.environ.get(b'HG_PENDING')
19 return root == encoding.environ.get(b'HG_PENDING')
20
20
21
21
22 def trypending(root, vfs, filename, **kwargs):
22 def trypending(root, vfs, filename, **kwargs):
23 '''Open file to be read according to HG_PENDING environment variable
23 """Open file to be read according to HG_PENDING environment variable
24
24
25 This opens '.pending' of specified 'filename' only when HG_PENDING
25 This opens '.pending' of specified 'filename' only when HG_PENDING
26 is equal to 'root'.
26 is equal to 'root'.
27
27
28 This returns '(fp, is_pending_opened)' tuple.
28 This returns '(fp, is_pending_opened)' tuple.
29 '''
29 """
30 if mayhavepending(root):
30 if mayhavepending(root):
31 try:
31 try:
32 return (vfs(b'%s.pending' % filename, **kwargs), True)
32 return (vfs(b'%s.pending' % filename, **kwargs), True)
@@ -925,7 +925,7 b' class ui(object):'
925 yield section, name, value
925 yield section, name, value
926
926
927 def plain(self, feature=None):
927 def plain(self, feature=None):
928 '''is plain mode active?
928 """is plain mode active?
929
929
930 Plain mode means that all configuration variables which affect
930 Plain mode means that all configuration variables which affect
931 the behavior and output of Mercurial should be
931 the behavior and output of Mercurial should be
@@ -939,7 +939,7 b' class ui(object):'
939 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
939 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
940 - False if feature is disabled by default and not included in HGPLAIN
940 - False if feature is disabled by default and not included in HGPLAIN
941 - True otherwise
941 - True otherwise
942 '''
942 """
943 if (
943 if (
944 b'HGPLAIN' not in encoding.environ
944 b'HGPLAIN' not in encoding.environ
945 and b'HGPLAINEXCEPT' not in encoding.environ
945 and b'HGPLAINEXCEPT' not in encoding.environ
@@ -1112,7 +1112,7 b' class ui(object):'
1112 return self._colormode != b'win32'
1112 return self._colormode != b'win32'
1113
1113
1114 def write(self, *args, **opts):
1114 def write(self, *args, **opts):
1115 '''write args to output
1115 """write args to output
1116
1116
1117 By default, this method simply writes to the buffer or stdout.
1117 By default, this method simply writes to the buffer or stdout.
1118 Color mode can be set on the UI class to have the output decorated
1118 Color mode can be set on the UI class to have the output decorated
@@ -1133,7 +1133,7 b' class ui(object):'
1133 When labeling output for a specific command, a label of
1133 When labeling output for a specific command, a label of
1134 "cmdname.type" is recommended. For example, status issues
1134 "cmdname.type" is recommended. For example, status issues
1135 a label of "status.modified" for modified files.
1135 a label of "status.modified" for modified files.
1136 '''
1136 """
1137 dest = self._fout
1137 dest = self._fout
1138
1138
1139 # inlined _write() for speed
1139 # inlined _write() for speed
@@ -1453,9 +1453,9 b' class ui(object):'
1453 return _reqexithandlers
1453 return _reqexithandlers
1454
1454
1455 def atexit(self, func, *args, **kwargs):
1455 def atexit(self, func, *args, **kwargs):
1456 '''register a function to run after dispatching a request
1456 """register a function to run after dispatching a request
1457
1457
1458 Handlers do not stay registered across request boundaries.'''
1458 Handlers do not stay registered across request boundaries."""
1459 self._exithandlers.append((func, args, kwargs))
1459 self._exithandlers.append((func, args, kwargs))
1460 return func
1460 return func
1461
1461
@@ -1484,8 +1484,14 b' class ui(object):'
1484 alldefaults = frozenset([b"text", b"curses"])
1484 alldefaults = frozenset([b"text", b"curses"])
1485
1485
1486 featureinterfaces = {
1486 featureinterfaces = {
1487 b"chunkselector": [b"text", b"curses",],
1487 b"chunkselector": [
1488 b"histedit": [b"text", b"curses",],
1488 b"text",
1489 b"curses",
1490 ],
1491 b"histedit": [
1492 b"text",
1493 b"curses",
1494 ],
1489 }
1495 }
1490
1496
1491 # Feature-specific interface
1497 # Feature-specific interface
@@ -1532,7 +1538,7 b' class ui(object):'
1532 return choseninterface
1538 return choseninterface
1533
1539
1534 def interactive(self):
1540 def interactive(self):
1535 '''is interactive input allowed?
1541 """is interactive input allowed?
1536
1542
1537 An interactive session is a session where input can be reasonably read
1543 An interactive session is a session where input can be reasonably read
1538 from `sys.stdin'. If this function returns false, any attempt to read
1544 from `sys.stdin'. If this function returns false, any attempt to read
@@ -1544,7 +1550,7 b' class ui(object):'
1544 to a terminal device.
1550 to a terminal device.
1545
1551
1546 This function refers to input only; for output, see `ui.formatted()'.
1552 This function refers to input only; for output, see `ui.formatted()'.
1547 '''
1553 """
1548 i = self.configbool(b"ui", b"interactive")
1554 i = self.configbool(b"ui", b"interactive")
1549 if i is None:
1555 if i is None:
1550 # some environments replace stdin without implementing isatty
1556 # some environments replace stdin without implementing isatty
@@ -1554,8 +1560,7 b' class ui(object):'
1554 return i
1560 return i
1555
1561
1556 def termwidth(self):
1562 def termwidth(self):
1557 '''how wide is the terminal in columns?
1563 """how wide is the terminal in columns?"""
1558 '''
1559 if b'COLUMNS' in encoding.environ:
1564 if b'COLUMNS' in encoding.environ:
1560 try:
1565 try:
1561 return int(encoding.environ[b'COLUMNS'])
1566 return int(encoding.environ[b'COLUMNS'])
@@ -1564,7 +1569,7 b' class ui(object):'
1564 return scmutil.termsize(self)[0]
1569 return scmutil.termsize(self)[0]
1565
1570
1566 def formatted(self):
1571 def formatted(self):
1567 '''should formatted output be used?
1572 """should formatted output be used?
1568
1573
1569 It is often desirable to format the output to suite the output medium.
1574 It is often desirable to format the output to suite the output medium.
1570 Examples of this are truncating long lines or colorizing messages.
1575 Examples of this are truncating long lines or colorizing messages.
@@ -1579,7 +1584,7 b' class ui(object):'
1579
1584
1580 This function refers to output only; for input, see `ui.interactive()'.
1585 This function refers to output only; for input, see `ui.interactive()'.
1581 This function always returns false when in plain mode, see `ui.plain()'.
1586 This function always returns false when in plain mode, see `ui.plain()'.
1582 '''
1587 """
1583 if self.plain():
1588 if self.plain():
1584 return False
1589 return False
1585
1590
@@ -1746,40 +1751,40 b' class ui(object):'
1746 raise error.ResponseExpected()
1751 raise error.ResponseExpected()
1747
1752
1748 def status(self, *msg, **opts):
1753 def status(self, *msg, **opts):
1749 '''write status message to output (if ui.quiet is False)
1754 """write status message to output (if ui.quiet is False)
1750
1755
1751 This adds an output label of "ui.status".
1756 This adds an output label of "ui.status".
1752 '''
1757 """
1753 if not self.quiet:
1758 if not self.quiet:
1754 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1759 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1755
1760
1756 def warn(self, *msg, **opts):
1761 def warn(self, *msg, **opts):
1757 '''write warning message to output (stderr)
1762 """write warning message to output (stderr)
1758
1763
1759 This adds an output label of "ui.warning".
1764 This adds an output label of "ui.warning".
1760 '''
1765 """
1761 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1766 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1762
1767
1763 def error(self, *msg, **opts):
1768 def error(self, *msg, **opts):
1764 '''write error message to output (stderr)
1769 """write error message to output (stderr)
1765
1770
1766 This adds an output label of "ui.error".
1771 This adds an output label of "ui.error".
1767 '''
1772 """
1768 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1773 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1769
1774
1770 def note(self, *msg, **opts):
1775 def note(self, *msg, **opts):
1771 '''write note to output (if ui.verbose is True)
1776 """write note to output (if ui.verbose is True)
1772
1777
1773 This adds an output label of "ui.note".
1778 This adds an output label of "ui.note".
1774 '''
1779 """
1775 if self.verbose:
1780 if self.verbose:
1776 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1781 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1777
1782
1778 def debug(self, *msg, **opts):
1783 def debug(self, *msg, **opts):
1779 '''write debug message to output (if ui.debugflag is True)
1784 """write debug message to output (if ui.debugflag is True)
1780
1785
1781 This adds an output label of "ui.debug".
1786 This adds an output label of "ui.debug".
1782 '''
1787 """
1783 if self.debugflag:
1788 if self.debugflag:
1784 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1789 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1785 self.log(b'debug', b'%s', b''.join(msg))
1790 self.log(b'debug', b'%s', b''.join(msg))
@@ -1875,12 +1880,12 b' class ui(object):'
1875 errprefix=None,
1880 errprefix=None,
1876 blockedtag=None,
1881 blockedtag=None,
1877 ):
1882 ):
1878 '''execute shell command with appropriate output stream. command
1883 """execute shell command with appropriate output stream. command
1879 output will be redirected if fout is not stdout.
1884 output will be redirected if fout is not stdout.
1880
1885
1881 if command fails and onerr is None, return status, else raise onerr
1886 if command fails and onerr is None, return status, else raise onerr
1882 object as exception.
1887 object as exception.
1883 '''
1888 """
1884 if blockedtag is None:
1889 if blockedtag is None:
1885 # Long cmds tend to be because of an absolute path on cmd. Keep
1890 # Long cmds tend to be because of an absolute path on cmd. Keep
1886 # the tail end instead
1891 # the tail end instead
@@ -1907,9 +1912,9 b' class ui(object):'
1907 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
1912 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
1908
1913
1909 def traceback(self, exc=None, force=False):
1914 def traceback(self, exc=None, force=False):
1910 '''print exception traceback if traceback printing enabled or forced.
1915 """print exception traceback if traceback printing enabled or forced.
1911 only to call in exception handler. returns true if traceback
1916 only to call in exception handler. returns true if traceback
1912 printed.'''
1917 printed."""
1913 if self.tracebackflag or force:
1918 if self.tracebackflag or force:
1914 if exc is None:
1919 if exc is None:
1915 exc = sys.exc_info()
1920 exc = sys.exc_info()
@@ -2011,7 +2016,7 b' class ui(object):'
2011 self._loggers[name] = logger
2016 self._loggers[name] = logger
2012
2017
2013 def log(self, event, msgfmt, *msgargs, **opts):
2018 def log(self, event, msgfmt, *msgargs, **opts):
2014 '''hook for logging facility extensions
2019 """hook for logging facility extensions
2015
2020
2016 event should be a readily-identifiable subsystem, which will
2021 event should be a readily-identifiable subsystem, which will
2017 allow filtering.
2022 allow filtering.
@@ -2020,7 +2025,7 b' class ui(object):'
2020 *msgargs are %-formatted into it.
2025 *msgargs are %-formatted into it.
2021
2026
2022 **opts currently has no defined meanings.
2027 **opts currently has no defined meanings.
2023 '''
2028 """
2024 if not self._loggers:
2029 if not self._loggers:
2025 return
2030 return
2026 activeloggers = [
2031 activeloggers = [
@@ -2040,7 +2045,7 b' class ui(object):'
2040 self._loggers = registeredloggers
2045 self._loggers = registeredloggers
2041
2046
2042 def label(self, msg, label):
2047 def label(self, msg, label):
2043 '''style msg based on supplied label
2048 """style msg based on supplied label
2044
2049
2045 If some color mode is enabled, this will add the necessary control
2050 If some color mode is enabled, this will add the necessary control
2046 characters to apply such color. In addition, 'debug' color mode adds
2051 characters to apply such color. In addition, 'debug' color mode adds
@@ -2048,7 +2053,7 b' class ui(object):'
2048
2053
2049 ui.write(s, 'label') is equivalent to
2054 ui.write(s, 'label') is equivalent to
2050 ui.write(ui.label(s, 'label')).
2055 ui.write(ui.label(s, 'label')).
2051 '''
2056 """
2052 if self._colormode is not None:
2057 if self._colormode is not None:
2053 return color.colorlabel(self, msg, label)
2058 return color.colorlabel(self, msg, label)
2054 return msg
2059 return msg
@@ -35,13 +35,13 b' urlreq = util.urlreq'
35
35
36
36
37 def escape(s, quote=None):
37 def escape(s, quote=None):
38 '''Replace special characters "&", "<" and ">" to HTML-safe sequences.
38 """Replace special characters "&", "<" and ">" to HTML-safe sequences.
39 If the optional flag quote is true, the quotation mark character (")
39 If the optional flag quote is true, the quotation mark character (")
40 is also translated.
40 is also translated.
41
41
42 This is the same as cgi.escape in Python, but always operates on
42 This is the same as cgi.escape in Python, but always operates on
43 bytes, whereas cgi.escape in Python 3 only works on unicodes.
43 bytes, whereas cgi.escape in Python 3 only works on unicodes.
44 '''
44 """
45 s = s.replace(b"&", b"&amp;")
45 s = s.replace(b"&", b"&amp;")
46 s = s.replace(b"<", b"&lt;")
46 s = s.replace(b"<", b"&lt;")
47 s = s.replace(b">", b"&gt;")
47 s = s.replace(b">", b"&gt;")
@@ -586,7 +586,7 b' def opener('
586 loggingopts=None,
586 loggingopts=None,
587 sendaccept=True,
587 sendaccept=True,
588 ):
588 ):
589 '''
589 """
590 construct an opener suitable for urllib2
590 construct an opener suitable for urllib2
591 authinfo will be added to the password manager
591 authinfo will be added to the password manager
592
592
@@ -600,7 +600,7 b' def opener('
600
600
601 ``sendaccept`` allows controlling whether the ``Accept`` request header
601 ``sendaccept`` allows controlling whether the ``Accept`` request header
602 is sent. The header is sent by default.
602 is sent. The header is sent by default.
603 '''
603 """
604 timeout = ui.configwith(float, b'http', b'timeout')
604 timeout = ui.configwith(float, b'http', b'timeout')
605 handlers = []
605 handlers = []
606
606
@@ -83,10 +83,22 b' if pycompat.ispy3:'
83 )
83 )
84 import urllib.response
84 import urllib.response
85
85
86 urlreq._registeraliases(urllib.response, (b"addclosehook", b"addinfourl",))
86 urlreq._registeraliases(
87 urllib.response,
88 (
89 b"addclosehook",
90 b"addinfourl",
91 ),
92 )
87 import urllib.error
93 import urllib.error
88
94
89 urlerr._registeraliases(urllib.error, (b"HTTPError", b"URLError",))
95 urlerr._registeraliases(
96 urllib.error,
97 (
98 b"HTTPError",
99 b"URLError",
100 ),
101 )
90 import http.server
102 import http.server
91
103
92 httpserver._registeraliases(
104 httpserver._registeraliases(
@@ -179,12 +191,28 b' else:'
179 b"urlopen",
191 b"urlopen",
180 ),
192 ),
181 )
193 )
182 urlreq._registeraliases(urlparse, (b"urlparse", b"urlunparse",))
194 urlreq._registeraliases(
195 urlparse,
196 (
197 b"urlparse",
198 b"urlunparse",
199 ),
200 )
183 urlreq._registeralias(urlparse, b"parse_qs", b"parseqs")
201 urlreq._registeralias(urlparse, b"parse_qs", b"parseqs")
184 urlreq._registeralias(urlparse, b"parse_qsl", b"parseqsl")
202 urlreq._registeralias(urlparse, b"parse_qsl", b"parseqsl")
185 urlerr._registeraliases(urllib2, (b"HTTPError", b"URLError",))
203 urlerr._registeraliases(
204 urllib2,
205 (
206 b"HTTPError",
207 b"URLError",
208 ),
209 )
186 httpserver._registeraliases(
210 httpserver._registeraliases(
187 BaseHTTPServer, (b"HTTPServer", b"BaseHTTPRequestHandler",)
211 BaseHTTPServer,
212 (
213 b"HTTPServer",
214 b"BaseHTTPRequestHandler",
215 ),
188 )
216 )
189 httpserver._registeraliases(
217 httpserver._registeraliases(
190 SimpleHTTPServer, (b"SimpleHTTPRequestHandler",)
218 SimpleHTTPServer, (b"SimpleHTTPRequestHandler",)
@@ -1264,7 +1264,7 b' class cow(object):'
1264
1264
1265
1265
1266 class sortdict(collections.OrderedDict):
1266 class sortdict(collections.OrderedDict):
1267 '''a simple sorted dictionary
1267 """a simple sorted dictionary
1268
1268
1269 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1269 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1270 >>> d2 = d1.copy()
1270 >>> d2 = d1.copy()
@@ -1276,7 +1276,7 b' class sortdict(collections.OrderedDict):'
1276 >>> d1.insert(1, b'a.5', 0.5)
1276 >>> d1.insert(1, b'a.5', 0.5)
1277 >>> d1
1277 >>> d1
1278 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1278 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1279 '''
1279 """
1280
1280
1281 def __setitem__(self, key, value):
1281 def __setitem__(self, key, value):
1282 if key in self:
1282 if key in self:
@@ -1761,8 +1761,8 b' def clearcachedproperty(obj, prop):'
1761
1761
1762
1762
1763 def increasingchunks(source, min=1024, max=65536):
1763 def increasingchunks(source, min=1024, max=65536):
1764 '''return no less than min bytes per chunk while data remains,
1764 """return no less than min bytes per chunk while data remains,
1765 doubling min after each chunk until it reaches max'''
1765 doubling min after each chunk until it reaches max"""
1766
1766
1767 def log2(x):
1767 def log2(x):
1768 if not x:
1768 if not x:
@@ -1833,7 +1833,7 b' if pycompat.ispypy:'
1833
1833
1834
1834
1835 def pathto(root, n1, n2):
1835 def pathto(root, n1, n2):
1836 '''return the relative path from one place to another.
1836 """return the relative path from one place to another.
1837 root should use os.sep to separate directories
1837 root should use os.sep to separate directories
1838 n1 should use os.sep to separate directories
1838 n1 should use os.sep to separate directories
1839 n2 should use "/" to separate directories
1839 n2 should use "/" to separate directories
@@ -1842,7 +1842,7 b' def pathto(root, n1, n2):'
1842 If n1 is a relative path, it's assumed it's
1842 If n1 is a relative path, it's assumed it's
1843 relative to root.
1843 relative to root.
1844 n2 should always be relative to root.
1844 n2 should always be relative to root.
1845 '''
1845 """
1846 if not n1:
1846 if not n1:
1847 return localpath(n2)
1847 return localpath(n2)
1848 if os.path.isabs(n1):
1848 if os.path.isabs(n1):
@@ -1892,7 +1892,7 b' def checksignature(func, depth=1):'
1892
1892
1893
1893
1894 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1894 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1895 '''copy a file, preserving mode and optionally other stat info like
1895 """copy a file, preserving mode and optionally other stat info like
1896 atime/mtime
1896 atime/mtime
1897
1897
1898 checkambig argument is used with filestat, and is useful only if
1898 checkambig argument is used with filestat, and is useful only if
@@ -1900,7 +1900,7 b' def copyfile(src, dest, hardlink=False, '
1900 repo.wlock).
1900 repo.wlock).
1901
1901
1902 copystat and checkambig should be exclusive.
1902 copystat and checkambig should be exclusive.
1903 '''
1903 """
1904 assert not (copystat and checkambig)
1904 assert not (copystat and checkambig)
1905 oldstat = None
1905 oldstat = None
1906 if os.path.lexists(dest):
1906 if os.path.lexists(dest):
@@ -2017,7 +2017,7 b' def copyfiles(src, dst, hardlink=None, p'
2017
2017
2018
2018
2019 def checkwinfilename(path):
2019 def checkwinfilename(path):
2020 r'''Check that the base-relative path is a valid filename on Windows.
2020 r"""Check that the base-relative path is a valid filename on Windows.
2021 Returns None if the path is ok, or a UI string describing the problem.
2021 Returns None if the path is ok, or a UI string describing the problem.
2022
2022
2023 >>> checkwinfilename(b"just/a/normal/path")
2023 >>> checkwinfilename(b"just/a/normal/path")
@@ -2039,7 +2039,7 b' def checkwinfilename(path):'
2039 "filename ends with '\\', which is invalid on Windows"
2039 "filename ends with '\\', which is invalid on Windows"
2040 >>> checkwinfilename(b"foo\\/bar")
2040 >>> checkwinfilename(b"foo\\/bar")
2041 "directory name ends with '\\', which is invalid on Windows"
2041 "directory name ends with '\\', which is invalid on Windows"
2042 '''
2042 """
2043 if path.endswith(b'\\'):
2043 if path.endswith(b'\\'):
2044 return _(b"filename ends with '\\', which is invalid on Windows")
2044 return _(b"filename ends with '\\', which is invalid on Windows")
2045 if b'\\/' in path:
2045 if b'\\/' in path:
@@ -2175,11 +2175,11 b' class _re(object):'
2175 _re2 = False
2175 _re2 = False
2176
2176
2177 def compile(self, pat, flags=0):
2177 def compile(self, pat, flags=0):
2178 '''Compile a regular expression, using re2 if possible
2178 """Compile a regular expression, using re2 if possible
2179
2179
2180 For best performance, use only re2-compatible regexp features. The
2180 For best performance, use only re2-compatible regexp features. The
2181 only flags from the re module that are re2-compatible are
2181 only flags from the re module that are re2-compatible are
2182 IGNORECASE and MULTILINE.'''
2182 IGNORECASE and MULTILINE."""
2183 if _re2 is None:
2183 if _re2 is None:
2184 self._checkre2()
2184 self._checkre2()
2185 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2185 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
@@ -2195,11 +2195,11 b' class _re(object):'
2195
2195
2196 @propertycache
2196 @propertycache
2197 def escape(self):
2197 def escape(self):
2198 '''Return the version of escape corresponding to self.compile.
2198 """Return the version of escape corresponding to self.compile.
2199
2199
2200 This is imperfect because whether re2 or re is used for a particular
2200 This is imperfect because whether re2 or re is used for a particular
2201 function depends on the flags, etc, but it's the best we can do.
2201 function depends on the flags, etc, but it's the best we can do.
2202 '''
2202 """
2203 global _re2
2203 global _re2
2204 if _re2 is None:
2204 if _re2 is None:
2205 self._checkre2()
2205 self._checkre2()
@@ -2215,7 +2215,7 b' re = _re()'
2215
2215
2216
2216
2217 def fspath(name, root):
2217 def fspath(name, root):
2218 '''Get name in the case stored in the filesystem
2218 """Get name in the case stored in the filesystem
2219
2219
2220 The name should be relative to root, and be normcase-ed for efficiency.
2220 The name should be relative to root, and be normcase-ed for efficiency.
2221
2221
@@ -2223,7 +2223,7 b' def fspath(name, root):'
2223 called, for case-sensitive filesystems (simply because it's expensive).
2223 called, for case-sensitive filesystems (simply because it's expensive).
2224
2224
2225 The root should be normcase-ed, too.
2225 The root should be normcase-ed, too.
2226 '''
2226 """
2227
2227
2228 def _makefspathcacheentry(dir):
2228 def _makefspathcacheentry(dir):
2229 return {normcase(n): n for n in os.listdir(dir)}
2229 return {normcase(n): n for n in os.listdir(dir)}
@@ -2301,11 +2301,11 b' def endswithsep(path):'
2301
2301
2302
2302
2303 def splitpath(path):
2303 def splitpath(path):
2304 '''Split path by os.sep.
2304 """Split path by os.sep.
2305 Note that this function does not use os.altsep because this is
2305 Note that this function does not use os.altsep because this is
2306 an alternative of simple "xxx.split(os.sep)".
2306 an alternative of simple "xxx.split(os.sep)".
2307 It is recommended to use os.path.normpath() before using this
2307 It is recommended to use os.path.normpath() before using this
2308 function if need.'''
2308 function if need."""
2309 return path.split(pycompat.ossep)
2309 return path.split(pycompat.ossep)
2310
2310
2311
2311
@@ -2459,7 +2459,7 b' class filestat(object):'
2459
2459
2460
2460
2461 class atomictempfile(object):
2461 class atomictempfile(object):
2462 '''writable file object that atomically updates a file
2462 """writable file object that atomically updates a file
2463
2463
2464 All writes will go to a temporary copy of the original file. Call
2464 All writes will go to a temporary copy of the original file. Call
2465 close() when you are done writing, and atomictempfile will rename
2465 close() when you are done writing, and atomictempfile will rename
@@ -2470,7 +2470,7 b' class atomictempfile(object):'
2470 checkambig argument of constructor is used with filestat, and is
2470 checkambig argument of constructor is used with filestat, and is
2471 useful only if target file is guarded by any lock (e.g. repo.lock
2471 useful only if target file is guarded by any lock (e.g. repo.lock
2472 or repo.wlock).
2472 or repo.wlock).
2473 '''
2473 """
2474
2474
2475 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2475 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2476 self.__name = name # permanent name
2476 self.__name = name # permanent name
@@ -3365,7 +3365,7 b' timedcm._nested = 0'
3365
3365
3366
3366
3367 def timed(func):
3367 def timed(func):
3368 '''Report the execution time of a function call to stderr.
3368 """Report the execution time of a function call to stderr.
3369
3369
3370 During development, use as a decorator when you need to measure
3370 During development, use as a decorator when you need to measure
3371 the cost of a function, e.g. as follows:
3371 the cost of a function, e.g. as follows:
@@ -3373,7 +3373,7 b' def timed(func):'
3373 @util.timed
3373 @util.timed
3374 def foo(a, b, c):
3374 def foo(a, b, c):
3375 pass
3375 pass
3376 '''
3376 """
3377
3377
3378 def wrapper(*args, **kwargs):
3378 def wrapper(*args, **kwargs):
3379 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3379 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
@@ -3404,7 +3404,7 b' def timed(func):'
3404
3404
3405
3405
3406 def sizetoint(s):
3406 def sizetoint(s):
3407 '''Convert a space specifier to a byte count.
3407 """Convert a space specifier to a byte count.
3408
3408
3409 >>> sizetoint(b'30')
3409 >>> sizetoint(b'30')
3410 30
3410 30
@@ -3412,7 +3412,7 b' def sizetoint(s):'
3412 2252
3412 2252
3413 >>> sizetoint(b'6M')
3413 >>> sizetoint(b'6M')
3414 6291456
3414 6291456
3415 '''
3415 """
3416 t = s.strip().lower()
3416 t = s.strip().lower()
3417 try:
3417 try:
3418 for k, u in _sizeunits:
3418 for k, u in _sizeunits:
@@ -3424,9 +3424,9 b' def sizetoint(s):'
3424
3424
3425
3425
3426 class hooks(object):
3426 class hooks(object):
3427 '''A collection of hook functions that can be used to extend a
3427 """A collection of hook functions that can be used to extend a
3428 function's behavior. Hooks are called in lexicographic order,
3428 function's behavior. Hooks are called in lexicographic order,
3429 based on the names of their sources.'''
3429 based on the names of their sources."""
3430
3430
3431 def __init__(self):
3431 def __init__(self):
3432 self._hooks = []
3432 self._hooks = []
@@ -3443,7 +3443,7 b' class hooks(object):'
3443
3443
3444
3444
3445 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3445 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3446 '''Yields lines for a nicely formatted stacktrace.
3446 """Yields lines for a nicely formatted stacktrace.
3447 Skips the 'skip' last entries, then return the last 'depth' entries.
3447 Skips the 'skip' last entries, then return the last 'depth' entries.
3448 Each file+linenumber is formatted according to fileline.
3448 Each file+linenumber is formatted according to fileline.
3449 Each line is formatted according to line.
3449 Each line is formatted according to line.
@@ -3453,7 +3453,7 b" def getstackframes(skip=0, line=b' %-*s "
3453 function
3453 function
3454
3454
3455 Not be used in production code but very convenient while developing.
3455 Not be used in production code but very convenient while developing.
3456 '''
3456 """
3457 entries = [
3457 entries = [
3458 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3458 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3459 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3459 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
@@ -3475,12 +3475,12 b' def debugstacktrace('
3475 depth=0,
3475 depth=0,
3476 prefix=b'',
3476 prefix=b'',
3477 ):
3477 ):
3478 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3478 """Writes a message to f (stderr) with a nicely formatted stacktrace.
3479 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3479 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3480 By default it will flush stdout first.
3480 By default it will flush stdout first.
3481 It can be used everywhere and intentionally does not require an ui object.
3481 It can be used everywhere and intentionally does not require an ui object.
3482 Not be used in production code but very convenient while developing.
3482 Not be used in production code but very convenient while developing.
3483 '''
3483 """
3484 if otherf:
3484 if otherf:
3485 otherf.flush()
3485 otherf.flush()
3486 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3486 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
@@ -641,19 +641,28 b' class sansiodecoder(object):'
641
641
642 elif special == SPECIAL_START_ARRAY:
642 elif special == SPECIAL_START_ARRAY:
643 self._collectionstack.append(
643 self._collectionstack.append(
644 {b'remaining': value, b'v': [],}
644 {
645 b'remaining': value,
646 b'v': [],
647 }
645 )
648 )
646 self._state = self._STATE_WANT_ARRAY_VALUE
649 self._state = self._STATE_WANT_ARRAY_VALUE
647
650
648 elif special == SPECIAL_START_MAP:
651 elif special == SPECIAL_START_MAP:
649 self._collectionstack.append(
652 self._collectionstack.append(
650 {b'remaining': value, b'v': {},}
653 {
654 b'remaining': value,
655 b'v': {},
656 }
651 )
657 )
652 self._state = self._STATE_WANT_MAP_KEY
658 self._state = self._STATE_WANT_MAP_KEY
653
659
654 elif special == SPECIAL_START_SET:
660 elif special == SPECIAL_START_SET:
655 self._collectionstack.append(
661 self._collectionstack.append(
656 {b'remaining': value, b'v': set(),}
662 {
663 b'remaining': value,
664 b'v': set(),
665 }
657 )
666 )
658 self._state = self._STATE_WANT_SET_VALUE
667 self._state = self._STATE_WANT_SET_VALUE
659
668
@@ -684,7 +693,10 b' class sansiodecoder(object):'
684 lastc[b'remaining'] -= 1
693 lastc[b'remaining'] -= 1
685
694
686 self._collectionstack.append(
695 self._collectionstack.append(
687 {b'remaining': value, b'v': newvalue,}
696 {
697 b'remaining': value,
698 b'v': newvalue,
699 }
688 )
700 )
689
701
690 # self._state doesn't need changed.
702 # self._state doesn't need changed.
@@ -711,7 +723,10 b' class sansiodecoder(object):'
711 lastc[b'remaining'] -= 1
723 lastc[b'remaining'] -= 1
712
724
713 self._collectionstack.append(
725 self._collectionstack.append(
714 {b'remaining': value, b'v': newvalue,}
726 {
727 b'remaining': value,
728 b'v': newvalue,
729 }
715 )
730 )
716
731
717 self._state = self._STATE_WANT_SET_VALUE
732 self._state = self._STATE_WANT_SET_VALUE
@@ -775,7 +790,10 b' class sansiodecoder(object):'
775 lastc[b'remaining'] -= 1
790 lastc[b'remaining'] -= 1
776
791
777 self._collectionstack.append(
792 self._collectionstack.append(
778 {b'remaining': value, b'v': newvalue,}
793 {
794 b'remaining': value,
795 b'v': newvalue,
796 }
779 )
797 )
780
798
781 self._state = self._STATE_WANT_ARRAY_VALUE
799 self._state = self._STATE_WANT_ARRAY_VALUE
@@ -789,7 +807,10 b' class sansiodecoder(object):'
789 lastc[b'remaining'] -= 1
807 lastc[b'remaining'] -= 1
790
808
791 self._collectionstack.append(
809 self._collectionstack.append(
792 {b'remaining': value, b'v': newvalue,}
810 {
811 b'remaining': value,
812 b'v': newvalue,
813 }
793 )
814 )
794
815
795 self._state = self._STATE_WANT_MAP_KEY
816 self._state = self._STATE_WANT_MAP_KEY
@@ -803,7 +824,10 b' class sansiodecoder(object):'
803 lastc[b'remaining'] -= 1
824 lastc[b'remaining'] -= 1
804
825
805 self._collectionstack.append(
826 self._collectionstack.append(
806 {b'remaining': value, b'v': newvalue,}
827 {
828 b'remaining': value,
829 b'v': newvalue,
830 }
807 )
831 )
808
832
809 self._state = self._STATE_WANT_SET_VALUE
833 self._state = self._STATE_WANT_SET_VALUE
@@ -29,7 +29,8 b" SERVERROLE = b'server'"
29 CLIENTROLE = b'client'
29 CLIENTROLE = b'client'
30
30
31 compewireprotosupport = collections.namedtuple(
31 compewireprotosupport = collections.namedtuple(
32 'compenginewireprotosupport', ('name', 'serverpriority', 'clientpriority'),
32 'compenginewireprotosupport',
33 ('name', 'serverpriority', 'clientpriority'),
33 )
34 )
34
35
35
36
@@ -53,12 +53,17 b' defaultdateformats = ('
53 b'%I:%M%p',
53 b'%I:%M%p',
54 )
54 )
55
55
56 extendeddateformats = defaultdateformats + (b"%Y", b"%Y-%m", b"%b", b"%b %Y",)
56 extendeddateformats = defaultdateformats + (
57 b"%Y",
58 b"%Y-%m",
59 b"%b",
60 b"%b %Y",
61 )
57
62
58
63
59 def makedate(timestamp=None):
64 def makedate(timestamp=None):
60 '''Return a unix timestamp (or the current time) as a (unixtime,
65 """Return a unix timestamp (or the current time) as a (unixtime,
61 offset) tuple based off the local timezone.'''
66 offset) tuple based off the local timezone."""
62 if timestamp is None:
67 if timestamp is None:
63 timestamp = time.time()
68 timestamp = time.time()
64 if timestamp < 0:
69 if timestamp < 0:
@@ -115,7 +120,7 b' def shortdate(date=None):'
115
120
116 def parsetimezone(s):
121 def parsetimezone(s):
117 """find a trailing timezone, if any, in string, and return a
122 """find a trailing timezone, if any, in string, and return a
118 (offset, remainder) pair"""
123 (offset, remainder) pair"""
119 s = pycompat.bytestr(s)
124 s = pycompat.bytestr(s)
120
125
121 if s.endswith(b"GMT") or s.endswith(b"UTC"):
126 if s.endswith(b"GMT") or s.endswith(b"UTC"):
@@ -292,10 +292,10 b' def pipefilter(s, cmd):'
292
292
293
293
294 def tempfilter(s, cmd):
294 def tempfilter(s, cmd):
295 '''filter string S through a pair of temporary files with CMD.
295 """filter string S through a pair of temporary files with CMD.
296 CMD is used as a template to create the real command to be run,
296 CMD is used as a template to create the real command to be run,
297 with the strings INFILE and OUTFILE replaced by the real names of
297 with the strings INFILE and OUTFILE replaced by the real names of
298 the temporary files generated.'''
298 the temporary files generated."""
299 inname, outname = None, None
299 inname, outname = None, None
300 try:
300 try:
301 infd, inname = pycompat.mkstemp(prefix=b'hg-filter-in-')
301 infd, inname = pycompat.mkstemp(prefix=b'hg-filter-in-')
@@ -465,17 +465,16 b' else:'
465
465
466
466
467 def tonativeenv(env):
467 def tonativeenv(env):
468 '''convert the environment from bytes to strings suitable for Popen(), etc.
468 """convert the environment from bytes to strings suitable for Popen(), etc."""
469 '''
470 return pycompat.rapply(tonativestr, env)
469 return pycompat.rapply(tonativestr, env)
471
470
472
471
473 def system(cmd, environ=None, cwd=None, out=None):
472 def system(cmd, environ=None, cwd=None, out=None):
474 '''enhanced shell command execution.
473 """enhanced shell command execution.
475 run with environment maybe modified, maybe in different dir.
474 run with environment maybe modified, maybe in different dir.
476
475
477 if out is specified, it is assumed to be a file-like object that has a
476 if out is specified, it is assumed to be a file-like object that has a
478 write() method. stdout and stderr will be redirected to out.'''
477 write() method. stdout and stderr will be redirected to out."""
479 try:
478 try:
480 stdout.flush()
479 stdout.flush()
481 except Exception:
480 except Exception:
@@ -685,14 +684,14 b' else:'
685 record_wait=None,
684 record_wait=None,
686 stdin_bytes=None,
685 stdin_bytes=None,
687 ):
686 ):
688 '''Spawn a command without waiting for it to finish.
687 """Spawn a command without waiting for it to finish.
689
688
690
689
691 When `record_wait` is not None, the spawned process will not be fully
690 When `record_wait` is not None, the spawned process will not be fully
692 detached and the `record_wait` argument will be called with a the
691 detached and the `record_wait` argument will be called with a the
693 `Subprocess.wait` function for the spawned process. This is mostly
692 `Subprocess.wait` function for the spawned process. This is mostly
694 useful for developers that need to make sure the spawned process
693 useful for developers that need to make sure the spawned process
695 finished before a certain point. (eg: writing test)'''
694 finished before a certain point. (eg: writing test)"""
696 if pycompat.isdarwin:
695 if pycompat.isdarwin:
697 # avoid crash in CoreFoundation in case another thread
696 # avoid crash in CoreFoundation in case another thread
698 # calls gui() while we're calling fork().
697 # calls gui() while we're calling fork().
@@ -494,15 +494,15 b' def person(author):'
494
494
495 @attr.s(hash=True)
495 @attr.s(hash=True)
496 class mailmapping(object):
496 class mailmapping(object):
497 '''Represents a username/email key or value in
497 """Represents a username/email key or value in
498 a mailmap file'''
498 a mailmap file"""
499
499
500 email = attr.ib()
500 email = attr.ib()
501 name = attr.ib(default=None)
501 name = attr.ib(default=None)
502
502
503
503
504 def _ismailmaplineinvalid(names, emails):
504 def _ismailmaplineinvalid(names, emails):
505 '''Returns True if the parsed names and emails
505 """Returns True if the parsed names and emails
506 in a mailmap entry are invalid.
506 in a mailmap entry are invalid.
507
507
508 >>> # No names or emails fails
508 >>> # No names or emails fails
@@ -522,7 +522,7 b' def _ismailmaplineinvalid(names, emails)'
522 >>> emails = [b'proper@email.com', b'commit@email.com']
522 >>> emails = [b'proper@email.com', b'commit@email.com']
523 >>> _ismailmaplineinvalid(names, emails)
523 >>> _ismailmaplineinvalid(names, emails)
524 False
524 False
525 '''
525 """
526 return not emails or not names and len(emails) < 2
526 return not emails or not names and len(emails) < 2
527
527
528
528
@@ -597,11 +597,13 b' def parsemailmap(mailmapcontent):'
597 continue
597 continue
598
598
599 mailmapkey = mailmapping(
599 mailmapkey = mailmapping(
600 email=emails[-1], name=names[-1] if len(names) == 2 else None,
600 email=emails[-1],
601 name=names[-1] if len(names) == 2 else None,
601 )
602 )
602
603
603 mailmap[mailmapkey] = mailmapping(
604 mailmap[mailmapkey] = mailmapping(
604 email=emails[0], name=names[0] if names else None,
605 email=emails[0],
606 name=names[0] if names else None,
605 )
607 )
606
608
607 return mailmap
609 return mailmap
@@ -659,7 +661,7 b' def mapname(mailmap, author):'
659
661
660
662
661 def isauthorwellformed(author):
663 def isauthorwellformed(author):
662 '''Return True if the author field is well formed
664 """Return True if the author field is well formed
663 (ie "Contributor Name <contrib@email.dom>")
665 (ie "Contributor Name <contrib@email.dom>")
664
666
665 >>> isauthorwellformed(b'Good Author <good@author.com>')
667 >>> isauthorwellformed(b'Good Author <good@author.com>')
@@ -676,7 +678,7 b' def isauthorwellformed(author):'
676 False
678 False
677 >>> isauthorwellformed(b'Bad Author <author>')
679 >>> isauthorwellformed(b'Bad Author <author>')
678 False
680 False
679 '''
681 """
680 return _correctauthorformat.match(author) is not None
682 return _correctauthorformat.match(author) is not None
681
683
682
684
@@ -83,12 +83,12 b' class abstractvfs(object):'
83
83
84 @util.propertycache
84 @util.propertycache
85 def open(self):
85 def open(self):
86 '''Open ``path`` file, which is relative to vfs root.
86 """Open ``path`` file, which is relative to vfs root.
87
87
88 Newly created directories are marked as "not to be indexed by
88 Newly created directories are marked as "not to be indexed by
89 the content indexing service", if ``notindexed`` is specified
89 the content indexing service", if ``notindexed`` is specified
90 for "write" mode access.
90 for "write" mode access.
91 '''
91 """
92 return self.__call__
92 return self.__call__
93
93
94 def read(self, path):
94 def read(self, path):
@@ -142,9 +142,9 b' class abstractvfs(object):'
142 return os.path.islink(self.join(path))
142 return os.path.islink(self.join(path))
143
143
144 def isfileorlink(self, path=None):
144 def isfileorlink(self, path=None):
145 '''return whether path is a regular file or a symlink
145 """return whether path is a regular file or a symlink
146
146
147 Unlike isfile, this doesn't follow symlinks.'''
147 Unlike isfile, this doesn't follow symlinks."""
148 try:
148 try:
149 st = self.lstat(path)
149 st = self.lstat(path)
150 except OSError:
150 except OSError:
@@ -228,8 +228,7 b' class abstractvfs(object):'
228 return util.readlink(self.join(path))
228 return util.readlink(self.join(path))
229
229
230 def removedirs(self, path=None):
230 def removedirs(self, path=None):
231 """Remove a leaf directory and all empty intermediate ones
231 """Remove a leaf directory and all empty intermediate ones"""
232 """
233 return util.removedirs(self.join(path))
232 return util.removedirs(self.join(path))
234
233
235 def rmdir(self, path=None):
234 def rmdir(self, path=None):
@@ -332,7 +331,7 b' class abstractvfs(object):'
332
331
333
332
334 class vfs(abstractvfs):
333 class vfs(abstractvfs):
335 '''Operate files relative to a base directory
334 """Operate files relative to a base directory
336
335
337 This class is used to hide the details of COW semantics and
336 This class is used to hide the details of COW semantics and
338 remote file access from higher level code.
337 remote file access from higher level code.
@@ -340,7 +339,7 b' class vfs(abstractvfs):'
340 'cacheaudited' should be enabled only if (a) vfs object is short-lived, or
339 'cacheaudited' should be enabled only if (a) vfs object is short-lived, or
341 (b) the base directory is managed by hg and considered sort-of append-only.
340 (b) the base directory is managed by hg and considered sort-of append-only.
342 See pathutil.pathauditor() for details.
341 See pathutil.pathauditor() for details.
343 '''
342 """
344
343
345 def __init__(
344 def __init__(
346 self,
345 self,
@@ -397,7 +396,7 b' class vfs(abstractvfs):'
397 auditpath=True,
396 auditpath=True,
398 makeparentdirs=True,
397 makeparentdirs=True,
399 ):
398 ):
400 '''Open ``path`` file, which is relative to vfs root.
399 """Open ``path`` file, which is relative to vfs root.
401
400
402 By default, parent directories are created as needed. Newly created
401 By default, parent directories are created as needed. Newly created
403 directories are marked as "not to be indexed by the content indexing
402 directories are marked as "not to be indexed by the content indexing
@@ -426,7 +425,7 b' class vfs(abstractvfs):'
426 truncation), if it is owned by another. Therefore, use
425 truncation), if it is owned by another. Therefore, use
427 combination of append mode and checkambig=True only in limited
426 combination of append mode and checkambig=True only in limited
428 cases (see also issue5418 and issue5584 for detail).
427 cases (see also issue5418 and issue5584 for detail).
429 '''
428 """
430 if auditpath:
429 if auditpath:
431 self._auditpath(path, mode)
430 self._auditpath(path, mode)
432 f = self.join(path)
431 f = self.join(path)
@@ -385,13 +385,13 b' def _getfileinfo(name):'
385
385
386
386
387 def checkcertificatechain(cert, build=True):
387 def checkcertificatechain(cert, build=True):
388 '''Tests the given certificate to see if there is a complete chain to a
388 """Tests the given certificate to see if there is a complete chain to a
389 trusted root certificate. As a side effect, missing certificates are
389 trusted root certificate. As a side effect, missing certificates are
390 downloaded and installed unless ``build=False``. True is returned if a
390 downloaded and installed unless ``build=False``. True is returned if a
391 chain to a trusted root exists (even if built on the fly), otherwise
391 chain to a trusted root exists (even if built on the fly), otherwise
392 False. NB: A chain to a trusted root does NOT imply that the certificate
392 False. NB: A chain to a trusted root does NOT imply that the certificate
393 is valid.
393 is valid.
394 '''
394 """
395
395
396 chainctxptr = ctypes.POINTER(CERT_CHAIN_CONTEXT)
396 chainctxptr = ctypes.POINTER(CERT_CHAIN_CONTEXT)
397
397
@@ -488,8 +488,8 b' def lasterrorwaspipeerror(err):'
488
488
489
489
490 def testpid(pid):
490 def testpid(pid):
491 '''return True if pid is still running or unable to
491 """return True if pid is still running or unable to
492 determine, False otherwise'''
492 determine, False otherwise"""
493 h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid)
493 h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid)
494 if h:
494 if h:
495 try:
495 try:
@@ -576,10 +576,10 b' def getuser():'
576
576
577
577
578 def setsignalhandler():
578 def setsignalhandler():
579 '''Register a termination handler for console events including
579 """Register a termination handler for console events including
580 CTRL+C. python signal handlers do not work well with socket
580 CTRL+C. python signal handlers do not work well with socket
581 operations.
581 operations.
582 '''
582 """
583
583
584 def handler(event):
584 def handler(event):
585 _kernel32.ExitProcess(1)
585 _kernel32.ExitProcess(1)
@@ -627,8 +627,8 b' def termsize():'
627
627
628
628
629 def enablevtmode():
629 def enablevtmode():
630 '''Enable virtual terminal mode for the associated console. Return True if
630 """Enable virtual terminal mode for the associated console. Return True if
631 enabled, else False.'''
631 enabled, else False."""
632
632
633 ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
633 ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
634
634
@@ -195,13 +195,13 b' def _isatty(fp):'
195
195
196
196
197 class winstdout(object):
197 class winstdout(object):
198 '''Some files on Windows misbehave.
198 """Some files on Windows misbehave.
199
199
200 When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
200 When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
201
201
202 When writing too many bytes to a console at the same, a "Not enough space"
202 When writing too many bytes to a console at the same, a "Not enough space"
203 error may happen. Python 3 already works around that.
203 error may happen. Python 3 already works around that.
204 '''
204 """
205
205
206 def __init__(self, fp):
206 def __init__(self, fp):
207 self.fp = fp
207 self.fp = fp
@@ -497,11 +497,11 b' def isowner(st):'
497
497
498
498
499 def findexe(command):
499 def findexe(command):
500 '''Find executable for command searching like cmd.exe does.
500 """Find executable for command searching like cmd.exe does.
501 If command is a basename then PATH is searched for command.
501 If command is a basename then PATH is searched for command.
502 PATH isn't searched if command is an absolute or relative path.
502 PATH isn't searched if command is an absolute or relative path.
503 An extension from PATHEXT is found and added if not present.
503 An extension from PATHEXT is found and added if not present.
504 If command isn't found None is returned.'''
504 If command isn't found None is returned."""
505 pathext = encoding.environ.get(b'PATHEXT', b'.COM;.EXE;.BAT;.CMD')
505 pathext = encoding.environ.get(b'PATHEXT', b'.COM;.EXE;.BAT;.CMD')
506 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
506 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
507 if os.path.splitext(command)[1].lower() in pathexts:
507 if os.path.splitext(command)[1].lower() in pathexts:
@@ -529,10 +529,10 b' def findexe(command):'
529
529
530
530
531 def statfiles(files):
531 def statfiles(files):
532 '''Stat each file in files. Yield each stat, or None if a file
532 """Stat each file in files. Yield each stat, or None if a file
533 does not exist or has a type we don't care about.
533 does not exist or has a type we don't care about.
534
534
535 Cluster and cache stat per directory to minimize number of OS stat calls.'''
535 Cluster and cache stat per directory to minimize number of OS stat calls."""
536 dircache = {} # dirname -> filename -> status | None if file does not exist
536 dircache = {} # dirname -> filename -> status | None if file does not exist
537 getkind = stat.S_IFMT
537 getkind = stat.S_IFMT
538 for nf in files:
538 for nf in files:
@@ -630,14 +630,14 b' class cachestat(object):'
630
630
631
631
632 def lookupreg(key, valname=None, scope=None):
632 def lookupreg(key, valname=None, scope=None):
633 ''' Look up a key/value name in the Windows registry.
633 """Look up a key/value name in the Windows registry.
634
634
635 valname: value name. If unspecified, the default value for the key
635 valname: value name. If unspecified, the default value for the key
636 is used.
636 is used.
637 scope: optionally specify scope for registry lookup, this can be
637 scope: optionally specify scope for registry lookup, this can be
638 a sequence of scopes to look up in order. Default (CURRENT_USER,
638 a sequence of scopes to look up in order. Default (CURRENT_USER,
639 LOCAL_MACHINE).
639 LOCAL_MACHINE).
640 '''
640 """
641 if scope is None:
641 if scope is None:
642 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
642 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
643 elif not isinstance(scope, (list, tuple)):
643 elif not isinstance(scope, (list, tuple)):
@@ -456,7 +456,10 b' def createcommandresponseeosframes('
456 def createalternatelocationresponseframe(stream, requestid, location):
456 def createalternatelocationresponseframe(stream, requestid, location):
457 data = {
457 data = {
458 b'status': b'redirect',
458 b'status': b'redirect',
459 b'location': {b'url': location.url, b'mediatype': location.mediatype,},
459 b'location': {
460 b'url': location.url,
461 b'mediatype': location.mediatype,
462 },
460 }
463 }
461
464
462 for a in (
465 for a in (
@@ -490,7 +493,12 b' def createalternatelocationresponseframe'
490 def createcommanderrorresponse(stream, requestid, message, args=None):
493 def createcommanderrorresponse(stream, requestid, message, args=None):
491 # TODO should this be using a list of {'msg': ..., 'args': {}} so atom
494 # TODO should this be using a list of {'msg': ..., 'args': {}} so atom
492 # formatting works consistently?
495 # formatting works consistently?
493 m = {b'status': b'error', b'error': {b'message': message,}}
496 m = {
497 b'status': b'error',
498 b'error': {
499 b'message': message,
500 },
501 }
494
502
495 if args:
503 if args:
496 m[b'error'][b'args'] = args
504 m[b'error'][b'args'] = args
@@ -510,7 +518,12 b' def createerrorframe(stream, requestid, '
510 assert len(msg) <= DEFAULT_MAX_FRAME_SIZE
518 assert len(msg) <= DEFAULT_MAX_FRAME_SIZE
511
519
512 payload = b''.join(
520 payload = b''.join(
513 cborutil.streamencode({b'type': errtype, b'message': [{b'msg': msg}],})
521 cborutil.streamencode(
522 {
523 b'type': errtype,
524 b'message': [{b'msg': msg}],
525 }
526 )
514 )
527 )
515
528
516 yield stream.makeframe(
529 yield stream.makeframe(
@@ -1292,14 +1305,18 b' class serverreactor(object):'
1292 for frame in gen:
1305 for frame in gen:
1293 yield frame
1306 yield frame
1294
1307
1295 return b'sendframes', {b'framegen': makegen(),}
1308 return b'sendframes', {
1309 b'framegen': makegen(),
1310 }
1296
1311
1297 def _handlesendframes(self, framegen):
1312 def _handlesendframes(self, framegen):
1298 if self._deferoutput:
1313 if self._deferoutput:
1299 self._bufferedframegens.append(framegen)
1314 self._bufferedframegens.append(framegen)
1300 return b'noop', {}
1315 return b'noop', {}
1301 else:
1316 else:
1302 return b'sendframes', {b'framegen': framegen,}
1317 return b'sendframes', {
1318 b'framegen': framegen,
1319 }
1303
1320
1304 def onservererror(self, stream, requestid, msg):
1321 def onservererror(self, stream, requestid, msg):
1305 ensureserverstream(stream)
1322 ensureserverstream(stream)
@@ -1351,7 +1368,9 b' class serverreactor(object):'
1351 return s
1368 return s
1352
1369
1353 def _makeerrorresult(self, msg):
1370 def _makeerrorresult(self, msg):
1354 return b'error', {b'message': msg,}
1371 return b'error', {
1372 b'message': msg,
1373 }
1355
1374
1356 def _makeruncommandresult(self, requestid):
1375 def _makeruncommandresult(self, requestid):
1357 entry = self._receivingcommands[requestid]
1376 entry = self._receivingcommands[requestid]
@@ -1397,7 +1416,9 b' class serverreactor(object):'
1397 )
1416 )
1398
1417
1399 def _makewantframeresult(self):
1418 def _makewantframeresult(self):
1400 return b'wantframe', {b'state': self._state,}
1419 return b'wantframe', {
1420 b'state': self._state,
1421 }
1401
1422
1402 def _validatecommandrequestframe(self, frame):
1423 def _validatecommandrequestframe(self, frame):
1403 new = frame.flags & FLAG_COMMAND_REQUEST_NEW
1424 new = frame.flags & FLAG_COMMAND_REQUEST_NEW
@@ -1802,7 +1823,9 b' class clientreactor(object):'
1802 return (
1823 return (
1803 request,
1824 request,
1804 b'sendframes',
1825 b'sendframes',
1805 {b'framegen': self._makecommandframes(request),},
1826 {
1827 b'framegen': self._makecommandframes(request),
1828 },
1806 )
1829 )
1807
1830
1808 def flushcommands(self):
1831 def flushcommands(self):
@@ -1835,7 +1858,9 b' class clientreactor(object):'
1835 for frame in self._makecommandframes(request):
1858 for frame in self._makecommandframes(request):
1836 yield frame
1859 yield frame
1837
1860
1838 return b'sendframes', {b'framegen': makeframes(),}
1861 return b'sendframes', {
1862 b'framegen': makeframes(),
1863 }
1839
1864
1840 def _makecommandframes(self, request):
1865 def _makecommandframes(self, request):
1841 """Emit frames to issue a command request.
1866 """Emit frames to issue a command request.
@@ -1851,7 +1876,9 b' class clientreactor(object):'
1851
1876
1852 payload = b''.join(
1877 payload = b''.join(
1853 cborutil.streamencode(
1878 cborutil.streamencode(
1854 {b'contentencodings': self._clientcontentencoders,}
1879 {
1880 b'contentencodings': self._clientcontentencoders,
1881 }
1855 )
1882 )
1856 )
1883 )
1857
1884
@@ -33,14 +33,23 b' SUPPORTED_ELLIPSESCAP = (ELLIPSESCAP1, E'
33
33
34 # All available wire protocol transports.
34 # All available wire protocol transports.
35 TRANSPORTS = {
35 TRANSPORTS = {
36 SSHV1: {b'transport': b'ssh', b'version': 1,},
36 SSHV1: {
37 b'transport': b'ssh',
38 b'version': 1,
39 },
37 SSHV2: {
40 SSHV2: {
38 b'transport': b'ssh',
41 b'transport': b'ssh',
39 # TODO mark as version 2 once all commands are implemented.
42 # TODO mark as version 2 once all commands are implemented.
40 b'version': 1,
43 b'version': 1,
41 },
44 },
42 b'http-v1': {b'transport': b'http', b'version': 1,},
45 b'http-v1': {
43 HTTP_WIREPROTO_V2: {b'transport': b'http', b'version': 2,},
46 b'transport': b'http',
47 b'version': 1,
48 },
49 HTTP_WIREPROTO_V2: {
50 b'transport': b'http',
51 b'version': 2,
52 },
44 }
53 }
45
54
46
55
@@ -36,7 +36,7 b' urlreq = util.urlreq'
36
36
37
37
38 def batchable(f):
38 def batchable(f):
39 '''annotation for batchable methods
39 """annotation for batchable methods
40
40
41 Such methods must implement a coroutine as follows:
41 Such methods must implement a coroutine as follows:
42
42
@@ -56,7 +56,7 b' def batchable(f):'
56 method, but adds the original method as an attribute called "batchable",
56 method, but adds the original method as an attribute called "batchable",
57 which is used by remotebatch to split the call into separate encoding and
57 which is used by remotebatch to split the call into separate encoding and
58 decoding phases.
58 decoding phases.
59 '''
59 """
60
60
61 def plain(*args, **opts):
61 def plain(*args, **opts):
62 batchable = f(*args, **opts)
62 batchable = f(*args, **opts)
@@ -474,7 +474,7 b' class wirepeer(repository.peer):'
474 return changegroupmod.cg1unpacker(f, b'UN')
474 return changegroupmod.cg1unpacker(f, b'UN')
475
475
476 def unbundle(self, bundle, heads, url):
476 def unbundle(self, bundle, heads, url):
477 '''Send cg (a readable file-like object representing the
477 """Send cg (a readable file-like object representing the
478 changegroup to push, typically a chunkbuffer object) to the
478 changegroup to push, typically a chunkbuffer object) to the
479 remote server as a bundle.
479 remote server as a bundle.
480
480
@@ -485,7 +485,7 b' class wirepeer(repository.peer):'
485
485
486 `url` is the url the client thinks it's pushing to, which is
486 `url` is the url the client thinks it's pushing to, which is
487 visible to hooks.
487 visible to hooks.
488 '''
488 """
489
489
490 if heads != [b'force'] and self.capable(b'unbundlehash'):
490 if heads != [b'force'] and self.capable(b'unbundlehash'):
491 heads = wireprototypes.encodelist(
491 heads = wireprototypes.encodelist(
@@ -655,6 +655,5 b' class wirepeer(repository.peer):'
655 raise NotImplementedError()
655 raise NotImplementedError()
656
656
657 def _abort(self, exception):
657 def _abort(self, exception):
658 """clearly abort the wire protocol connection and raise the exception
658 """clearly abort the wire protocol connection and raise the exception"""
659 """
660 raise NotImplementedError()
659 raise NotImplementedError()
@@ -602,10 +602,10 b' def pushkey(repo, proto, namespace, key,'
602
602
603 @wireprotocommand(b'stream_out', permission=b'pull')
603 @wireprotocommand(b'stream_out', permission=b'pull')
604 def stream(repo, proto):
604 def stream(repo, proto):
605 '''If the server supports streaming clone, it advertises the "stream"
605 """If the server supports streaming clone, it advertises the "stream"
606 capability with a value representing the version and flags of the repo
606 capability with a value representing the version and flags of the repo
607 it is serving. Client checks to see if it understands the format.
607 it is serving. Client checks to see if it understands the format.
608 '''
608 """
609 return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
609 return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
610
610
611
611
@@ -982,7 +982,10 b' def capabilitiesv2(repo, proto):'
982 b'revisions': {
982 b'revisions': {
983 b'type': b'list',
983 b'type': b'list',
984 b'example': [
984 b'example': [
985 {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
985 {
986 b'type': b'changesetexplicit',
987 b'nodes': [b'abcdef...'],
988 }
986 ],
989 ],
987 },
990 },
988 b'fields': {
991 b'fields': {
@@ -1166,14 +1169,20 b' def makefilematcher(repo, pathfilter):'
1166 b'default': lambda: False,
1169 b'default': lambda: False,
1167 b'example': True,
1170 b'example': True,
1168 },
1171 },
1169 b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
1172 b'nodes': {
1173 b'type': b'list',
1174 b'example': [b'0123456...'],
1175 },
1170 b'fields': {
1176 b'fields': {
1171 b'type': b'set',
1177 b'type': b'set',
1172 b'default': set,
1178 b'default': set,
1173 b'example': {b'parents', b'revision'},
1179 b'example': {b'parents', b'revision'},
1174 b'validvalues': {b'parents', b'revision', b'linknode'},
1180 b'validvalues': {b'parents', b'revision', b'linknode'},
1175 },
1181 },
1176 b'path': {b'type': b'bytes', b'example': b'foo.txt',},
1182 b'path': {
1183 b'type': b'bytes',
1184 b'example': b'foo.txt',
1185 },
1177 },
1186 },
1178 permission=b'pull',
1187 permission=b'pull',
1179 # TODO censoring a file revision won't invalidate the cache.
1188 # TODO censoring a file revision won't invalidate the cache.
@@ -1262,7 +1271,10 b' def filesdatacapabilities(repo, proto):'
1262 b'revisions': {
1271 b'revisions': {
1263 b'type': b'list',
1272 b'type': b'list',
1264 b'example': [
1273 b'example': [
1265 {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
1274 {
1275 b'type': b'changesetexplicit',
1276 b'nodes': [b'abcdef...'],
1277 }
1266 ],
1278 ],
1267 },
1279 },
1268 },
1280 },
@@ -1375,7 +1387,12 b' def knownv2(repo, proto, nodes):'
1375
1387
1376 @wireprotocommand(
1388 @wireprotocommand(
1377 b'listkeys',
1389 b'listkeys',
1378 args={b'namespace': {b'type': b'bytes', b'example': b'ns',},},
1390 args={
1391 b'namespace': {
1392 b'type': b'bytes',
1393 b'example': b'ns',
1394 },
1395 },
1379 permission=b'pull',
1396 permission=b'pull',
1380 )
1397 )
1381 def listkeysv2(repo, proto, namespace):
1398 def listkeysv2(repo, proto, namespace):
@@ -1390,7 +1407,12 b' def listkeysv2(repo, proto, namespace):'
1390
1407
1391 @wireprotocommand(
1408 @wireprotocommand(
1392 b'lookup',
1409 b'lookup',
1393 args={b'key': {b'type': b'bytes', b'example': b'foo',},},
1410 args={
1411 b'key': {
1412 b'type': b'bytes',
1413 b'example': b'foo',
1414 },
1415 },
1394 permission=b'pull',
1416 permission=b'pull',
1395 )
1417 )
1396 def lookupv2(repo, proto, key):
1418 def lookupv2(repo, proto, key):
@@ -1415,7 +1437,10 b' def manifestdatacapabilities(repo, proto'
1415 @wireprotocommand(
1437 @wireprotocommand(
1416 b'manifestdata',
1438 b'manifestdata',
1417 args={
1439 args={
1418 b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
1440 b'nodes': {
1441 b'type': b'list',
1442 b'example': [b'0123456...'],
1443 },
1419 b'haveparents': {
1444 b'haveparents': {
1420 b'type': b'bool',
1445 b'type': b'bool',
1421 b'default': lambda: False,
1446 b'default': lambda: False,
@@ -1427,7 +1452,10 b' def manifestdatacapabilities(repo, proto'
1427 b'example': {b'parents', b'revision'},
1452 b'example': {b'parents', b'revision'},
1428 b'validvalues': {b'parents', b'revision'},
1453 b'validvalues': {b'parents', b'revision'},
1429 },
1454 },
1430 b'tree': {b'type': b'bytes', b'example': b'',},
1455 b'tree': {
1456 b'type': b'bytes',
1457 b'example': b'',
1458 },
1431 },
1459 },
1432 permission=b'pull',
1460 permission=b'pull',
1433 cachekeyfn=makecommandcachekeyfn(b'manifestdata', 1, allargs=True),
1461 cachekeyfn=makecommandcachekeyfn(b'manifestdata', 1, allargs=True),
@@ -1485,10 +1513,22 b' def manifestdata(repo, proto, haveparent'
1485 @wireprotocommand(
1513 @wireprotocommand(
1486 b'pushkey',
1514 b'pushkey',
1487 args={
1515 args={
1488 b'namespace': {b'type': b'bytes', b'example': b'ns',},
1516 b'namespace': {
1489 b'key': {b'type': b'bytes', b'example': b'key',},
1517 b'type': b'bytes',
1490 b'old': {b'type': b'bytes', b'example': b'old',},
1518 b'example': b'ns',
1491 b'new': {b'type': b'bytes', b'example': b'new',},
1519 },
1520 b'key': {
1521 b'type': b'bytes',
1522 b'example': b'key',
1523 },
1524 b'old': {
1525 b'type': b'bytes',
1526 b'example': b'old',
1527 },
1528 b'new': {
1529 b'type': b'bytes',
1530 b'example': b'new',
1531 },
1492 },
1532 },
1493 permission=b'push',
1533 permission=b'push',
1494 )
1534 )
@@ -116,8 +116,8 b' else:'
116
116
117
117
118 def worthwhile(ui, costperop, nops, threadsafe=True):
118 def worthwhile(ui, costperop, nops, threadsafe=True):
119 '''try to determine whether the benefit of multiple processes can
119 """try to determine whether the benefit of multiple processes can
120 outweigh the cost of starting them'''
120 outweigh the cost of starting them"""
121
121
122 if not threadsafe and _DISALLOW_THREAD_UNSAFE:
122 if not threadsafe and _DISALLOW_THREAD_UNSAFE:
123 return False
123 return False
@@ -131,7 +131,7 b' def worthwhile(ui, costperop, nops, thre'
131 def worker(
131 def worker(
132 ui, costperarg, func, staticargs, args, hasretval=False, threadsafe=True
132 ui, costperarg, func, staticargs, args, hasretval=False, threadsafe=True
133 ):
133 ):
134 '''run a function, possibly in parallel in multiple worker
134 """run a function, possibly in parallel in multiple worker
135 processes.
135 processes.
136
136
137 returns a progress iterator
137 returns a progress iterator
@@ -153,7 +153,7 b' def worker('
153 threadsafe - whether work items are thread safe and can be executed using
153 threadsafe - whether work items are thread safe and can be executed using
154 a thread-based worker. Should be disabled for CPU heavy tasks that don't
154 a thread-based worker. Should be disabled for CPU heavy tasks that don't
155 release the GIL.
155 release the GIL.
156 '''
156 """
157 enabled = ui.configbool(b'worker', b'enabled')
157 enabled = ui.configbool(b'worker', b'enabled')
158 if enabled and worthwhile(ui, costperarg, len(args), threadsafe=threadsafe):
158 if enabled and worthwhile(ui, costperarg, len(args), threadsafe=threadsafe):
159 return _platformworker(ui, func, staticargs, args, hasretval)
159 return _platformworker(ui, func, staticargs, args, hasretval)
@@ -306,10 +306,10 b' def _posixworker(ui, func, staticargs, a'
306
306
307
307
308 def _posixexitstatus(code):
308 def _posixexitstatus(code):
309 '''convert a posix exit status into the same form returned by
309 """convert a posix exit status into the same form returned by
310 os.spawnv
310 os.spawnv
311
311
312 returns None if the process was stopped instead of exiting'''
312 returns None if the process was stopped instead of exiting"""
313 if os.WIFEXITED(code):
313 if os.WIFEXITED(code):
314 return os.WEXITSTATUS(code)
314 return os.WEXITSTATUS(code)
315 elif os.WIFSIGNALED(code):
315 elif os.WIFSIGNALED(code):
@@ -423,7 +423,7 b' else:'
423
423
424
424
425 def partition(lst, nslices):
425 def partition(lst, nslices):
426 '''partition a list into N slices of roughly equal size
426 """partition a list into N slices of roughly equal size
427
427
428 The current strategy takes every Nth element from the input. If
428 The current strategy takes every Nth element from the input. If
429 we ever write workers that need to preserve grouping in input
429 we ever write workers that need to preserve grouping in input
@@ -450,6 +450,6 b' def partition(lst, nslices):'
450 What we should really be doing is have workers read filenames from a
450 What we should really be doing is have workers read filenames from a
451 ordered queue. This preserves locality and also keeps any worker from
451 ordered queue. This preserves locality and also keeps any worker from
452 getting more than one file out of balance.
452 getting more than one file out of balance.
453 '''
453 """
454 for i in range(nslices):
454 for i in range(nslices):
455 yield lst[i::nslices]
455 yield lst[i::nslices]
@@ -816,7 +816,8 b' class buildhgexe(build_ext):'
816 if sys.version_info[0] >= 3:
816 if sys.version_info[0] >= 3:
817 fsdecode = os.fsdecode
817 fsdecode = os.fsdecode
818 dest = os.path.join(
818 dest = os.path.join(
819 os.path.dirname(self.hgtarget), fsdecode(dllbasename),
819 os.path.dirname(self.hgtarget),
820 fsdecode(dllbasename),
820 )
821 )
821
822
822 if not os.path.exists(dest):
823 if not os.path.exists(dest):
@@ -1066,7 +1067,7 b' class hginstall(install):'
1066
1067
1067
1068
1068 class hginstalllib(install_lib):
1069 class hginstalllib(install_lib):
1069 '''
1070 """
1070 This is a specialization of install_lib that replaces the copy_file used
1071 This is a specialization of install_lib that replaces the copy_file used
1071 there so that it supports setting the mode of files after copying them,
1072 there so that it supports setting the mode of files after copying them,
1072 instead of just preserving the mode that the files originally had. If your
1073 instead of just preserving the mode that the files originally had. If your
@@ -1075,7 +1076,7 b' class hginstalllib(install_lib):'
1075
1076
1076 Note that just passing keep_permissions=False to copy_file would be
1077 Note that just passing keep_permissions=False to copy_file would be
1077 insufficient, as it might still be applying a umask.
1078 insufficient, as it might still be applying a umask.
1078 '''
1079 """
1079
1080
1080 def run(self):
1081 def run(self):
1081 realcopyfile = file_util.copy_file
1082 realcopyfile = file_util.copy_file
@@ -1103,11 +1104,11 b' class hginstalllib(install_lib):'
1103
1104
1104
1105
1105 class hginstallscripts(install_scripts):
1106 class hginstallscripts(install_scripts):
1106 '''
1107 """
1107 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1108 This is a specialization of install_scripts that replaces the @LIBDIR@ with
1108 the configured directory for modules. If possible, the path is made relative
1109 the configured directory for modules. If possible, the path is made relative
1109 to the directory for scripts.
1110 to the directory for scripts.
1110 '''
1111 """
1111
1112
1112 def initialize_options(self):
1113 def initialize_options(self):
1113 install_scripts.initialize_options(self)
1114 install_scripts.initialize_options(self)
@@ -1400,8 +1401,7 b' class RustCompilationError(CCompilerErro'
1400
1401
1401
1402
1402 class RustExtension(Extension):
1403 class RustExtension(Extension):
1403 """Base classes for concrete Rust Extension classes.
1404 """Base classes for concrete Rust Extension classes."""
1404 """
1405
1405
1406 rusttargetdir = os.path.join('rust', 'target', 'release')
1406 rusttargetdir = os.path.join('rust', 'target', 'release')
1407
1407
@@ -1547,7 +1547,10 b' extmodules = ['
1547 include_dirs=common_include_dirs,
1547 include_dirs=common_include_dirs,
1548 extra_compile_args=common_cflags,
1548 extra_compile_args=common_cflags,
1549 depends=common_depends
1549 depends=common_depends
1550 + ['mercurial/cext/charencode.h', 'mercurial/cext/revlog.h',],
1550 + [
1551 'mercurial/cext/charencode.h',
1552 'mercurial/cext/revlog.h',
1553 ],
1551 ),
1554 ),
1552 Extension(
1555 Extension(
1553 'mercurial.cext.osutil',
1556 'mercurial.cext.osutil',
@@ -1635,10 +1638,19 b" if os.name == 'nt':"
1635 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1638 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1636
1639
1637 packagedata = {
1640 packagedata = {
1638 'mercurial': ['locale/*/LC_MESSAGES/hg.mo', 'dummycert.pem',],
1641 'mercurial': [
1639 'mercurial.defaultrc': ['*.rc',],
1642 'locale/*/LC_MESSAGES/hg.mo',
1640 'mercurial.helptext': ['*.txt',],
1643 'dummycert.pem',
1641 'mercurial.helptext.internals': ['*.txt',],
1644 ],
1645 'mercurial.defaultrc': [
1646 '*.rc',
1647 ],
1648 'mercurial.helptext': [
1649 '*.txt',
1650 ],
1651 'mercurial.helptext.internals': [
1652 '*.txt',
1653 ],
1642 }
1654 }
1643
1655
1644
1656
@@ -44,16 +44,24 b' configtable = {}'
44 configitem = registrar.configitem(configtable)
44 configitem = registrar.configitem(configtable)
45
45
46 configitem(
46 configitem(
47 b'badserver', b'closeafteraccept', default=False,
47 b'badserver',
48 b'closeafteraccept',
49 default=False,
48 )
50 )
49 configitem(
51 configitem(
50 b'badserver', b'closeafterrecvbytes', default=b'0',
52 b'badserver',
53 b'closeafterrecvbytes',
54 default=b'0',
51 )
55 )
52 configitem(
56 configitem(
53 b'badserver', b'closeaftersendbytes', default=b'0',
57 b'badserver',
58 b'closeaftersendbytes',
59 default=b'0',
54 )
60 )
55 configitem(
61 configitem(
56 b'badserver', b'closebeforeaccept', default=False,
62 b'badserver',
63 b'closebeforeaccept',
64 default=False,
57 )
65 )
58
66
59 # We can't adjust __class__ on a socket instance. So we define a proxy type.
67 # We can't adjust __class__ on a socket instance. So we define a proxy type.
@@ -27,7 +27,9 b' configtable = {}'
27 configitem = registrar.configitem(configtable)
27 configitem = registrar.configitem(configtable)
28
28
29 configitem(
29 configitem(
30 b'fakedirstatewritetime', b'fakenow', default=None,
30 b'fakedirstatewritetime',
31 b'fakenow',
32 default=None,
31 )
33 )
32
34
33 parsers = policy.importmod('parsers')
35 parsers = policy.importmod('parsers')
@@ -14,7 +14,9 b' configtable = {}'
14 configitem = registrar.configitem(configtable)
14 configitem = registrar.configitem(configtable)
15
15
16 configitem(
16 configitem(
17 b'fakepatchtime', b'fakenow', default=None,
17 b'fakepatchtime',
18 b'fakenow',
19 default=None,
18 )
20 )
19
21
20
22
@@ -139,10 +139,20 b' def extsetup(ui):'
139
139
140 # Register flag processors for each extension
140 # Register flag processors for each extension
141 flagutil.addflagprocessor(
141 flagutil.addflagprocessor(
142 REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,)
142 REVIDX_NOOP,
143 (
144 noopdonothingread,
145 noopdonothing,
146 validatehash,
147 ),
143 )
148 )
144 flagutil.addflagprocessor(
149 flagutil.addflagprocessor(
145 REVIDX_BASE64, (b64decode, b64encode, bypass,),
150 REVIDX_BASE64,
151 (
152 b64decode,
153 b64encode,
154 bypass,
155 ),
146 )
156 )
147 flagutil.addflagprocessor(
157 flagutil.addflagprocessor(
148 REVIDX_GZIP, (gzipdecompress, gzipcompress, bypass)
158 REVIDX_GZIP, (gzipdecompress, gzipcompress, bypass)
@@ -1047,7 +1047,7 b' def has_black():'
1047 version_regex = b'black, version ([0-9a-b.]+)'
1047 version_regex = b'black, version ([0-9a-b.]+)'
1048 version = matchoutput(blackcmd, version_regex)
1048 version = matchoutput(blackcmd, version_regex)
1049 sv = distutils.version.StrictVersion
1049 sv = distutils.version.StrictVersion
1050 return version and sv(_bytes2sys(version.group(1))) >= sv('19.10b0')
1050 return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1')
1051
1051
1052
1052
1053 @check('pytype', 'the pytype type checker')
1053 @check('pytype', 'the pytype type checker')
@@ -44,8 +44,7 b' def check(*args, **kwargs):'
44
44
45
45
46 def roundtrips(data, decode, encode):
46 def roundtrips(data, decode, encode):
47 """helper to tests function that must do proper encode/decode roundtripping
47 """helper to tests function that must do proper encode/decode roundtripping"""
48 """
49
48
50 @given(data)
49 @given(data)
51 def testroundtrips(value):
50 def testroundtrips(value):
@@ -71,6 +70,11 b' bytestrings = ('
71 st.builds(
70 st.builds(
72 lambda s, e: s.encode(e),
71 lambda s, e: s.encode(e),
73 st.text(),
72 st.text(),
74 st.sampled_from(['utf-8', 'utf-16',]),
73 st.sampled_from(
74 [
75 'utf-8',
76 'utf-16',
77 ]
78 ),
75 )
79 )
76 ) | st.binary()
80 ) | st.binary()
@@ -534,7 +534,9 b' def getparser():'
534 help="install and use chg wrapper in place of hg",
534 help="install and use chg wrapper in place of hg",
535 )
535 )
536 hgconf.add_argument(
536 hgconf.add_argument(
537 "--chg-debug", action="store_true", help="show chg debug logs",
537 "--chg-debug",
538 action="store_true",
539 help="show chg debug logs",
538 )
540 )
539 hgconf.add_argument("--compiler", help="compiler to build with")
541 hgconf.add_argument("--compiler", help="compiler to build with")
540 hgconf.add_argument(
542 hgconf.add_argument(
@@ -1193,7 +1195,10 b' class Test(unittest.TestCase):'
1193 if self._keeptmpdir:
1195 if self._keeptmpdir:
1194 log(
1196 log(
1195 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1197 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1196 % (_bytes2sys(self._testtmp), _bytes2sys(self._threadtmp),)
1198 % (
1199 _bytes2sys(self._testtmp),
1200 _bytes2sys(self._threadtmp),
1201 )
1197 )
1202 )
1198 else:
1203 else:
1199 try:
1204 try:
@@ -2091,11 +2096,11 b' class TTest(Test):'
2091
2096
2092 @staticmethod
2097 @staticmethod
2093 def parsehghaveoutput(lines):
2098 def parsehghaveoutput(lines):
2094 '''Parse hghave log lines.
2099 """Parse hghave log lines.
2095
2100
2096 Return tuple of lists (missing, failed):
2101 Return tuple of lists (missing, failed):
2097 * the missing/unknown features
2102 * the missing/unknown features
2098 * the features for which existence check failed'''
2103 * the features for which existence check failed"""
2099 missing = []
2104 missing = []
2100 failed = []
2105 failed = []
2101 for line in lines:
2106 for line in lines:
@@ -2155,12 +2160,10 b' class TestResult(unittest._TextTestResul'
2155 self.color = pygmentspresent
2160 self.color = pygmentspresent
2156
2161
2157 def onStart(self, test):
2162 def onStart(self, test):
2158 """ Can be overriden by custom TestResult
2163 """Can be overriden by custom TestResult"""
2159 """
2160
2164
2161 def onEnd(self):
2165 def onEnd(self):
2162 """ Can be overriden by custom TestResult
2166 """Can be overriden by custom TestResult"""
2163 """
2164
2167
2165 def addFailure(self, test, reason):
2168 def addFailure(self, test, reason):
2166 self.failures.append((test, reason))
2169 self.failures.append((test, reason))
@@ -3168,7 +3171,9 b' class TestRunner(object):'
3168 vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
3171 vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
3169 vlog("# Using PATH", os.environ["PATH"])
3172 vlog("# Using PATH", os.environ["PATH"])
3170 vlog(
3173 vlog(
3171 "# Using", _bytes2sys(IMPL_PATH), _bytes2sys(osenvironb[IMPL_PATH]),
3174 "# Using",
3175 _bytes2sys(IMPL_PATH),
3176 _bytes2sys(osenvironb[IMPL_PATH]),
3172 )
3177 )
3173 vlog("# Writing to directory", _bytes2sys(self._outputdir))
3178 vlog("# Writing to directory", _bytes2sys(self._outputdir))
3174
3179
@@ -78,7 +78,13 b" testfilefixup(case0, b'22', [b'', b'22']"
78 testfilefixup(case0, b'222', [b'', b'222'])
78 testfilefixup(case0, b'222', [b'', b'222'])
79
79
80 # input case 1: 3 lines, each commit adds one line
80 # input case 1: 3 lines, each commit adds one line
81 case1 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2, 3]), (b'3', [3]),])
81 case1 = buildcontents(
82 [
83 (b'1', [1, 2, 3]),
84 (b'2', [2, 3]),
85 (b'3', [3]),
86 ]
87 )
82
88
83 # 1:1 line mapping
89 # 1:1 line mapping
84 testfilefixup(case1, b'123', case1)
90 testfilefixup(case1, b'123', case1)
@@ -121,7 +127,13 b" testfilefixup(case1, b'1a23', case1)"
121 testfilefixup(case1, b'12b3', case1)
127 testfilefixup(case1, b'12b3', case1)
122
128
123 # input case 2: delete in the middle
129 # input case 2: delete in the middle
124 case2 = buildcontents([(b'11', [1, 2]), (b'22', [1]), (b'33', [1, 2]),])
130 case2 = buildcontents(
131 [
132 (b'11', [1, 2]),
133 (b'22', [1]),
134 (b'33', [1, 2]),
135 ]
136 )
125
137
126 # deletion (optimize code should make it 2 chunks)
138 # deletion (optimize code should make it 2 chunks)
127 testfilefixup(
139 testfilefixup(
@@ -136,7 +148,13 b" testfilefixup(case2, b'aaaa', [b'', b'aa"
136 testfilefixup(case2, b'aaa', case2)
148 testfilefixup(case2, b'aaa', case2)
137
149
138 # input case 3: rev 3 reverts rev 2
150 # input case 3: rev 3 reverts rev 2
139 case3 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2]), (b'3', [1, 2, 3]),])
151 case3 = buildcontents(
152 [
153 (b'1', [1, 2, 3]),
154 (b'2', [2]),
155 (b'3', [1, 2, 3]),
156 ]
157 )
140
158
141 # 1:1 line mapping
159 # 1:1 line mapping
142 testfilefixup(case3, b'13', case3)
160 testfilefixup(case3, b'13', case3)
@@ -159,7 +177,13 b' case4 = buildcontents('
159 [
177 [
160 (b'1', [1, 2, 3]),
178 (b'1', [1, 2, 3]),
161 (b'2', [2, 3]),
179 (b'2', [2, 3]),
162 (b'3', [1, 2,]),
180 (
181 b'3',
182 [
183 1,
184 2,
185 ],
186 ),
163 (b'4', [1, 3]),
187 (b'4', [1, 3]),
164 (b'5', [3]),
188 (b'5', [3]),
165 (b'6', [2, 3]),
189 (b'6', [2, 3]),
@@ -183,7 +207,13 b" testfilefixup(case4, b'28', [b'', b'34',"
183 testfilefixup(case4, b'', [b'', b'34', b'37', b''])
207 testfilefixup(case4, b'', [b'', b'34', b'37', b''])
184
208
185 # input case 5: replace a small chunk which is near a deleted line
209 # input case 5: replace a small chunk which is near a deleted line
186 case5 = buildcontents([(b'12', [1, 2]), (b'3', [1]), (b'4', [1, 2]),])
210 case5 = buildcontents(
211 [
212 (b'12', [1, 2]),
213 (b'3', [1]),
214 (b'4', [1, 2]),
215 ]
216 )
187
217
188 testfilefixup(case5, b'1cd4', [b'', b'1cd34', b'1cd4'])
218 testfilefixup(case5, b'1cd4', [b'', b'1cd34', b'1cd4'])
189
219
@@ -24,13 +24,13 b' if pycompat.ispy3:'
24
24
25
25
26 def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
26 def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
27 '''nodes: total number of nodes in the graph
27 """nodes: total number of nodes in the graph
28 rootprob: probability that a new node (not 0) will be a root
28 rootprob: probability that a new node (not 0) will be a root
29 mergeprob: probability that, excluding a root a node will be a merge
29 mergeprob: probability that, excluding a root a node will be a merge
30 prevprob: probability that p1 will be the previous node
30 prevprob: probability that p1 will be the previous node
31
31
32 return value is a graph represented as an adjacency list.
32 return value is a graph represented as an adjacency list.
33 '''
33 """
34 graph = [None] * nodes
34 graph = [None] * nodes
35 for i in xrange(nodes):
35 for i in xrange(nodes):
36 if i == 0 or rng.random() < rootprob:
36 if i == 0 or rng.random() < rootprob:
@@ -228,7 +228,11 b' def test_missingancestors_explicit():'
228 print("remaining (sorted): %s" % sorted(list(revs)))
228 print("remaining (sorted): %s" % sorted(list(revs)))
229
229
230 for i, (bases, revs) in enumerate(
230 for i, (bases, revs) in enumerate(
231 (({10}, {11}), ({11}, {10}), ({7}, {9, 11}),)
231 (
232 ({10}, {11}),
233 ({11}, {10}),
234 ({7}, {9, 11}),
235 )
232 ):
236 ):
233 print("%% missingancestors(), example %d" % (i + 1))
237 print("%% missingancestors(), example %d" % (i + 1))
234 missanc = ancestor.incrementalmissingancestors(graph.get, bases)
238 missanc = ancestor.incrementalmissingancestors(graph.get, bases)
@@ -30,11 +30,17 b' class thing(object):'
30 class localthing(thing):
30 class localthing(thing):
31 def foo(self, one, two=None):
31 def foo(self, one, two=None):
32 if one:
32 if one:
33 return b"%s and %s" % (one, two,)
33 return b"%s and %s" % (
34 one,
35 two,
36 )
34 return b"Nope"
37 return b"Nope"
35
38
36 def bar(self, b, a):
39 def bar(self, b, a):
37 return b"%s und %s" % (b, a,)
40 return b"%s und %s" % (
41 b,
42 a,
43 )
38
44
39 def greet(self, name=None):
45 def greet(self, name=None):
40 return b"Hello, %s" % name
46 return b"Hello, %s" % name
@@ -176,7 +182,15 b' class remotething(thing):'
176 args = b','.join(n + b'=' + escapearg(v) for n, v in args)
182 args = b','.join(n + b'=' + escapearg(v) for n, v in args)
177 req.append(name + b':' + args)
183 req.append(name + b':' + args)
178 req = b';'.join(req)
184 req = b';'.join(req)
179 res = self._submitone(b'batch', [(b'cmds', req,)])
185 res = self._submitone(
186 b'batch',
187 [
188 (
189 b'cmds',
190 req,
191 )
192 ],
193 )
180 for r in res.split(b';'):
194 for r in res.split(b';'):
181 yield r
195 yield r
182
196
@@ -190,7 +204,16 b' class remotething(thing):'
190
204
191 @wireprotov1peer.batchable
205 @wireprotov1peer.batchable
192 def foo(self, one, two=None):
206 def foo(self, one, two=None):
193 encargs = [(b'one', mangle(one),), (b'two', mangle(two),)]
207 encargs = [
208 (
209 b'one',
210 mangle(one),
211 ),
212 (
213 b'two',
214 mangle(two),
215 ),
216 ]
194 encresref = wireprotov1peer.future()
217 encresref = wireprotov1peer.future()
195 yield encargs, encresref
218 yield encargs, encresref
196 yield unmangle(encresref.value)
219 yield unmangle(encresref.value)
@@ -198,14 +221,33 b' class remotething(thing):'
198 @wireprotov1peer.batchable
221 @wireprotov1peer.batchable
199 def bar(self, b, a):
222 def bar(self, b, a):
200 encresref = wireprotov1peer.future()
223 encresref = wireprotov1peer.future()
201 yield [(b'b', mangle(b),), (b'a', mangle(a),)], encresref
224 yield [
225 (
226 b'b',
227 mangle(b),
228 ),
229 (
230 b'a',
231 mangle(a),
232 ),
233 ], encresref
202 yield unmangle(encresref.value)
234 yield unmangle(encresref.value)
203
235
204 # greet is coded directly. It therefore does not support batching. If it
236 # greet is coded directly. It therefore does not support batching. If it
205 # does appear in a batch, the batch is split around greet, and the call to
237 # does appear in a batch, the batch is split around greet, and the call to
206 # greet is done in its own roundtrip.
238 # greet is done in its own roundtrip.
207 def greet(self, name=None):
239 def greet(self, name=None):
208 return unmangle(self._submitone(b'greet', [(b'name', mangle(name),)]))
240 return unmangle(
241 self._submitone(
242 b'greet',
243 [
244 (
245 b'name',
246 mangle(name),
247 )
248 ],
249 )
250 )
209
251
210
252
211 # demo remote usage
253 # demo remote usage
@@ -690,7 +690,12 b' class ArrayTests(TestCase):'
690
690
691 self.assertEqual(
691 self.assertEqual(
692 list(cborutil.streamencodearrayfromiter(source)),
692 list(cborutil.streamencodearrayfromiter(source)),
693 [b'\x9f', b'\x43', b'foo', b'\xff',],
693 [
694 b'\x9f',
695 b'\x43',
696 b'foo',
697 b'\xff',
698 ],
694 )
699 )
695
700
696 dest = b''.join(cborutil.streamencodearrayfromiter(source))
701 dest = b''.join(cborutil.streamencodearrayfromiter(source))
@@ -799,7 +804,11 b' class ArrayTests(TestCase):'
799 class SetTests(TestCase):
804 class SetTests(TestCase):
800 def testempty(self):
805 def testempty(self):
801 self.assertEqual(
806 self.assertEqual(
802 list(cborutil.streamencode(set())), [b'\xd9\x01\x02', b'\x80',]
807 list(cborutil.streamencode(set())),
808 [
809 b'\xd9\x01\x02',
810 b'\x80',
811 ],
803 )
812 )
804
813
805 self.assertEqual(cborutil.decodeall(b'\xd9\x01\x02\x80'), [set()])
814 self.assertEqual(cborutil.decodeall(b'\xd9\x01\x02\x80'), [set()])
@@ -914,14 +923,26 b' class SetTests(TestCase):'
914 ):
923 ):
915 cborutil.decodeall(encoded)
924 cborutil.decodeall(encoded)
916
925
917 encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\x80',]) # empty array
926 encoded = b''.join(
927 [
928 b'\xd9\x01\x02',
929 b'\x81',
930 b'\x80',
931 ]
932 ) # empty array
918
933
919 with self.assertRaisesRegex(
934 with self.assertRaisesRegex(
920 cborutil.CBORDecodeError, 'collections not allowed as set values'
935 cborutil.CBORDecodeError, 'collections not allowed as set values'
921 ):
936 ):
922 cborutil.decodeall(encoded)
937 cborutil.decodeall(encoded)
923
938
924 encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\xa0',]) # empty map
939 encoded = b''.join(
940 [
941 b'\xd9\x01\x02',
942 b'\x81',
943 b'\xa0',
944 ]
945 ) # empty map
925
946
926 with self.assertRaisesRegex(
947 with self.assertRaisesRegex(
927 cborutil.CBORDecodeError, 'collections not allowed as set values'
948 cborutil.CBORDecodeError, 'collections not allowed as set values'
@@ -1059,7 +1080,13 b' class MapTests(TestCase):'
1059 ):
1080 ):
1060 cborutil.decodeall(encoded)
1081 cborutil.decodeall(encoded)
1061
1082
1062 encoded = b''.join([b'\xa1', b'\x80', b'\x43foo',]) # empty array
1083 encoded = b''.join(
1084 [
1085 b'\xa1',
1086 b'\x80',
1087 b'\x43foo',
1088 ]
1089 ) # empty array
1063
1090
1064 with self.assertRaisesRegex(
1091 with self.assertRaisesRegex(
1065 cborutil.CBORDecodeError, 'collections not supported as map keys'
1092 cborutil.CBORDecodeError, 'collections not supported as map keys'
@@ -1260,7 +1287,10 b' class DecodeallTests(TestCase):'
1260
1287
1261 def testpartialinput(self):
1288 def testpartialinput(self):
1262 encoded = b''.join(
1289 encoded = b''.join(
1263 [b'\x82', b'\x01',] # array of 2 elements # integer 1
1290 [
1291 b'\x82',
1292 b'\x01',
1293 ] # array of 2 elements # integer 1
1264 )
1294 )
1265
1295
1266 with self.assertRaisesRegex(
1296 with self.assertRaisesRegex(
@@ -76,7 +76,9 b' if not os.path.isdir(os.path.join(cwd, "'
76 sys.exit(0)
76 sys.exit(0)
77
77
78 files = subprocess.check_output(
78 files = subprocess.check_output(
79 "hg files --print0 \"%s\"" % fileset, shell=True, cwd=cwd,
79 "hg files --print0 \"%s\"" % fileset,
80 shell=True,
81 cwd=cwd,
80 ).split(b'\0')
82 ).split(b'\0')
81
83
82 if sys.version_info[0] >= 3:
84 if sys.version_info[0] >= 3:
@@ -69,29 +69,60 b' class linelogtests(unittest.TestCase):'
69 ll.replacelines(1, 0, 0, 0, 3)
69 ll.replacelines(1, 0, 0, 0, 3)
70 self.assertEqual(
70 self.assertEqual(
71 [(l.rev, l.linenum) for l in ll.annotate(1)],
71 [(l.rev, l.linenum) for l in ll.annotate(1)],
72 [(1, 0), (1, 1), (1, 2),],
72 [
73 (1, 0),
74 (1, 1),
75 (1, 2),
76 ],
73 )
77 )
74 # Replace line 1 with a new line
78 # Replace line 1 with a new line
75 ll.replacelines(2, 1, 2, 1, 2)
79 ll.replacelines(2, 1, 2, 1, 2)
76 self.assertEqual(
80 self.assertEqual(
77 [(l.rev, l.linenum) for l in ll.annotate(2)],
81 [(l.rev, l.linenum) for l in ll.annotate(2)],
78 [(1, 0), (2, 1), (1, 2),],
82 [
83 (1, 0),
84 (2, 1),
85 (1, 2),
86 ],
79 )
87 )
80 # delete a line out of 2
88 # delete a line out of 2
81 ll.replacelines(3, 1, 2, 0, 0)
89 ll.replacelines(3, 1, 2, 0, 0)
82 self.assertEqual(
90 self.assertEqual(
83 [(l.rev, l.linenum) for l in ll.annotate(3)], [(1, 0), (1, 2),]
91 [(l.rev, l.linenum) for l in ll.annotate(3)],
92 [
93 (1, 0),
94 (1, 2),
95 ],
84 )
96 )
85 # annotation of 1 is unchanged
97 # annotation of 1 is unchanged
86 self.assertEqual(
98 self.assertEqual(
87 [(l.rev, l.linenum) for l in ll.annotate(1)],
99 [(l.rev, l.linenum) for l in ll.annotate(1)],
88 [(1, 0), (1, 1), (1, 2),],
100 [
101 (1, 0),
102 (1, 1),
103 (1, 2),
104 ],
89 )
105 )
90 ll.annotate(3) # set internal state to revision 3
106 ll.annotate(3) # set internal state to revision 3
91 start = ll.getoffset(0)
107 start = ll.getoffset(0)
92 end = ll.getoffset(1)
108 end = ll.getoffset(1)
93 self.assertEqual(ll.getalllines(start, end), [(1, 0), (2, 1), (1, 1),])
109 self.assertEqual(
94 self.assertEqual(ll.getalllines(), [(1, 0), (2, 1), (1, 1), (1, 2),])
110 ll.getalllines(start, end),
111 [
112 (1, 0),
113 (2, 1),
114 (1, 1),
115 ],
116 )
117 self.assertEqual(
118 ll.getalllines(),
119 [
120 (1, 0),
121 (2, 1),
122 (1, 1),
123 (1, 2),
124 ],
125 )
95
126
96 def testparseclinelogfile(self):
127 def testparseclinelogfile(self):
97 # This data is what the replacements in testsimpleedits
128 # This data is what the replacements in testsimpleedits
@@ -116,14 +147,26 b' class linelogtests(unittest.TestCase):'
116 llc = linelog.linelog.fromdata(data)
147 llc = linelog.linelog.fromdata(data)
117 self.assertEqual(
148 self.assertEqual(
118 [(l.rev, l.linenum) for l in llc.annotate(1)],
149 [(l.rev, l.linenum) for l in llc.annotate(1)],
119 [(1, 0), (1, 1), (1, 2),],
150 [
151 (1, 0),
152 (1, 1),
153 (1, 2),
154 ],
120 )
155 )
121 self.assertEqual(
156 self.assertEqual(
122 [(l.rev, l.linenum) for l in llc.annotate(2)],
157 [(l.rev, l.linenum) for l in llc.annotate(2)],
123 [(1, 0), (2, 1), (1, 2),],
158 [
159 (1, 0),
160 (2, 1),
161 (1, 2),
162 ],
124 )
163 )
125 self.assertEqual(
164 self.assertEqual(
126 [(l.rev, l.linenum) for l in llc.annotate(3)], [(1, 0), (1, 2),]
165 [(l.rev, l.linenum) for l in llc.annotate(3)],
166 [
167 (1, 0),
168 (1, 2),
169 ],
127 )
170 )
128 # Check we emit the same bytecode.
171 # Check we emit the same bytecode.
129 ll = linelog.linelog()
172 ll = linelog.linelog()
@@ -73,7 +73,10 b' class teststate(object):'
73 self._acquirecalled,
73 self._acquirecalled,
74 called,
74 called,
75 'expected acquire to be %s but was actually %s'
75 'expected acquire to be %s but was actually %s'
76 % (self._tocalled(called), self._tocalled(self._acquirecalled),),
76 % (
77 self._tocalled(called),
78 self._tocalled(self._acquirecalled),
79 ),
77 )
80 )
78
81
79 def resetacquirefn(self):
82 def resetacquirefn(self):
@@ -84,7 +87,10 b' class teststate(object):'
84 self._releasecalled,
87 self._releasecalled,
85 called,
88 called,
86 'expected release to be %s but was actually %s'
89 'expected release to be %s but was actually %s'
87 % (self._tocalled(called), self._tocalled(self._releasecalled),),
90 % (
91 self._tocalled(called),
92 self._tocalled(self._releasecalled),
93 ),
88 )
94 )
89
95
90 def assertpostreleasecalled(self, called):
96 def assertpostreleasecalled(self, called):
@@ -104,7 +110,10 b' class teststate(object):'
104 actual,
110 actual,
105 exists,
111 exists,
106 'expected lock to %s but actually did %s'
112 'expected lock to %s but actually did %s'
107 % (self._toexists(exists), self._toexists(actual),),
113 % (
114 self._toexists(exists),
115 self._toexists(actual),
116 ),
108 )
117 )
109
118
110 def _tocalled(self, called):
119 def _tocalled(self, called):
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now