##// END OF EJS Templates
merge with stable
Augie Fackler -
r45251:f365dfed merge default
parent child Browse files
Show More
@@ -194,3 +194,4 b' e4344e463c0c888a2f437b78b5982ecdf3f6650a'
194 7f5410dfc8a64bb587d19637deb95d378fd1eb5c 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl44RUUQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91WcUD/9em14ckTP9APTrSpe6y4FLS6cIUZabNN6wDXjTrHmS26hoNvWrT+RpWQ5XSOOJhZdhjkR1k87EOw9+m6+36ZaL+RXYnjrbku9fxbbFBraGTFy0JZHAT6v57uQ8P7XwqN4dGvXXpgE5UuY5sp1uDRbtIPNts3iWJKAnIazxUnyotHNtJQNESHySomzR1s93z1oOMpHapAqUmPbcZywg4otWjrOnkhOok3Sa3TgGthpHbM0qmh6J9ZaRBXsKEpLkjCRNggdvqww1w4omcAJzY4V5tG8WfhW+Xl8zBBe0K5m/ug3e25sWR5Dqm4+qUO0HZWQ3m3/M7CCuQrWFXTkr7nKac50vtFzsqHlHNoaiKnvQKoruQs3266TGsrzCCOSy8BqmpysD6sB79owLKoh0LfFOcSwG9kZ8sovEvTfrRn8g3YAp7XbXkDxbcLMijr7P4gWq8sC1NZJn1yhLXitcCfAAuVrVQfPVdt2pp8Ry2NdGnHjikQjOn/wAKlYJ5F8JMdn6eEI/Gveg2g8uR9kp/9zaXRx6rU3ccuZQ7cBQbBlBsmmpd7gJRp2v0NKsV8hXtCPnBvcfCqgYHLg7FQVq1wKe5glvtmx9uPZNsl/S++fSxGoXfp9wVi048J42KyEH6yvoySCvbYeSFQvMfAoD1xJ4xWtT8ZEj6oiHvzHw1u/zgw==
194 7f5410dfc8a64bb587d19637deb95d378fd1eb5c 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl44RUUQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91WcUD/9em14ckTP9APTrSpe6y4FLS6cIUZabNN6wDXjTrHmS26hoNvWrT+RpWQ5XSOOJhZdhjkR1k87EOw9+m6+36ZaL+RXYnjrbku9fxbbFBraGTFy0JZHAT6v57uQ8P7XwqN4dGvXXpgE5UuY5sp1uDRbtIPNts3iWJKAnIazxUnyotHNtJQNESHySomzR1s93z1oOMpHapAqUmPbcZywg4otWjrOnkhOok3Sa3TgGthpHbM0qmh6J9ZaRBXsKEpLkjCRNggdvqww1w4omcAJzY4V5tG8WfhW+Xl8zBBe0K5m/ug3e25sWR5Dqm4+qUO0HZWQ3m3/M7CCuQrWFXTkr7nKac50vtFzsqHlHNoaiKnvQKoruQs3266TGsrzCCOSy8BqmpysD6sB79owLKoh0LfFOcSwG9kZ8sovEvTfrRn8g3YAp7XbXkDxbcLMijr7P4gWq8sC1NZJn1yhLXitcCfAAuVrVQfPVdt2pp8Ry2NdGnHjikQjOn/wAKlYJ5F8JMdn6eEI/Gveg2g8uR9kp/9zaXRx6rU3ccuZQ7cBQbBlBsmmpd7gJRp2v0NKsV8hXtCPnBvcfCqgYHLg7FQVq1wKe5glvtmx9uPZNsl/S++fSxGoXfp9wVi048J42KyEH6yvoySCvbYeSFQvMfAoD1xJ4xWtT8ZEj6oiHvzHw1u/zgw==
195 6d121acbb82e65fe4dd3c2318a1b61981b958492 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl5f3IEQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91WoeD/9qhywGg/TI/FJEeJN5bJjcpB/YQeYDWCHh69yUmMPenf+6CaV/3QPc3R8JyQSKWwGUwc0IgZiJBb/HoUvBzpQyTvmGqddWsIGBpdGAkbLmRrE5BakR7Shs987a3Oq4hB03DJD4sQ1VitWg2OvGNd8rl1kSIF8aIErVI6ZiSw5eYemc/1VyBJXHWSFmcfnQqdsyPppH9e9/TAhio+YP4EmLmoxUcyRSb3UbtO2NT9+DEADaex+H2l9evg7AkTieVd6N163uqsLJIxSfCh5ZVmzaGW6uEoyC4U+9bkAyVE3Cy5z2giYblBzUkO9xqEZoA4tOM+b+gHokY8Sq3iGVw046CIW5+FjU9B5+7hCqWThYjnpnt+RomtHxrkqQ9SSHYnEWb4YTHqs+J7lWbm3ErjF08hYOyMA9/VT47UAKw4XL4Ss/1Pr7YezdmwB4jn7dqvslNvTqRAUOzB/15YeCfbd23SL4YzGaKBs9ajkxFFeCNNpLQ8CRm3a7/K6qkYyfSUpgUX7xBmRQTvUgr3nVk1epH/kOKwryy94Z+nlHF0qEMEq+1QOa5yvt3Kkr4H03pOFbLhdpjID5IYP4rRQTKB9yOS3XWBCE63AQVc7uuaBGPMCSLaKRAFDUXWY7GzCqda88WeN5BFC5iHrQTYE1IQ5YaWu38QMsJt2HHVc27+BuLA==
195 6d121acbb82e65fe4dd3c2318a1b61981b958492 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl5f3IEQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91WoeD/9qhywGg/TI/FJEeJN5bJjcpB/YQeYDWCHh69yUmMPenf+6CaV/3QPc3R8JyQSKWwGUwc0IgZiJBb/HoUvBzpQyTvmGqddWsIGBpdGAkbLmRrE5BakR7Shs987a3Oq4hB03DJD4sQ1VitWg2OvGNd8rl1kSIF8aIErVI6ZiSw5eYemc/1VyBJXHWSFmcfnQqdsyPppH9e9/TAhio+YP4EmLmoxUcyRSb3UbtO2NT9+DEADaex+H2l9evg7AkTieVd6N163uqsLJIxSfCh5ZVmzaGW6uEoyC4U+9bkAyVE3Cy5z2giYblBzUkO9xqEZoA4tOM+b+gHokY8Sq3iGVw046CIW5+FjU9B5+7hCqWThYjnpnt+RomtHxrkqQ9SSHYnEWb4YTHqs+J7lWbm3ErjF08hYOyMA9/VT47UAKw4XL4Ss/1Pr7YezdmwB4jn7dqvslNvTqRAUOzB/15YeCfbd23SL4YzGaKBs9ajkxFFeCNNpLQ8CRm3a7/K6qkYyfSUpgUX7xBmRQTvUgr3nVk1epH/kOKwryy94Z+nlHF0qEMEq+1QOa5yvt3Kkr4H03pOFbLhdpjID5IYP4rRQTKB9yOS3XWBCE63AQVc7uuaBGPMCSLaKRAFDUXWY7GzCqda88WeN5BFC5iHrQTYE1IQ5YaWu38QMsJt2HHVc27+BuLA==
196 8fca7e8449a847e3cf1054f2c07b51237699fad3 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl6GDVQQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91egzEACNEyQwLWCQEeNyxXKuTsnXhYU/au7nSGOti/9+zg/22SSceMsVcIyNr2ZnkMf3hnzBjL7Efsthif0QXyfB0LZDXwNuDmNlDtUV2veyVGSDE2UqiSbDBRu6MYTvtfYX87RmSWla3HHO09pwpcrhxyHs3mliQsXyB2+D+ovTOIjYukQLnh34jQnwiWEYLDXkHEHHTpdXqAnA7tVen3ardLyTWgky6DUwlfcnoVsAPXnDkqQ9aE2w7SoAsNtEAddmkjKoYYdBkV5aUInU/DyFVF7qnlCcvWm+EkN1708xZUQ1KzdAyeeoIrMkBgpSoyeNQ9pcU3T7B100UxLo/FP/A7y96b2kHnKJU6fVyD3OeHvP9SeucurC6jn2YoG3e1wSOQcbEuCsdGjqgAHnKt2SMPsEBu2qJJcUdco9tANN5BdntBo7bLc/zcpXZH3TkRfRSndWXPaXDJaQNvbH7aLIUTCP9oQaqTN+9BQ+Egt7YsB4C58JZmC87FAuekDULc4LWK2gDPFf7F/PvBnMh7+YylPl/8LLrEnz2Q/GM0S1HLhBrDf6vzxV5wVzCu9Q2N0PCkg6lDAJFVWLTEbxcRukKxbyK88Yzrb4GuUY4F5V21fN4vuxkOay7eoiXUcHMN2IN+DwhNWQSm5pUnpqGTfCYj/ZBbAykP2UnVOClL6O2JQA2A==
196 8fca7e8449a847e3cf1054f2c07b51237699fad3 0 iQJEBAABCAAuFiEEK8zhT1xnJaouqK63ucncgkqlvdUFAl6GDVQQHHJhZkBkdXJpbjQyLmNvbQAKCRC5ydyCSqW91egzEACNEyQwLWCQEeNyxXKuTsnXhYU/au7nSGOti/9+zg/22SSceMsVcIyNr2ZnkMf3hnzBjL7Efsthif0QXyfB0LZDXwNuDmNlDtUV2veyVGSDE2UqiSbDBRu6MYTvtfYX87RmSWla3HHO09pwpcrhxyHs3mliQsXyB2+D+ovTOIjYukQLnh34jQnwiWEYLDXkHEHHTpdXqAnA7tVen3ardLyTWgky6DUwlfcnoVsAPXnDkqQ9aE2w7SoAsNtEAddmkjKoYYdBkV5aUInU/DyFVF7qnlCcvWm+EkN1708xZUQ1KzdAyeeoIrMkBgpSoyeNQ9pcU3T7B100UxLo/FP/A7y96b2kHnKJU6fVyD3OeHvP9SeucurC6jn2YoG3e1wSOQcbEuCsdGjqgAHnKt2SMPsEBu2qJJcUdco9tANN5BdntBo7bLc/zcpXZH3TkRfRSndWXPaXDJaQNvbH7aLIUTCP9oQaqTN+9BQ+Egt7YsB4C58JZmC87FAuekDULc4LWK2gDPFf7F/PvBnMh7+YylPl/8LLrEnz2Q/GM0S1HLhBrDf6vzxV5wVzCu9Q2N0PCkg6lDAJFVWLTEbxcRukKxbyK88Yzrb4GuUY4F5V21fN4vuxkOay7eoiXUcHMN2IN+DwhNWQSm5pUnpqGTfCYj/ZBbAykP2UnVOClL6O2JQA2A==
197 26ce8e7515036d3431a03aaeb7bc72dd96cb1112 0 iQJJBAABCgAzFiEE64UTlbQiPuL3ugso2lR0C/CHMroFAl6YlRUVHDc4OTVwdWxraXRAZ21haWwuY29tAAoJENpUdAvwhzK6Z3YP/iOqphn99v0z2OupCl0q8CepbcdZMJWW3j00OAHYSO43M0FULpMpzC2o+kZDeqeLyzN7DsjoGts2cUnAOe9WX73sPkX1n1dbiDcUSsRqNND+tCkEZMtTn4DaGNIq1zSkkm8Q7O/1uwZPnX6FaIRMBs9qGbdfmMPNEvzny2tgrKc3ra1+AA8RCdtsbpqhjy+xf+EKVB/SMsQVVSJEgPkUkW6PwpaspdrxQKgZrb7C7Jx/gRVzMTUmCQe1sVCSnZNO3I/woAqDY2UNg7/hBubeRh/EjoH1o4ONTXgBQdYCl7QdcwDHpDc2HstonrFq51qxBecHDVw+ZKQds63Ixtxuab3SK0o/SWabZ1v8bGaWnyWnRWXL/1qkyFWly+fjEGGlv1kHl3n0UmwlUY8FQJCYDZgR0FqQGXAF3vMJOEp82ysk6jWN/7NRzcnoUC7HpNo1jPMiPRjskgVf3bhErfUQnhlF1YsVu/jPTixyfftbiaZmwILMkaPF8Kg3Cyf63p2cdcnTHdbP1U6ncR+BucthlbFei4WL0J2iERb8TBeCxOyCHlEUq8kampjbmPXN7VxnK4oX3xeBTf8mMbvrD5Fv3svRD+SkCCKu/MwQvB1VT6q425TSKHbCWeNqGjVLvetpx+skVH7eaXLEQ3wlCfo/0OQTRimx2O73EnOF5r8Q2POm
@@ -207,3 +207,4 b' e4344e463c0c888a2f437b78b5982ecdf3f6650a'
207 7f5410dfc8a64bb587d19637deb95d378fd1eb5c 5.3
207 7f5410dfc8a64bb587d19637deb95d378fd1eb5c 5.3
208 6d121acbb82e65fe4dd3c2318a1b61981b958492 5.3.1
208 6d121acbb82e65fe4dd3c2318a1b61981b958492 5.3.1
209 8fca7e8449a847e3cf1054f2c07b51237699fad3 5.3.2
209 8fca7e8449a847e3cf1054f2c07b51237699fad3 5.3.2
210 26ce8e7515036d3431a03aaeb7bc72dd96cb1112 5.4rc0
@@ -59,7 +59,7 b" DEBIAN_ACCOUNT_ID_2 = '136693071363'"
59 UBUNTU_ACCOUNT_ID = '099720109477'
59 UBUNTU_ACCOUNT_ID = '099720109477'
60
60
61
61
62 WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-2019.11.13'
62 WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-*'
63
63
64
64
65 KEY_PAIRS = {
65 KEY_PAIRS = {
@@ -464,7 +464,7 b' def ensure_iam_state(iamclient, iamresou'
464 profile.add_role(RoleName=role)
464 profile.add_role(RoleName=role)
465
465
466
466
467 def find_image(ec2resource, owner_id, name):
467 def find_image(ec2resource, owner_id, name, reverse_sort_field=None):
468 """Find an AMI by its owner ID and name."""
468 """Find an AMI by its owner ID and name."""
469
469
470 images = ec2resource.images.filter(
470 images = ec2resource.images.filter(
@@ -476,6 +476,13 b' def find_image(ec2resource, owner_id, na'
476 ]
476 ]
477 )
477 )
478
478
479 if reverse_sort_field:
480 images = sorted(
481 images,
482 key=lambda image: getattr(image, reverse_sort_field),
483 reverse=True,
484 )
485
479 for image in images:
486 for image in images:
480 return image
487 return image
481
488
@@ -1059,7 +1066,7 b' def temporary_linux_dev_instances('
1059
1066
1060
1067
1061 def ensure_windows_dev_ami(
1068 def ensure_windows_dev_ami(
1062 c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME
1069 c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME,
1063 ):
1070 ):
1064 """Ensure Windows Development AMI is available and up-to-date.
1071 """Ensure Windows Development AMI is available and up-to-date.
1065
1072
@@ -1078,7 +1085,12 b' def ensure_windows_dev_ami('
1078
1085
1079 name = '%s%s' % (prefix, 'windows-dev')
1086 name = '%s%s' % (prefix, 'windows-dev')
1080
1087
1081 image = find_image(ec2resource, AMAZON_ACCOUNT_ID, base_image_name)
1088 image = find_image(
1089 ec2resource,
1090 AMAZON_ACCOUNT_ID,
1091 base_image_name,
1092 reverse_sort_field="name",
1093 )
1082
1094
1083 config = {
1095 config = {
1084 'BlockDeviceMappings': [
1096 'BlockDeviceMappings': [
@@ -26,11 +26,11 b' DISTROS = {'
26
26
27 INSTALL_PYTHONS = r'''
27 INSTALL_PYTHONS = r'''
28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
28 PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0"
29 PYENV3_VERSIONS="3.5.7 3.6.9 3.7.5 3.8.0 pypy3.5-7.0.0 pypy3.6-7.2.0"
29 PYENV3_VERSIONS="3.5.9 3.6.10 3.7.7 3.8.2 pypy3.5-7.0.0 pypy3.6-7.3.0"
30
30
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
31 git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv
32 pushd /hgdev/pyenv
32 pushd /hgdev/pyenv
33 git checkout 0e7cfc3b3d4eca46ad83d632e1505f5932cd179b
33 git checkout 3005c4664372ae13fbe376be699313eb428c8bdd
34 popd
34 popd
35
35
36 export PYENV_ROOT="/hgdev/pyenv"
36 export PYENV_ROOT="/hgdev/pyenv"
@@ -22,10 +22,10 b''
22 $VC9_PYTHON_URL = "https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi"
22 $VC9_PYTHON_URL = "https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi"
23 $VC9_PYTHON_SHA256 = "070474db76a2e625513a5835df4595df9324d820f9cc97eab2a596dcbc2f5cbf"
23 $VC9_PYTHON_SHA256 = "070474db76a2e625513a5835df4595df9324d820f9cc97eab2a596dcbc2f5cbf"
24
24
25 $PYTHON27_x64_URL = "https://www.python.org/ftp/python/2.7.17/python-2.7.17.amd64.msi"
25 $PYTHON27_x64_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.amd64.msi"
26 $PYTHON27_x64_SHA256 = "3b934447e3620e51d2daf5b2f258c9b617bcc686ca2f777a49aa3b47893abf1b"
26 $PYTHON27_x64_SHA256 = "b74a3afa1e0bf2a6fc566a7b70d15c9bfabba3756fb077797d16fffa27800c05"
27 $PYTHON27_X86_URL = "https://www.python.org/ftp/python/2.7.17/python-2.7.17.msi"
27 $PYTHON27_X86_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.msi"
28 $PYTHON27_X86_SHA256 = "a4e3a321517c6b0c2693d6f712a0d18c82600b3d0c759c299b3d14384a17f863"
28 $PYTHON27_X86_SHA256 = "d901802e90026e9bad76b8a81f8dd7e43c7d7e8269d9281c9e9df7a9c40480a9"
29
29
30 $PYTHON35_x86_URL = "https://www.python.org/ftp/python/3.5.4/python-3.5.4.exe"
30 $PYTHON35_x86_URL = "https://www.python.org/ftp/python/3.5.4/python-3.5.4.exe"
31 $PYTHON35_x86_SHA256 = "F27C2D67FD9688E4970F3BFF799BB9D722A0D6C2C13B04848E1F7D620B524B0E"
31 $PYTHON35_x86_SHA256 = "F27C2D67FD9688E4970F3BFF799BB9D722A0D6C2C13B04848E1F7D620B524B0E"
@@ -37,15 +37,15 b''
37 $PYTHON36_x64_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe"
37 $PYTHON36_x64_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe"
38 $PYTHON36_x64_SHA256 = "96088A58B7C43BC83B84E6B67F15E8706C614023DD64F9A5A14E81FF824ADADC"
38 $PYTHON36_x64_SHA256 = "96088A58B7C43BC83B84E6B67F15E8706C614023DD64F9A5A14E81FF824ADADC"
39
39
40 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.5/python-3.7.5.exe"
40 $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.7/python-3.7.7.exe"
41 $PYTHON37_x86_SHA256 = "3c2ae8f72b48e6e0c2b482206e322bf5d0344ff91abc3b3c200cec9e275c7168"
41 $PYTHON37_x86_SHA256 = "27fbffcd342d5055acc64050db4c35d0025661521e642b59c381dcba2e162c6a"
42 $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.5/python-3.7.5-amd64.exe"
42 $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.7/python-3.7.7-amd64.exe"
43 $PYTHON37_x64_SHA256 = "f3d60c127e7a92ed547efa3321bf70cd96b75c53bf4b903147015257c1314981"
43 $PYTHON37_x64_SHA256 = "1a0368663ceff999d865de955992b6ea3cb0c8cb15a1a296a8eb7df19cc59e69"
44
44
45 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.0/python-3.8.0.exe"
45 $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.2/python-3.8.2.exe"
46 $PYTHON38_x86_SHA256 = "b471908de5e10d8fb5c3351a5affb1172da7790c533e0c9ffbaeec9c11611b15"
46 $PYTHON38_x86_SHA256 = "03ac5754a69c9c11c08d1f4d694c14625a4d27348ad4dd2d1253e2547819db2c"
47 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.0/python-3.8.0-amd64.exe"
47 $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.2/python-3.8.2-amd64.exe"
48 $PYTHON38_x64_SHA256 = "a9bbc6088a3e4c7112826e21bfee6277f7b6d93259f7c57176139231bb7071e4"
48 $PYTHON38_x64_SHA256 = "8e400e3f32cdcb746e62e0db4d3ae4cba1f927141ebc4d0d5a4006b0daee8921"
49
49
50 # PIP 19.2.3.
50 # PIP 19.2.3.
51 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
51 $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py"
@@ -4,21 +4,29 b''
4 #
4 #
5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements.txt contrib/packaging/requirements.txt.in
5 # pip-compile --generate-hashes --output-file=contrib/packaging/requirements.txt contrib/packaging/requirements.txt.in
6 #
6 #
7 jinja2==2.10.3 \
7 docutils==0.16 \
8 --hash=sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f \
8 --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
9 --hash=sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de
9 --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
10 # via -r contrib/packaging/requirements.txt.in
11 jinja2==2.11.2 \
12 --hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \
13 --hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \
14 # via -r contrib/packaging/requirements.txt.in
10 markupsafe==1.1.1 \
15 markupsafe==1.1.1 \
11 --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
16 --hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
12 --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
17 --hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
13 --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
18 --hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
14 --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
19 --hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
20 --hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
15 --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
21 --hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
16 --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \
22 --hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b \
17 --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
23 --hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
18 --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
24 --hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
19 --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
25 --hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
20 --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
26 --hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
27 --hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
21 --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
28 --hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
29 --hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
22 --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
30 --hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
23 --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
31 --hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
24 --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
32 --hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
@@ -35,5 +43,7 b' markupsafe==1.1.1 \\'
35 --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
43 --hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
36 --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
44 --hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
37 --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
45 --hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
46 --hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
38 --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
47 --hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
48 --hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
39 # via jinja2
49 # via jinja2
@@ -1,1 +1,2 b''
1 docutils
1 jinja2
2 jinja2
@@ -67,6 +67,10 b' def findcommonincoming(repo, remote, hea'
67 ancestorsof=ancestorsof,
67 ancestorsof=ancestorsof,
68 )
68 )
69 common, anyinc, srvheads = res
69 common, anyinc, srvheads = res
70 if heads and not anyinc:
71 # server could be lying on the advertised heads
72 has_node = repo.changelog.hasnode
73 anyinc = any(not has_node(n) for n in heads)
70 return (list(common), anyinc, heads or list(srvheads))
74 return (list(common), anyinc, heads or list(srvheads))
71
75
72
76
@@ -112,6 +112,11 b' def update_persistent_nodemap(revlog):'
112 To be used for updating the nodemap on disk outside of a normal transaction
112 To be used for updating the nodemap on disk outside of a normal transaction
113 setup (eg, `debugupdatecache`).
113 setup (eg, `debugupdatecache`).
114 """
114 """
115 if revlog._inline:
116 return # inlined revlog are too small for this to be relevant
117 if revlog.nodemap_file is None:
118 return # we do not use persistent_nodemap on this revlog
119
115 notr = _NoTransaction()
120 notr = _NoTransaction()
116 _persist_nodemap(notr, revlog)
121 _persist_nodemap(notr, revlog)
117 for k in sorted(notr._postclose):
122 for k in sorted(notr._postclose):
@@ -628,14 +628,12 b' def determineactions(repo, deficiencies,'
628 """
628 """
629 newactions = []
629 newactions = []
630
630
631 knownreqs = supporteddestrequirements(repo)
632
633 for d in deficiencies:
631 for d in deficiencies:
634 name = d.name
632 name = d._requirement
635
633
636 # If the action is a requirement that doesn't show up in the
634 # If the action is a requirement that doesn't show up in the
637 # destination requirements, prune the action.
635 # destination requirements, prune the action.
638 if name in knownreqs and name not in destreqs:
636 if name is not None and name not in destreqs:
639 continue
637 continue
640
638
641 newactions.append(d)
639 newactions.append(d)
@@ -307,7 +307,6 b' pub struct StatusOptions {'
307 /// Dispatch a single entry (file, folder, symlink...) found during `traverse`.
307 /// Dispatch a single entry (file, folder, symlink...) found during `traverse`.
308 /// If the entry is a folder that needs to be traversed, it will be handled
308 /// If the entry is a folder that needs to be traversed, it will be handled
309 /// in a separate thread.
309 /// in a separate thread.
310
311 fn handle_traversed_entry<'a>(
310 fn handle_traversed_entry<'a>(
312 scope: &rayon::Scope<'a>,
311 scope: &rayon::Scope<'a>,
313 files_sender: &'a crossbeam::Sender<IoResult<(HgPathBuf, Dispatch)>>,
312 files_sender: &'a crossbeam::Sender<IoResult<(HgPathBuf, Dispatch)>>,
@@ -324,6 +323,11 b" fn handle_traversed_entry<'a>("
324 let file_type = dir_entry.file_type()?;
323 let file_type = dir_entry.file_type()?;
325 let entry_option = dmap.get(&filename);
324 let entry_option = dmap.get(&filename);
326
325
326 if filename.as_bytes() == b".hg" {
327 // Could be a directory or a symlink
328 return Ok(());
329 }
330
327 if file_type.is_dir() {
331 if file_type.is_dir() {
328 handle_traversed_dir(
332 handle_traversed_dir(
329 scope,
333 scope,
@@ -447,9 +451,7 b" fn traverse_dir<'a>("
447 options: StatusOptions,
451 options: StatusOptions,
448 ) -> IoResult<()> {
452 ) -> IoResult<()> {
449 let directory = directory.as_ref();
453 let directory = directory.as_ref();
450 if directory.as_bytes() == b".hg" {
454
451 return Ok(());
452 }
453 let visit_entries = match matcher.visit_children_set(directory) {
455 let visit_entries = match matcher.visit_children_set(directory) {
454 VisitChildrenSet::Empty => return Ok(()),
456 VisitChildrenSet::Empty => return Ok(()),
455 VisitChildrenSet::This | VisitChildrenSet::Recursive => None,
457 VisitChildrenSet::This | VisitChildrenSet::Recursive => None,
@@ -890,7 +890,8 b' class hgbuilddoc(Command):'
890 )
890 )
891 if res:
891 if res:
892 raise SystemExit(
892 raise SystemExit(
893 'error running gendoc.py: %s' % '\n'.join([out, err])
893 'error running gendoc.py: %s'
894 % '\n'.join([sysstr(out), sysstr(err)])
894 )
895 )
895
896
896 with open(txt, 'wb') as fh:
897 with open(txt, 'wb') as fh:
@@ -907,7 +908,8 b' class hgbuilddoc(Command):'
907 )
908 )
908 if res:
909 if res:
909 raise SystemExit(
910 raise SystemExit(
910 'error running gendoc: %s' % '\n'.join([out, err])
911 'error running gendoc: %s'
912 % '\n'.join([sysstr(out), sysstr(err)])
911 )
913 )
912
914
913 with open(gendoc, 'wb') as fh:
915 with open(gendoc, 'wb') as fh:
@@ -932,7 +934,8 b' class hgbuilddoc(Command):'
932 )
934 )
933 if res:
935 if res:
934 raise SystemExit(
936 raise SystemExit(
935 'error running runrst: %s' % '\n'.join([out, err])
937 'error running runrst: %s'
938 % '\n'.join([sysstr(out), sysstr(err)])
936 )
939 )
937
940
938 normalizecrlf('doc/%s' % root)
941 normalizecrlf('doc/%s' % root)
@@ -957,7 +960,8 b' class hgbuilddoc(Command):'
957 )
960 )
958 if res:
961 if res:
959 raise SystemExit(
962 raise SystemExit(
960 'error running runrst: %s' % '\n'.join([out, err])
963 'error running runrst: %s'
964 % '\n'.join([sysstr(out), sysstr(err)])
961 )
965 )
962
966
963 normalizecrlf('doc/%s.html' % root)
967 normalizecrlf('doc/%s.html' % root)
@@ -1313,6 +1317,9 b' for plat, macro, code in ['
1313 if sys.platform == 'darwin':
1317 if sys.platform == 'darwin':
1314 osutil_ldflags += ['-framework', 'ApplicationServices']
1318 osutil_ldflags += ['-framework', 'ApplicationServices']
1315
1319
1320 if sys.platform == 'sunos5':
1321 osutil_ldflags += ['-lsocket']
1322
1316 xdiff_srcs = [
1323 xdiff_srcs = [
1317 'mercurial/thirdparty/xdiff/xdiffi.c',
1324 'mercurial/thirdparty/xdiff/xdiffi.c',
1318 'mercurial/thirdparty/xdiff/xprepare.c',
1325 'mercurial/thirdparty/xdiff/xprepare.c',
@@ -81,7 +81,7 b' Make and delete some tags'
81 Different hash because no x bit
81 Different hash because no x bit
82 $ hg bookmarks
82 $ hg bookmarks
83 premerge1 3:973ef48a98a4
83 premerge1 3:973ef48a98a4
84 premerge2 8:3537b15eaaca
84 premerge2 8:1cc21e701444
85 #endif
85 #endif
86
86
87 Test that redoing a convert results in an identical graph
87 Test that redoing a convert results in an identical graph
@@ -249,12 +249,12 b' XXX: we should show better message when '
249 $ hg pull -r b4e4bce660512ad3e71189e14588a70ac8e31fef
249 $ hg pull -r b4e4bce660512ad3e71189e14588a70ac8e31fef
250 pulling from ssh://user@dummy/repo
250 pulling from ssh://user@dummy/repo
251 searching for changes
251 searching for changes
252 no changes found
253 adding changesets
252 adding changesets
254 adding manifests
253 adding manifests
255 adding file changes
254 adding file changes
256 added 4 changesets with 4 changes to 4 files
255 added 4 changesets with 4 changes to 4 files
257 new changesets eaba929e866c:b4e4bce66051
256 new changesets eaba929e866c:b4e4bce66051
257 (run 'hg update' to get a working copy)
258 $ hg glog
258 $ hg glog
259 o 5:b4e4bce66051 added e
259 o 5:b4e4bce66051 added e
260 | public
260 | public
@@ -133,12 +133,6 b' non-lfs content, and the extension enabl'
133 requirements
133 requirements
134 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
134 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
135
135
136 sidedata
137 Allows storage of extra data alongside a revision.
138
139 copies-sdc
140 Allows to use more efficient algorithm to deal with copy tracing.
141
142 beginning upgrade...
136 beginning upgrade...
143 repository locked and read-only
137 repository locked and read-only
144 creating temporary repository to stage migrated data: * (glob)
138 creating temporary repository to stage migrated data: * (glob)
@@ -2334,10 +2334,10 b' New namespace is registered per repo ins'
2334 is global. So we shouldn't expect the namespace always exists. Using
2334 is global. So we shouldn't expect the namespace always exists. Using
2335 ssh:// makes sure a bundle repository is created from scratch. (issue6301)
2335 ssh:// makes sure a bundle repository is created from scratch. (issue6301)
2336
2336
2337 $ hg clone -e "'$PYTHON' '$TESTDIR/dummyssh'" \
2337 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" \
2338 > -qr0 "ssh://user@dummy/`pwd`/a" a-clone
2338 > -qr0 "ssh://user@dummy/`pwd`/a" a-clone
2339 $ hg incoming --config extensions.names=names.py -R a-clone \
2339 $ hg incoming --config extensions.names=names.py -R a-clone \
2340 > -e "'$PYTHON' '$TESTDIR/dummyssh'" -T '{bars}\n' -l1
2340 > -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" -T '{bars}\n' -l1
2341 comparing with ssh://user@dummy/$TESTTMP/a
2341 comparing with ssh://user@dummy/$TESTTMP/a
2342 searching for changes
2342 searching for changes
2343
2343
@@ -672,3 +672,11 b' using log status template, the copy info'
672
672
673
673
674 $ cd ..
674 $ cd ..
675
676 Make sure .hg doesn't show up even as a symlink
677
678 $ hg init repo0
679 $ mkdir symlink-repo0
680 $ cd symlink-repo0
681 $ ln -s ../repo0/.hg
682 $ hg status
@@ -159,12 +159,6 b' An upgrade of a repository created with '
159 requirements
159 requirements
160 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
160 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
161
161
162 sidedata
163 Allows storage of extra data alongside a revision.
164
165 copies-sdc
166 Allows to use more efficient algorithm to deal with copy tracing.
167
168 additional optimizations are available by specifying "--optimize <name>":
162 additional optimizations are available by specifying "--optimize <name>":
169
163
170 re-delta-parent
164 re-delta-parent
@@ -189,12 +183,6 b' An upgrade of a repository created with '
189 requirements
183 requirements
190 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
184 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
191
185
192 sidedata
193 Allows storage of extra data alongside a revision.
194
195 copies-sdc
196 Allows to use more efficient algorithm to deal with copy tracing.
197
198 re-delta-parent
186 re-delta-parent
199 deltas within internal storage will choose a new base revision if needed
187 deltas within internal storage will choose a new base revision if needed
200
188
@@ -219,12 +207,6 b' modern form of the option'
219 requirements
207 requirements
220 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
208 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
221
209
222 sidedata
223 Allows storage of extra data alongside a revision.
224
225 copies-sdc
226 Allows to use more efficient algorithm to deal with copy tracing.
227
228 re-delta-parent
210 re-delta-parent
229 deltas within internal storage will choose a new base revision if needed
211 deltas within internal storage will choose a new base revision if needed
230
212
@@ -332,12 +314,6 b' Various sub-optimal detections work'
332 sparserevlog
314 sparserevlog
333 Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
315 Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
334
316
335 sidedata
336 Allows storage of extra data alongside a revision.
337
338 copies-sdc
339 Allows to use more efficient algorithm to deal with copy tracing.
340
341 additional optimizations are available by specifying "--optimize <name>":
317 additional optimizations are available by specifying "--optimize <name>":
342
318
343 re-delta-parent
319 re-delta-parent
@@ -386,12 +362,6 b' Various sub-optimal detections work'
386 sparserevlog
362 sparserevlog
387 Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
363 Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
388
364
389 sidedata
390 Allows storage of extra data alongside a revision.
391
392 copies-sdc
393 Allows to use more efficient algorithm to deal with copy tracing.
394
395 additional optimizations are available by specifying "--optimize <name>":
365 additional optimizations are available by specifying "--optimize <name>":
396
366
397 re-delta-parent
367 re-delta-parent
@@ -418,12 +388,6 b' Upgrading a repository that is already m'
418 requirements
388 requirements
419 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
389 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
420
390
421 sidedata
422 Allows storage of extra data alongside a revision.
423
424 copies-sdc
425 Allows to use more efficient algorithm to deal with copy tracing.
426
427 beginning upgrade...
391 beginning upgrade...
428 repository locked and read-only
392 repository locked and read-only
429 creating temporary repository to stage migrated data: $TESTTMP/modern/.hg/upgrade.* (glob)
393 creating temporary repository to stage migrated data: $TESTTMP/modern/.hg/upgrade.* (glob)
@@ -475,12 +439,6 b' make sure we have a .d file'
475 generaldelta
439 generaldelta
476 repository storage will be able to create optimal deltas; new repository data will be smaller and read times should decrease; interacting with other repositories using this storage model should require less network and CPU resources, making "hg push" and "hg pull" faster
440 repository storage will be able to create optimal deltas; new repository data will be smaller and read times should decrease; interacting with other repositories using this storage model should require less network and CPU resources, making "hg push" and "hg pull" faster
477
441
478 sidedata
479 Allows storage of extra data alongside a revision.
480
481 copies-sdc
482 Allows to use more efficient algorithm to deal with copy tracing.
483
484 beginning upgrade...
442 beginning upgrade...
485 repository locked and read-only
443 repository locked and read-only
486 creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
444 creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
@@ -577,12 +535,6 b' unless --no-backup is passed'
577 sparserevlog
535 sparserevlog
578 Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
536 Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
579
537
580 sidedata
581 Allows storage of extra data alongside a revision.
582
583 copies-sdc
584 Allows to use more efficient algorithm to deal with copy tracing.
585
586 beginning upgrade...
538 beginning upgrade...
587 repository locked and read-only
539 repository locked and read-only
588 creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
540 creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
@@ -617,12 +569,6 b' We can restrict optimization to some rev'
617 requirements
569 requirements
618 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
570 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
619
571
620 sidedata
621 Allows storage of extra data alongside a revision.
622
623 copies-sdc
624 Allows to use more efficient algorithm to deal with copy tracing.
625
626 re-delta-parent
572 re-delta-parent
627 deltas within internal storage will choose a new base revision if needed
573 deltas within internal storage will choose a new base revision if needed
628
574
@@ -697,12 +643,6 b' Check we can select negatively'
697 requirements
643 requirements
698 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
644 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
699
645
700 sidedata
701 Allows storage of extra data alongside a revision.
702
703 copies-sdc
704 Allows to use more efficient algorithm to deal with copy tracing.
705
706 re-delta-parent
646 re-delta-parent
707 deltas within internal storage will choose a new base revision if needed
647 deltas within internal storage will choose a new base revision if needed
708
648
@@ -749,12 +689,6 b' Check that we can select changelog only'
749 requirements
689 requirements
750 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
690 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
751
691
752 sidedata
753 Allows storage of extra data alongside a revision.
754
755 copies-sdc
756 Allows to use more efficient algorithm to deal with copy tracing.
757
758 re-delta-parent
692 re-delta-parent
759 deltas within internal storage will choose a new base revision if needed
693 deltas within internal storage will choose a new base revision if needed
760
694
@@ -801,12 +735,6 b' Check that we can select filelog only'
801 requirements
735 requirements
802 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
736 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
803
737
804 sidedata
805 Allows storage of extra data alongside a revision.
806
807 copies-sdc
808 Allows to use more efficient algorithm to deal with copy tracing.
809
810 re-delta-parent
738 re-delta-parent
811 deltas within internal storage will choose a new base revision if needed
739 deltas within internal storage will choose a new base revision if needed
812
740
@@ -858,12 +786,6 b" Check you can't skip revlog clone during"
858 preserved: dotencode, fncache, generaldelta, revlogv1, store
786 preserved: dotencode, fncache, generaldelta, revlogv1, store
859 removed: sparserevlog
787 removed: sparserevlog
860
788
861 sidedata
862 Allows storage of extra data alongside a revision.
863
864 copies-sdc
865 Allows to use more efficient algorithm to deal with copy tracing.
866
867 re-delta-parent
789 re-delta-parent
868 deltas within internal storage will choose a new base revision if needed
790 deltas within internal storage will choose a new base revision if needed
869
791
@@ -916,12 +838,6 b" Check you can't skip revlog clone during"
916 sparserevlog
838 sparserevlog
917 Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
839 Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
918
840
919 sidedata
920 Allows storage of extra data alongside a revision.
921
922 copies-sdc
923 Allows to use more efficient algorithm to deal with copy tracing.
924
925 re-delta-parent
841 re-delta-parent
926 deltas within internal storage will choose a new base revision if needed
842 deltas within internal storage will choose a new base revision if needed
927
843
@@ -976,12 +892,6 b" store files with special filenames aren'"
976 requirements
892 requirements
977 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
893 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
978
894
979 sidedata
980 Allows storage of extra data alongside a revision.
981
982 copies-sdc
983 Allows to use more efficient algorithm to deal with copy tracing.
984
985 beginning upgrade...
895 beginning upgrade...
986 repository locked and read-only
896 repository locked and read-only
987 creating temporary repository to stage migrated data: $TESTTMP/store-filenames/.hg/upgrade.* (glob)
897 creating temporary repository to stage migrated data: $TESTTMP/store-filenames/.hg/upgrade.* (glob)
@@ -1013,12 +923,6 b" store files with special filenames aren'"
1013 requirements
923 requirements
1014 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
924 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
1015
925
1016 sidedata
1017 Allows storage of extra data alongside a revision.
1018
1019 copies-sdc
1020 Allows to use more efficient algorithm to deal with copy tracing.
1021
1022 re-delta-fulladd
926 re-delta-fulladd
1023 each revision will be added as new content to the internal storage; this will likely drastically slow down execution time, but some extensions might need it
927 each revision will be added as new content to the internal storage; this will likely drastically slow down execution time, but some extensions might need it
1024
928
@@ -1083,12 +987,6 b' Check upgrading a large file repository'
1083 requirements
987 requirements
1084 preserved: dotencode, fncache, generaldelta, largefiles, revlogv1, sparserevlog, store
988 preserved: dotencode, fncache, generaldelta, largefiles, revlogv1, sparserevlog, store
1085
989
1086 sidedata
1087 Allows storage of extra data alongside a revision.
1088
1089 copies-sdc
1090 Allows to use more efficient algorithm to deal with copy tracing.
1091
1092 beginning upgrade...
990 beginning upgrade...
1093 repository locked and read-only
991 repository locked and read-only
1094 creating temporary repository to stage migrated data: $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
992 creating temporary repository to stage migrated data: $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
@@ -1142,12 +1040,6 b' Check upgrading a large file repository'
1142 requirements
1040 requirements
1143 preserved: dotencode, fncache, generaldelta, largefiles, lfs, revlogv1, sparserevlog, store
1041 preserved: dotencode, fncache, generaldelta, largefiles, lfs, revlogv1, sparserevlog, store
1144
1042
1145 sidedata
1146 Allows storage of extra data alongside a revision.
1147
1148 copies-sdc
1149 Allows to use more efficient algorithm to deal with copy tracing.
1150
1151 beginning upgrade...
1043 beginning upgrade...
1152 repository locked and read-only
1044 repository locked and read-only
1153 creating temporary repository to stage migrated data: $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
1045 creating temporary repository to stage migrated data: $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
@@ -1243,12 +1135,6 b' repository config is taken in account'
1243 requirements
1135 requirements
1244 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
1136 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
1245
1137
1246 sidedata
1247 Allows storage of extra data alongside a revision.
1248
1249 copies-sdc
1250 Allows to use more efficient algorithm to deal with copy tracing.
1251
1252 re-delta-all
1138 re-delta-all
1253 deltas within internal storage will be fully recomputed; this will likely drastically slow down execution time
1139 deltas within internal storage will be fully recomputed; this will likely drastically slow down execution time
1254
1140
@@ -13,7 +13,7 b' timer="$1"'
13
13
14 # if the test timeout have been extended, explicitly extend the provided timer
14 # if the test timeout have been extended, explicitly extend the provided timer
15 if [ "$HGTEST_TIMEOUT_DEFAULT" -lt "$HGTEST_TIMEOUT" ]; then
15 if [ "$HGTEST_TIMEOUT_DEFAULT" -lt "$HGTEST_TIMEOUT" ]; then
16 timer=$(( ($timer * $HGTEST_TIMEOUT) / $HGTEST_TIMEOUT_DEFAULT ))
16 timer=$(( ( 100 * $timer * $HGTEST_TIMEOUT) / $HGTEST_TIMEOUT_DEFAULT ))
17 fi
17 fi
18
18
19 wait_on="$2"
19 wait_on="$2"
General Comments 0
You need to be logged in to leave comments. Login now