##// END OF EJS Templates
merge with stable
Matt Mackall -
r23602:a4679a74 merge default
parent child Browse files
Show More
@@ -1,100 +1,101 b''
1 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A=
1 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A=
2 2be3001847cb18a23c403439d9e7d0ace30804e9 0 iD8DBQBExUbjywK+sNU5EO8RAhzxAKCtyHAQUzcTSZTqlfJ0by6vhREwWQCghaQFHfkfN0l9/40EowNhuMOKnJk=
2 2be3001847cb18a23c403439d9e7d0ace30804e9 0 iD8DBQBExUbjywK+sNU5EO8RAhzxAKCtyHAQUzcTSZTqlfJ0by6vhREwWQCghaQFHfkfN0l9/40EowNhuMOKnJk=
3 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys=
3 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys=
4 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4=
4 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4=
5 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I=
5 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I=
6 23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU=
6 23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU=
7 bae2e9c838e90a393bae3973a7850280413e091a 0 iD8DBQBH6DO5ywK+sNU5EO8RAsfrAJ0e4r9c9GF/MJsM7Xjd3NesLRC3+ACffj6+6HXdZf8cswAoFPO+DY00oD0=
7 bae2e9c838e90a393bae3973a7850280413e091a 0 iD8DBQBH6DO5ywK+sNU5EO8RAsfrAJ0e4r9c9GF/MJsM7Xjd3NesLRC3+ACffj6+6HXdZf8cswAoFPO+DY00oD0=
8 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 0 iD8DBQBINdwsywK+sNU5EO8RAjIUAKCPmlFJSpsPAAUKF+iNHAwVnwmzeQCdEXrL27CWclXuUKdbQC8De7LICtE=
8 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 0 iD8DBQBINdwsywK+sNU5EO8RAjIUAKCPmlFJSpsPAAUKF+iNHAwVnwmzeQCdEXrL27CWclXuUKdbQC8De7LICtE=
9 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 0 iD8DBQBIo1wpywK+sNU5EO8RAmRNAJ94x3OFt6blbqu/yBoypm/AJ44fuACfUaldXcV5z9tht97hSp22DVTEPGc=
9 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 0 iD8DBQBIo1wpywK+sNU5EO8RAmRNAJ94x3OFt6blbqu/yBoypm/AJ44fuACfUaldXcV5z9tht97hSp22DVTEPGc=
10 2a67430f92f15ea5159c26b09ec4839a0c549a26 0 iEYEABECAAYFAkk1hykACgkQywK+sNU5EO85QACeNJNUanjc2tl4wUoPHNuv+lSj0ZMAoIm93wSTc/feyYnO2YCaQ1iyd9Nu
10 2a67430f92f15ea5159c26b09ec4839a0c549a26 0 iEYEABECAAYFAkk1hykACgkQywK+sNU5EO85QACeNJNUanjc2tl4wUoPHNuv+lSj0ZMAoIm93wSTc/feyYnO2YCaQ1iyd9Nu
11 3773e510d433969e277b1863c317b674cbee2065 0 iEYEABECAAYFAklNbbAACgkQywK+sNU5EO8o+gCfeb2/lfIJZMvyDA1m+G1CsBAxfFsAoIa6iAMG8SBY7hW1Q85Yf/LXEvaE
11 3773e510d433969e277b1863c317b674cbee2065 0 iEYEABECAAYFAklNbbAACgkQywK+sNU5EO8o+gCfeb2/lfIJZMvyDA1m+G1CsBAxfFsAoIa6iAMG8SBY7hW1Q85Yf/LXEvaE
12 11a4eb81fb4f4742451591489e2797dc47903277 0 iEYEABECAAYFAklcAnsACgkQywK+sNU5EO+uXwCbBVHNNsLy1g7BlAyQJwadYVyHOXoAoKvtAVO71+bv7EbVoukwTzT+P4Sx
12 11a4eb81fb4f4742451591489e2797dc47903277 0 iEYEABECAAYFAklcAnsACgkQywK+sNU5EO+uXwCbBVHNNsLy1g7BlAyQJwadYVyHOXoAoKvtAVO71+bv7EbVoukwTzT+P4Sx
13 11efa41037e280d08cfb07c09ad485df30fb0ea8 0 iEYEABECAAYFAkmvJRQACgkQywK+sNU5EO9XZwCeLMgDgPSMWMm6vgjL4lDs2pEc5+0AnRxfiFbpbBfuEFTqKz9nbzeyoBlx
13 11efa41037e280d08cfb07c09ad485df30fb0ea8 0 iEYEABECAAYFAkmvJRQACgkQywK+sNU5EO9XZwCeLMgDgPSMWMm6vgjL4lDs2pEc5+0AnRxfiFbpbBfuEFTqKz9nbzeyoBlx
14 02981000012e3adf40c4849bd7b3d5618f9ce82d 0 iEYEABECAAYFAknEH3wACgkQywK+sNU5EO+uXwCeI+LbLMmhjU1lKSfU3UWJHjjUC7oAoIZLvYDGOL/tNZFUuatc3RnZ2eje
14 02981000012e3adf40c4849bd7b3d5618f9ce82d 0 iEYEABECAAYFAknEH3wACgkQywK+sNU5EO+uXwCeI+LbLMmhjU1lKSfU3UWJHjjUC7oAoIZLvYDGOL/tNZFUuatc3RnZ2eje
15 196d40e7c885fa6e95f89134809b3ec7bdbca34b 0 iEYEABECAAYFAkpL2X4ACgkQywK+sNU5EO9FOwCfXJycjyKJXsvQqKkHrglwOQhEKS4An36GfKzptfN8b1qNc3+ya/5c2WOM
15 196d40e7c885fa6e95f89134809b3ec7bdbca34b 0 iEYEABECAAYFAkpL2X4ACgkQywK+sNU5EO9FOwCfXJycjyKJXsvQqKkHrglwOQhEKS4An36GfKzptfN8b1qNc3+ya/5c2WOM
16 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 0 iEYEABECAAYFAkpopLIACgkQywK+sNU5EO8QSgCfZ0ztsd071rOa2lhmp9Fyue/WoI0AoLTei80/xrhRlB8L/rZEf2KBl8dA
16 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 0 iEYEABECAAYFAkpopLIACgkQywK+sNU5EO8QSgCfZ0ztsd071rOa2lhmp9Fyue/WoI0AoLTei80/xrhRlB8L/rZEf2KBl8dA
17 31ec469f9b556f11819937cf68ee53f2be927ebf 0 iEYEABECAAYFAksBuxAACgkQywK+sNU5EO+mBwCfagB+A0txzWZ6dRpug3LEoK7Z1QsAoKpbk8vsLjv6/oRDicSk/qBu33+m
17 31ec469f9b556f11819937cf68ee53f2be927ebf 0 iEYEABECAAYFAksBuxAACgkQywK+sNU5EO+mBwCfagB+A0txzWZ6dRpug3LEoK7Z1QsAoKpbk8vsLjv6/oRDicSk/qBu33+m
18 439d7ea6fe3aa4ab9ec274a68846779153789de9 0 iEYEABECAAYFAksVw0kACgkQywK+sNU5EO/oZwCfdfBEkgp38xq6wN2F4nj+SzofrJIAnjmxt04vaJSeOOeHylHvk6lzuQsw
18 439d7ea6fe3aa4ab9ec274a68846779153789de9 0 iEYEABECAAYFAksVw0kACgkQywK+sNU5EO/oZwCfdfBEkgp38xq6wN2F4nj+SzofrJIAnjmxt04vaJSeOOeHylHvk6lzuQsw
19 296a0b14a68621f6990c54fdba0083f6f20935bf 0 iEYEABECAAYFAks+jCoACgkQywK+sNU5EO9J8wCeMUGF9E/gS2UBsqIz56WS4HMPRPUAoI5J95mwEIK8Clrl7qFRidNI6APq
19 296a0b14a68621f6990c54fdba0083f6f20935bf 0 iEYEABECAAYFAks+jCoACgkQywK+sNU5EO9J8wCeMUGF9E/gS2UBsqIz56WS4HMPRPUAoI5J95mwEIK8Clrl7qFRidNI6APq
20 4aa619c4c2c09907034d9824ebb1dd0e878206eb 0 iEYEABECAAYFAktm9IsACgkQywK+sNU5EO9XGgCgk4HclRQhexEtooPE5GcUCdB6M8EAn2ptOhMVbIoO+JncA+tNACPFXh0O
20 4aa619c4c2c09907034d9824ebb1dd0e878206eb 0 iEYEABECAAYFAktm9IsACgkQywK+sNU5EO9XGgCgk4HclRQhexEtooPE5GcUCdB6M8EAn2ptOhMVbIoO+JncA+tNACPFXh0O
21 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 0 iEYEABECAAYFAkuRoSQACgkQywK+sNU5EO//3QCeJDc5r2uFyFCtAlpSA27DEE5rrxAAn2FSwTy9fhrB3QAdDQlwkEZcQzDh
21 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 0 iEYEABECAAYFAkuRoSQACgkQywK+sNU5EO//3QCeJDc5r2uFyFCtAlpSA27DEE5rrxAAn2FSwTy9fhrB3QAdDQlwkEZcQzDh
22 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 0 iEYEABECAAYFAku1IwIACgkQywK+sNU5EO9MjgCdHLVwkTZlNHxhcznZKBL1rjN+J7cAoLLWi9LTL6f/TgBaPSKOy1ublbaW
22 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 0 iEYEABECAAYFAku1IwIACgkQywK+sNU5EO9MjgCdHLVwkTZlNHxhcznZKBL1rjN+J7cAoLLWi9LTL6f/TgBaPSKOy1ublbaW
23 39f725929f0c48c5fb3b90c071fc3066012456ca 0 iEYEABECAAYFAkvclvsACgkQywK+sNU5EO9FSwCeL9i5x8ALW/LE5+lCX6MFEAe4MhwAn1ev5o6SX6GrNdDfKweiemfO2VBk
23 39f725929f0c48c5fb3b90c071fc3066012456ca 0 iEYEABECAAYFAkvclvsACgkQywK+sNU5EO9FSwCeL9i5x8ALW/LE5+lCX6MFEAe4MhwAn1ev5o6SX6GrNdDfKweiemfO2VBk
24 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 0 iEYEABECAAYFAkvsKTkACgkQywK+sNU5EO9qEACgiSiRGvTG2vXGJ65tUSOIYihTuFAAnRzRIqEVSw8M8/RGeUXRps0IzaCO
24 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 0 iEYEABECAAYFAkvsKTkACgkQywK+sNU5EO9qEACgiSiRGvTG2vXGJ65tUSOIYihTuFAAnRzRIqEVSw8M8/RGeUXRps0IzaCO
25 24fe2629c6fd0c74c90bd066e77387c2b02e8437 0 iEYEABECAAYFAkwFLRsACgkQywK+sNU5EO+pJACgp13tPI+pbwKZV+LeMjcQ4H6tCZYAoJebzhd6a8yYx6qiwpJxA9BXZNXy
25 24fe2629c6fd0c74c90bd066e77387c2b02e8437 0 iEYEABECAAYFAkwFLRsACgkQywK+sNU5EO+pJACgp13tPI+pbwKZV+LeMjcQ4H6tCZYAoJebzhd6a8yYx6qiwpJxA9BXZNXy
26 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 0 iEYEABECAAYFAkwsyxcACgkQywK+sNU5EO+crACfUpNAF57PmClkSri9nJcBjb2goN4AniPCNaKvnki7TnUsi1u2oxltpKKL
26 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 0 iEYEABECAAYFAkwsyxcACgkQywK+sNU5EO+crACfUpNAF57PmClkSri9nJcBjb2goN4AniPCNaKvnki7TnUsi1u2oxltpKKL
27 bf1774d95bde614af3956d92b20e2a0c68c5fec7 0 iEYEABECAAYFAkxVwccACgkQywK+sNU5EO+oFQCeJzwZ+we1fIIyBGCddHceOUAN++cAnjvT6A8ZWW0zV21NXIFF1qQmjxJd
27 bf1774d95bde614af3956d92b20e2a0c68c5fec7 0 iEYEABECAAYFAkxVwccACgkQywK+sNU5EO+oFQCeJzwZ+we1fIIyBGCddHceOUAN++cAnjvT6A8ZWW0zV21NXIFF1qQmjxJd
28 c00f03a4982e467fb6b6bd45908767db6df4771d 0 iEYEABECAAYFAkxXDqsACgkQywK+sNU5EO/GJACfT9Rz4hZOxPQEs91JwtmfjevO84gAmwSmtfo5mmWSm8gtTUebCcdTv0Kf
28 c00f03a4982e467fb6b6bd45908767db6df4771d 0 iEYEABECAAYFAkxXDqsACgkQywK+sNU5EO/GJACfT9Rz4hZOxPQEs91JwtmfjevO84gAmwSmtfo5mmWSm8gtTUebCcdTv0Kf
29 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 0 iD8DBQBMdo+qywK+sNU5EO8RAqQpAJ975BL2CCAiWMz9SXthNQ9xG181IwCgp4O+KViHPkufZVFn2aTKMNvcr1A=
29 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 0 iD8DBQBMdo+qywK+sNU5EO8RAqQpAJ975BL2CCAiWMz9SXthNQ9xG181IwCgp4O+KViHPkufZVFn2aTKMNvcr1A=
30 93d8bff78c96fe7e33237b257558ee97290048a4 0 iD8DBQBMpfvdywK+sNU5EO8RAsxVAJ0UaL1XB51C76JUBhafc9GBefuMxwCdEWkTOzwvE0SarJBe9i008jhbqW4=
30 93d8bff78c96fe7e33237b257558ee97290048a4 0 iD8DBQBMpfvdywK+sNU5EO8RAsxVAJ0UaL1XB51C76JUBhafc9GBefuMxwCdEWkTOzwvE0SarJBe9i008jhbqW4=
31 333421b9e0f96c7bc788e5667c146a58a9440a55 0 iD8DBQBMz0HOywK+sNU5EO8RAlsEAJ0USh6yOG7OrWkADGunVt9QimBQnwCbBqeMnKgSbwEw8jZwE3Iz1mdrYlo=
31 333421b9e0f96c7bc788e5667c146a58a9440a55 0 iD8DBQBMz0HOywK+sNU5EO8RAlsEAJ0USh6yOG7OrWkADGunVt9QimBQnwCbBqeMnKgSbwEw8jZwE3Iz1mdrYlo=
32 4438875ec01bd0fc32be92b0872eb6daeed4d44f 0 iD8DBQBM4WYUywK+sNU5EO8RAhCVAJ0dJswachwFAHALmk1x0RJehxzqPQCbBNskP9n/X689jB+btNTZTyKU/fw=
32 4438875ec01bd0fc32be92b0872eb6daeed4d44f 0 iD8DBQBM4WYUywK+sNU5EO8RAhCVAJ0dJswachwFAHALmk1x0RJehxzqPQCbBNskP9n/X689jB+btNTZTyKU/fw=
33 6aff4f144ad356311318b0011df0bb21f2c97429 0 iD8DBQBM9uxXywK+sNU5EO8RAv+4AKCDj4qKP16GdPaq1tP6BUwpM/M1OACfRyzLPp/qiiN8xJTWoWYSe/XjJug=
33 6aff4f144ad356311318b0011df0bb21f2c97429 0 iD8DBQBM9uxXywK+sNU5EO8RAv+4AKCDj4qKP16GdPaq1tP6BUwpM/M1OACfRyzLPp/qiiN8xJTWoWYSe/XjJug=
34 e3bf16703e2601de99e563cdb3a5d50b64e6d320 0 iD8DBQBNH8WqywK+sNU5EO8RAiQTAJ9sBO+TeiGro4si77VVaQaA6jcRUgCfSA28dBbjj0oFoQwvPoZjANiZBH8=
34 e3bf16703e2601de99e563cdb3a5d50b64e6d320 0 iD8DBQBNH8WqywK+sNU5EO8RAiQTAJ9sBO+TeiGro4si77VVaQaA6jcRUgCfSA28dBbjj0oFoQwvPoZjANiZBH8=
35 a6c855c32ea081da3c3b8ff628f1847ff271482f 0 iD8DBQBNSJJ+ywK+sNU5EO8RAoJaAKCweDEF70fu+r1Zn7pYDXdlk5RuSgCeO9gK/eit8Lin/1n3pO7aYguFLok=
35 a6c855c32ea081da3c3b8ff628f1847ff271482f 0 iD8DBQBNSJJ+ywK+sNU5EO8RAoJaAKCweDEF70fu+r1Zn7pYDXdlk5RuSgCeO9gK/eit8Lin/1n3pO7aYguFLok=
36 2b2155623ee2559caf288fd333f30475966c4525 0 iD8DBQBNSJeBywK+sNU5EO8RAm1KAJ4hW9Cm9nHaaGJguchBaPLlAr+O3wCgqgmMok8bdAS06N6PL60PSTM//Gg=
36 2b2155623ee2559caf288fd333f30475966c4525 0 iD8DBQBNSJeBywK+sNU5EO8RAm1KAJ4hW9Cm9nHaaGJguchBaPLlAr+O3wCgqgmMok8bdAS06N6PL60PSTM//Gg=
37 2616325766e3504c8ae7c84bd15ee610901fe91d 0 iD8DBQBNbWy9ywK+sNU5EO8RAlWCAJ4mW8HbzjJj9GpK98muX7k+7EvEHwCfaTLbC/DH3QEsZBhEP+M8tzL6RU4=
37 2616325766e3504c8ae7c84bd15ee610901fe91d 0 iD8DBQBNbWy9ywK+sNU5EO8RAlWCAJ4mW8HbzjJj9GpK98muX7k+7EvEHwCfaTLbC/DH3QEsZBhEP+M8tzL6RU4=
38 aa1f3be38ab127280761889d2dca906ca465b5f4 0 iD8DBQBNeQq7ywK+sNU5EO8RAlEOAJ4tlEDdetE9lKfjGgjbkcR8PrC3egCfXCfF3qNVvU/2YYjpgvRwevjvDy0=
38 aa1f3be38ab127280761889d2dca906ca465b5f4 0 iD8DBQBNeQq7ywK+sNU5EO8RAlEOAJ4tlEDdetE9lKfjGgjbkcR8PrC3egCfXCfF3qNVvU/2YYjpgvRwevjvDy0=
39 b032bec2c0a651ca0ddecb65714bfe6770f67d70 0 iD8DBQBNlg5kywK+sNU5EO8RAnGEAJ9gmEx6MfaR4XcG2m/93vwtfyzs3gCgltzx8/YdHPwqDwRX/WbpYgi33is=
39 b032bec2c0a651ca0ddecb65714bfe6770f67d70 0 iD8DBQBNlg5kywK+sNU5EO8RAnGEAJ9gmEx6MfaR4XcG2m/93vwtfyzs3gCgltzx8/YdHPwqDwRX/WbpYgi33is=
40 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 0 iD8DBQBNvTy4ywK+sNU5EO8RAmp8AJ9QnxK4jTJ7G722MyeBxf0UXEdGwACgtlM7BKtNQfbEH/fOW5y+45W88VI=
40 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 0 iD8DBQBNvTy4ywK+sNU5EO8RAmp8AJ9QnxK4jTJ7G722MyeBxf0UXEdGwACgtlM7BKtNQfbEH/fOW5y+45W88VI=
41 733af5d9f6b22387913e1d11350fb8cb7c1487dd 0 iD8DBQBN5q/8ywK+sNU5EO8RArRGAKCNGT94GKIYtSuwZ57z1sQbcw6uLACfffpbMV4NAPMl8womAwg+7ZPKnIU=
41 733af5d9f6b22387913e1d11350fb8cb7c1487dd 0 iD8DBQBN5q/8ywK+sNU5EO8RArRGAKCNGT94GKIYtSuwZ57z1sQbcw6uLACfffpbMV4NAPMl8womAwg+7ZPKnIU=
42 de9eb6b1da4fc522b1cab16d86ca166204c24f25 0 iD8DBQBODhfhywK+sNU5EO8RAr2+AJ4ugbAj8ae8/K0bYZzx3sascIAg1QCeK3b+zbbVVqd3b7CDpwFnaX8kTd4=
42 de9eb6b1da4fc522b1cab16d86ca166204c24f25 0 iD8DBQBODhfhywK+sNU5EO8RAr2+AJ4ugbAj8ae8/K0bYZzx3sascIAg1QCeK3b+zbbVVqd3b7CDpwFnaX8kTd4=
43 4a43e23b8c55b4566b8200bf69fe2158485a2634 0 iD8DBQBONzIMywK+sNU5EO8RAj5SAJ0aPS3+JHnyI6bHB2Fl0LImbDmagwCdGbDLp1S7TFobxXudOH49bX45Iik=
43 4a43e23b8c55b4566b8200bf69fe2158485a2634 0 iD8DBQBONzIMywK+sNU5EO8RAj5SAJ0aPS3+JHnyI6bHB2Fl0LImbDmagwCdGbDLp1S7TFobxXudOH49bX45Iik=
44 d629f1e89021103f1753addcef6b310e4435b184 0 iD8DBQBOWAsBywK+sNU5EO8RAht4AJwJl9oNFopuGkj5m8aKuf7bqPkoAQCeNrEm7UhFsZKYT5iUOjnMV7s2LaM=
44 d629f1e89021103f1753addcef6b310e4435b184 0 iD8DBQBOWAsBywK+sNU5EO8RAht4AJwJl9oNFopuGkj5m8aKuf7bqPkoAQCeNrEm7UhFsZKYT5iUOjnMV7s2LaM=
45 351a9292e430e35766c552066ed3e87c557b803b 0 iD8DBQBOh3zUywK+sNU5EO8RApFMAKCD3Y/u3avDFndznwqfG5UeTHMlvACfUivPIVQZyDZnhZMq0UhC6zhCEQg=
45 351a9292e430e35766c552066ed3e87c557b803b 0 iD8DBQBOh3zUywK+sNU5EO8RApFMAKCD3Y/u3avDFndznwqfG5UeTHMlvACfUivPIVQZyDZnhZMq0UhC6zhCEQg=
46 384082750f2c51dc917d85a7145748330fa6ef4d 0 iD8DBQBOmd+OywK+sNU5EO8RAgDgAJ9V/X+G7VLwhTpHrZNiOHabzSyzYQCdE2kKfIevJUYB9QLAWCWP6DPwrwI=
46 384082750f2c51dc917d85a7145748330fa6ef4d 0 iD8DBQBOmd+OywK+sNU5EO8RAgDgAJ9V/X+G7VLwhTpHrZNiOHabzSyzYQCdE2kKfIevJUYB9QLAWCWP6DPwrwI=
47 41453d55b481ddfcc1dacb445179649e24ca861d 0 iD8DBQBOsFhpywK+sNU5EO8RAqM6AKCyfxUae3/zLuiLdQz+JR78690eMACfQ6JTBQib4AbE+rUDdkeFYg9K/+4=
47 41453d55b481ddfcc1dacb445179649e24ca861d 0 iD8DBQBOsFhpywK+sNU5EO8RAqM6AKCyfxUae3/zLuiLdQz+JR78690eMACfQ6JTBQib4AbE+rUDdkeFYg9K/+4=
48 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 0 iD8DBQBO1/fWywK+sNU5EO8RAmoPAKCR5lpv1D6JLURHD8KVLSV4GRVEBgCgnd0Sy78ligNfqAMafmACRDvj7vo=
48 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 0 iD8DBQBO1/fWywK+sNU5EO8RAmoPAKCR5lpv1D6JLURHD8KVLSV4GRVEBgCgnd0Sy78ligNfqAMafmACRDvj7vo=
49 6344043924497cd06d781d9014c66802285072e4 0 iD8DBQBPALgmywK+sNU5EO8RAlfhAJ9nYOdWnhfVDHYtDTJAyJtXBAQS9wCgnefoSQt7QABkbGxM+Q85UYEBuD0=
49 6344043924497cd06d781d9014c66802285072e4 0 iD8DBQBPALgmywK+sNU5EO8RAlfhAJ9nYOdWnhfVDHYtDTJAyJtXBAQS9wCgnefoSQt7QABkbGxM+Q85UYEBuD0=
50 db33555eafeaf9df1e18950e29439eaa706d399b 0 iD8DBQBPGdzxywK+sNU5EO8RAppkAJ9jOXhUVE/97CPgiMA0pMGiIYnesQCfengAszcBiSiKGugiI8Okc9ghU+Y=
50 db33555eafeaf9df1e18950e29439eaa706d399b 0 iD8DBQBPGdzxywK+sNU5EO8RAppkAJ9jOXhUVE/97CPgiMA0pMGiIYnesQCfengAszcBiSiKGugiI8Okc9ghU+Y=
51 2aa5b51f310fb3befd26bed99c02267f5c12c734 0 iD8DBQBPKZ9bywK+sNU5EO8RAt1TAJ45r1eJ0YqSkInzrrayg4TVCh0SnQCgm0GA/Ua74jnnDwVQ60lAwROuz1Q=
51 2aa5b51f310fb3befd26bed99c02267f5c12c734 0 iD8DBQBPKZ9bywK+sNU5EO8RAt1TAJ45r1eJ0YqSkInzrrayg4TVCh0SnQCgm0GA/Ua74jnnDwVQ60lAwROuz1Q=
52 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 0 iD8DBQBPT/fvywK+sNU5EO8RAnfYAKCn7d0vwqIb100YfWm1F7nFD5B+FACeM02YHpQLSNsztrBCObtqcnfod7Q=
52 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 0 iD8DBQBPT/fvywK+sNU5EO8RAnfYAKCn7d0vwqIb100YfWm1F7nFD5B+FACeM02YHpQLSNsztrBCObtqcnfod7Q=
53 b9bd95e61b49c221c4cca24e6da7c946fc02f992 0 iD8DBQBPeLsIywK+sNU5EO8RAvpNAKCtKe2gitz8dYn52IRF0hFOPCR7AQCfRJL/RWCFweu2T1vH/mUOCf8SXXc=
53 b9bd95e61b49c221c4cca24e6da7c946fc02f992 0 iD8DBQBPeLsIywK+sNU5EO8RAvpNAKCtKe2gitz8dYn52IRF0hFOPCR7AQCfRJL/RWCFweu2T1vH/mUOCf8SXXc=
54 d9e2f09d5488c395ae9ddbb320ceacd24757e055 0 iD8DBQBPju/dywK+sNU5EO8RArBYAJ9xtifdbk+hCOJO8OZa4JfHX8OYZQCeKPMBaBWiT8N/WHoOm1XU0q+iono=
54 d9e2f09d5488c395ae9ddbb320ceacd24757e055 0 iD8DBQBPju/dywK+sNU5EO8RArBYAJ9xtifdbk+hCOJO8OZa4JfHX8OYZQCeKPMBaBWiT8N/WHoOm1XU0q+iono=
55 00182b3d087909e3c3ae44761efecdde8f319ef3 0 iD8DBQBPoFhIywK+sNU5EO8RAhzhAKCBj1n2jxPTkZNJJ5pSp3soa+XHIgCgsZZpAQxOpXwCp0eCdNGe0+pmxmg=
55 00182b3d087909e3c3ae44761efecdde8f319ef3 0 iD8DBQBPoFhIywK+sNU5EO8RAhzhAKCBj1n2jxPTkZNJJ5pSp3soa+XHIgCgsZZpAQxOpXwCp0eCdNGe0+pmxmg=
56 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 0 iD8DBQBPovNWywK+sNU5EO8RAhgiAJ980T91FdPTRMmVONDhpkMsZwVIMACgg3bKvoWSeuCW28llUhAJtUjrMv0=
56 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 0 iD8DBQBPovNWywK+sNU5EO8RAhgiAJ980T91FdPTRMmVONDhpkMsZwVIMACgg3bKvoWSeuCW28llUhAJtUjrMv0=
57 85a358df5bbbe404ca25730c9c459b34263441dc 0 iD8DBQBPyZsWywK+sNU5EO8RAnpLAJ48qrGDJRT+pteS0mSQ11haqHstPwCdG4ccGbk+0JHb7aNy8/NRGAOqn9w=
57 85a358df5bbbe404ca25730c9c459b34263441dc 0 iD8DBQBPyZsWywK+sNU5EO8RAnpLAJ48qrGDJRT+pteS0mSQ11haqHstPwCdG4ccGbk+0JHb7aNy8/NRGAOqn9w=
58 b013baa3898e117959984fc64c29d8c784d2f28b 0 iD8DBQBP8QOPywK+sNU5EO8RAqimAKCFRSx0lvG6y8vne2IhNG062Hn0dACeMLI5/zhpWpHBIVeAAquYfx2XFeA=
58 b013baa3898e117959984fc64c29d8c784d2f28b 0 iD8DBQBP8QOPywK+sNU5EO8RAqimAKCFRSx0lvG6y8vne2IhNG062Hn0dACeMLI5/zhpWpHBIVeAAquYfx2XFeA=
59 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 0 iD8DBQBQGiL8ywK+sNU5EO8RAq5oAJ4rMMCPx6O+OuzNXVOexogedWz/QgCeIiIxLd76I4pXO48tdXhr0hQcBuM=
59 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 0 iD8DBQBQGiL8ywK+sNU5EO8RAq5oAJ4rMMCPx6O+OuzNXVOexogedWz/QgCeIiIxLd76I4pXO48tdXhr0hQcBuM=
60 072209ae4ddb654eb2d5fd35bff358c738414432 0 iD8DBQBQQkq0ywK+sNU5EO8RArDTAJ9nk5CySnNAjAXYvqvx4uWCw9ThZwCgqmFRehH/l+oTwj3f8nw8u8qTCdc=
60 072209ae4ddb654eb2d5fd35bff358c738414432 0 iD8DBQBQQkq0ywK+sNU5EO8RArDTAJ9nk5CySnNAjAXYvqvx4uWCw9ThZwCgqmFRehH/l+oTwj3f8nw8u8qTCdc=
61 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 0 iD8DBQBQamltywK+sNU5EO8RAlsqAJ4qF/m6aFu4mJCOKTiAP5RvZFK02ACfawYShUZO6OXEFfveU0aAxDR0M1k=
61 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 0 iD8DBQBQamltywK+sNU5EO8RAlsqAJ4qF/m6aFu4mJCOKTiAP5RvZFK02ACfawYShUZO6OXEFfveU0aAxDR0M1k=
62 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 0 iD8DBQBQgPV5ywK+sNU5EO8RArylAJ0abcx5NlDjyv3ZDWpAfRIHyRsJtQCgn4TMuEayqgxzrvadQZHdTEU2g38=
62 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 0 iD8DBQBQgPV5ywK+sNU5EO8RArylAJ0abcx5NlDjyv3ZDWpAfRIHyRsJtQCgn4TMuEayqgxzrvadQZHdTEU2g38=
63 195ad823b5d58c68903a6153a25e3fb4ed25239d 0 iD8DBQBQkuT9ywK+sNU5EO8RAhB4AKCeerItoK2Jipm2cVf4euGofAa/WACeJj3TVd4pFILpb+ogj7ebweFLJi0=
63 195ad823b5d58c68903a6153a25e3fb4ed25239d 0 iD8DBQBQkuT9ywK+sNU5EO8RAhB4AKCeerItoK2Jipm2cVf4euGofAa/WACeJj3TVd4pFILpb+ogj7ebweFLJi0=
64 0c10cf8191469e7c3c8844922e17e71a176cb7cb 0 iD8DBQBQvQWoywK+sNU5EO8RAnq3AJoCn98u4geFx5YaQaeh99gFhCd7bQCgjoBwBSUyOvGd0yBy60E3Vv3VZhM=
64 0c10cf8191469e7c3c8844922e17e71a176cb7cb 0 iD8DBQBQvQWoywK+sNU5EO8RAnq3AJoCn98u4geFx5YaQaeh99gFhCd7bQCgjoBwBSUyOvGd0yBy60E3Vv3VZhM=
65 a4765077b65e6ae29ba42bab7834717b5072d5ba 0 iD8DBQBQ486sywK+sNU5EO8RAhmJAJ90aLfLKZhmcZN7kqphigQJxiFOQACeJ5IUZxjGKH4xzi3MrgIcx9n+dB0=
65 a4765077b65e6ae29ba42bab7834717b5072d5ba 0 iD8DBQBQ486sywK+sNU5EO8RAhmJAJ90aLfLKZhmcZN7kqphigQJxiFOQACeJ5IUZxjGKH4xzi3MrgIcx9n+dB0=
66 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 0 iD8DBQBQ+yuYywK+sNU5EO8RAm9JAJoD/UciWvpGeKBcpGtZJBFJVcL/HACghDXSgQ+xQDjB+6uGrdgAQsRR1Lg=
66 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 0 iD8DBQBQ+yuYywK+sNU5EO8RAm9JAJoD/UciWvpGeKBcpGtZJBFJVcL/HACghDXSgQ+xQDjB+6uGrdgAQsRR1Lg=
67 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 0 iD8DBQBRDDROywK+sNU5EO8RAh75AJ9uJCGoCWnP0Lv/+XuYs4hvUl+sAgCcD36QgAnuw8IQXrvv684BAXAnHcA=
67 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 0 iD8DBQBRDDROywK+sNU5EO8RAh75AJ9uJCGoCWnP0Lv/+XuYs4hvUl+sAgCcD36QgAnuw8IQXrvv684BAXAnHcA=
68 7511d4df752e61fe7ae4f3682e0a0008573b0402 0 iD8DBQBRFYaoywK+sNU5EO8RAuErAJoDyhXn+lptU3+AevVdwAIeNFyR2gCdHzPHyWd+JDeWCUR+pSOBi8O2ppM=
68 7511d4df752e61fe7ae4f3682e0a0008573b0402 0 iD8DBQBRFYaoywK+sNU5EO8RAuErAJoDyhXn+lptU3+AevVdwAIeNFyR2gCdHzPHyWd+JDeWCUR+pSOBi8O2ppM=
69 5b7175377babacce80a6c1e12366d8032a6d4340 0 iD8DBQBRMCYgywK+sNU5EO8RAq1/AKCWKlt9ysibyQgYwoxxIOZv5J8rpwCcDSHQaaf1fFZUTnQsOePwcM2Y/Sg=
69 5b7175377babacce80a6c1e12366d8032a6d4340 0 iD8DBQBRMCYgywK+sNU5EO8RAq1/AKCWKlt9ysibyQgYwoxxIOZv5J8rpwCcDSHQaaf1fFZUTnQsOePwcM2Y/Sg=
70 50c922c1b5145dab8baefefb0437d363b6a6c21c 0 iD8DBQBRWnUnywK+sNU5EO8RAuQRAJwM42cJqJPeqJ0jVNdMqKMDqr4dSACeP0cRVGz1gitMuV0x8f3mrZrqc7I=
70 50c922c1b5145dab8baefefb0437d363b6a6c21c 0 iD8DBQBRWnUnywK+sNU5EO8RAuQRAJwM42cJqJPeqJ0jVNdMqKMDqr4dSACeP0cRVGz1gitMuV0x8f3mrZrqc7I=
71 8a7bd2dccd44ed571afe7424cd7f95594f27c092 0 iD8DBQBRXfBvywK+sNU5EO8RAn+LAKCsMmflbuXjYRxlzFwId5ptm8TZcwCdGkyLbZcASBOkzQUm/WW1qfknJHU=
71 8a7bd2dccd44ed571afe7424cd7f95594f27c092 0 iD8DBQBRXfBvywK+sNU5EO8RAn+LAKCsMmflbuXjYRxlzFwId5ptm8TZcwCdGkyLbZcASBOkzQUm/WW1qfknJHU=
72 292cd385856d98bacb2c3086f8897bc660c2beea 0 iD8DBQBRcM0BywK+sNU5EO8RAjp4AKCJBykQbvXhKuvLSMxKx3a2TBiXcACfbr/kLg5GlZTF/XDPmY+PyHgI/GM=
72 292cd385856d98bacb2c3086f8897bc660c2beea 0 iD8DBQBRcM0BywK+sNU5EO8RAjp4AKCJBykQbvXhKuvLSMxKx3a2TBiXcACfbr/kLg5GlZTF/XDPmY+PyHgI/GM=
73 23f785b38af38d2fca6b8f3db56b8007a84cd73a 0 iD8DBQBRgZwNywK+sNU5EO8RAmO4AJ4u2ILGuimRP6MJgE2t65LZ5dAdkACgiENEstIdrlFC80p+sWKD81kKIYI=
73 23f785b38af38d2fca6b8f3db56b8007a84cd73a 0 iD8DBQBRgZwNywK+sNU5EO8RAmO4AJ4u2ILGuimRP6MJgE2t65LZ5dAdkACgiENEstIdrlFC80p+sWKD81kKIYI=
74 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 0 iD8DBQBRkswvywK+sNU5EO8RAiYYAJsHTHyHbJeAgmGvBTmDrfcKu4doUgCeLm7eGBjx7yAPUvEtxef8rAkQmXI=
74 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 0 iD8DBQBRkswvywK+sNU5EO8RAiYYAJsHTHyHbJeAgmGvBTmDrfcKu4doUgCeLm7eGBjx7yAPUvEtxef8rAkQmXI=
75 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 0 iD8DBQBRqnFLywK+sNU5EO8RAsWNAJ9RR6t+y1DLFc2HeH0eN9VfZAKF9gCeJ8ezvhtKq/LMs0/nvcgKQc/d5jk=
75 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 0 iD8DBQBRqnFLywK+sNU5EO8RAsWNAJ9RR6t+y1DLFc2HeH0eN9VfZAKF9gCeJ8ezvhtKq/LMs0/nvcgKQc/d5jk=
76 009794acc6e37a650f0fae37872e733382ac1c0c 0 iD8DBQBR0guxywK+sNU5EO8RArNkAKCq9pMihVzP8Os5kCmgbWpe5C37wgCgqzuPZTHvAsXF5wTyaSTMVa9Ccq4=
76 009794acc6e37a650f0fae37872e733382ac1c0c 0 iD8DBQBR0guxywK+sNU5EO8RArNkAKCq9pMihVzP8Os5kCmgbWpe5C37wgCgqzuPZTHvAsXF5wTyaSTMVa9Ccq4=
77 f0d7721d7322dcfb5af33599c2543f27335334bb 0 iD8DBQBR8taaywK+sNU5EO8RAqeEAJ4idDhhDuEsgsUjeQgWNj498matHACfT67gSF5w0ylsrBx1Hb52HkGXDm0=
77 f0d7721d7322dcfb5af33599c2543f27335334bb 0 iD8DBQBR8taaywK+sNU5EO8RAqeEAJ4idDhhDuEsgsUjeQgWNj498matHACfT67gSF5w0ylsrBx1Hb52HkGXDm0=
78 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 0 iD8DBQBR+ymFywK+sNU5EO8RAuSdAJkBMcd9DAZ3rWE9WGKPm2YZ8LBoXACfXn/wbEsVy7ZgJoUwiWmHSnQaWCI=
78 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 0 iD8DBQBR+ymFywK+sNU5EO8RAuSdAJkBMcd9DAZ3rWE9WGKPm2YZ8LBoXACfXn/wbEsVy7ZgJoUwiWmHSnQaWCI=
79 335a558f81dc73afeab4d7be63617392b130117f 0 iQIVAwUAUiZrIyBXgaxoKi1yAQK2iw//cquNqqSkc8Re5/TZT9I6NH+lh6DbOKjJP0Xl1Wqq0K+KSIUgZG4G32ovaEb2l5X0uY+3unRPiZ0ebl0YSw4Fb2ZiPIADXLBTOYRrY2Wwd3tpJeGI6wEgZt3SfcITV/g7NJrCjT3FlYoSOIayrExM80InSdcEM0Q3Rx6HKzY2acyxzgZeAtAW5ohFvHilSvY6p5Gcm4+QptMxvw45GPdreUmjeXZxNXNXZ8P+MjMz/QJbai/N7PjmK8lqnhkBsT48Ng/KhhmOkGntNJ2/ImBWLFGcWngSvJ7sfWwnyhndvGhe0Hq1NcCf7I8TjNDxU5TR+m+uW7xjXdLoDbUjBdX4sKXnh8ZjbYiODKBOrrDq25cf8nA/tnpKyE/qsVy60kOk6loY4XKiYmn1V49Ta0emmDx0hqo3HgxHHsHX0NDnGdWGol7cPRET0RzVobKq1A0jnrhPooWidvLh9bPzLonrWDo+ib+DuySoRkuYUK4pgZJ2mbg6daFOBEZygkSyRB8bo1UQUP7EgQDrWe4khb/5GHEfDkrQz3qu/sXvc0Ir1mOUWBFPHC2DjjCn/oMJuUkG1SwM8l2Bfv7h67ssES6YQ2+RjOix4yid7EXS/Ogl45PzCIPSI5+BbNs10JhE0w5uErBHlF53EDTe/TSLc+GU6DB6PP6dH912Njdr3jpNSUQ=
79 335a558f81dc73afeab4d7be63617392b130117f 0 iQIVAwUAUiZrIyBXgaxoKi1yAQK2iw//cquNqqSkc8Re5/TZT9I6NH+lh6DbOKjJP0Xl1Wqq0K+KSIUgZG4G32ovaEb2l5X0uY+3unRPiZ0ebl0YSw4Fb2ZiPIADXLBTOYRrY2Wwd3tpJeGI6wEgZt3SfcITV/g7NJrCjT3FlYoSOIayrExM80InSdcEM0Q3Rx6HKzY2acyxzgZeAtAW5ohFvHilSvY6p5Gcm4+QptMxvw45GPdreUmjeXZxNXNXZ8P+MjMz/QJbai/N7PjmK8lqnhkBsT48Ng/KhhmOkGntNJ2/ImBWLFGcWngSvJ7sfWwnyhndvGhe0Hq1NcCf7I8TjNDxU5TR+m+uW7xjXdLoDbUjBdX4sKXnh8ZjbYiODKBOrrDq25cf8nA/tnpKyE/qsVy60kOk6loY4XKiYmn1V49Ta0emmDx0hqo3HgxHHsHX0NDnGdWGol7cPRET0RzVobKq1A0jnrhPooWidvLh9bPzLonrWDo+ib+DuySoRkuYUK4pgZJ2mbg6daFOBEZygkSyRB8bo1UQUP7EgQDrWe4khb/5GHEfDkrQz3qu/sXvc0Ir1mOUWBFPHC2DjjCn/oMJuUkG1SwM8l2Bfv7h67ssES6YQ2+RjOix4yid7EXS/Ogl45PzCIPSI5+BbNs10JhE0w5uErBHlF53EDTe/TSLc+GU6DB6PP6dH912Njdr3jpNSUQ=
80 e7fa36d2ad3a7944a52dca126458d6f482db3524 0 iQIVAwUAUktg4yBXgaxoKi1yAQLO0g//du/2ypYYUfmM/yZ4zztNKIvgMSGTDVbCCGB2y2/wk2EcolpjpGTkcgnJT413ksYtw78ZU+mvv0RjgrFCm8DQ8kroJaQZ2qHmtSUb42hPBPvtg6kL9YaA4yvp87uUBpFRavGS5uX4hhEIyvZKzhXUBvqtL3TfwR7ld21bj8j00wudqELyyU9IrojIY9jkJ3XL/4shBGgP7u6OK5g8yJ6zTnWgysUetxHBPrYjG25lziiiZQFvZqK1B3PUqAOaFPltQs0PB8ipOCAHQgJsjaREj8VmC3+rskmSSy66NHm6gAB9+E8oAgOcU7FzWbdYgnz4kR3M7TQvHX9U61NinPXC6Q9d1VPhO3E6sIGvqJ4YeQOn65V9ezYuIpFSlgQzCHMmLVnOV96Uv1R/Z39I4w7D3S5qoZcQT/siQwGbsZoPMGFYmqOK1da5TZWrrJWkYzc9xvzT9m3q3Wds5pmCmo4b/dIqDifWwYEcNAZ0/YLHwCN5SEZWuunkEwtU5o7TZAv3bvDDA6WxUrrHI/y9/qvvhXxsJnY8IueNhshdmWZfXKz+lJi2Dvk7DUlEQ1zZWSsozi1E+3biMPJO47jsxjoT/jmE5+GHLCgcnXXDVBeaVal99IOaTRFukiz2EMsry1s8fnwEE5XKDKRlU/dOPfsje0gc7bgE0QD/u3E4NJ99g9A=
80 e7fa36d2ad3a7944a52dca126458d6f482db3524 0 iQIVAwUAUktg4yBXgaxoKi1yAQLO0g//du/2ypYYUfmM/yZ4zztNKIvgMSGTDVbCCGB2y2/wk2EcolpjpGTkcgnJT413ksYtw78ZU+mvv0RjgrFCm8DQ8kroJaQZ2qHmtSUb42hPBPvtg6kL9YaA4yvp87uUBpFRavGS5uX4hhEIyvZKzhXUBvqtL3TfwR7ld21bj8j00wudqELyyU9IrojIY9jkJ3XL/4shBGgP7u6OK5g8yJ6zTnWgysUetxHBPrYjG25lziiiZQFvZqK1B3PUqAOaFPltQs0PB8ipOCAHQgJsjaREj8VmC3+rskmSSy66NHm6gAB9+E8oAgOcU7FzWbdYgnz4kR3M7TQvHX9U61NinPXC6Q9d1VPhO3E6sIGvqJ4YeQOn65V9ezYuIpFSlgQzCHMmLVnOV96Uv1R/Z39I4w7D3S5qoZcQT/siQwGbsZoPMGFYmqOK1da5TZWrrJWkYzc9xvzT9m3q3Wds5pmCmo4b/dIqDifWwYEcNAZ0/YLHwCN5SEZWuunkEwtU5o7TZAv3bvDDA6WxUrrHI/y9/qvvhXxsJnY8IueNhshdmWZfXKz+lJi2Dvk7DUlEQ1zZWSsozi1E+3biMPJO47jsxjoT/jmE5+GHLCgcnXXDVBeaVal99IOaTRFukiz2EMsry1s8fnwEE5XKDKRlU/dOPfsje0gc7bgE0QD/u3E4NJ99g9A=
81 1596f2d8f2421314b1ddead8f7d0c91009358994 0 iQIVAwUAUmRq+yBXgaxoKi1yAQLolhAAi+l4ZFdQTu9yJDv22YmkmHH4fI3d5VBYgvfJPufpyaj7pX626QNW18UNcGSw2BBpYHIJzWPkk/4XznLVKr4Ciw2N3/yqloEFV0V2SSrTbMWiR9qXI4KJH+Df3KZnKs3FgiYpXkErL4GWkc1jLVR50xQ5RnkMljjtCd0NTeV2PHZ6gP2qbu6CS+5sm3AFhTDGnx8GicbMw76ZNw5M2G+T48yH9jn5KQi2SBThfi4H9Bpr8FDuR7PzQLgw9SbtYxtdQxNkK55k0nG4oLDxduNakU6SH9t8n8tdCfMt58kTzlQVrPFiTFjKu2n2JioDTz2HEivbZ5H757cu7SvpX8gW3paeBc57e+GOLMisMZABXLICq59c3QnrMwFY4FG+5cpiHVXoaZz/0bYCJx+IhU4QLWqZuzb18KSyHUCqQRzXlzS6QV5O7dY5YNQXFC44j/dS5zdgWMYo2mc6mVP2OaPUn7F6aQh5MCDYorPIOkcNjOg7ytajo7DXbzWt5Al8qt6386BJksyR3GAonc09+l8IFeNxk8HZNP4ETQ8aWj0dC9jgBDPK43T2Bju/i84s+U/bRe4tGSQalZUEv06mkIH/VRJp5w2izYTsdIjA4FT9d36OhaxlfoO1X6tHR9AyA3bF/g/ozvBwuo3kTRUUqo+Ggvx/DmcPQdDiZZQIqDBXch0=
81 1596f2d8f2421314b1ddead8f7d0c91009358994 0 iQIVAwUAUmRq+yBXgaxoKi1yAQLolhAAi+l4ZFdQTu9yJDv22YmkmHH4fI3d5VBYgvfJPufpyaj7pX626QNW18UNcGSw2BBpYHIJzWPkk/4XznLVKr4Ciw2N3/yqloEFV0V2SSrTbMWiR9qXI4KJH+Df3KZnKs3FgiYpXkErL4GWkc1jLVR50xQ5RnkMljjtCd0NTeV2PHZ6gP2qbu6CS+5sm3AFhTDGnx8GicbMw76ZNw5M2G+T48yH9jn5KQi2SBThfi4H9Bpr8FDuR7PzQLgw9SbtYxtdQxNkK55k0nG4oLDxduNakU6SH9t8n8tdCfMt58kTzlQVrPFiTFjKu2n2JioDTz2HEivbZ5H757cu7SvpX8gW3paeBc57e+GOLMisMZABXLICq59c3QnrMwFY4FG+5cpiHVXoaZz/0bYCJx+IhU4QLWqZuzb18KSyHUCqQRzXlzS6QV5O7dY5YNQXFC44j/dS5zdgWMYo2mc6mVP2OaPUn7F6aQh5MCDYorPIOkcNjOg7ytajo7DXbzWt5Al8qt6386BJksyR3GAonc09+l8IFeNxk8HZNP4ETQ8aWj0dC9jgBDPK43T2Bju/i84s+U/bRe4tGSQalZUEv06mkIH/VRJp5w2izYTsdIjA4FT9d36OhaxlfoO1X6tHR9AyA3bF/g/ozvBwuo3kTRUUqo+Ggvx/DmcPQdDiZZQIqDBXch0=
82 d825e4025e39d1c39db943cdc89818abd0a87c27 0 iQIVAwUAUnQlXiBXgaxoKi1yAQJd3BAAi7LjMSpXmdR7B8K98C3/By4YHsCOAocMl3JXiLd7SXwKmlta1zxtkgWwWJnNYE3lVJvGCl+l4YsGKmFu755MGXlyORh1x4ohckoC1a8cqnbNAgD6CSvjSaZfnINLGZQP1wIP4yWj0FftKVANQBjj/xkkxO530mjBYnUvyA4PeDd5A1AOUUu6qHzX6S5LcprEt7iktLI+Ae1dYTkiCpckDtyYUKIk3RK/4AGWwGCPddVWeV5bDxLs8GHyMbqdBwx+2EAMtyZfXT+z6MDRsL/gEBVOXHb/UR0qpYED+qFnbtTlxqQkRE/wBhwDoRzUgcSuukQ9iPn79WNDSdT5b6Jd393uEO5BNF/DB6rrOiWmlpoooWgTY9kcwGB02v0hhLrH5r1wkv8baaPl+qjCjBxf4CNKm/83KN5/umGbZlORqPSN5JVxK6vDNwFFmHLaZbMT1g27GsGOWm84VH+dgolgk4nmRNSO37eTNM5Y1C3Zf2amiqDSRcAxCgseg0Jh10G7i52SSTcZPI2MqrwT9eIyg8PTIxT1D5bPcCzkg5nTTL6S7bet7OSwynRnHslhvVUBly8aIj4eY/5cQqAucUUa5sq6xLD8N27Tl+sQi+kE6KtWu2c0ZhpouflYp55XNMHgU4KeFcVcDtHfJRF6THT6tFcHFNauCHbhfN2F33ANMP4=
82 d825e4025e39d1c39db943cdc89818abd0a87c27 0 iQIVAwUAUnQlXiBXgaxoKi1yAQJd3BAAi7LjMSpXmdR7B8K98C3/By4YHsCOAocMl3JXiLd7SXwKmlta1zxtkgWwWJnNYE3lVJvGCl+l4YsGKmFu755MGXlyORh1x4ohckoC1a8cqnbNAgD6CSvjSaZfnINLGZQP1wIP4yWj0FftKVANQBjj/xkkxO530mjBYnUvyA4PeDd5A1AOUUu6qHzX6S5LcprEt7iktLI+Ae1dYTkiCpckDtyYUKIk3RK/4AGWwGCPddVWeV5bDxLs8GHyMbqdBwx+2EAMtyZfXT+z6MDRsL/gEBVOXHb/UR0qpYED+qFnbtTlxqQkRE/wBhwDoRzUgcSuukQ9iPn79WNDSdT5b6Jd393uEO5BNF/DB6rrOiWmlpoooWgTY9kcwGB02v0hhLrH5r1wkv8baaPl+qjCjBxf4CNKm/83KN5/umGbZlORqPSN5JVxK6vDNwFFmHLaZbMT1g27GsGOWm84VH+dgolgk4nmRNSO37eTNM5Y1C3Zf2amiqDSRcAxCgseg0Jh10G7i52SSTcZPI2MqrwT9eIyg8PTIxT1D5bPcCzkg5nTTL6S7bet7OSwynRnHslhvVUBly8aIj4eY/5cQqAucUUa5sq6xLD8N27Tl+sQi+kE6KtWu2c0ZhpouflYp55XNMHgU4KeFcVcDtHfJRF6THT6tFcHFNauCHbhfN2F33ANMP4=
83 209e04a06467e2969c0cc6501335be0406d46ef0 0 iQIVAwUAUpv1oCBXgaxoKi1yAQKOFBAAma2wlsr3w/5NvDwq2rmOrgtNDq1DnNqcXloaOdwegX1z3/N++5uVjLjI0VyguexnwK+7E8rypMZ+4glaiZvIiGPnGMYbG9iOoz5XBhtUHzI5ECYfm5QU81by9VmCIvArDFe5Hlnz4XaXpEGnAwPywD+yzV3/+tyoV7MgsVinCMtbX9OF84/ubWKNzq2810FpQRfYoCOrF8sUed/1TcQrSm1eMB/PnuxjFCFySiR6J7Urd9bJoJIDtdZOQeeHaL5Z8Pcsyzjoe/9oTwJ3L3tl/NMZtRxiQUWtfRA0zvEnQ4QEkZSDMd/JnGiWHPVeP4P92+YN15za9yhneEAtustrTNAmVF2Uh92RIlmkG475HFhvwPJ4DfCx0vU1OOKX/U4c1rifW7H7HaipoaMlsDU2VFsAHcc3YF8ulVt27bH2yUaLGJz7eqpt+3DzZTKp4d/brZA2EkbVgsoYP+XYLbzxfwWlaMwiN3iCnlTFbNogH8MxhfHFWBj6ouikqOz8HlNl6BmSQiUCBnz5fquVpXmW2Md+TDekk+uOW9mvk1QMU62br+Z6PEZupkdTrqKaz+8ZMWvTRct8SiOcu7R11LpfERyrwYGGPei0P2YrEGIWGgXvEobXoPTSl7J+mpOA/rp2Q1zA3ihjgzwtGZZF+ThQXZGIMGaA2YPgzuYRqY8l5oc=
83 209e04a06467e2969c0cc6501335be0406d46ef0 0 iQIVAwUAUpv1oCBXgaxoKi1yAQKOFBAAma2wlsr3w/5NvDwq2rmOrgtNDq1DnNqcXloaOdwegX1z3/N++5uVjLjI0VyguexnwK+7E8rypMZ+4glaiZvIiGPnGMYbG9iOoz5XBhtUHzI5ECYfm5QU81by9VmCIvArDFe5Hlnz4XaXpEGnAwPywD+yzV3/+tyoV7MgsVinCMtbX9OF84/ubWKNzq2810FpQRfYoCOrF8sUed/1TcQrSm1eMB/PnuxjFCFySiR6J7Urd9bJoJIDtdZOQeeHaL5Z8Pcsyzjoe/9oTwJ3L3tl/NMZtRxiQUWtfRA0zvEnQ4QEkZSDMd/JnGiWHPVeP4P92+YN15za9yhneEAtustrTNAmVF2Uh92RIlmkG475HFhvwPJ4DfCx0vU1OOKX/U4c1rifW7H7HaipoaMlsDU2VFsAHcc3YF8ulVt27bH2yUaLGJz7eqpt+3DzZTKp4d/brZA2EkbVgsoYP+XYLbzxfwWlaMwiN3iCnlTFbNogH8MxhfHFWBj6ouikqOz8HlNl6BmSQiUCBnz5fquVpXmW2Md+TDekk+uOW9mvk1QMU62br+Z6PEZupkdTrqKaz+8ZMWvTRct8SiOcu7R11LpfERyrwYGGPei0P2YrEGIWGgXvEobXoPTSl7J+mpOA/rp2Q1zA3ihjgzwtGZZF+ThQXZGIMGaA2YPgzuYRqY8l5oc=
84 ca387377df7a3a67dbb90b6336b781cdadc3ef41 0 iQIVAwUAUsThISBXgaxoKi1yAQJpvRAAkRkCWLjHBZnWxX9Oe6t2HQgkSsmn9wMHvXXGFkcAmrqJ86yfyrxLq2Ns0X7Qwky37kOwKsywM53FQlsx9j//Y+ncnGZoObFTz9YTuSbOHGVsTbAruXWxBrGOf1nFTlg8afcbH0jPfQXwxf3ptfBhgsFCzORcqc8HNopAW+2sgXGhHnbVtq6LF90PWkbKjCCQLiX3da1uETGAElrl4jA5Y2i64S1Q/2X+UFrNslkIIRCGmAJ6BnE6KLJaUftpfbN7Br7a3z9xxWqxRYDOinxDgfAPAucOJPLgMVQ0bJIallaRu7KTmIWKIuSBgg1/hgfoX8I1w49WrTGp0gGY140kl8RWwczAz/SB03Xtbl2+h6PV7rUV2K/5g61DkwdVbWqXM9wmJZmvjEKK0qQbBT0By4QSEDNcKKqtaFFwhFzx4dkXph0igHOtXhSNzMd8PsFx/NRn9NLFIpirxfqVDwakpDNBZw4Q9hUAlTPxSFL3vD9/Zs7lV4/dAvvl+tixJEi2k/iv248b/AI1PrPIQEqDvjrozzzYvrS4HtbkUn+IiHiepQaYnpqKoXvBu6btK/nv0GTxB5OwVJzMA1RPDcxIFfZA2AazHjrXiPAl5uWYEddEvRjaCiF8xkQkfiXzLOoqhKQHdwPGcfMFEs9lNR8BrB2ZOajBJc8RPsFDswhT5h4=
84 ca387377df7a3a67dbb90b6336b781cdadc3ef41 0 iQIVAwUAUsThISBXgaxoKi1yAQJpvRAAkRkCWLjHBZnWxX9Oe6t2HQgkSsmn9wMHvXXGFkcAmrqJ86yfyrxLq2Ns0X7Qwky37kOwKsywM53FQlsx9j//Y+ncnGZoObFTz9YTuSbOHGVsTbAruXWxBrGOf1nFTlg8afcbH0jPfQXwxf3ptfBhgsFCzORcqc8HNopAW+2sgXGhHnbVtq6LF90PWkbKjCCQLiX3da1uETGAElrl4jA5Y2i64S1Q/2X+UFrNslkIIRCGmAJ6BnE6KLJaUftpfbN7Br7a3z9xxWqxRYDOinxDgfAPAucOJPLgMVQ0bJIallaRu7KTmIWKIuSBgg1/hgfoX8I1w49WrTGp0gGY140kl8RWwczAz/SB03Xtbl2+h6PV7rUV2K/5g61DkwdVbWqXM9wmJZmvjEKK0qQbBT0By4QSEDNcKKqtaFFwhFzx4dkXph0igHOtXhSNzMd8PsFx/NRn9NLFIpirxfqVDwakpDNBZw4Q9hUAlTPxSFL3vD9/Zs7lV4/dAvvl+tixJEi2k/iv248b/AI1PrPIQEqDvjrozzzYvrS4HtbkUn+IiHiepQaYnpqKoXvBu6btK/nv0GTxB5OwVJzMA1RPDcxIFfZA2AazHjrXiPAl5uWYEddEvRjaCiF8xkQkfiXzLOoqhKQHdwPGcfMFEs9lNR8BrB2ZOajBJc8RPsFDswhT5h4=
85 8862469e16f9236208581b20de5f96bd13cc039d 0 iQIVAwUAUt7cLSBXgaxoKi1yAQLOkRAAidp501zafqe+JnDwlf7ORcJc+FgCE6mK1gxDfReCbkMsY7AzspogU7orqfSmr6XXdrDwmk3Y5x3mf44OGzNQjvuNWhqnTgJ7sOcU/lICGQUc8WiGNzHEMFGX9S+K4dpUaBf8Tcl8pU3iArhlthDghW6SZeDFB/FDBaUx9dkdFp6eXrmu4OuGRZEvwUvPtCGxIL7nKNnufI1du/MsWQxvC2ORHbMNtRq6tjA0fLZi4SvbySuYifQRS32BfHkFS5Qu4/40+1k7kd0YFyyQUvIsVa17lrix3zDqMavG8x7oOlqM/axDMBT6DhpdBMAdc5qqf8myz8lwjlFjyDUL6u3Z4/yE0nUrmEudXiXwG0xbVoEN8SCNrDmmvFMt6qdCpdDMkHr2TuSh0Hh4FT5CDkzPI8ZRssv/01j/QvIO3c/xlbpGRPWpsPXEVOz3pmjYN4qyQesnBKWCENsQLy/8s2rey8iQgx2GtsrNw8+wGX6XE4v3QtwUrRe12hWoNrEHWl0xnLv2mvAFqdMAMpFY6EpOKLlE4hoCs2CmTJ2dv6e2tiGTXGU6/frI5iuNRK61OXnH5OjEc8DCGH/GC7NXyDOXOB+7BdBvvf50l2C/vxR2TKgTncLtHeLCrR0GHNHsxqRo1UDwOWur0r7fdfCRvb2tIr5LORCqKYVKd60/BAXjHWc=
85 8862469e16f9236208581b20de5f96bd13cc039d 0 iQIVAwUAUt7cLSBXgaxoKi1yAQLOkRAAidp501zafqe+JnDwlf7ORcJc+FgCE6mK1gxDfReCbkMsY7AzspogU7orqfSmr6XXdrDwmk3Y5x3mf44OGzNQjvuNWhqnTgJ7sOcU/lICGQUc8WiGNzHEMFGX9S+K4dpUaBf8Tcl8pU3iArhlthDghW6SZeDFB/FDBaUx9dkdFp6eXrmu4OuGRZEvwUvPtCGxIL7nKNnufI1du/MsWQxvC2ORHbMNtRq6tjA0fLZi4SvbySuYifQRS32BfHkFS5Qu4/40+1k7kd0YFyyQUvIsVa17lrix3zDqMavG8x7oOlqM/axDMBT6DhpdBMAdc5qqf8myz8lwjlFjyDUL6u3Z4/yE0nUrmEudXiXwG0xbVoEN8SCNrDmmvFMt6qdCpdDMkHr2TuSh0Hh4FT5CDkzPI8ZRssv/01j/QvIO3c/xlbpGRPWpsPXEVOz3pmjYN4qyQesnBKWCENsQLy/8s2rey8iQgx2GtsrNw8+wGX6XE4v3QtwUrRe12hWoNrEHWl0xnLv2mvAFqdMAMpFY6EpOKLlE4hoCs2CmTJ2dv6e2tiGTXGU6/frI5iuNRK61OXnH5OjEc8DCGH/GC7NXyDOXOB+7BdBvvf50l2C/vxR2TKgTncLtHeLCrR0GHNHsxqRo1UDwOWur0r7fdfCRvb2tIr5LORCqKYVKd60/BAXjHWc=
86 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 0 iQIVAwUAUu1lIyBXgaxoKi1yAQIzCBAAizSWvTkWt8+tReM9jUetoSToF+XahLhn381AYdErFCBErX4bNL+vyEj+Jt2DHsAfabkvNBe3k7rtFlXHwpq6POa/ciFGPDhFlplNv6yN1jOKBlMsgdjpn7plZKcLHODOigU7IMlgg70Um8qVrRgQ8FhvbVgR2I5+CD6bucFzqo78wNl9mCIHIQCpGKIUoz56GbwT+rUpEB182Z3u6rf4NWj35RZLGAicVV2A2eAAFh4ZvuC+Z0tXMkp6Gq9cINawZgqfLbzVYJeXBtJC39lHPyp5P3LaEVRhntc9YTwbfkVGjyJZR60iYrieeKpOYRnzgHauPVdgVhkTkBxshmEPY7svKYSQqlj8hLuFa+a3ajbIPrpQAAi1MgtamA991atNqGiSTjdZa9kLQvfdn0k80+gkCxpuO56PhvtdjKsYVRgQMTYmQVQdh3x4WbQOSqTADXXIZUaWxx4RmNSlxY7KD+3lPP09teOD+A3B2cP60bC5NsCfULtQFXQzdC7NvfIyYfYBTZa+Pv6HFkVe10cbnqTt83hBy0D77vdaegPRe56qDNU+GrIG2/rosnlKGFjFoK/pTYkR9uzfkrhEjLwyfkoXlBqY+376W0PC5fP10pJeQBS9DuXpCPlgtyW0Jy1ayCT1YR4QJC4n75vZwTFBFRBhSi0HqFquOgy83+O0Q/k=
86 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 0 iQIVAwUAUu1lIyBXgaxoKi1yAQIzCBAAizSWvTkWt8+tReM9jUetoSToF+XahLhn381AYdErFCBErX4bNL+vyEj+Jt2DHsAfabkvNBe3k7rtFlXHwpq6POa/ciFGPDhFlplNv6yN1jOKBlMsgdjpn7plZKcLHODOigU7IMlgg70Um8qVrRgQ8FhvbVgR2I5+CD6bucFzqo78wNl9mCIHIQCpGKIUoz56GbwT+rUpEB182Z3u6rf4NWj35RZLGAicVV2A2eAAFh4ZvuC+Z0tXMkp6Gq9cINawZgqfLbzVYJeXBtJC39lHPyp5P3LaEVRhntc9YTwbfkVGjyJZR60iYrieeKpOYRnzgHauPVdgVhkTkBxshmEPY7svKYSQqlj8hLuFa+a3ajbIPrpQAAi1MgtamA991atNqGiSTjdZa9kLQvfdn0k80+gkCxpuO56PhvtdjKsYVRgQMTYmQVQdh3x4WbQOSqTADXXIZUaWxx4RmNSlxY7KD+3lPP09teOD+A3B2cP60bC5NsCfULtQFXQzdC7NvfIyYfYBTZa+Pv6HFkVe10cbnqTt83hBy0D77vdaegPRe56qDNU+GrIG2/rosnlKGFjFoK/pTYkR9uzfkrhEjLwyfkoXlBqY+376W0PC5fP10pJeQBS9DuXpCPlgtyW0Jy1ayCT1YR4QJC4n75vZwTFBFRBhSi0HqFquOgy83+O0Q/k=
87 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 0 iQIVAwUAUxJPlyBXgaxoKi1yAQLIRA//Qh9qzoYthPAWAUNbzybWXC/oMBI2X89NQC7l1ivKhv7cn9L79D8SWXM18q7LTwLdlwOkV/a0NTE3tkQTLvxJpfnRLCBbMOcGiIn/PxsAae8IhMAUbR7qz+XOynHOs60ZhK9X8seQHJRf1YtOI9gYTL/WYk8Cnpmc6xZQ90TNhoPPkpdfe8Y236V11SbYtN14fmrPaWQ3GXwyrvQaqM1F7BxSnC/sbm9+/wprsTa8gRQo7YQL/T5jJQgFiatG3yayrDdJtoRq3TZKtsxw8gtQdfVCrrBibbysjM8++dnwA92apHNUY8LzyptPy7rSDXRrIpPUWGGTQTD+6HQwkcLFtIuUpw4I75SV3z2r6LyOLKzDJUIunKOOYFS/rEIQGxZHxZOBAvbI+73mHAn3pJqm+UAA7R1n7tk3JyQncg50qJlm9zIUPGpNFcdEqak5iXzGYx292VlcE+fbJYeIPWggpilaVUgdmXtMCG0O0uX6C8MDmzVDCjd6FzDJ4GTZwgmWJaamvls85CkZgyN/UqlisfFXub0A1h7qAzBSVpP1+Ti+UbBjlrGX8BMRYHRGYIeIq16elcWwSpLgshjDwNn2r2EdwX8xKU5mucgTzSLprbOYGdQaqnvf6e8IX5WMBgwVW9YdY9yJKSLF7kE1AlM9nfVcXwOK4mHoMvnNgiX3zsw=
87 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 0 iQIVAwUAUxJPlyBXgaxoKi1yAQLIRA//Qh9qzoYthPAWAUNbzybWXC/oMBI2X89NQC7l1ivKhv7cn9L79D8SWXM18q7LTwLdlwOkV/a0NTE3tkQTLvxJpfnRLCBbMOcGiIn/PxsAae8IhMAUbR7qz+XOynHOs60ZhK9X8seQHJRf1YtOI9gYTL/WYk8Cnpmc6xZQ90TNhoPPkpdfe8Y236V11SbYtN14fmrPaWQ3GXwyrvQaqM1F7BxSnC/sbm9+/wprsTa8gRQo7YQL/T5jJQgFiatG3yayrDdJtoRq3TZKtsxw8gtQdfVCrrBibbysjM8++dnwA92apHNUY8LzyptPy7rSDXRrIpPUWGGTQTD+6HQwkcLFtIuUpw4I75SV3z2r6LyOLKzDJUIunKOOYFS/rEIQGxZHxZOBAvbI+73mHAn3pJqm+UAA7R1n7tk3JyQncg50qJlm9zIUPGpNFcdEqak5iXzGYx292VlcE+fbJYeIPWggpilaVUgdmXtMCG0O0uX6C8MDmzVDCjd6FzDJ4GTZwgmWJaamvls85CkZgyN/UqlisfFXub0A1h7qAzBSVpP1+Ti+UbBjlrGX8BMRYHRGYIeIq16elcWwSpLgshjDwNn2r2EdwX8xKU5mucgTzSLprbOYGdQaqnvf6e8IX5WMBgwVW9YdY9yJKSLF7kE1AlM9nfVcXwOK4mHoMvnNgiX3zsw=
88 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 0 iQIVAwUAUztENyBXgaxoKi1yAQIpkhAAmJj5JRTSn0Dn/OTAHggalw8KYFbAck1X35Wg9O7ku7sd+cOnNnkYfqAdz2m5ikqWHP7aWMiNkNy7Ree2110NqkQVYG/2AJStXBdIOmewqnjDlNt+rbJQN/JsjeKSCy+ToNvhqX5cTM9DF2pwRjMsTXVff307S6/3pga244i+RFAeG3WCUrzfDu641MGFLjG4atCj8ZFLg9DcW5bsRiOs5ZK5Il+UAb2yyoS2KNQ70VLhYULhGtqq9tuO4nLRGN3DX/eDcYfncPCav1GckW4OZKakcbLtAdW0goSgGWloxcM+j2E6Z1JZ9tOTTkFN77EvX0ZWZLmYM7sUN1meFnKbVxrtGKlMelwKwlT252c65PAKa9zsTaRUKvN7XclyxZAYVCsiCQ/V08NXhNgXJXcoKUAeGNf6wruOyvRU9teia8fAiuHJoY58WC8jC4nYG3iZTnl+zNj2A5xuEUpYHhjUfe3rNJeK7CwUpJKlbxopu5mnW9AE9ITfI490eaapRLTojOBDJNqCORAtbggMD46fLeCOzzB8Gl70U2p5P34F92Sn6mgERFKh/10XwJcj4ZIeexbQK8lqQ2cIanDN9dAmbvavPTY8grbANuq+vXDGxjIjfxapqzsSPqUJ5KnfTQyLq5NWwquR9t38XvHZfktkd140BFKwIUAIlKKaFfYXXtM=
88 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 0 iQIVAwUAUztENyBXgaxoKi1yAQIpkhAAmJj5JRTSn0Dn/OTAHggalw8KYFbAck1X35Wg9O7ku7sd+cOnNnkYfqAdz2m5ikqWHP7aWMiNkNy7Ree2110NqkQVYG/2AJStXBdIOmewqnjDlNt+rbJQN/JsjeKSCy+ToNvhqX5cTM9DF2pwRjMsTXVff307S6/3pga244i+RFAeG3WCUrzfDu641MGFLjG4atCj8ZFLg9DcW5bsRiOs5ZK5Il+UAb2yyoS2KNQ70VLhYULhGtqq9tuO4nLRGN3DX/eDcYfncPCav1GckW4OZKakcbLtAdW0goSgGWloxcM+j2E6Z1JZ9tOTTkFN77EvX0ZWZLmYM7sUN1meFnKbVxrtGKlMelwKwlT252c65PAKa9zsTaRUKvN7XclyxZAYVCsiCQ/V08NXhNgXJXcoKUAeGNf6wruOyvRU9teia8fAiuHJoY58WC8jC4nYG3iZTnl+zNj2A5xuEUpYHhjUfe3rNJeK7CwUpJKlbxopu5mnW9AE9ITfI490eaapRLTojOBDJNqCORAtbggMD46fLeCOzzB8Gl70U2p5P34F92Sn6mgERFKh/10XwJcj4ZIeexbQK8lqQ2cIanDN9dAmbvavPTY8grbANuq+vXDGxjIjfxapqzsSPqUJ5KnfTQyLq5NWwquR9t38XvHZfktkd140BFKwIUAIlKKaFfYXXtM=
89 564f55b251224f16508dd1311452db7780dafe2b 0 iQIVAwUAU1BmFSBXgaxoKi1yAQJ2Aw//bjK++xJuZCIdktg/i5FxBwoxdbipfTkKsN/YjUwrEmroYM8IkqIsO+U54OGCYWr3NPJ3VS8wUQeJ+NF3ffcjmjC297R9J+X0c5G90DdQUYX44jG/tP8Tqpev4Q7DLCXT26aRwEMdJQpq0eGaqv55E5Cxnyt3RrLCqe7RjPresZFg7iYrro5nq8TGYwBhessHXnCix9QI0HtXiLpms+0UGz8Sbi9nEYW+M0OZCyO1TvykCpFzEsLNwqqtFvhOMD/AMiWcTKNUpjmOn3V83xjWl+jnDUt7BxJ7n1efUnlwl4IeWlSUb73q/durtaymb97cSdKFmXHv4pdAShQEuEpVVGO1WELsKoXmbj30ItTW2V3KvNbjFsvIdDo7zLCpXyTq1HC56W7QCIMINX2qT+hrAMWC12tPQ05f89Cv1+jpk6eOPFqIHFdi663AjyrnGll8nwN7HJWwtA5wTXisu3bec51FAq4yJTzPMtOE9spz36E+Go2hZ1cAv9oCSceZcM0wB8KiMfaZJKNZNZk1jvsdiio4CcdASOFQPOspz07GqQxVP7W+F1Oz32LgwcNAEAS/f3juwDj45GYfAWJrTh3dnJy5DTD2LVC7KtkxxUVkWkqxivnDB9anj++FN9eyekxzut5eFED+WrCfZMcSPW0ai7wbslhKUhCwSf/v3DgGwsM=
89 564f55b251224f16508dd1311452db7780dafe2b 0 iQIVAwUAU1BmFSBXgaxoKi1yAQJ2Aw//bjK++xJuZCIdktg/i5FxBwoxdbipfTkKsN/YjUwrEmroYM8IkqIsO+U54OGCYWr3NPJ3VS8wUQeJ+NF3ffcjmjC297R9J+X0c5G90DdQUYX44jG/tP8Tqpev4Q7DLCXT26aRwEMdJQpq0eGaqv55E5Cxnyt3RrLCqe7RjPresZFg7iYrro5nq8TGYwBhessHXnCix9QI0HtXiLpms+0UGz8Sbi9nEYW+M0OZCyO1TvykCpFzEsLNwqqtFvhOMD/AMiWcTKNUpjmOn3V83xjWl+jnDUt7BxJ7n1efUnlwl4IeWlSUb73q/durtaymb97cSdKFmXHv4pdAShQEuEpVVGO1WELsKoXmbj30ItTW2V3KvNbjFsvIdDo7zLCpXyTq1HC56W7QCIMINX2qT+hrAMWC12tPQ05f89Cv1+jpk6eOPFqIHFdi663AjyrnGll8nwN7HJWwtA5wTXisu3bec51FAq4yJTzPMtOE9spz36E+Go2hZ1cAv9oCSceZcM0wB8KiMfaZJKNZNZk1jvsdiio4CcdASOFQPOspz07GqQxVP7W+F1Oz32LgwcNAEAS/f3juwDj45GYfAWJrTh3dnJy5DTD2LVC7KtkxxUVkWkqxivnDB9anj++FN9eyekxzut5eFED+WrCfZMcSPW0ai7wbslhKUhCwSf/v3DgGwsM=
90 2195ac506c6ababe86985b932f4948837c0891b5 0 iQIVAwUAU2LO/CBXgaxoKi1yAQI/3w/7BT/VRPyxey6tYp7i5cONIlEB3gznebGYwm0SGYNE6lsvS2VLh6ztb+j4eqOadr8Ssna6bslBx+dVsm+VuJ+vrNLMucD5Uc+fhn6dAfVqg+YBzUEaedI5yNsJizcJUDI7hUVsxiPiiYd9hchCWJ+z2tVt2jCyG2lMV2rbW36AM89sgz/wn5/AaAFsgoS6up/uzA3Tmw+qZSO6dZChb4Q8midIUWEbNzVhokgYcw7/HmjmvkvV9RJYiG8aBnMdQmxTE69q2dTjnnDL6wu61WU2FpTN09HRFbemUqzAfoJp8MmXq6jWgfLcm0cI3kRo7ZNpnEkmVKsfKQCXXiaR4alt9IQpQ6Jl7LSYsYI+D4ejpYysIsZyAE8qzltYhBKJWqO27A5V4WdJsoTgA/RwKfPRlci4PY8I4N466S7PBXVz/Cc5EpFkecvrgceTmBafb8JEi+gPiD2Po4vtW3bCeV4xldiEXHeJ77byUz7fZU7jL78SjJVOCCQTJfKZVr36kTz3KlaOz3E700RxzEFDYbK7I41mdANeQBmNNbcvRTy5ma6W6I3McEcAH4wqM5fFQ8YS+QWJxk85Si8KtaDPqoEdC/0dQPavuU/jAVjhV8IbmmkOtO7WvOHQDBtrR15yMxGMnUwMrPHaRNKdHNYRG0LL7lpCtdMi1mzLQgHYY9SRYvI=
90 2195ac506c6ababe86985b932f4948837c0891b5 0 iQIVAwUAU2LO/CBXgaxoKi1yAQI/3w/7BT/VRPyxey6tYp7i5cONIlEB3gznebGYwm0SGYNE6lsvS2VLh6ztb+j4eqOadr8Ssna6bslBx+dVsm+VuJ+vrNLMucD5Uc+fhn6dAfVqg+YBzUEaedI5yNsJizcJUDI7hUVsxiPiiYd9hchCWJ+z2tVt2jCyG2lMV2rbW36AM89sgz/wn5/AaAFsgoS6up/uzA3Tmw+qZSO6dZChb4Q8midIUWEbNzVhokgYcw7/HmjmvkvV9RJYiG8aBnMdQmxTE69q2dTjnnDL6wu61WU2FpTN09HRFbemUqzAfoJp8MmXq6jWgfLcm0cI3kRo7ZNpnEkmVKsfKQCXXiaR4alt9IQpQ6Jl7LSYsYI+D4ejpYysIsZyAE8qzltYhBKJWqO27A5V4WdJsoTgA/RwKfPRlci4PY8I4N466S7PBXVz/Cc5EpFkecvrgceTmBafb8JEi+gPiD2Po4vtW3bCeV4xldiEXHeJ77byUz7fZU7jL78SjJVOCCQTJfKZVr36kTz3KlaOz3E700RxzEFDYbK7I41mdANeQBmNNbcvRTy5ma6W6I3McEcAH4wqM5fFQ8YS+QWJxk85Si8KtaDPqoEdC/0dQPavuU/jAVjhV8IbmmkOtO7WvOHQDBtrR15yMxGMnUwMrPHaRNKdHNYRG0LL7lpCtdMi1mzLQgHYY9SRYvI=
91 269c80ee5b3cb3684fa8edc61501b3506d02eb10 0 iQIVAwUAU4uX5CBXgaxoKi1yAQLpdg/+OxulOKwZN+Nr7xsRhUijYjyAElRf2mGDvMrbAOA2xNf85DOXjOrX5TKETumf1qANA5cHa1twA8wYgxUzhx30H+w5EsLjyeSsOncRnD5WZNqSoIq2XevT0T4c8xdyNftyBqK4h/SC/t2h3vEiSCUaGcfNK8yk4XO45MIk4kk9nlA9jNWdA5ZMLgEFBye2ggz0JjEAPUkVDqlr9sNORDEbnwZxGPV8CK9HaL/I8VWClaFgjKQmjqV3SQsNFe2XPffzXmIipFJ+ODuXVxYpAsvLiGmcfuUfSDHQ4L9QvjBsWe1PgYMr/6CY/lPYmR+xW5mJUE9eIdN4MYcXgicLrmMpdF5pToNccNCMtfa6CDvEasPRqe2bDzL/Q9dQbdOVE/boaYBlgmYLL+/u+dpqip9KkyGgbSo9uJzst1mLTCzJmr5bw+surul28i9HM+4+Lewg4UUdHLz46no1lfTlB5o5EAhiOZBTEVdoBaKfewVpDa/aBRvtWX7UMVRG5qrtA0sXwydN00Jaqkr9m20W0jWjtc1ZC72QCrynVHOyfIb2rN98rnuy2QN4bTvjNpNjHOhhhPTOoVo0YYPdiUupm46vymUTQCmWsglU4Rlaa3vXneP7JenL5TV8WLPs9J28lF0IkOnyBXY7OFcpvYO1euu7iR1VdjfrQukMyaX18usymiA=
91 269c80ee5b3cb3684fa8edc61501b3506d02eb10 0 iQIVAwUAU4uX5CBXgaxoKi1yAQLpdg/+OxulOKwZN+Nr7xsRhUijYjyAElRf2mGDvMrbAOA2xNf85DOXjOrX5TKETumf1qANA5cHa1twA8wYgxUzhx30H+w5EsLjyeSsOncRnD5WZNqSoIq2XevT0T4c8xdyNftyBqK4h/SC/t2h3vEiSCUaGcfNK8yk4XO45MIk4kk9nlA9jNWdA5ZMLgEFBye2ggz0JjEAPUkVDqlr9sNORDEbnwZxGPV8CK9HaL/I8VWClaFgjKQmjqV3SQsNFe2XPffzXmIipFJ+ODuXVxYpAsvLiGmcfuUfSDHQ4L9QvjBsWe1PgYMr/6CY/lPYmR+xW5mJUE9eIdN4MYcXgicLrmMpdF5pToNccNCMtfa6CDvEasPRqe2bDzL/Q9dQbdOVE/boaYBlgmYLL+/u+dpqip9KkyGgbSo9uJzst1mLTCzJmr5bw+surul28i9HM+4+Lewg4UUdHLz46no1lfTlB5o5EAhiOZBTEVdoBaKfewVpDa/aBRvtWX7UMVRG5qrtA0sXwydN00Jaqkr9m20W0jWjtc1ZC72QCrynVHOyfIb2rN98rnuy2QN4bTvjNpNjHOhhhPTOoVo0YYPdiUupm46vymUTQCmWsglU4Rlaa3vXneP7JenL5TV8WLPs9J28lF0IkOnyBXY7OFcpvYO1euu7iR1VdjfrQukMyaX18usymiA=
92 2d8cd3d0e83c7336c0cb45a9f88638363f993848 0 iQIVAwUAU7OLTCBXgaxoKi1yAQJ+pw/+M3yOesgf55eo3PUTZw02QZxDyEg9ElrRc6664/QFXaJuYdz8H3LGG/NYs8uEdYihiGpS1Qc70jwd1IoUlrCELsaSSZpzWQ+VpQFX29aooBoetfL+8WgqV8zJHCtY0E1EBg/Z3ZL3n2OS++fVeWlKtp5mwEq8uLTUmhIS7GseP3bIG/CwF2Zz4bzhmPGK8V2s74aUvELZLCfkBE1ULNs7Nou1iPDGnhYOD53eq1KGIPlIg1rnLbyYw5bhS20wy5IxkWf2eCaXfmQBTG61kO5m3nkzfVgtxmZHLqYggISTJXUovfGsWZcp5a71clCSMVal+Mfviw8L/UPHG0Ie1c36djJiFLxM0f2HlwVMjegQOZSAeMGg1YL1xnIys2zMMsKgEeR+JISTal1pJyLcT9x5mr1HCnUczSGXE5zsixN+PORRnZOqcEZTa2mHJ1h5jJeEm36B/eR57BMJG+i0QgZqTpLzYTFrp2eWokGMjFB1MvgAkL2YoRsw9h6TeIwqzK8mFwLi28bf1c90gX9uMbwY/NOqGzfQKBR9bvCjs2k/gmJ+qd5AbC3DvOxHnN6hRZUqNq76Bo4F+CUVcjQ/NXnfnOIVNbILpl5Un5kl+8wLFM+mNxDxduajaUwLhSHZofKmmCSLbuuaGmQTC7a/4wzhQM9e5dX0X/8sOo8CptW7uw4=
92 2d8cd3d0e83c7336c0cb45a9f88638363f993848 0 iQIVAwUAU7OLTCBXgaxoKi1yAQJ+pw/+M3yOesgf55eo3PUTZw02QZxDyEg9ElrRc6664/QFXaJuYdz8H3LGG/NYs8uEdYihiGpS1Qc70jwd1IoUlrCELsaSSZpzWQ+VpQFX29aooBoetfL+8WgqV8zJHCtY0E1EBg/Z3ZL3n2OS++fVeWlKtp5mwEq8uLTUmhIS7GseP3bIG/CwF2Zz4bzhmPGK8V2s74aUvELZLCfkBE1ULNs7Nou1iPDGnhYOD53eq1KGIPlIg1rnLbyYw5bhS20wy5IxkWf2eCaXfmQBTG61kO5m3nkzfVgtxmZHLqYggISTJXUovfGsWZcp5a71clCSMVal+Mfviw8L/UPHG0Ie1c36djJiFLxM0f2HlwVMjegQOZSAeMGg1YL1xnIys2zMMsKgEeR+JISTal1pJyLcT9x5mr1HCnUczSGXE5zsixN+PORRnZOqcEZTa2mHJ1h5jJeEm36B/eR57BMJG+i0QgZqTpLzYTFrp2eWokGMjFB1MvgAkL2YoRsw9h6TeIwqzK8mFwLi28bf1c90gX9uMbwY/NOqGzfQKBR9bvCjs2k/gmJ+qd5AbC3DvOxHnN6hRZUqNq76Bo4F+CUVcjQ/NXnfnOIVNbILpl5Un5kl+8wLFM+mNxDxduajaUwLhSHZofKmmCSLbuuaGmQTC7a/4wzhQM9e5dX0X/8sOo8CptW7uw4=
93 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 0 iQIVAwUAU8n97yBXgaxoKi1yAQKqcA/+MT0VFoP6N8fHnlxj85maoM2HfZbAzX7oEW1B8F1WH6rHESHDexDWIYWJ2XnEeTD4GCXN0/1p+O/I0IMPNzqoSz8BU0SR4+ejhRkGrKG7mcFiF5G8enxaiISn9nmax6DyRfqtOQBzuXYGObXg9PGvMS6zbR0SorJK61xX7fSsUNN6BAvHJfpwcVkOrrFAIpEhs/Gh9wg0oUKCffO/Abs6oS+P6nGLylpIyXqC7rKZ4uPVc6Ljh9DOcpV4NCU6kQbNE7Ty79E0/JWWLsHOEY4F4WBzI7rVh7dOkRMmfNGaqvKkuNkJOEqTR1o1o73Hhbxn4NU7IPbVP/zFKC+/4QVtcPk2IPlpK1MqA1H2hBNYZhJlNhvAa7LwkIxM0916/zQ8dbFAzp6Ay/t/L0tSEcIrudTz2KTrY0WKw+pkzB/nTwaS3XZre6H2B+gszskmf1Y41clkIy/nH9K7zBuzANWyK3+bm40vmMoBbbnsweUAKkyCwqm4KTyQoYQWzu/ZiZcI+Uuk/ajJ9s7EhJbIlSnYG9ttWL/IZ1h+qPU9mqVO9fcaqkeL/NIRh+IsnzaWo0zmHU1bK+/E29PPGGf3v6+IEJmXg7lvNl5pHiMd2tb7RNO/UaNSv1Y2E9naD4FQwSWo38GRBcnRGuKCLdZNHGUR+6dYo6BJCGG8wtZvNXb3TOo=
93 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 0 iQIVAwUAU8n97yBXgaxoKi1yAQKqcA/+MT0VFoP6N8fHnlxj85maoM2HfZbAzX7oEW1B8F1WH6rHESHDexDWIYWJ2XnEeTD4GCXN0/1p+O/I0IMPNzqoSz8BU0SR4+ejhRkGrKG7mcFiF5G8enxaiISn9nmax6DyRfqtOQBzuXYGObXg9PGvMS6zbR0SorJK61xX7fSsUNN6BAvHJfpwcVkOrrFAIpEhs/Gh9wg0oUKCffO/Abs6oS+P6nGLylpIyXqC7rKZ4uPVc6Ljh9DOcpV4NCU6kQbNE7Ty79E0/JWWLsHOEY4F4WBzI7rVh7dOkRMmfNGaqvKkuNkJOEqTR1o1o73Hhbxn4NU7IPbVP/zFKC+/4QVtcPk2IPlpK1MqA1H2hBNYZhJlNhvAa7LwkIxM0916/zQ8dbFAzp6Ay/t/L0tSEcIrudTz2KTrY0WKw+pkzB/nTwaS3XZre6H2B+gszskmf1Y41clkIy/nH9K7zBuzANWyK3+bm40vmMoBbbnsweUAKkyCwqm4KTyQoYQWzu/ZiZcI+Uuk/ajJ9s7EhJbIlSnYG9ttWL/IZ1h+qPU9mqVO9fcaqkeL/NIRh+IsnzaWo0zmHU1bK+/E29PPGGf3v6+IEJmXg7lvNl5pHiMd2tb7RNO/UaNSv1Y2E9naD4FQwSWo38GRBcnRGuKCLdZNHGUR+6dYo6BJCGG8wtZvNXb3TOo=
94 3178e49892020336491cdc6945885c4de26ffa8b 0 iQIVAwUAU9whUCBXgaxoKi1yAQJDKxAAoGzdHXV/BvZ598VExEQ8IqkmBVIP1QZDVBr/orMc1eFM4tbGKxumMGbqgJsg+NetI0irkh/YWeJQ13lT4Og72iJ+4UC9eF9pcpUKr/0eBYdU2N/p2MIbVNWh3aF5QkbuQpSri0VbHOWkxqwoqrrwXEjgHaKYP4PKh+Dzukax4yzBUIyzAG38pt4a8hbjnozCl2uAikxk4Ojg+ZufhPoZWgFEuYzSfK5SrwVKOwuxKYFGbbVGTQMIXLvBhOipAmHp4JMEYHfG85kwuyx/DCDbGmXKPQYQfClwjJ4ob/IwG8asyMsPWs+09vrvpVO08HBuph3GjuiWJ1fhEef/ImWmZdQySI9Y4SjwP4dMVfzLCnY+PYPDM9Sq/5Iee13gI2lVM2NtAfQZPXh9l8u6SbCir1UhMNMx0qVMkqMAATmiZ+ETHCO75q4Wdcmnv5fk2PbvaGBVtrHGeiyuz5mK/j4cMbd0R9R0hR1PyC4dOhNqOnbqELNIe0rKNByG1RkpiQYsqZTU6insmnZrv4fVsxfA4JOObPfKNT4oa24MHS73ldLFCfQAuIxVE7RDJJ3bHeh/yO6Smo28FuVRldBl5e+wj2MykS8iVcuSa1smw6gJ14iLBH369nlR3fAAQxI0omVYPDHLr7SsH3vJasTaCD7V3SL4lW6vo/yaAh4ImlTAE+Y=
94 3178e49892020336491cdc6945885c4de26ffa8b 0 iQIVAwUAU9whUCBXgaxoKi1yAQJDKxAAoGzdHXV/BvZ598VExEQ8IqkmBVIP1QZDVBr/orMc1eFM4tbGKxumMGbqgJsg+NetI0irkh/YWeJQ13lT4Og72iJ+4UC9eF9pcpUKr/0eBYdU2N/p2MIbVNWh3aF5QkbuQpSri0VbHOWkxqwoqrrwXEjgHaKYP4PKh+Dzukax4yzBUIyzAG38pt4a8hbjnozCl2uAikxk4Ojg+ZufhPoZWgFEuYzSfK5SrwVKOwuxKYFGbbVGTQMIXLvBhOipAmHp4JMEYHfG85kwuyx/DCDbGmXKPQYQfClwjJ4ob/IwG8asyMsPWs+09vrvpVO08HBuph3GjuiWJ1fhEef/ImWmZdQySI9Y4SjwP4dMVfzLCnY+PYPDM9Sq/5Iee13gI2lVM2NtAfQZPXh9l8u6SbCir1UhMNMx0qVMkqMAATmiZ+ETHCO75q4Wdcmnv5fk2PbvaGBVtrHGeiyuz5mK/j4cMbd0R9R0hR1PyC4dOhNqOnbqELNIe0rKNByG1RkpiQYsqZTU6insmnZrv4fVsxfA4JOObPfKNT4oa24MHS73ldLFCfQAuIxVE7RDJJ3bHeh/yO6Smo28FuVRldBl5e+wj2MykS8iVcuSa1smw6gJ14iLBH369nlR3fAAQxI0omVYPDHLr7SsH3vJasTaCD7V3SL4lW6vo/yaAh4ImlTAE+Y=
95 5dc91146f35369949ea56b40172308158b59063a 0 iQIVAwUAVAUgJyBXgaxoKi1yAQJkEg/9EXFZvPpuvU7AjII1dlIT8F534AXrO30+H6hweg+h2mUCSb/mZnbo3Jr1tATgBWbIKkYmmsiIKNlJMFNPZTWhImGcVA93t6v85tSFiNJRI2QP9ypl5wTt2KhiS/s7GbUYCtPDm6xyNYoSvDo6vXJ5mfGlgFZY5gYLwEHq/lIRWLWD4EWYWbk5yN+B7rHu6A1n3yro73UR8DudEhYYqC23KbWEqFOiNd1IGj3UJlxIHUE4AcDukxbfiMWrKvv1kuT/vXak3X7cLXlO56aUbMopvaUflA3PSr3XAqynDd69cxACo/T36fuwzCQN4ICpdzGTos0rQALSr7CKF5YP9LMhVhCsOn0pCsAkSiw4HxxbcHQLl+t+0rchNysc4dWGwDt6GAfYcdm3fPtGFtA3qsN8lOpCquFH3TAZ3TrIjLFoTOk6s1xX1x5rjP/DAHc/y3KZU0Ffx3TwdQEEEIFaAXaxQG848rdfzV42+dnFnXh1G/MIrKAmv3ZSUkQ3XJfGc7iu82FsYE1NLHriUQDmMRBzCoQ1Rn1Kji119Cxf5rsMcQ6ZISR1f0jDCUS/qxlHvSqETLp8H63NSUfvuKSC7uC6pGvq9XQm1JRNO5UuJfK6tHzy0jv9bt2IRo2xbmvpDu9L5oHHd3JePsAmFmbrFf/7Qem3JyzEvRcpdcdHtefxcxc=
95 5dc91146f35369949ea56b40172308158b59063a 0 iQIVAwUAVAUgJyBXgaxoKi1yAQJkEg/9EXFZvPpuvU7AjII1dlIT8F534AXrO30+H6hweg+h2mUCSb/mZnbo3Jr1tATgBWbIKkYmmsiIKNlJMFNPZTWhImGcVA93t6v85tSFiNJRI2QP9ypl5wTt2KhiS/s7GbUYCtPDm6xyNYoSvDo6vXJ5mfGlgFZY5gYLwEHq/lIRWLWD4EWYWbk5yN+B7rHu6A1n3yro73UR8DudEhYYqC23KbWEqFOiNd1IGj3UJlxIHUE4AcDukxbfiMWrKvv1kuT/vXak3X7cLXlO56aUbMopvaUflA3PSr3XAqynDd69cxACo/T36fuwzCQN4ICpdzGTos0rQALSr7CKF5YP9LMhVhCsOn0pCsAkSiw4HxxbcHQLl+t+0rchNysc4dWGwDt6GAfYcdm3fPtGFtA3qsN8lOpCquFH3TAZ3TrIjLFoTOk6s1xX1x5rjP/DAHc/y3KZU0Ffx3TwdQEEEIFaAXaxQG848rdfzV42+dnFnXh1G/MIrKAmv3ZSUkQ3XJfGc7iu82FsYE1NLHriUQDmMRBzCoQ1Rn1Kji119Cxf5rsMcQ6ZISR1f0jDCUS/qxlHvSqETLp8H63NSUfvuKSC7uC6pGvq9XQm1JRNO5UuJfK6tHzy0jv9bt2IRo2xbmvpDu9L5oHHd3JePsAmFmbrFf/7Qem3JyzEvRcpdcdHtefxcxc=
96 f768c888aaa68d12dd7f509dcc7f01c9584357d0 0 iQIVAwUAVCxczSBXgaxoKi1yAQJYiA/9HnqKuU7IsGACgsUGt+YaqZQumg077Anj158kihSytmSts6xDxqVY1UQB38dqAKLJrQc7RbN0YK0NVCKZZrx/4OqgWvjiL5qWUJKqQzsDx4LGTUlbPlZNZawW2urmmYW6c9ZZDs1EVnVeZMDrOdntddtnBgtILDwrZ8o3U7FwSlfnm03vTkqUMj9okA3AsI8+lQIlo4qbqjQJYwvUC1ZezRdQwaT1LyoWUgjmhoZ1XWcWKOs9baikaJr6fMv8vZpwmaOY1+pztxYlROeSPVWt9P6yOf0Hi/2eg8AwSZLaX96xfk9IvXUSItg/wjTWP9BhnNs/ulwTnN8QOgSXpYxH4RXwsYOyU7BvwAekA9xi17wuzPrGEliScplxICIZ7jiiwv/VngMvM9AYw2mNBvZt2ZIGrrLaK6pq/zBm5tbviwqt5/8U5aqO8k1O0e4XYm5WmQ1c2AkXRO+xwvFpondlSF2y0flzf2FRXP82QMfsy7vxIP0KmaQ4ex+J8krZgMjNTwXh2M4tdYNtu5AehJQEP3l6giy2srkMDuFLqoe1yECjVlGdgA86ve3J/84I8KGgsufYMhfQnwHHGXCbONcNsDvO0QOee6CIQVcdKCG7dac3M89SC6Ns2CjuC8BIYDRnxbGQb7Fvn4ZcadyJKKbXQJzMgRV25K6BAwTIdvYAtgU=
96 f768c888aaa68d12dd7f509dcc7f01c9584357d0 0 iQIVAwUAVCxczSBXgaxoKi1yAQJYiA/9HnqKuU7IsGACgsUGt+YaqZQumg077Anj158kihSytmSts6xDxqVY1UQB38dqAKLJrQc7RbN0YK0NVCKZZrx/4OqgWvjiL5qWUJKqQzsDx4LGTUlbPlZNZawW2urmmYW6c9ZZDs1EVnVeZMDrOdntddtnBgtILDwrZ8o3U7FwSlfnm03vTkqUMj9okA3AsI8+lQIlo4qbqjQJYwvUC1ZezRdQwaT1LyoWUgjmhoZ1XWcWKOs9baikaJr6fMv8vZpwmaOY1+pztxYlROeSPVWt9P6yOf0Hi/2eg8AwSZLaX96xfk9IvXUSItg/wjTWP9BhnNs/ulwTnN8QOgSXpYxH4RXwsYOyU7BvwAekA9xi17wuzPrGEliScplxICIZ7jiiwv/VngMvM9AYw2mNBvZt2ZIGrrLaK6pq/zBm5tbviwqt5/8U5aqO8k1O0e4XYm5WmQ1c2AkXRO+xwvFpondlSF2y0flzf2FRXP82QMfsy7vxIP0KmaQ4ex+J8krZgMjNTwXh2M4tdYNtu5AehJQEP3l6giy2srkMDuFLqoe1yECjVlGdgA86ve3J/84I8KGgsufYMhfQnwHHGXCbONcNsDvO0QOee6CIQVcdKCG7dac3M89SC6Ns2CjuC8BIYDRnxbGQb7Fvn4ZcadyJKKbXQJzMgRV25K6BAwTIdvYAtgU=
97 7f8d16af8cae246fa5a48e723d48d58b015aed94 0 iQIVAwUAVEL0XyBXgaxoKi1yAQJLkRAAjZhpUju5nnSYtN9S0/vXS/tjuAtBTUdGwc0mz97VrM6Yhc6BjSCZL59tjeqQaoH7Lqf94pRAtZyIB2Vj/VVMDbM+/eaoSr1JixxppU+a4eqScaj82944u4C5YMSMC22PMvEwqKmy87RinZKJlFwSQ699zZ5g6mnNq8xeAiDlYhoF2QKzUXwnKxzpvjGsYhYGDMmVS1QPmky4WGvuTl6KeGkv8LidKf7r6/2RZeMcq+yjJ7R0RTtyjo1cM5dMcn/jRdwZxuV4cmFweCAeoy5guV+X6du022TpVndjOSDoKiRgdk7pTuaToXIy+9bleHpEo9bwKx58wvOMg7sirAYjrA4Xcx762RHiUuidTTPktm8sNsBQmgwJZ8Pzm+8TyHjFGLnBfeiDbQQEdLCXloz0jVOVRflDfMays1WpAYUV8XNOsgxnD2jDU8L0NLkJiX5Y0OerGq9AZ+XbgJFVBFhaOfsm2PEc3jq00GOLzrGzA+4b3CGpFzM3EyK9OnnwbP7SqCGb7PJgjmQ7IO8IWEmVYGaKtWONSm8zRLcKdH8xuk8iN1qCkBXMty/wfTEVTkIlMVEDbslYkVfj0rAPJ8B37bfe0Yz4CEMkCmARIB1rIOpMhnavXGuD50OP2PBBY/8DyC5aY97z9f04na/ffk+l7rWaHihjHufKIApt5OnfJ1w=
97 7f8d16af8cae246fa5a48e723d48d58b015aed94 0 iQIVAwUAVEL0XyBXgaxoKi1yAQJLkRAAjZhpUju5nnSYtN9S0/vXS/tjuAtBTUdGwc0mz97VrM6Yhc6BjSCZL59tjeqQaoH7Lqf94pRAtZyIB2Vj/VVMDbM+/eaoSr1JixxppU+a4eqScaj82944u4C5YMSMC22PMvEwqKmy87RinZKJlFwSQ699zZ5g6mnNq8xeAiDlYhoF2QKzUXwnKxzpvjGsYhYGDMmVS1QPmky4WGvuTl6KeGkv8LidKf7r6/2RZeMcq+yjJ7R0RTtyjo1cM5dMcn/jRdwZxuV4cmFweCAeoy5guV+X6du022TpVndjOSDoKiRgdk7pTuaToXIy+9bleHpEo9bwKx58wvOMg7sirAYjrA4Xcx762RHiUuidTTPktm8sNsBQmgwJZ8Pzm+8TyHjFGLnBfeiDbQQEdLCXloz0jVOVRflDfMays1WpAYUV8XNOsgxnD2jDU8L0NLkJiX5Y0OerGq9AZ+XbgJFVBFhaOfsm2PEc3jq00GOLzrGzA+4b3CGpFzM3EyK9OnnwbP7SqCGb7PJgjmQ7IO8IWEmVYGaKtWONSm8zRLcKdH8xuk8iN1qCkBXMty/wfTEVTkIlMVEDbslYkVfj0rAPJ8B37bfe0Yz4CEMkCmARIB1rIOpMhnavXGuD50OP2PBBY/8DyC5aY97z9f04na/ffk+l7rWaHihjHufKIApt5OnfJ1w=
98 ced632394371a36953ce4d394f86278ae51a2aae 0 iQIVAwUAVFWpfSBXgaxoKi1yAQLCQw//cvCi/Di3z/2ZEDQt4Ayyxv18gzewqrYyoElgnEzr5uTynD9Mf25hprstKla/Y5C6q+y0K6qCHPimGOkz3H+wZ2GVUgLKAwMABkfSb5IZiLTGaB2DjAJKZRwB6h43wG/DSFggE3dYszWuyHW88c72ZzVF5CSNc4J1ARLjDSgnNYJQ6XdPw3C9KgiLFDXzynPpZbPg0AK5bdPUKJruMeIKPn36Hx/Tv5GXUrbc2/lcnyRDFWisaDl0X/5eLdA+r3ID0cSmyPLYOeCgszRiW++KGw+PPDsWVeM3ZaZ9SgaBWU7MIn9A7yQMnnSzgDbN+9v/VMT3zbk1WJXlQQK8oA+CCdHH9EY33RfZ6ST/lr3pSQbUG1hdK6Sw+H6WMkOnnEk6HtLwa4xZ3HjDpoPkhVV+S0C7D5WWOovbubxuBiW5v8tK4sIOS6bAaKevTBKRbo4Rs6qmS/Ish5Q+z5bKst80cyEdi4QSoPZ/W+6kh1KfOprMxynwPQhtEcDYW2gfLpgPIM7RdXPKukLlkV2qX3eF/tqApGU4KNdP4I3N80Ri0h+6tVU/K4TMYzlRV3ziLBumJ4TnBrTHU3X6AfZUfTgslQzokX8/7a3tbctX6kZuJPggLGisdFSdirHbrUc+y5VKuJtPr+LxxgZKRFbs2VpJRem6FvwGNyndWLv32v0GMtQ=
98 ced632394371a36953ce4d394f86278ae51a2aae 0 iQIVAwUAVFWpfSBXgaxoKi1yAQLCQw//cvCi/Di3z/2ZEDQt4Ayyxv18gzewqrYyoElgnEzr5uTynD9Mf25hprstKla/Y5C6q+y0K6qCHPimGOkz3H+wZ2GVUgLKAwMABkfSb5IZiLTGaB2DjAJKZRwB6h43wG/DSFggE3dYszWuyHW88c72ZzVF5CSNc4J1ARLjDSgnNYJQ6XdPw3C9KgiLFDXzynPpZbPg0AK5bdPUKJruMeIKPn36Hx/Tv5GXUrbc2/lcnyRDFWisaDl0X/5eLdA+r3ID0cSmyPLYOeCgszRiW++KGw+PPDsWVeM3ZaZ9SgaBWU7MIn9A7yQMnnSzgDbN+9v/VMT3zbk1WJXlQQK8oA+CCdHH9EY33RfZ6ST/lr3pSQbUG1hdK6Sw+H6WMkOnnEk6HtLwa4xZ3HjDpoPkhVV+S0C7D5WWOovbubxuBiW5v8tK4sIOS6bAaKevTBKRbo4Rs6qmS/Ish5Q+z5bKst80cyEdi4QSoPZ/W+6kh1KfOprMxynwPQhtEcDYW2gfLpgPIM7RdXPKukLlkV2qX3eF/tqApGU4KNdP4I3N80Ri0h+6tVU/K4TMYzlRV3ziLBumJ4TnBrTHU3X6AfZUfTgslQzokX8/7a3tbctX6kZuJPggLGisdFSdirHbrUc+y5VKuJtPr+LxxgZKRFbs2VpJRem6FvwGNyndWLv32v0GMtQ=
99 643c58303fb0ec020907af28b9e486be299ba043 0 iQIVAwUAVGKawCBXgaxoKi1yAQL7zxAAjpXKNvzm/PKVlTfDjuVOYZ9H8w9QKUZ0vfrNJrN6Eo6hULIostbdRc25FcMWocegTqvKbz3IG+L2TKOIdZJS9M9QS4URybUd37URq4Jai8kMiJY31KixNNnjO2G1B39aIXUhY+EPx12aY31/OVy4laXIVtN6qpSncjo9baXSOMZmx6RyA1dbyfwXRjT/aODCGHZXgLJHS/kHlkCsThVlqYQ4rUCDkXIeMqIGF1CR0KjfmKpp1fS14OMgpLgdnt9+pnBZ+qcf1YdpOeQob1zwunjMYOyYC74FyOTdwaynU2iDsuBrmkE8kgEedIn7+WWe9fp/6TQJMVOeTQPZBNSRRSUYCw5Tg/0L/+jLtzjc2mY4444sDPbR7scrtU+/GtvlR5z0Y5pofwEdFME7PZNOp9a4kMiSa7ZERyGdN7U1pDu9JU6BZRz+nPzW217PVnTF7YFV/GGUzMTk9i7EZb5M4T9r9gfxFSMPeT5ct712CdBfyRlsSbSWk8XclTXwW385kLVYNDtOukWrvEiwxpA14Xb/ZUXbIDZVf5rP2HrZHMkghzeUYPjRn/IlgYUt7sDNmqFZNIc9mRFrZC9uFQ/Nul5InZodNODQDM+nHpxaztt4xl4qKep8SDEPAQjNr8biC6T9MtLKbWbSKDlqYYNv0pb2PuGub3y9rvkF1Y05mgM=
99 643c58303fb0ec020907af28b9e486be299ba043 0 iQIVAwUAVGKawCBXgaxoKi1yAQL7zxAAjpXKNvzm/PKVlTfDjuVOYZ9H8w9QKUZ0vfrNJrN6Eo6hULIostbdRc25FcMWocegTqvKbz3IG+L2TKOIdZJS9M9QS4URybUd37URq4Jai8kMiJY31KixNNnjO2G1B39aIXUhY+EPx12aY31/OVy4laXIVtN6qpSncjo9baXSOMZmx6RyA1dbyfwXRjT/aODCGHZXgLJHS/kHlkCsThVlqYQ4rUCDkXIeMqIGF1CR0KjfmKpp1fS14OMgpLgdnt9+pnBZ+qcf1YdpOeQob1zwunjMYOyYC74FyOTdwaynU2iDsuBrmkE8kgEedIn7+WWe9fp/6TQJMVOeTQPZBNSRRSUYCw5Tg/0L/+jLtzjc2mY4444sDPbR7scrtU+/GtvlR5z0Y5pofwEdFME7PZNOp9a4kMiSa7ZERyGdN7U1pDu9JU6BZRz+nPzW217PVnTF7YFV/GGUzMTk9i7EZb5M4T9r9gfxFSMPeT5ct712CdBfyRlsSbSWk8XclTXwW385kLVYNDtOukWrvEiwxpA14Xb/ZUXbIDZVf5rP2HrZHMkghzeUYPjRn/IlgYUt7sDNmqFZNIc9mRFrZC9uFQ/Nul5InZodNODQDM+nHpxaztt4xl4qKep8SDEPAQjNr8biC6T9MtLKbWbSKDlqYYNv0pb2PuGub3y9rvkF1Y05mgM=
100 902554884335e5ca3661d63be9978eb4aec3f68a 0 iQIVAwUAVH0KMyBXgaxoKi1yAQLUKxAAjgyYpmqD0Ji5OQ3995yX0dmwHOaaSuYpq71VUsOMYBskjH4xE2UgcTrX8RWUf0E+Ya91Nw3veTf+IZlYLaWuOYuJPRzw+zD1sVY8xprwqBOXNaA7n8SsTqZPSh6qgw4S0pUm0xJUOZzUP1l9S7BtIdJP7KwZ7hs9YZev4r9M3G15xOIPn5qJqBAtIeE6f5+ezoyOpSPZFtLFc4qKQ/YWzOT5uuSaYogXgVByXRFaO84+1TD93LR0PyVWxhwU9JrDU5d7P/bUTW1BXdjsxTbBnigWswKHC71EHpgz/HCYxivVL30qNdOm4Fow1Ec2GdUzGunSqTPrq18ScZDYW1x87f3JuqPM+ce/lxRWBBqP1yE30/8l/Us67m6enWXdGER8aL1lYTGOIWAhvJpfzv9KebaUq1gMFLo6j+OfwR3rYPiCHgi20nTNBa+LOceWFjCGzFa3T9UQWHW/MBElfAxK65uecbGRRYY9V1/+wxtTUiS6ixpmzL8S7uUd5n6oMaeeMiD82NLgPIbMyUHQv6eFEcCj0U9NT2uKbFRmclMs5V+8D+RTCsLJ55R9PD5OoRw/6K/coqqPShYmJvgYsFQPzXVpQdCRae31xdfGFmd5KUetqyrT+4GUdJWzSm0giSgovpEJNxXglrvNdvSO7fX3R1oahhwOwtGqMwNilcK+iDw=
100 902554884335e5ca3661d63be9978eb4aec3f68a 0 iQIVAwUAVH0KMyBXgaxoKi1yAQLUKxAAjgyYpmqD0Ji5OQ3995yX0dmwHOaaSuYpq71VUsOMYBskjH4xE2UgcTrX8RWUf0E+Ya91Nw3veTf+IZlYLaWuOYuJPRzw+zD1sVY8xprwqBOXNaA7n8SsTqZPSh6qgw4S0pUm0xJUOZzUP1l9S7BtIdJP7KwZ7hs9YZev4r9M3G15xOIPn5qJqBAtIeE6f5+ezoyOpSPZFtLFc4qKQ/YWzOT5uuSaYogXgVByXRFaO84+1TD93LR0PyVWxhwU9JrDU5d7P/bUTW1BXdjsxTbBnigWswKHC71EHpgz/HCYxivVL30qNdOm4Fow1Ec2GdUzGunSqTPrq18ScZDYW1x87f3JuqPM+ce/lxRWBBqP1yE30/8l/Us67m6enWXdGER8aL1lYTGOIWAhvJpfzv9KebaUq1gMFLo6j+OfwR3rYPiCHgi20nTNBa+LOceWFjCGzFa3T9UQWHW/MBElfAxK65uecbGRRYY9V1/+wxtTUiS6ixpmzL8S7uUd5n6oMaeeMiD82NLgPIbMyUHQv6eFEcCj0U9NT2uKbFRmclMs5V+8D+RTCsLJ55R9PD5OoRw/6K/coqqPShYmJvgYsFQPzXVpQdCRae31xdfGFmd5KUetqyrT+4GUdJWzSm0giSgovpEJNxXglrvNdvSO7fX3R1oahhwOwtGqMwNilcK+iDw=
101 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 0 iQIVAwUAVJNALCBXgaxoKi1yAQKgmw/+OFbHHOMmN2zs2lI2Y0SoMALPNQBInMBq2E6RMCMbfcS9Cn75iD29DnvBwAYNWaWsYEGyheJ7JjGBiuNKPOrLaHkdjG+5ypbhAfNDyHDiteMsXfH7D1L+cTOAB8yvhimZHOTTVF0zb/uRyVIPNowAyervUVRjDptzdfcvjUS+X+/Ufgwms6Y4CcuzFLFCxpmryJhLtOpwUPLlzIqeNkFOYWkHanCgtZX03PNIWhorH3AWOc9yztwWPQ+kcKl3FMlyuNMPhS/ElxSF6GHGtreRbtP+ZLoSIOMb2QBKpGDpZLgJ3JQEHDcZ0h5CLZWL9dDUJR3M8pg1qglqMFSWMgRPTzxPS4QntPgT/Ewd3+U5oCZUh052fG41OeCZ0CnVCpqi5PjUIDhzQkONxRCN2zbjQ2GZY7glbXoqytissihEIVP9m7RmBVq1rbjOKr+yUetJ9gOZcsMtZiCEq4Uj2cbA1x32MQv7rxwAgQP1kgQ62b0sN08HTjQpI7/IkNALLIDHoQWWr45H97i34qK1dd5uCOnYk7juvhGNX5XispxNnC01/CUVNnqChfDHpgnDjgT+1H618LiTgUAD3zo4IVAhCqF5XWsS4pQEENOB3Msffi62fYowvJx7f/htWeRLZ2OA+B85hhDiD4QBdHCRoz3spVp0asNqDxX4f4ndj8RlzfM=
@@ -1,113 +1,114 b''
1 d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f
1 d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f
2 1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e
2 1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e
3 7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d
3 7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d
4 b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c
4 b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c
5 f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5
5 f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5
6 0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b
6 0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b
7 12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6
7 12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6
8 4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b
8 4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b
9 eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c
9 eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c
10 979c049974485125e1f9357f6bbe9c1b548a64c3 0.7
10 979c049974485125e1f9357f6bbe9c1b548a64c3 0.7
11 3a56574f329a368d645853e0f9e09472aee62349 0.8
11 3a56574f329a368d645853e0f9e09472aee62349 0.8
12 6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1
12 6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1
13 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9
13 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9
14 2be3001847cb18a23c403439d9e7d0ace30804e9 0.9.1
14 2be3001847cb18a23c403439d9e7d0ace30804e9 0.9.1
15 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2
15 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2
16 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3
16 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3
17 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4
17 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4
18 23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5
18 23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5
19 bae2e9c838e90a393bae3973a7850280413e091a 1.0
19 bae2e9c838e90a393bae3973a7850280413e091a 1.0
20 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 1.0.1
20 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 1.0.1
21 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 1.0.2
21 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 1.0.2
22 2a67430f92f15ea5159c26b09ec4839a0c549a26 1.1
22 2a67430f92f15ea5159c26b09ec4839a0c549a26 1.1
23 3773e510d433969e277b1863c317b674cbee2065 1.1.1
23 3773e510d433969e277b1863c317b674cbee2065 1.1.1
24 11a4eb81fb4f4742451591489e2797dc47903277 1.1.2
24 11a4eb81fb4f4742451591489e2797dc47903277 1.1.2
25 11efa41037e280d08cfb07c09ad485df30fb0ea8 1.2
25 11efa41037e280d08cfb07c09ad485df30fb0ea8 1.2
26 02981000012e3adf40c4849bd7b3d5618f9ce82d 1.2.1
26 02981000012e3adf40c4849bd7b3d5618f9ce82d 1.2.1
27 196d40e7c885fa6e95f89134809b3ec7bdbca34b 1.3
27 196d40e7c885fa6e95f89134809b3ec7bdbca34b 1.3
28 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 1.3.1
28 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 1.3.1
29 31ec469f9b556f11819937cf68ee53f2be927ebf 1.4
29 31ec469f9b556f11819937cf68ee53f2be927ebf 1.4
30 439d7ea6fe3aa4ab9ec274a68846779153789de9 1.4.1
30 439d7ea6fe3aa4ab9ec274a68846779153789de9 1.4.1
31 296a0b14a68621f6990c54fdba0083f6f20935bf 1.4.2
31 296a0b14a68621f6990c54fdba0083f6f20935bf 1.4.2
32 4aa619c4c2c09907034d9824ebb1dd0e878206eb 1.4.3
32 4aa619c4c2c09907034d9824ebb1dd0e878206eb 1.4.3
33 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 1.5
33 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 1.5
34 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 1.5.1
34 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 1.5.1
35 39f725929f0c48c5fb3b90c071fc3066012456ca 1.5.2
35 39f725929f0c48c5fb3b90c071fc3066012456ca 1.5.2
36 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 1.5.3
36 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 1.5.3
37 24fe2629c6fd0c74c90bd066e77387c2b02e8437 1.5.4
37 24fe2629c6fd0c74c90bd066e77387c2b02e8437 1.5.4
38 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 1.6
38 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 1.6
39 bf1774d95bde614af3956d92b20e2a0c68c5fec7 1.6.1
39 bf1774d95bde614af3956d92b20e2a0c68c5fec7 1.6.1
40 c00f03a4982e467fb6b6bd45908767db6df4771d 1.6.2
40 c00f03a4982e467fb6b6bd45908767db6df4771d 1.6.2
41 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 1.6.3
41 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 1.6.3
42 93d8bff78c96fe7e33237b257558ee97290048a4 1.6.4
42 93d8bff78c96fe7e33237b257558ee97290048a4 1.6.4
43 333421b9e0f96c7bc788e5667c146a58a9440a55 1.7
43 333421b9e0f96c7bc788e5667c146a58a9440a55 1.7
44 4438875ec01bd0fc32be92b0872eb6daeed4d44f 1.7.1
44 4438875ec01bd0fc32be92b0872eb6daeed4d44f 1.7.1
45 6aff4f144ad356311318b0011df0bb21f2c97429 1.7.2
45 6aff4f144ad356311318b0011df0bb21f2c97429 1.7.2
46 e3bf16703e2601de99e563cdb3a5d50b64e6d320 1.7.3
46 e3bf16703e2601de99e563cdb3a5d50b64e6d320 1.7.3
47 a6c855c32ea081da3c3b8ff628f1847ff271482f 1.7.4
47 a6c855c32ea081da3c3b8ff628f1847ff271482f 1.7.4
48 2b2155623ee2559caf288fd333f30475966c4525 1.7.5
48 2b2155623ee2559caf288fd333f30475966c4525 1.7.5
49 2616325766e3504c8ae7c84bd15ee610901fe91d 1.8
49 2616325766e3504c8ae7c84bd15ee610901fe91d 1.8
50 aa1f3be38ab127280761889d2dca906ca465b5f4 1.8.1
50 aa1f3be38ab127280761889d2dca906ca465b5f4 1.8.1
51 b032bec2c0a651ca0ddecb65714bfe6770f67d70 1.8.2
51 b032bec2c0a651ca0ddecb65714bfe6770f67d70 1.8.2
52 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 1.8.3
52 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 1.8.3
53 733af5d9f6b22387913e1d11350fb8cb7c1487dd 1.8.4
53 733af5d9f6b22387913e1d11350fb8cb7c1487dd 1.8.4
54 de9eb6b1da4fc522b1cab16d86ca166204c24f25 1.9
54 de9eb6b1da4fc522b1cab16d86ca166204c24f25 1.9
55 4a43e23b8c55b4566b8200bf69fe2158485a2634 1.9.1
55 4a43e23b8c55b4566b8200bf69fe2158485a2634 1.9.1
56 d629f1e89021103f1753addcef6b310e4435b184 1.9.2
56 d629f1e89021103f1753addcef6b310e4435b184 1.9.2
57 351a9292e430e35766c552066ed3e87c557b803b 1.9.3
57 351a9292e430e35766c552066ed3e87c557b803b 1.9.3
58 384082750f2c51dc917d85a7145748330fa6ef4d 2.0-rc
58 384082750f2c51dc917d85a7145748330fa6ef4d 2.0-rc
59 41453d55b481ddfcc1dacb445179649e24ca861d 2.0
59 41453d55b481ddfcc1dacb445179649e24ca861d 2.0
60 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 2.0.1
60 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 2.0.1
61 6344043924497cd06d781d9014c66802285072e4 2.0.2
61 6344043924497cd06d781d9014c66802285072e4 2.0.2
62 db33555eafeaf9df1e18950e29439eaa706d399b 2.1-rc
62 db33555eafeaf9df1e18950e29439eaa706d399b 2.1-rc
63 2aa5b51f310fb3befd26bed99c02267f5c12c734 2.1
63 2aa5b51f310fb3befd26bed99c02267f5c12c734 2.1
64 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 2.1.1
64 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 2.1.1
65 b9bd95e61b49c221c4cca24e6da7c946fc02f992 2.1.2
65 b9bd95e61b49c221c4cca24e6da7c946fc02f992 2.1.2
66 d9e2f09d5488c395ae9ddbb320ceacd24757e055 2.2-rc
66 d9e2f09d5488c395ae9ddbb320ceacd24757e055 2.2-rc
67 00182b3d087909e3c3ae44761efecdde8f319ef3 2.2
67 00182b3d087909e3c3ae44761efecdde8f319ef3 2.2
68 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 2.2.1
68 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 2.2.1
69 85a358df5bbbe404ca25730c9c459b34263441dc 2.2.2
69 85a358df5bbbe404ca25730c9c459b34263441dc 2.2.2
70 b013baa3898e117959984fc64c29d8c784d2f28b 2.2.3
70 b013baa3898e117959984fc64c29d8c784d2f28b 2.2.3
71 a06e2681dd1786e2354d84a5fa9c1c88dd4fa3e0 2.3-rc
71 a06e2681dd1786e2354d84a5fa9c1c88dd4fa3e0 2.3-rc
72 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 2.3
72 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 2.3
73 072209ae4ddb654eb2d5fd35bff358c738414432 2.3.1
73 072209ae4ddb654eb2d5fd35bff358c738414432 2.3.1
74 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 2.3.2
74 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 2.3.2
75 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 2.4-rc
75 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 2.4-rc
76 195ad823b5d58c68903a6153a25e3fb4ed25239d 2.4
76 195ad823b5d58c68903a6153a25e3fb4ed25239d 2.4
77 0c10cf8191469e7c3c8844922e17e71a176cb7cb 2.4.1
77 0c10cf8191469e7c3c8844922e17e71a176cb7cb 2.4.1
78 a4765077b65e6ae29ba42bab7834717b5072d5ba 2.4.2
78 a4765077b65e6ae29ba42bab7834717b5072d5ba 2.4.2
79 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 2.5-rc
79 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 2.5-rc
80 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 2.5
80 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 2.5
81 7511d4df752e61fe7ae4f3682e0a0008573b0402 2.5.1
81 7511d4df752e61fe7ae4f3682e0a0008573b0402 2.5.1
82 5b7175377babacce80a6c1e12366d8032a6d4340 2.5.2
82 5b7175377babacce80a6c1e12366d8032a6d4340 2.5.2
83 50c922c1b5145dab8baefefb0437d363b6a6c21c 2.5.3
83 50c922c1b5145dab8baefefb0437d363b6a6c21c 2.5.3
84 8a7bd2dccd44ed571afe7424cd7f95594f27c092 2.5.4
84 8a7bd2dccd44ed571afe7424cd7f95594f27c092 2.5.4
85 292cd385856d98bacb2c3086f8897bc660c2beea 2.6-rc
85 292cd385856d98bacb2c3086f8897bc660c2beea 2.6-rc
86 23f785b38af38d2fca6b8f3db56b8007a84cd73a 2.6
86 23f785b38af38d2fca6b8f3db56b8007a84cd73a 2.6
87 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 2.6.1
87 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 2.6.1
88 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 2.6.2
88 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 2.6.2
89 009794acc6e37a650f0fae37872e733382ac1c0c 2.6.3
89 009794acc6e37a650f0fae37872e733382ac1c0c 2.6.3
90 f0d7721d7322dcfb5af33599c2543f27335334bb 2.7-rc
90 f0d7721d7322dcfb5af33599c2543f27335334bb 2.7-rc
91 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 2.7
91 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 2.7
92 335a558f81dc73afeab4d7be63617392b130117f 2.7.1
92 335a558f81dc73afeab4d7be63617392b130117f 2.7.1
93 e7fa36d2ad3a7944a52dca126458d6f482db3524 2.7.2
93 e7fa36d2ad3a7944a52dca126458d6f482db3524 2.7.2
94 1596f2d8f2421314b1ddead8f7d0c91009358994 2.8-rc
94 1596f2d8f2421314b1ddead8f7d0c91009358994 2.8-rc
95 d825e4025e39d1c39db943cdc89818abd0a87c27 2.8
95 d825e4025e39d1c39db943cdc89818abd0a87c27 2.8
96 209e04a06467e2969c0cc6501335be0406d46ef0 2.8.1
96 209e04a06467e2969c0cc6501335be0406d46ef0 2.8.1
97 ca387377df7a3a67dbb90b6336b781cdadc3ef41 2.8.2
97 ca387377df7a3a67dbb90b6336b781cdadc3ef41 2.8.2
98 8862469e16f9236208581b20de5f96bd13cc039d 2.9-rc
98 8862469e16f9236208581b20de5f96bd13cc039d 2.9-rc
99 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 2.9
99 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 2.9
100 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 2.9.1
100 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 2.9.1
101 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 2.9.2
101 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 2.9.2
102 564f55b251224f16508dd1311452db7780dafe2b 3.0-rc
102 564f55b251224f16508dd1311452db7780dafe2b 3.0-rc
103 2195ac506c6ababe86985b932f4948837c0891b5 3.0
103 2195ac506c6ababe86985b932f4948837c0891b5 3.0
104 269c80ee5b3cb3684fa8edc61501b3506d02eb10 3.0.1
104 269c80ee5b3cb3684fa8edc61501b3506d02eb10 3.0.1
105 2d8cd3d0e83c7336c0cb45a9f88638363f993848 3.0.2
105 2d8cd3d0e83c7336c0cb45a9f88638363f993848 3.0.2
106 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 3.1-rc
106 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 3.1-rc
107 3178e49892020336491cdc6945885c4de26ffa8b 3.1
107 3178e49892020336491cdc6945885c4de26ffa8b 3.1
108 5dc91146f35369949ea56b40172308158b59063a 3.1.1
108 5dc91146f35369949ea56b40172308158b59063a 3.1.1
109 f768c888aaa68d12dd7f509dcc7f01c9584357d0 3.1.2
109 f768c888aaa68d12dd7f509dcc7f01c9584357d0 3.1.2
110 7f8d16af8cae246fa5a48e723d48d58b015aed94 3.2-rc
110 7f8d16af8cae246fa5a48e723d48d58b015aed94 3.2-rc
111 ced632394371a36953ce4d394f86278ae51a2aae 3.2
111 ced632394371a36953ce4d394f86278ae51a2aae 3.2
112 643c58303fb0ec020907af28b9e486be299ba043 3.2.1
112 643c58303fb0ec020907af28b9e486be299ba043 3.2.1
113 902554884335e5ca3661d63be9978eb4aec3f68a 3.2.2
113 902554884335e5ca3661d63be9978eb4aec3f68a 3.2.2
114 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 3.2.3
@@ -1,1288 +1,1295 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10
10
11 import os
11 import os
12 import copy
12 import copy
13
13
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
14 from mercurial import hg, util, cmdutil, scmutil, match as match_, \
15 archival, pathutil, revset
15 archival, pathutil, revset
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial.node import hex
17 from mercurial.node import hex
18
18
19 import lfutil
19 import lfutil
20 import lfcommands
20 import lfcommands
21 import basestore
21 import basestore
22
22
23 # -- Utility functions: commonly/repeatedly needed functionality ---------------
23 # -- Utility functions: commonly/repeatedly needed functionality ---------------
24
24
25 def composenormalfilematcher(match, manifest):
25 def composenormalfilematcher(match, manifest):
26 m = copy.copy(match)
26 m = copy.copy(match)
27 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
27 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
28 manifest)
28 manifest)
29 m._files = filter(notlfile, m._files)
29 m._files = filter(notlfile, m._files)
30 m._fmap = set(m._files)
30 m._fmap = set(m._files)
31 m._always = False
31 m._always = False
32 origmatchfn = m.matchfn
32 origmatchfn = m.matchfn
33 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
33 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
34 return m
34 return m
35
35
36 def installnormalfilesmatchfn(manifest):
36 def installnormalfilesmatchfn(manifest):
37 '''installmatchfn with a matchfn that ignores all largefiles'''
37 '''installmatchfn with a matchfn that ignores all largefiles'''
38 def overridematch(ctx, pats=[], opts={}, globbed=False,
38 def overridematch(ctx, pats=[], opts={}, globbed=False,
39 default='relpath'):
39 default='relpath'):
40 match = oldmatch(ctx, pats, opts, globbed, default)
40 match = oldmatch(ctx, pats, opts, globbed, default)
41 return composenormalfilematcher(match, manifest)
41 return composenormalfilematcher(match, manifest)
42 oldmatch = installmatchfn(overridematch)
42 oldmatch = installmatchfn(overridematch)
43
43
44 def installmatchfn(f):
44 def installmatchfn(f):
45 '''monkey patch the scmutil module with a custom match function.
45 '''monkey patch the scmutil module with a custom match function.
46 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
46 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
47 oldmatch = scmutil.match
47 oldmatch = scmutil.match
48 setattr(f, 'oldmatch', oldmatch)
48 setattr(f, 'oldmatch', oldmatch)
49 scmutil.match = f
49 scmutil.match = f
50 return oldmatch
50 return oldmatch
51
51
52 def restorematchfn():
52 def restorematchfn():
53 '''restores scmutil.match to what it was before installmatchfn
53 '''restores scmutil.match to what it was before installmatchfn
54 was called. no-op if scmutil.match is its original function.
54 was called. no-op if scmutil.match is its original function.
55
55
56 Note that n calls to installmatchfn will require n calls to
56 Note that n calls to installmatchfn will require n calls to
57 restore the original matchfn.'''
57 restore the original matchfn.'''
58 scmutil.match = getattr(scmutil.match, 'oldmatch')
58 scmutil.match = getattr(scmutil.match, 'oldmatch')
59
59
60 def installmatchandpatsfn(f):
60 def installmatchandpatsfn(f):
61 oldmatchandpats = scmutil.matchandpats
61 oldmatchandpats = scmutil.matchandpats
62 setattr(f, 'oldmatchandpats', oldmatchandpats)
62 setattr(f, 'oldmatchandpats', oldmatchandpats)
63 scmutil.matchandpats = f
63 scmutil.matchandpats = f
64 return oldmatchandpats
64 return oldmatchandpats
65
65
66 def restorematchandpatsfn():
66 def restorematchandpatsfn():
67 '''restores scmutil.matchandpats to what it was before
67 '''restores scmutil.matchandpats to what it was before
68 installmatchandpatsfn was called. No-op if scmutil.matchandpats
68 installmatchandpatsfn was called. No-op if scmutil.matchandpats
69 is its original function.
69 is its original function.
70
70
71 Note that n calls to installmatchandpatsfn will require n calls
71 Note that n calls to installmatchandpatsfn will require n calls
72 to restore the original matchfn.'''
72 to restore the original matchfn.'''
73 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
73 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
74 scmutil.matchandpats)
74 scmutil.matchandpats)
75
75
76 def addlargefiles(ui, repo, matcher, **opts):
76 def addlargefiles(ui, repo, matcher, **opts):
77 large = opts.pop('large', None)
77 large = opts.pop('large', None)
78 lfsize = lfutil.getminsize(
78 lfsize = lfutil.getminsize(
79 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
79 ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
80
80
81 lfmatcher = None
81 lfmatcher = None
82 if lfutil.islfilesrepo(repo):
82 if lfutil.islfilesrepo(repo):
83 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
83 lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
84 if lfpats:
84 if lfpats:
85 lfmatcher = match_.match(repo.root, '', list(lfpats))
85 lfmatcher = match_.match(repo.root, '', list(lfpats))
86
86
87 lfnames = []
87 lfnames = []
88 m = copy.copy(matcher)
88 m = copy.copy(matcher)
89 m.bad = lambda x, y: None
89 m.bad = lambda x, y: None
90 wctx = repo[None]
90 wctx = repo[None]
91 for f in repo.walk(m):
91 for f in repo.walk(m):
92 exact = m.exact(f)
92 exact = m.exact(f)
93 lfile = lfutil.standin(f) in wctx
93 lfile = lfutil.standin(f) in wctx
94 nfile = f in wctx
94 nfile = f in wctx
95 exists = lfile or nfile
95 exists = lfile or nfile
96
96
97 # Don't warn the user when they attempt to add a normal tracked file.
97 # Don't warn the user when they attempt to add a normal tracked file.
98 # The normal add code will do that for us.
98 # The normal add code will do that for us.
99 if exact and exists:
99 if exact and exists:
100 if lfile:
100 if lfile:
101 ui.warn(_('%s already a largefile\n') % f)
101 ui.warn(_('%s already a largefile\n') % f)
102 continue
102 continue
103
103
104 if (exact or not exists) and not lfutil.isstandin(f):
104 if (exact or not exists) and not lfutil.isstandin(f):
105 wfile = repo.wjoin(f)
105 wfile = repo.wjoin(f)
106
106
107 # In case the file was removed previously, but not committed
107 # In case the file was removed previously, but not committed
108 # (issue3507)
108 # (issue3507)
109 if not os.path.exists(wfile):
109 if not os.path.exists(wfile):
110 continue
110 continue
111
111
112 abovemin = (lfsize and
112 abovemin = (lfsize and
113 os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
113 os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
114 if large or abovemin or (lfmatcher and lfmatcher(f)):
114 if large or abovemin or (lfmatcher and lfmatcher(f)):
115 lfnames.append(f)
115 lfnames.append(f)
116 if ui.verbose or not exact:
116 if ui.verbose or not exact:
117 ui.status(_('adding %s as a largefile\n') % m.rel(f))
117 ui.status(_('adding %s as a largefile\n') % m.rel(f))
118
118
119 bad = []
119 bad = []
120
120
121 # Need to lock, otherwise there could be a race condition between
121 # Need to lock, otherwise there could be a race condition between
122 # when standins are created and added to the repo.
122 # when standins are created and added to the repo.
123 wlock = repo.wlock()
123 wlock = repo.wlock()
124 try:
124 try:
125 if not opts.get('dry_run'):
125 if not opts.get('dry_run'):
126 standins = []
126 standins = []
127 lfdirstate = lfutil.openlfdirstate(ui, repo)
127 lfdirstate = lfutil.openlfdirstate(ui, repo)
128 for f in lfnames:
128 for f in lfnames:
129 standinname = lfutil.standin(f)
129 standinname = lfutil.standin(f)
130 lfutil.writestandin(repo, standinname, hash='',
130 lfutil.writestandin(repo, standinname, hash='',
131 executable=lfutil.getexecutable(repo.wjoin(f)))
131 executable=lfutil.getexecutable(repo.wjoin(f)))
132 standins.append(standinname)
132 standins.append(standinname)
133 if lfdirstate[f] == 'r':
133 if lfdirstate[f] == 'r':
134 lfdirstate.normallookup(f)
134 lfdirstate.normallookup(f)
135 else:
135 else:
136 lfdirstate.add(f)
136 lfdirstate.add(f)
137 lfdirstate.write()
137 lfdirstate.write()
138 bad += [lfutil.splitstandin(f)
138 bad += [lfutil.splitstandin(f)
139 for f in repo[None].add(standins)
139 for f in repo[None].add(standins)
140 if f in m.files()]
140 if f in m.files()]
141 finally:
141 finally:
142 wlock.release()
142 wlock.release()
143 return bad
143 return bad
144
144
145 def removelargefiles(ui, repo, isaddremove, *pats, **opts):
145 def removelargefiles(ui, repo, isaddremove, *pats, **opts):
146 after = opts.get('after')
146 after = opts.get('after')
147 if not pats and not after:
147 if not pats and not after:
148 raise util.Abort(_('no files specified'))
148 raise util.Abort(_('no files specified'))
149 m = scmutil.match(repo[None], pats, opts)
149 m = scmutil.match(repo[None], pats, opts)
150 try:
150 try:
151 repo.lfstatus = True
151 repo.lfstatus = True
152 s = repo.status(match=m, clean=True)
152 s = repo.status(match=m, clean=True)
153 finally:
153 finally:
154 repo.lfstatus = False
154 repo.lfstatus = False
155 manifest = repo[None].manifest()
155 manifest = repo[None].manifest()
156 modified, added, deleted, clean = [[f for f in list
156 modified, added, deleted, clean = [[f for f in list
157 if lfutil.standin(f) in manifest]
157 if lfutil.standin(f) in manifest]
158 for list in (s.modified, s.added,
158 for list in (s.modified, s.added,
159 s.deleted, s.clean)]
159 s.deleted, s.clean)]
160
160
161 def warn(files, msg):
161 def warn(files, msg):
162 for f in files:
162 for f in files:
163 ui.warn(msg % m.rel(f))
163 ui.warn(msg % m.rel(f))
164 return int(len(files) > 0)
164 return int(len(files) > 0)
165
165
166 result = 0
166 result = 0
167
167
168 if after:
168 if after:
169 remove = deleted
169 remove = deleted
170 result = warn(modified + added + clean,
170 result = warn(modified + added + clean,
171 _('not removing %s: file still exists\n'))
171 _('not removing %s: file still exists\n'))
172 else:
172 else:
173 remove = deleted + clean
173 remove = deleted + clean
174 result = warn(modified, _('not removing %s: file is modified (use -f'
174 result = warn(modified, _('not removing %s: file is modified (use -f'
175 ' to force removal)\n'))
175 ' to force removal)\n'))
176 result = warn(added, _('not removing %s: file has been marked for add'
176 result = warn(added, _('not removing %s: file has been marked for add'
177 ' (use forget to undo)\n')) or result
177 ' (use forget to undo)\n')) or result
178
178
179 for f in sorted(remove):
179 for f in sorted(remove):
180 if ui.verbose or not m.exact(f):
180 if ui.verbose or not m.exact(f):
181 ui.status(_('removing %s\n') % m.rel(f))
181 ui.status(_('removing %s\n') % m.rel(f))
182
182
183 # Need to lock because standin files are deleted then removed from the
183 # Need to lock because standin files are deleted then removed from the
184 # repository and we could race in-between.
184 # repository and we could race in-between.
185 wlock = repo.wlock()
185 wlock = repo.wlock()
186 try:
186 try:
187 lfdirstate = lfutil.openlfdirstate(ui, repo)
187 lfdirstate = lfutil.openlfdirstate(ui, repo)
188 for f in remove:
188 for f in remove:
189 if not after:
189 if not after:
190 # If this is being called by addremove, notify the user that we
190 # If this is being called by addremove, notify the user that we
191 # are removing the file.
191 # are removing the file.
192 if isaddremove:
192 if isaddremove:
193 ui.status(_('removing %s\n') % f)
193 ui.status(_('removing %s\n') % f)
194 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
194
195 lfdirstate.remove(f)
195 if not opts.get('dry_run'):
196 if not after:
197 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
198 lfdirstate.remove(f)
199
200 if opts.get('dry_run'):
201 return result
202
196 lfdirstate.write()
203 lfdirstate.write()
197 remove = [lfutil.standin(f) for f in remove]
204 remove = [lfutil.standin(f) for f in remove]
198 # If this is being called by addremove, let the original addremove
205 # If this is being called by addremove, let the original addremove
199 # function handle this.
206 # function handle this.
200 if not isaddremove:
207 if not isaddremove:
201 for f in remove:
208 for f in remove:
202 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
209 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
203 repo[None].forget(remove)
210 repo[None].forget(remove)
204 finally:
211 finally:
205 wlock.release()
212 wlock.release()
206
213
207 return result
214 return result
208
215
209 # For overriding mercurial.hgweb.webcommands so that largefiles will
216 # For overriding mercurial.hgweb.webcommands so that largefiles will
210 # appear at their right place in the manifests.
217 # appear at their right place in the manifests.
211 def decodepath(orig, path):
218 def decodepath(orig, path):
212 return lfutil.splitstandin(path) or path
219 return lfutil.splitstandin(path) or path
213
220
214 # -- Wrappers: modify existing commands --------------------------------
221 # -- Wrappers: modify existing commands --------------------------------
215
222
216 # Add works by going through the files that the user wanted to add and
223 # Add works by going through the files that the user wanted to add and
217 # checking if they should be added as largefiles. Then it makes a new
224 # checking if they should be added as largefiles. Then it makes a new
218 # matcher which matches only the normal files and runs the original
225 # matcher which matches only the normal files and runs the original
219 # version of add.
226 # version of add.
220 def overrideadd(orig, ui, repo, *pats, **opts):
227 def overrideadd(orig, ui, repo, *pats, **opts):
221 normal = opts.pop('normal')
228 normal = opts.pop('normal')
222 if normal:
229 if normal:
223 if opts.get('large'):
230 if opts.get('large'):
224 raise util.Abort(_('--normal cannot be used with --large'))
231 raise util.Abort(_('--normal cannot be used with --large'))
225 return orig(ui, repo, *pats, **opts)
232 return orig(ui, repo, *pats, **opts)
226 matcher = scmutil.match(repo[None], pats, opts)
233 matcher = scmutil.match(repo[None], pats, opts)
227 bad = addlargefiles(ui, repo, matcher, **opts)
234 bad = addlargefiles(ui, repo, matcher, **opts)
228 installnormalfilesmatchfn(repo[None].manifest())
235 installnormalfilesmatchfn(repo[None].manifest())
229 result = orig(ui, repo, *pats, **opts)
236 result = orig(ui, repo, *pats, **opts)
230 restorematchfn()
237 restorematchfn()
231
238
232 return (result == 1 or bad) and 1 or 0
239 return (result == 1 or bad) and 1 or 0
233
240
234 def overrideremove(orig, ui, repo, *pats, **opts):
241 def overrideremove(orig, ui, repo, *pats, **opts):
235 installnormalfilesmatchfn(repo[None].manifest())
242 installnormalfilesmatchfn(repo[None].manifest())
236 result = orig(ui, repo, *pats, **opts)
243 result = orig(ui, repo, *pats, **opts)
237 restorematchfn()
244 restorematchfn()
238 return removelargefiles(ui, repo, False, *pats, **opts) or result
245 return removelargefiles(ui, repo, False, *pats, **opts) or result
239
246
240 def overridestatusfn(orig, repo, rev2, **opts):
247 def overridestatusfn(orig, repo, rev2, **opts):
241 try:
248 try:
242 repo._repo.lfstatus = True
249 repo._repo.lfstatus = True
243 return orig(repo, rev2, **opts)
250 return orig(repo, rev2, **opts)
244 finally:
251 finally:
245 repo._repo.lfstatus = False
252 repo._repo.lfstatus = False
246
253
247 def overridestatus(orig, ui, repo, *pats, **opts):
254 def overridestatus(orig, ui, repo, *pats, **opts):
248 try:
255 try:
249 repo.lfstatus = True
256 repo.lfstatus = True
250 return orig(ui, repo, *pats, **opts)
257 return orig(ui, repo, *pats, **opts)
251 finally:
258 finally:
252 repo.lfstatus = False
259 repo.lfstatus = False
253
260
254 def overridedirty(orig, repo, ignoreupdate=False):
261 def overridedirty(orig, repo, ignoreupdate=False):
255 try:
262 try:
256 repo._repo.lfstatus = True
263 repo._repo.lfstatus = True
257 return orig(repo, ignoreupdate)
264 return orig(repo, ignoreupdate)
258 finally:
265 finally:
259 repo._repo.lfstatus = False
266 repo._repo.lfstatus = False
260
267
261 def overridelog(orig, ui, repo, *pats, **opts):
268 def overridelog(orig, ui, repo, *pats, **opts):
262 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
269 def overridematchandpats(ctx, pats=[], opts={}, globbed=False,
263 default='relpath'):
270 default='relpath'):
264 """Matcher that merges root directory with .hglf, suitable for log.
271 """Matcher that merges root directory with .hglf, suitable for log.
265 It is still possible to match .hglf directly.
272 It is still possible to match .hglf directly.
266 For any listed files run log on the standin too.
273 For any listed files run log on the standin too.
267 matchfn tries both the given filename and with .hglf stripped.
274 matchfn tries both the given filename and with .hglf stripped.
268 """
275 """
269 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default)
276 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default)
270 m, p = copy.copy(matchandpats)
277 m, p = copy.copy(matchandpats)
271
278
272 if m.always():
279 if m.always():
273 # We want to match everything anyway, so there's no benefit trying
280 # We want to match everything anyway, so there's no benefit trying
274 # to add standins.
281 # to add standins.
275 return matchandpats
282 return matchandpats
276
283
277 pats = set(p)
284 pats = set(p)
278 # TODO: handling of patterns in both cases below
285 # TODO: handling of patterns in both cases below
279 if m._cwd:
286 if m._cwd:
280 if os.path.isabs(m._cwd):
287 if os.path.isabs(m._cwd):
281 # TODO: handle largefile magic when invoked from other cwd
288 # TODO: handle largefile magic when invoked from other cwd
282 return matchandpats
289 return matchandpats
283 back = (m._cwd.count('/') + 1) * '../'
290 back = (m._cwd.count('/') + 1) * '../'
284 pats.update(back + lfutil.standin(m._cwd + '/' + f) for f in p)
291 pats.update(back + lfutil.standin(m._cwd + '/' + f) for f in p)
285 else:
292 else:
286 pats.update(lfutil.standin(f) for f in p)
293 pats.update(lfutil.standin(f) for f in p)
287
294
288 for i in range(0, len(m._files)):
295 for i in range(0, len(m._files)):
289 standin = lfutil.standin(m._files[i])
296 standin = lfutil.standin(m._files[i])
290 if standin in repo[ctx.node()]:
297 if standin in repo[ctx.node()]:
291 m._files[i] = standin
298 m._files[i] = standin
292 elif m._files[i] not in repo[ctx.node()]:
299 elif m._files[i] not in repo[ctx.node()]:
293 m._files.append(standin)
300 m._files.append(standin)
294 pats.add(standin)
301 pats.add(standin)
295
302
296 m._fmap = set(m._files)
303 m._fmap = set(m._files)
297 m._always = False
304 m._always = False
298 origmatchfn = m.matchfn
305 origmatchfn = m.matchfn
299 def lfmatchfn(f):
306 def lfmatchfn(f):
300 lf = lfutil.splitstandin(f)
307 lf = lfutil.splitstandin(f)
301 if lf is not None and origmatchfn(lf):
308 if lf is not None and origmatchfn(lf):
302 return True
309 return True
303 r = origmatchfn(f)
310 r = origmatchfn(f)
304 return r
311 return r
305 m.matchfn = lfmatchfn
312 m.matchfn = lfmatchfn
306
313
307 return m, pats
314 return m, pats
308
315
309 # For hg log --patch, the match object is used in two different senses:
316 # For hg log --patch, the match object is used in two different senses:
310 # (1) to determine what revisions should be printed out, and
317 # (1) to determine what revisions should be printed out, and
311 # (2) to determine what files to print out diffs for.
318 # (2) to determine what files to print out diffs for.
312 # The magic matchandpats override should be used for case (1) but not for
319 # The magic matchandpats override should be used for case (1) but not for
313 # case (2).
320 # case (2).
314 def overridemakelogfilematcher(repo, pats, opts):
321 def overridemakelogfilematcher(repo, pats, opts):
315 pctx = repo[None]
322 pctx = repo[None]
316 match, pats = oldmatchandpats(pctx, pats, opts)
323 match, pats = oldmatchandpats(pctx, pats, opts)
317 return lambda rev: match
324 return lambda rev: match
318
325
319 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
326 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
320 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
327 oldmakelogfilematcher = cmdutil._makenofollowlogfilematcher
321 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
328 setattr(cmdutil, '_makenofollowlogfilematcher', overridemakelogfilematcher)
322
329
323 try:
330 try:
324 return orig(ui, repo, *pats, **opts)
331 return orig(ui, repo, *pats, **opts)
325 finally:
332 finally:
326 restorematchandpatsfn()
333 restorematchandpatsfn()
327 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
334 setattr(cmdutil, '_makenofollowlogfilematcher', oldmakelogfilematcher)
328
335
329 def overrideverify(orig, ui, repo, *pats, **opts):
336 def overrideverify(orig, ui, repo, *pats, **opts):
330 large = opts.pop('large', False)
337 large = opts.pop('large', False)
331 all = opts.pop('lfa', False)
338 all = opts.pop('lfa', False)
332 contents = opts.pop('lfc', False)
339 contents = opts.pop('lfc', False)
333
340
334 result = orig(ui, repo, *pats, **opts)
341 result = orig(ui, repo, *pats, **opts)
335 if large or all or contents:
342 if large or all or contents:
336 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
343 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
337 return result
344 return result
338
345
339 def overridedebugstate(orig, ui, repo, *pats, **opts):
346 def overridedebugstate(orig, ui, repo, *pats, **opts):
340 large = opts.pop('large', False)
347 large = opts.pop('large', False)
341 if large:
348 if large:
342 class fakerepo(object):
349 class fakerepo(object):
343 dirstate = lfutil.openlfdirstate(ui, repo)
350 dirstate = lfutil.openlfdirstate(ui, repo)
344 orig(ui, fakerepo, *pats, **opts)
351 orig(ui, fakerepo, *pats, **opts)
345 else:
352 else:
346 orig(ui, repo, *pats, **opts)
353 orig(ui, repo, *pats, **opts)
347
354
348 # Override needs to refresh standins so that update's normal merge
355 # Override needs to refresh standins so that update's normal merge
349 # will go through properly. Then the other update hook (overriding repo.update)
356 # will go through properly. Then the other update hook (overriding repo.update)
350 # will get the new files. Filemerge is also overridden so that the merge
357 # will get the new files. Filemerge is also overridden so that the merge
351 # will merge standins correctly.
358 # will merge standins correctly.
352 def overrideupdate(orig, ui, repo, *pats, **opts):
359 def overrideupdate(orig, ui, repo, *pats, **opts):
353 # Need to lock between the standins getting updated and their
360 # Need to lock between the standins getting updated and their
354 # largefiles getting updated
361 # largefiles getting updated
355 wlock = repo.wlock()
362 wlock = repo.wlock()
356 try:
363 try:
357 if opts['check']:
364 if opts['check']:
358 lfdirstate = lfutil.openlfdirstate(ui, repo)
365 lfdirstate = lfutil.openlfdirstate(ui, repo)
359 unsure, s = lfdirstate.status(
366 unsure, s = lfdirstate.status(
360 match_.always(repo.root, repo.getcwd()),
367 match_.always(repo.root, repo.getcwd()),
361 [], False, False, False)
368 [], False, False, False)
362
369
363 mod = len(s.modified) > 0
370 mod = len(s.modified) > 0
364 for lfile in unsure:
371 for lfile in unsure:
365 standin = lfutil.standin(lfile)
372 standin = lfutil.standin(lfile)
366 if repo['.'][standin].data().strip() != \
373 if repo['.'][standin].data().strip() != \
367 lfutil.hashfile(repo.wjoin(lfile)):
374 lfutil.hashfile(repo.wjoin(lfile)):
368 mod = True
375 mod = True
369 else:
376 else:
370 lfdirstate.normal(lfile)
377 lfdirstate.normal(lfile)
371 lfdirstate.write()
378 lfdirstate.write()
372 if mod:
379 if mod:
373 raise util.Abort(_('uncommitted changes'))
380 raise util.Abort(_('uncommitted changes'))
374 return orig(ui, repo, *pats, **opts)
381 return orig(ui, repo, *pats, **opts)
375 finally:
382 finally:
376 wlock.release()
383 wlock.release()
377
384
378 # Before starting the manifest merge, merge.updates will call
385 # Before starting the manifest merge, merge.updates will call
379 # _checkunknownfile to check if there are any files in the merged-in
386 # _checkunknownfile to check if there are any files in the merged-in
380 # changeset that collide with unknown files in the working copy.
387 # changeset that collide with unknown files in the working copy.
381 #
388 #
382 # The largefiles are seen as unknown, so this prevents us from merging
389 # The largefiles are seen as unknown, so this prevents us from merging
383 # in a file 'foo' if we already have a largefile with the same name.
390 # in a file 'foo' if we already have a largefile with the same name.
384 #
391 #
385 # The overridden function filters the unknown files by removing any
392 # The overridden function filters the unknown files by removing any
386 # largefiles. This makes the merge proceed and we can then handle this
393 # largefiles. This makes the merge proceed and we can then handle this
387 # case further in the overridden calculateupdates function below.
394 # case further in the overridden calculateupdates function below.
388 def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
395 def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
389 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
396 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
390 return False
397 return False
391 return origfn(repo, wctx, mctx, f)
398 return origfn(repo, wctx, mctx, f)
392
399
393 # The manifest merge handles conflicts on the manifest level. We want
400 # The manifest merge handles conflicts on the manifest level. We want
394 # to handle changes in largefile-ness of files at this level too.
401 # to handle changes in largefile-ness of files at this level too.
395 #
402 #
396 # The strategy is to run the original calculateupdates and then process
403 # The strategy is to run the original calculateupdates and then process
397 # the action list it outputs. There are two cases we need to deal with:
404 # the action list it outputs. There are two cases we need to deal with:
398 #
405 #
399 # 1. Normal file in p1, largefile in p2. Here the largefile is
406 # 1. Normal file in p1, largefile in p2. Here the largefile is
400 # detected via its standin file, which will enter the working copy
407 # detected via its standin file, which will enter the working copy
401 # with a "get" action. It is not "merge" since the standin is all
408 # with a "get" action. It is not "merge" since the standin is all
402 # Mercurial is concerned with at this level -- the link to the
409 # Mercurial is concerned with at this level -- the link to the
403 # existing normal file is not relevant here.
410 # existing normal file is not relevant here.
404 #
411 #
405 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
412 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
406 # since the largefile will be present in the working copy and
413 # since the largefile will be present in the working copy and
407 # different from the normal file in p2. Mercurial therefore
414 # different from the normal file in p2. Mercurial therefore
408 # triggers a merge action.
415 # triggers a merge action.
409 #
416 #
410 # In both cases, we prompt the user and emit new actions to either
417 # In both cases, we prompt the user and emit new actions to either
411 # remove the standin (if the normal file was kept) or to remove the
418 # remove the standin (if the normal file was kept) or to remove the
412 # normal file and get the standin (if the largefile was kept). The
419 # normal file and get the standin (if the largefile was kept). The
413 # default prompt answer is to use the largefile version since it was
420 # default prompt answer is to use the largefile version since it was
414 # presumably changed on purpose.
421 # presumably changed on purpose.
415 #
422 #
416 # Finally, the merge.applyupdates function will then take care of
423 # Finally, the merge.applyupdates function will then take care of
417 # writing the files into the working copy and lfcommands.updatelfiles
424 # writing the files into the working copy and lfcommands.updatelfiles
418 # will update the largefiles.
425 # will update the largefiles.
419 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
426 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
420 partial, acceptremote, followcopies):
427 partial, acceptremote, followcopies):
421 overwrite = force and not branchmerge
428 overwrite = force and not branchmerge
422 actions, diverge, renamedelete = origfn(
429 actions, diverge, renamedelete = origfn(
423 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
430 repo, p1, p2, pas, branchmerge, force, partial, acceptremote,
424 followcopies)
431 followcopies)
425
432
426 if overwrite:
433 if overwrite:
427 return actions, diverge, renamedelete
434 return actions, diverge, renamedelete
428
435
429 # Convert to dictionary with filename as key and action as value.
436 # Convert to dictionary with filename as key and action as value.
430 lfiles = set()
437 lfiles = set()
431 actionbyfile = {}
438 actionbyfile = {}
432 for m, l in actions.iteritems():
439 for m, l in actions.iteritems():
433 for f, args, msg in l:
440 for f, args, msg in l:
434 actionbyfile[f] = m, args, msg
441 actionbyfile[f] = m, args, msg
435 splitstandin = f and lfutil.splitstandin(f)
442 splitstandin = f and lfutil.splitstandin(f)
436 if splitstandin in p1:
443 if splitstandin in p1:
437 lfiles.add(splitstandin)
444 lfiles.add(splitstandin)
438 elif lfutil.standin(f) in p1:
445 elif lfutil.standin(f) in p1:
439 lfiles.add(f)
446 lfiles.add(f)
440
447
441 for lfile in lfiles:
448 for lfile in lfiles:
442 standin = lfutil.standin(lfile)
449 standin = lfutil.standin(lfile)
443 (lm, largs, lmsg) = actionbyfile.get(lfile, (None, None, None))
450 (lm, largs, lmsg) = actionbyfile.get(lfile, (None, None, None))
444 (sm, sargs, smsg) = actionbyfile.get(standin, (None, None, None))
451 (sm, sargs, smsg) = actionbyfile.get(standin, (None, None, None))
445 if sm in ('g', 'dc') and lm != 'r':
452 if sm in ('g', 'dc') and lm != 'r':
446 # Case 1: normal file in the working copy, largefile in
453 # Case 1: normal file in the working copy, largefile in
447 # the second parent
454 # the second parent
448 usermsg = _('remote turned local normal file %s into a largefile\n'
455 usermsg = _('remote turned local normal file %s into a largefile\n'
449 'use (l)argefile or keep (n)ormal file?'
456 'use (l)argefile or keep (n)ormal file?'
450 '$$ &Largefile $$ &Normal file') % lfile
457 '$$ &Largefile $$ &Normal file') % lfile
451 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
458 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
452 actionbyfile[lfile] = ('r', None, 'replaced by standin')
459 actionbyfile[lfile] = ('r', None, 'replaced by standin')
453 actionbyfile[standin] = ('g', sargs, 'replaces standin')
460 actionbyfile[standin] = ('g', sargs, 'replaces standin')
454 else: # keep local normal file
461 else: # keep local normal file
455 actionbyfile[lfile] = ('k', None, 'replaces standin')
462 actionbyfile[lfile] = ('k', None, 'replaces standin')
456 if branchmerge:
463 if branchmerge:
457 actionbyfile[standin] = ('k', None,
464 actionbyfile[standin] = ('k', None,
458 'replaced by non-standin')
465 'replaced by non-standin')
459 else:
466 else:
460 actionbyfile[standin] = ('r', None,
467 actionbyfile[standin] = ('r', None,
461 'replaced by non-standin')
468 'replaced by non-standin')
462 elif lm in ('g', 'dc') and sm != 'r':
469 elif lm in ('g', 'dc') and sm != 'r':
463 # Case 2: largefile in the working copy, normal file in
470 # Case 2: largefile in the working copy, normal file in
464 # the second parent
471 # the second parent
465 usermsg = _('remote turned local largefile %s into a normal file\n'
472 usermsg = _('remote turned local largefile %s into a normal file\n'
466 'keep (l)argefile or use (n)ormal file?'
473 'keep (l)argefile or use (n)ormal file?'
467 '$$ &Largefile $$ &Normal file') % lfile
474 '$$ &Largefile $$ &Normal file') % lfile
468 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
475 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
469 if branchmerge:
476 if branchmerge:
470 # largefile can be restored from standin safely
477 # largefile can be restored from standin safely
471 actionbyfile[lfile] = ('k', None, 'replaced by standin')
478 actionbyfile[lfile] = ('k', None, 'replaced by standin')
472 actionbyfile[standin] = ('k', None, 'replaces standin')
479 actionbyfile[standin] = ('k', None, 'replaces standin')
473 else:
480 else:
474 # "lfile" should be marked as "removed" without
481 # "lfile" should be marked as "removed" without
475 # removal of itself
482 # removal of itself
476 actionbyfile[lfile] = ('lfmr', None,
483 actionbyfile[lfile] = ('lfmr', None,
477 'forget non-standin largefile')
484 'forget non-standin largefile')
478
485
479 # linear-merge should treat this largefile as 're-added'
486 # linear-merge should treat this largefile as 're-added'
480 actionbyfile[standin] = ('a', None, 'keep standin')
487 actionbyfile[standin] = ('a', None, 'keep standin')
481 else: # pick remote normal file
488 else: # pick remote normal file
482 actionbyfile[lfile] = ('g', largs, 'replaces standin')
489 actionbyfile[lfile] = ('g', largs, 'replaces standin')
483 actionbyfile[standin] = ('r', None, 'replaced by non-standin')
490 actionbyfile[standin] = ('r', None, 'replaced by non-standin')
484
491
485 # Convert back to dictionary-of-lists format
492 # Convert back to dictionary-of-lists format
486 for l in actions.itervalues():
493 for l in actions.itervalues():
487 l[:] = []
494 l[:] = []
488 actions['lfmr'] = []
495 actions['lfmr'] = []
489 for f, (m, args, msg) in actionbyfile.iteritems():
496 for f, (m, args, msg) in actionbyfile.iteritems():
490 actions[m].append((f, args, msg))
497 actions[m].append((f, args, msg))
491
498
492 return actions, diverge, renamedelete
499 return actions, diverge, renamedelete
493
500
494 def mergerecordupdates(orig, repo, actions, branchmerge):
501 def mergerecordupdates(orig, repo, actions, branchmerge):
495 if 'lfmr' in actions:
502 if 'lfmr' in actions:
496 # this should be executed before 'orig', to execute 'remove'
503 # this should be executed before 'orig', to execute 'remove'
497 # before all other actions
504 # before all other actions
498 for lfile, args, msg in actions['lfmr']:
505 for lfile, args, msg in actions['lfmr']:
499 repo.dirstate.remove(lfile)
506 repo.dirstate.remove(lfile)
500
507
501 return orig(repo, actions, branchmerge)
508 return orig(repo, actions, branchmerge)
502
509
503
510
504 # Override filemerge to prompt the user about how they wish to merge
511 # Override filemerge to prompt the user about how they wish to merge
505 # largefiles. This will handle identical edits without prompting the user.
512 # largefiles. This will handle identical edits without prompting the user.
506 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
513 def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca, labels=None):
507 if not lfutil.isstandin(orig):
514 if not lfutil.isstandin(orig):
508 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
515 return origfn(repo, mynode, orig, fcd, fco, fca, labels=labels)
509
516
510 ahash = fca.data().strip().lower()
517 ahash = fca.data().strip().lower()
511 dhash = fcd.data().strip().lower()
518 dhash = fcd.data().strip().lower()
512 ohash = fco.data().strip().lower()
519 ohash = fco.data().strip().lower()
513 if (ohash != ahash and
520 if (ohash != ahash and
514 ohash != dhash and
521 ohash != dhash and
515 (dhash == ahash or
522 (dhash == ahash or
516 repo.ui.promptchoice(
523 repo.ui.promptchoice(
517 _('largefile %s has a merge conflict\nancestor was %s\n'
524 _('largefile %s has a merge conflict\nancestor was %s\n'
518 'keep (l)ocal %s or\ntake (o)ther %s?'
525 'keep (l)ocal %s or\ntake (o)ther %s?'
519 '$$ &Local $$ &Other') %
526 '$$ &Local $$ &Other') %
520 (lfutil.splitstandin(orig), ahash, dhash, ohash),
527 (lfutil.splitstandin(orig), ahash, dhash, ohash),
521 0) == 1)):
528 0) == 1)):
522 repo.wwrite(fcd.path(), fco.data(), fco.flags())
529 repo.wwrite(fcd.path(), fco.data(), fco.flags())
523 return 0
530 return 0
524
531
525 # Copy first changes the matchers to match standins instead of
532 # Copy first changes the matchers to match standins instead of
526 # largefiles. Then it overrides util.copyfile in that function it
533 # largefiles. Then it overrides util.copyfile in that function it
527 # checks if the destination largefile already exists. It also keeps a
534 # checks if the destination largefile already exists. It also keeps a
528 # list of copied files so that the largefiles can be copied and the
535 # list of copied files so that the largefiles can be copied and the
529 # dirstate updated.
536 # dirstate updated.
530 def overridecopy(orig, ui, repo, pats, opts, rename=False):
537 def overridecopy(orig, ui, repo, pats, opts, rename=False):
531 # doesn't remove largefile on rename
538 # doesn't remove largefile on rename
532 if len(pats) < 2:
539 if len(pats) < 2:
533 # this isn't legal, let the original function deal with it
540 # this isn't legal, let the original function deal with it
534 return orig(ui, repo, pats, opts, rename)
541 return orig(ui, repo, pats, opts, rename)
535
542
536 def makestandin(relpath):
543 def makestandin(relpath):
537 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
544 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
538 return os.path.join(repo.wjoin(lfutil.standin(path)))
545 return os.path.join(repo.wjoin(lfutil.standin(path)))
539
546
540 fullpats = scmutil.expandpats(pats)
547 fullpats = scmutil.expandpats(pats)
541 dest = fullpats[-1]
548 dest = fullpats[-1]
542
549
543 if os.path.isdir(dest):
550 if os.path.isdir(dest):
544 if not os.path.isdir(makestandin(dest)):
551 if not os.path.isdir(makestandin(dest)):
545 os.makedirs(makestandin(dest))
552 os.makedirs(makestandin(dest))
546 # This could copy both lfiles and normal files in one command,
553 # This could copy both lfiles and normal files in one command,
547 # but we don't want to do that. First replace their matcher to
554 # but we don't want to do that. First replace their matcher to
548 # only match normal files and run it, then replace it to just
555 # only match normal files and run it, then replace it to just
549 # match largefiles and run it again.
556 # match largefiles and run it again.
550 nonormalfiles = False
557 nonormalfiles = False
551 nolfiles = False
558 nolfiles = False
552 installnormalfilesmatchfn(repo[None].manifest())
559 installnormalfilesmatchfn(repo[None].manifest())
553 try:
560 try:
554 try:
561 try:
555 result = orig(ui, repo, pats, opts, rename)
562 result = orig(ui, repo, pats, opts, rename)
556 except util.Abort, e:
563 except util.Abort, e:
557 if str(e) != _('no files to copy'):
564 if str(e) != _('no files to copy'):
558 raise e
565 raise e
559 else:
566 else:
560 nonormalfiles = True
567 nonormalfiles = True
561 result = 0
568 result = 0
562 finally:
569 finally:
563 restorematchfn()
570 restorematchfn()
564
571
565 # The first rename can cause our current working directory to be removed.
572 # The first rename can cause our current working directory to be removed.
566 # In that case there is nothing left to copy/rename so just quit.
573 # In that case there is nothing left to copy/rename so just quit.
567 try:
574 try:
568 repo.getcwd()
575 repo.getcwd()
569 except OSError:
576 except OSError:
570 return result
577 return result
571
578
572 try:
579 try:
573 try:
580 try:
574 # When we call orig below it creates the standins but we don't add
581 # When we call orig below it creates the standins but we don't add
575 # them to the dir state until later so lock during that time.
582 # them to the dir state until later so lock during that time.
576 wlock = repo.wlock()
583 wlock = repo.wlock()
577
584
578 manifest = repo[None].manifest()
585 manifest = repo[None].manifest()
579 def overridematch(ctx, pats=[], opts={}, globbed=False,
586 def overridematch(ctx, pats=[], opts={}, globbed=False,
580 default='relpath'):
587 default='relpath'):
581 newpats = []
588 newpats = []
582 # The patterns were previously mangled to add the standin
589 # The patterns were previously mangled to add the standin
583 # directory; we need to remove that now
590 # directory; we need to remove that now
584 for pat in pats:
591 for pat in pats:
585 if match_.patkind(pat) is None and lfutil.shortname in pat:
592 if match_.patkind(pat) is None and lfutil.shortname in pat:
586 newpats.append(pat.replace(lfutil.shortname, ''))
593 newpats.append(pat.replace(lfutil.shortname, ''))
587 else:
594 else:
588 newpats.append(pat)
595 newpats.append(pat)
589 match = oldmatch(ctx, newpats, opts, globbed, default)
596 match = oldmatch(ctx, newpats, opts, globbed, default)
590 m = copy.copy(match)
597 m = copy.copy(match)
591 lfile = lambda f: lfutil.standin(f) in manifest
598 lfile = lambda f: lfutil.standin(f) in manifest
592 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
599 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
593 m._fmap = set(m._files)
600 m._fmap = set(m._files)
594 origmatchfn = m.matchfn
601 origmatchfn = m.matchfn
595 m.matchfn = lambda f: (lfutil.isstandin(f) and
602 m.matchfn = lambda f: (lfutil.isstandin(f) and
596 (f in manifest) and
603 (f in manifest) and
597 origmatchfn(lfutil.splitstandin(f)) or
604 origmatchfn(lfutil.splitstandin(f)) or
598 None)
605 None)
599 return m
606 return m
600 oldmatch = installmatchfn(overridematch)
607 oldmatch = installmatchfn(overridematch)
601 listpats = []
608 listpats = []
602 for pat in pats:
609 for pat in pats:
603 if match_.patkind(pat) is not None:
610 if match_.patkind(pat) is not None:
604 listpats.append(pat)
611 listpats.append(pat)
605 else:
612 else:
606 listpats.append(makestandin(pat))
613 listpats.append(makestandin(pat))
607
614
608 try:
615 try:
609 origcopyfile = util.copyfile
616 origcopyfile = util.copyfile
610 copiedfiles = []
617 copiedfiles = []
611 def overridecopyfile(src, dest):
618 def overridecopyfile(src, dest):
612 if (lfutil.shortname in src and
619 if (lfutil.shortname in src and
613 dest.startswith(repo.wjoin(lfutil.shortname))):
620 dest.startswith(repo.wjoin(lfutil.shortname))):
614 destlfile = dest.replace(lfutil.shortname, '')
621 destlfile = dest.replace(lfutil.shortname, '')
615 if not opts['force'] and os.path.exists(destlfile):
622 if not opts['force'] and os.path.exists(destlfile):
616 raise IOError('',
623 raise IOError('',
617 _('destination largefile already exists'))
624 _('destination largefile already exists'))
618 copiedfiles.append((src, dest))
625 copiedfiles.append((src, dest))
619 origcopyfile(src, dest)
626 origcopyfile(src, dest)
620
627
621 util.copyfile = overridecopyfile
628 util.copyfile = overridecopyfile
622 result += orig(ui, repo, listpats, opts, rename)
629 result += orig(ui, repo, listpats, opts, rename)
623 finally:
630 finally:
624 util.copyfile = origcopyfile
631 util.copyfile = origcopyfile
625
632
626 lfdirstate = lfutil.openlfdirstate(ui, repo)
633 lfdirstate = lfutil.openlfdirstate(ui, repo)
627 for (src, dest) in copiedfiles:
634 for (src, dest) in copiedfiles:
628 if (lfutil.shortname in src and
635 if (lfutil.shortname in src and
629 dest.startswith(repo.wjoin(lfutil.shortname))):
636 dest.startswith(repo.wjoin(lfutil.shortname))):
630 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
637 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
631 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
638 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
632 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
639 destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
633 if not os.path.isdir(destlfiledir):
640 if not os.path.isdir(destlfiledir):
634 os.makedirs(destlfiledir)
641 os.makedirs(destlfiledir)
635 if rename:
642 if rename:
636 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
643 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
637
644
638 # The file is gone, but this deletes any empty parent
645 # The file is gone, but this deletes any empty parent
639 # directories as a side-effect.
646 # directories as a side-effect.
640 util.unlinkpath(repo.wjoin(srclfile), True)
647 util.unlinkpath(repo.wjoin(srclfile), True)
641 lfdirstate.remove(srclfile)
648 lfdirstate.remove(srclfile)
642 else:
649 else:
643 util.copyfile(repo.wjoin(srclfile),
650 util.copyfile(repo.wjoin(srclfile),
644 repo.wjoin(destlfile))
651 repo.wjoin(destlfile))
645
652
646 lfdirstate.add(destlfile)
653 lfdirstate.add(destlfile)
647 lfdirstate.write()
654 lfdirstate.write()
648 except util.Abort, e:
655 except util.Abort, e:
649 if str(e) != _('no files to copy'):
656 if str(e) != _('no files to copy'):
650 raise e
657 raise e
651 else:
658 else:
652 nolfiles = True
659 nolfiles = True
653 finally:
660 finally:
654 restorematchfn()
661 restorematchfn()
655 wlock.release()
662 wlock.release()
656
663
657 if nolfiles and nonormalfiles:
664 if nolfiles and nonormalfiles:
658 raise util.Abort(_('no files to copy'))
665 raise util.Abort(_('no files to copy'))
659
666
660 return result
667 return result
661
668
662 # When the user calls revert, we have to be careful to not revert any
669 # When the user calls revert, we have to be careful to not revert any
663 # changes to other largefiles accidentally. This means we have to keep
670 # changes to other largefiles accidentally. This means we have to keep
664 # track of the largefiles that are being reverted so we only pull down
671 # track of the largefiles that are being reverted so we only pull down
665 # the necessary largefiles.
672 # the necessary largefiles.
666 #
673 #
667 # Standins are only updated (to match the hash of largefiles) before
674 # Standins are only updated (to match the hash of largefiles) before
668 # commits. Update the standins then run the original revert, changing
675 # commits. Update the standins then run the original revert, changing
669 # the matcher to hit standins instead of largefiles. Based on the
676 # the matcher to hit standins instead of largefiles. Based on the
670 # resulting standins update the largefiles.
677 # resulting standins update the largefiles.
671 def overriderevert(orig, ui, repo, *pats, **opts):
678 def overriderevert(orig, ui, repo, *pats, **opts):
672 # Because we put the standins in a bad state (by updating them)
679 # Because we put the standins in a bad state (by updating them)
673 # and then return them to a correct state we need to lock to
680 # and then return them to a correct state we need to lock to
674 # prevent others from changing them in their incorrect state.
681 # prevent others from changing them in their incorrect state.
675 wlock = repo.wlock()
682 wlock = repo.wlock()
676 try:
683 try:
677 lfdirstate = lfutil.openlfdirstate(ui, repo)
684 lfdirstate = lfutil.openlfdirstate(ui, repo)
678 s = lfutil.lfdirstatestatus(lfdirstate, repo)
685 s = lfutil.lfdirstatestatus(lfdirstate, repo)
679 lfdirstate.write()
686 lfdirstate.write()
680 for lfile in s.modified:
687 for lfile in s.modified:
681 lfutil.updatestandin(repo, lfutil.standin(lfile))
688 lfutil.updatestandin(repo, lfutil.standin(lfile))
682 for lfile in s.deleted:
689 for lfile in s.deleted:
683 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
690 if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
684 os.unlink(repo.wjoin(lfutil.standin(lfile)))
691 os.unlink(repo.wjoin(lfutil.standin(lfile)))
685
692
686 oldstandins = lfutil.getstandinsstate(repo)
693 oldstandins = lfutil.getstandinsstate(repo)
687
694
688 def overridematch(ctx, pats=[], opts={}, globbed=False,
695 def overridematch(ctx, pats=[], opts={}, globbed=False,
689 default='relpath'):
696 default='relpath'):
690 match = oldmatch(ctx, pats, opts, globbed, default)
697 match = oldmatch(ctx, pats, opts, globbed, default)
691 m = copy.copy(match)
698 m = copy.copy(match)
692 def tostandin(f):
699 def tostandin(f):
693 if lfutil.standin(f) in ctx:
700 if lfutil.standin(f) in ctx:
694 return lfutil.standin(f)
701 return lfutil.standin(f)
695 elif lfutil.standin(f) in repo[None]:
702 elif lfutil.standin(f) in repo[None]:
696 return None
703 return None
697 return f
704 return f
698 m._files = [tostandin(f) for f in m._files]
705 m._files = [tostandin(f) for f in m._files]
699 m._files = [f for f in m._files if f is not None]
706 m._files = [f for f in m._files if f is not None]
700 m._fmap = set(m._files)
707 m._fmap = set(m._files)
701 origmatchfn = m.matchfn
708 origmatchfn = m.matchfn
702 def matchfn(f):
709 def matchfn(f):
703 if lfutil.isstandin(f):
710 if lfutil.isstandin(f):
704 return (origmatchfn(lfutil.splitstandin(f)) and
711 return (origmatchfn(lfutil.splitstandin(f)) and
705 (f in repo[None] or f in ctx))
712 (f in repo[None] or f in ctx))
706 return origmatchfn(f)
713 return origmatchfn(f)
707 m.matchfn = matchfn
714 m.matchfn = matchfn
708 return m
715 return m
709 oldmatch = installmatchfn(overridematch)
716 oldmatch = installmatchfn(overridematch)
710 try:
717 try:
711 orig(ui, repo, *pats, **opts)
718 orig(ui, repo, *pats, **opts)
712 finally:
719 finally:
713 restorematchfn()
720 restorematchfn()
714
721
715 newstandins = lfutil.getstandinsstate(repo)
722 newstandins = lfutil.getstandinsstate(repo)
716 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
723 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
717 # lfdirstate should be 'normallookup'-ed for updated files,
724 # lfdirstate should be 'normallookup'-ed for updated files,
718 # because reverting doesn't touch dirstate for 'normal' files
725 # because reverting doesn't touch dirstate for 'normal' files
719 # when target revision is explicitly specified: in such case,
726 # when target revision is explicitly specified: in such case,
720 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
727 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
721 # of target (standin) file.
728 # of target (standin) file.
722 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
729 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
723 normallookup=True)
730 normallookup=True)
724
731
725 finally:
732 finally:
726 wlock.release()
733 wlock.release()
727
734
728 # after pulling changesets, we need to take some extra care to get
735 # after pulling changesets, we need to take some extra care to get
729 # largefiles updated remotely
736 # largefiles updated remotely
730 def overridepull(orig, ui, repo, source=None, **opts):
737 def overridepull(orig, ui, repo, source=None, **opts):
731 revsprepull = len(repo)
738 revsprepull = len(repo)
732 if not source:
739 if not source:
733 source = 'default'
740 source = 'default'
734 repo.lfpullsource = source
741 repo.lfpullsource = source
735 result = orig(ui, repo, source, **opts)
742 result = orig(ui, repo, source, **opts)
736 revspostpull = len(repo)
743 revspostpull = len(repo)
737 lfrevs = opts.get('lfrev', [])
744 lfrevs = opts.get('lfrev', [])
738 if opts.get('all_largefiles'):
745 if opts.get('all_largefiles'):
739 lfrevs.append('pulled()')
746 lfrevs.append('pulled()')
740 if lfrevs and revspostpull > revsprepull:
747 if lfrevs and revspostpull > revsprepull:
741 numcached = 0
748 numcached = 0
742 repo.firstpulled = revsprepull # for pulled() revset expression
749 repo.firstpulled = revsprepull # for pulled() revset expression
743 try:
750 try:
744 for rev in scmutil.revrange(repo, lfrevs):
751 for rev in scmutil.revrange(repo, lfrevs):
745 ui.note(_('pulling largefiles for revision %s\n') % rev)
752 ui.note(_('pulling largefiles for revision %s\n') % rev)
746 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
753 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
747 numcached += len(cached)
754 numcached += len(cached)
748 finally:
755 finally:
749 del repo.firstpulled
756 del repo.firstpulled
750 ui.status(_("%d largefiles cached\n") % numcached)
757 ui.status(_("%d largefiles cached\n") % numcached)
751 return result
758 return result
752
759
753 def pulledrevsetsymbol(repo, subset, x):
760 def pulledrevsetsymbol(repo, subset, x):
754 """``pulled()``
761 """``pulled()``
755 Changesets that just has been pulled.
762 Changesets that just has been pulled.
756
763
757 Only available with largefiles from pull --lfrev expressions.
764 Only available with largefiles from pull --lfrev expressions.
758
765
759 .. container:: verbose
766 .. container:: verbose
760
767
761 Some examples:
768 Some examples:
762
769
763 - pull largefiles for all new changesets::
770 - pull largefiles for all new changesets::
764
771
765 hg pull -lfrev "pulled()"
772 hg pull -lfrev "pulled()"
766
773
767 - pull largefiles for all new branch heads::
774 - pull largefiles for all new branch heads::
768
775
769 hg pull -lfrev "head(pulled()) and not closed()"
776 hg pull -lfrev "head(pulled()) and not closed()"
770
777
771 """
778 """
772
779
773 try:
780 try:
774 firstpulled = repo.firstpulled
781 firstpulled = repo.firstpulled
775 except AttributeError:
782 except AttributeError:
776 raise util.Abort(_("pulled() only available in --lfrev"))
783 raise util.Abort(_("pulled() only available in --lfrev"))
777 return revset.baseset([r for r in subset if r >= firstpulled])
784 return revset.baseset([r for r in subset if r >= firstpulled])
778
785
779 def overrideclone(orig, ui, source, dest=None, **opts):
786 def overrideclone(orig, ui, source, dest=None, **opts):
780 d = dest
787 d = dest
781 if d is None:
788 if d is None:
782 d = hg.defaultdest(source)
789 d = hg.defaultdest(source)
783 if opts.get('all_largefiles') and not hg.islocal(d):
790 if opts.get('all_largefiles') and not hg.islocal(d):
784 raise util.Abort(_(
791 raise util.Abort(_(
785 '--all-largefiles is incompatible with non-local destination %s') %
792 '--all-largefiles is incompatible with non-local destination %s') %
786 d)
793 d)
787
794
788 return orig(ui, source, dest, **opts)
795 return orig(ui, source, dest, **opts)
789
796
790 def hgclone(orig, ui, opts, *args, **kwargs):
797 def hgclone(orig, ui, opts, *args, **kwargs):
791 result = orig(ui, opts, *args, **kwargs)
798 result = orig(ui, opts, *args, **kwargs)
792
799
793 if result is not None:
800 if result is not None:
794 sourcerepo, destrepo = result
801 sourcerepo, destrepo = result
795 repo = destrepo.local()
802 repo = destrepo.local()
796
803
797 # Caching is implicitly limited to 'rev' option, since the dest repo was
804 # Caching is implicitly limited to 'rev' option, since the dest repo was
798 # truncated at that point. The user may expect a download count with
805 # truncated at that point. The user may expect a download count with
799 # this option, so attempt whether or not this is a largefile repo.
806 # this option, so attempt whether or not this is a largefile repo.
800 if opts.get('all_largefiles'):
807 if opts.get('all_largefiles'):
801 success, missing = lfcommands.downloadlfiles(ui, repo, None)
808 success, missing = lfcommands.downloadlfiles(ui, repo, None)
802
809
803 if missing != 0:
810 if missing != 0:
804 return None
811 return None
805
812
806 return result
813 return result
807
814
808 def overriderebase(orig, ui, repo, **opts):
815 def overriderebase(orig, ui, repo, **opts):
809 resuming = opts.get('continue')
816 resuming = opts.get('continue')
810 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
817 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
811 repo._lfstatuswriters.append(lambda *msg, **opts: None)
818 repo._lfstatuswriters.append(lambda *msg, **opts: None)
812 try:
819 try:
813 return orig(ui, repo, **opts)
820 return orig(ui, repo, **opts)
814 finally:
821 finally:
815 repo._lfstatuswriters.pop()
822 repo._lfstatuswriters.pop()
816 repo._lfcommithooks.pop()
823 repo._lfcommithooks.pop()
817
824
818 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
825 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
819 prefix=None, mtime=None, subrepos=None):
826 prefix=None, mtime=None, subrepos=None):
820 # No need to lock because we are only reading history and
827 # No need to lock because we are only reading history and
821 # largefile caches, neither of which are modified.
828 # largefile caches, neither of which are modified.
822 lfcommands.cachelfiles(repo.ui, repo, node)
829 lfcommands.cachelfiles(repo.ui, repo, node)
823
830
824 if kind not in archival.archivers:
831 if kind not in archival.archivers:
825 raise util.Abort(_("unknown archive type '%s'") % kind)
832 raise util.Abort(_("unknown archive type '%s'") % kind)
826
833
827 ctx = repo[node]
834 ctx = repo[node]
828
835
829 if kind == 'files':
836 if kind == 'files':
830 if prefix:
837 if prefix:
831 raise util.Abort(
838 raise util.Abort(
832 _('cannot give prefix when archiving to files'))
839 _('cannot give prefix when archiving to files'))
833 else:
840 else:
834 prefix = archival.tidyprefix(dest, kind, prefix)
841 prefix = archival.tidyprefix(dest, kind, prefix)
835
842
836 def write(name, mode, islink, getdata):
843 def write(name, mode, islink, getdata):
837 if matchfn and not matchfn(name):
844 if matchfn and not matchfn(name):
838 return
845 return
839 data = getdata()
846 data = getdata()
840 if decode:
847 if decode:
841 data = repo.wwritedata(name, data)
848 data = repo.wwritedata(name, data)
842 archiver.addfile(prefix + name, mode, islink, data)
849 archiver.addfile(prefix + name, mode, islink, data)
843
850
844 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
851 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
845
852
846 if repo.ui.configbool("ui", "archivemeta", True):
853 if repo.ui.configbool("ui", "archivemeta", True):
847 def metadata():
854 def metadata():
848 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
855 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
849 hex(repo.changelog.node(0)), hex(node), ctx.branch())
856 hex(repo.changelog.node(0)), hex(node), ctx.branch())
850
857
851 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
858 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
852 if repo.tagtype(t) == 'global')
859 if repo.tagtype(t) == 'global')
853 if not tags:
860 if not tags:
854 repo.ui.pushbuffer()
861 repo.ui.pushbuffer()
855 opts = {'template': '{latesttag}\n{latesttagdistance}',
862 opts = {'template': '{latesttag}\n{latesttagdistance}',
856 'style': '', 'patch': None, 'git': None}
863 'style': '', 'patch': None, 'git': None}
857 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
864 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
858 ltags, dist = repo.ui.popbuffer().split('\n')
865 ltags, dist = repo.ui.popbuffer().split('\n')
859 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
866 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
860 tags += 'latesttagdistance: %s\n' % dist
867 tags += 'latesttagdistance: %s\n' % dist
861
868
862 return base + tags
869 return base + tags
863
870
864 write('.hg_archival.txt', 0644, False, metadata)
871 write('.hg_archival.txt', 0644, False, metadata)
865
872
866 for f in ctx:
873 for f in ctx:
867 ff = ctx.flags(f)
874 ff = ctx.flags(f)
868 getdata = ctx[f].data
875 getdata = ctx[f].data
869 if lfutil.isstandin(f):
876 if lfutil.isstandin(f):
870 path = lfutil.findfile(repo, getdata().strip())
877 path = lfutil.findfile(repo, getdata().strip())
871 if path is None:
878 if path is None:
872 raise util.Abort(
879 raise util.Abort(
873 _('largefile %s not found in repo store or system cache')
880 _('largefile %s not found in repo store or system cache')
874 % lfutil.splitstandin(f))
881 % lfutil.splitstandin(f))
875 f = lfutil.splitstandin(f)
882 f = lfutil.splitstandin(f)
876
883
877 def getdatafn():
884 def getdatafn():
878 fd = None
885 fd = None
879 try:
886 try:
880 fd = open(path, 'rb')
887 fd = open(path, 'rb')
881 return fd.read()
888 return fd.read()
882 finally:
889 finally:
883 if fd:
890 if fd:
884 fd.close()
891 fd.close()
885
892
886 getdata = getdatafn
893 getdata = getdatafn
887 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
894 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
888
895
889 if subrepos:
896 if subrepos:
890 for subpath in sorted(ctx.substate):
897 for subpath in sorted(ctx.substate):
891 sub = ctx.sub(subpath)
898 sub = ctx.sub(subpath)
892 submatch = match_.narrowmatcher(subpath, matchfn)
899 submatch = match_.narrowmatcher(subpath, matchfn)
893 sub.archive(archiver, prefix, submatch)
900 sub.archive(archiver, prefix, submatch)
894
901
895 archiver.done()
902 archiver.done()
896
903
897 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
904 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None):
898 repo._get(repo._state + ('hg',))
905 repo._get(repo._state + ('hg',))
899 rev = repo._state[1]
906 rev = repo._state[1]
900 ctx = repo._repo[rev]
907 ctx = repo._repo[rev]
901
908
902 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
909 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
903
910
904 def write(name, mode, islink, getdata):
911 def write(name, mode, islink, getdata):
905 # At this point, the standin has been replaced with the largefile name,
912 # At this point, the standin has been replaced with the largefile name,
906 # so the normal matcher works here without the lfutil variants.
913 # so the normal matcher works here without the lfutil variants.
907 if match and not match(f):
914 if match and not match(f):
908 return
915 return
909 data = getdata()
916 data = getdata()
910
917
911 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
918 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
912
919
913 for f in ctx:
920 for f in ctx:
914 ff = ctx.flags(f)
921 ff = ctx.flags(f)
915 getdata = ctx[f].data
922 getdata = ctx[f].data
916 if lfutil.isstandin(f):
923 if lfutil.isstandin(f):
917 path = lfutil.findfile(repo._repo, getdata().strip())
924 path = lfutil.findfile(repo._repo, getdata().strip())
918 if path is None:
925 if path is None:
919 raise util.Abort(
926 raise util.Abort(
920 _('largefile %s not found in repo store or system cache')
927 _('largefile %s not found in repo store or system cache')
921 % lfutil.splitstandin(f))
928 % lfutil.splitstandin(f))
922 f = lfutil.splitstandin(f)
929 f = lfutil.splitstandin(f)
923
930
924 def getdatafn():
931 def getdatafn():
925 fd = None
932 fd = None
926 try:
933 try:
927 fd = open(os.path.join(prefix, path), 'rb')
934 fd = open(os.path.join(prefix, path), 'rb')
928 return fd.read()
935 return fd.read()
929 finally:
936 finally:
930 if fd:
937 if fd:
931 fd.close()
938 fd.close()
932
939
933 getdata = getdatafn
940 getdata = getdatafn
934
941
935 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
942 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
936
943
937 for subpath in sorted(ctx.substate):
944 for subpath in sorted(ctx.substate):
938 sub = ctx.sub(subpath)
945 sub = ctx.sub(subpath)
939 submatch = match_.narrowmatcher(subpath, match)
946 submatch = match_.narrowmatcher(subpath, match)
940 sub.archive(archiver, os.path.join(prefix, repo._path) + '/', submatch)
947 sub.archive(archiver, os.path.join(prefix, repo._path) + '/', submatch)
941
948
942 # If a largefile is modified, the change is not reflected in its
949 # If a largefile is modified, the change is not reflected in its
943 # standin until a commit. cmdutil.bailifchanged() raises an exception
950 # standin until a commit. cmdutil.bailifchanged() raises an exception
944 # if the repo has uncommitted changes. Wrap it to also check if
951 # if the repo has uncommitted changes. Wrap it to also check if
945 # largefiles were changed. This is used by bisect, backout and fetch.
952 # largefiles were changed. This is used by bisect, backout and fetch.
946 def overridebailifchanged(orig, repo):
953 def overridebailifchanged(orig, repo):
947 orig(repo)
954 orig(repo)
948 repo.lfstatus = True
955 repo.lfstatus = True
949 s = repo.status()
956 s = repo.status()
950 repo.lfstatus = False
957 repo.lfstatus = False
951 if s.modified or s.added or s.removed or s.deleted:
958 if s.modified or s.added or s.removed or s.deleted:
952 raise util.Abort(_('uncommitted changes'))
959 raise util.Abort(_('uncommitted changes'))
953
960
954 def overrideforget(orig, ui, repo, *pats, **opts):
961 def overrideforget(orig, ui, repo, *pats, **opts):
955 installnormalfilesmatchfn(repo[None].manifest())
962 installnormalfilesmatchfn(repo[None].manifest())
956 result = orig(ui, repo, *pats, **opts)
963 result = orig(ui, repo, *pats, **opts)
957 restorematchfn()
964 restorematchfn()
958 m = scmutil.match(repo[None], pats, opts)
965 m = scmutil.match(repo[None], pats, opts)
959
966
960 try:
967 try:
961 repo.lfstatus = True
968 repo.lfstatus = True
962 s = repo.status(match=m, clean=True)
969 s = repo.status(match=m, clean=True)
963 finally:
970 finally:
964 repo.lfstatus = False
971 repo.lfstatus = False
965 forget = sorted(s.modified + s.added + s.deleted + s.clean)
972 forget = sorted(s.modified + s.added + s.deleted + s.clean)
966 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
973 forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
967
974
968 for f in forget:
975 for f in forget:
969 if lfutil.standin(f) not in repo.dirstate and not \
976 if lfutil.standin(f) not in repo.dirstate and not \
970 os.path.isdir(m.rel(lfutil.standin(f))):
977 os.path.isdir(m.rel(lfutil.standin(f))):
971 ui.warn(_('not removing %s: file is already untracked\n')
978 ui.warn(_('not removing %s: file is already untracked\n')
972 % m.rel(f))
979 % m.rel(f))
973 result = 1
980 result = 1
974
981
975 for f in forget:
982 for f in forget:
976 if ui.verbose or not m.exact(f):
983 if ui.verbose or not m.exact(f):
977 ui.status(_('removing %s\n') % m.rel(f))
984 ui.status(_('removing %s\n') % m.rel(f))
978
985
979 # Need to lock because standin files are deleted then removed from the
986 # Need to lock because standin files are deleted then removed from the
980 # repository and we could race in-between.
987 # repository and we could race in-between.
981 wlock = repo.wlock()
988 wlock = repo.wlock()
982 try:
989 try:
983 lfdirstate = lfutil.openlfdirstate(ui, repo)
990 lfdirstate = lfutil.openlfdirstate(ui, repo)
984 for f in forget:
991 for f in forget:
985 if lfdirstate[f] == 'a':
992 if lfdirstate[f] == 'a':
986 lfdirstate.drop(f)
993 lfdirstate.drop(f)
987 else:
994 else:
988 lfdirstate.remove(f)
995 lfdirstate.remove(f)
989 lfdirstate.write()
996 lfdirstate.write()
990 standins = [lfutil.standin(f) for f in forget]
997 standins = [lfutil.standin(f) for f in forget]
991 for f in standins:
998 for f in standins:
992 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
999 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
993 repo[None].forget(standins)
1000 repo[None].forget(standins)
994 finally:
1001 finally:
995 wlock.release()
1002 wlock.release()
996
1003
997 return result
1004 return result
998
1005
999 def _getoutgoings(repo, other, missing, addfunc):
1006 def _getoutgoings(repo, other, missing, addfunc):
1000 """get pairs of filename and largefile hash in outgoing revisions
1007 """get pairs of filename and largefile hash in outgoing revisions
1001 in 'missing'.
1008 in 'missing'.
1002
1009
1003 largefiles already existing on 'other' repository are ignored.
1010 largefiles already existing on 'other' repository are ignored.
1004
1011
1005 'addfunc' is invoked with each unique pairs of filename and
1012 'addfunc' is invoked with each unique pairs of filename and
1006 largefile hash value.
1013 largefile hash value.
1007 """
1014 """
1008 knowns = set()
1015 knowns = set()
1009 lfhashes = set()
1016 lfhashes = set()
1010 def dedup(fn, lfhash):
1017 def dedup(fn, lfhash):
1011 k = (fn, lfhash)
1018 k = (fn, lfhash)
1012 if k not in knowns:
1019 if k not in knowns:
1013 knowns.add(k)
1020 knowns.add(k)
1014 lfhashes.add(lfhash)
1021 lfhashes.add(lfhash)
1015 lfutil.getlfilestoupload(repo, missing, dedup)
1022 lfutil.getlfilestoupload(repo, missing, dedup)
1016 if lfhashes:
1023 if lfhashes:
1017 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1024 lfexists = basestore._openstore(repo, other).exists(lfhashes)
1018 for fn, lfhash in knowns:
1025 for fn, lfhash in knowns:
1019 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1026 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1020 addfunc(fn, lfhash)
1027 addfunc(fn, lfhash)
1021
1028
1022 def outgoinghook(ui, repo, other, opts, missing):
1029 def outgoinghook(ui, repo, other, opts, missing):
1023 if opts.pop('large', None):
1030 if opts.pop('large', None):
1024 lfhashes = set()
1031 lfhashes = set()
1025 if ui.debugflag:
1032 if ui.debugflag:
1026 toupload = {}
1033 toupload = {}
1027 def addfunc(fn, lfhash):
1034 def addfunc(fn, lfhash):
1028 if fn not in toupload:
1035 if fn not in toupload:
1029 toupload[fn] = []
1036 toupload[fn] = []
1030 toupload[fn].append(lfhash)
1037 toupload[fn].append(lfhash)
1031 lfhashes.add(lfhash)
1038 lfhashes.add(lfhash)
1032 def showhashes(fn):
1039 def showhashes(fn):
1033 for lfhash in sorted(toupload[fn]):
1040 for lfhash in sorted(toupload[fn]):
1034 ui.debug(' %s\n' % (lfhash))
1041 ui.debug(' %s\n' % (lfhash))
1035 else:
1042 else:
1036 toupload = set()
1043 toupload = set()
1037 def addfunc(fn, lfhash):
1044 def addfunc(fn, lfhash):
1038 toupload.add(fn)
1045 toupload.add(fn)
1039 lfhashes.add(lfhash)
1046 lfhashes.add(lfhash)
1040 def showhashes(fn):
1047 def showhashes(fn):
1041 pass
1048 pass
1042 _getoutgoings(repo, other, missing, addfunc)
1049 _getoutgoings(repo, other, missing, addfunc)
1043
1050
1044 if not toupload:
1051 if not toupload:
1045 ui.status(_('largefiles: no files to upload\n'))
1052 ui.status(_('largefiles: no files to upload\n'))
1046 else:
1053 else:
1047 ui.status(_('largefiles to upload (%d entities):\n')
1054 ui.status(_('largefiles to upload (%d entities):\n')
1048 % (len(lfhashes)))
1055 % (len(lfhashes)))
1049 for file in sorted(toupload):
1056 for file in sorted(toupload):
1050 ui.status(lfutil.splitstandin(file) + '\n')
1057 ui.status(lfutil.splitstandin(file) + '\n')
1051 showhashes(file)
1058 showhashes(file)
1052 ui.status('\n')
1059 ui.status('\n')
1053
1060
1054 def summaryremotehook(ui, repo, opts, changes):
1061 def summaryremotehook(ui, repo, opts, changes):
1055 largeopt = opts.get('large', False)
1062 largeopt = opts.get('large', False)
1056 if changes is None:
1063 if changes is None:
1057 if largeopt:
1064 if largeopt:
1058 return (False, True) # only outgoing check is needed
1065 return (False, True) # only outgoing check is needed
1059 else:
1066 else:
1060 return (False, False)
1067 return (False, False)
1061 elif largeopt:
1068 elif largeopt:
1062 url, branch, peer, outgoing = changes[1]
1069 url, branch, peer, outgoing = changes[1]
1063 if peer is None:
1070 if peer is None:
1064 # i18n: column positioning for "hg summary"
1071 # i18n: column positioning for "hg summary"
1065 ui.status(_('largefiles: (no remote repo)\n'))
1072 ui.status(_('largefiles: (no remote repo)\n'))
1066 return
1073 return
1067
1074
1068 toupload = set()
1075 toupload = set()
1069 lfhashes = set()
1076 lfhashes = set()
1070 def addfunc(fn, lfhash):
1077 def addfunc(fn, lfhash):
1071 toupload.add(fn)
1078 toupload.add(fn)
1072 lfhashes.add(lfhash)
1079 lfhashes.add(lfhash)
1073 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1080 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1074
1081
1075 if not toupload:
1082 if not toupload:
1076 # i18n: column positioning for "hg summary"
1083 # i18n: column positioning for "hg summary"
1077 ui.status(_('largefiles: (no files to upload)\n'))
1084 ui.status(_('largefiles: (no files to upload)\n'))
1078 else:
1085 else:
1079 # i18n: column positioning for "hg summary"
1086 # i18n: column positioning for "hg summary"
1080 ui.status(_('largefiles: %d entities for %d files to upload\n')
1087 ui.status(_('largefiles: %d entities for %d files to upload\n')
1081 % (len(lfhashes), len(toupload)))
1088 % (len(lfhashes), len(toupload)))
1082
1089
1083 def overridesummary(orig, ui, repo, *pats, **opts):
1090 def overridesummary(orig, ui, repo, *pats, **opts):
1084 try:
1091 try:
1085 repo.lfstatus = True
1092 repo.lfstatus = True
1086 orig(ui, repo, *pats, **opts)
1093 orig(ui, repo, *pats, **opts)
1087 finally:
1094 finally:
1088 repo.lfstatus = False
1095 repo.lfstatus = False
1089
1096
1090 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1097 def scmutiladdremove(orig, repo, matcher, prefix, opts={}, dry_run=None,
1091 similarity=None):
1098 similarity=None):
1092 if not lfutil.islfilesrepo(repo):
1099 if not lfutil.islfilesrepo(repo):
1093 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1100 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1094 # Get the list of missing largefiles so we can remove them
1101 # Get the list of missing largefiles so we can remove them
1095 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1102 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1096 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1103 unsure, s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [],
1097 False, False, False)
1104 False, False, False)
1098
1105
1099 # Call into the normal remove code, but the removing of the standin, we want
1106 # Call into the normal remove code, but the removing of the standin, we want
1100 # to have handled by original addremove. Monkey patching here makes sure
1107 # to have handled by original addremove. Monkey patching here makes sure
1101 # we don't remove the standin in the largefiles code, preventing a very
1108 # we don't remove the standin in the largefiles code, preventing a very
1102 # confused state later.
1109 # confused state later.
1103 if s.deleted:
1110 if s.deleted:
1104 m = [repo.wjoin(f) for f in s.deleted]
1111 m = [repo.wjoin(f) for f in s.deleted]
1105 removelargefiles(repo.ui, repo, True, *m, **opts)
1112 removelargefiles(repo.ui, repo, True, *m, **opts)
1106 # Call into the normal add code, and any files that *should* be added as
1113 # Call into the normal add code, and any files that *should* be added as
1107 # largefiles will be
1114 # largefiles will be
1108 addlargefiles(repo.ui, repo, matcher, **opts)
1115 addlargefiles(repo.ui, repo, matcher, **opts)
1109 # Now that we've handled largefiles, hand off to the original addremove
1116 # Now that we've handled largefiles, hand off to the original addremove
1110 # function to take care of the rest. Make sure it doesn't do anything with
1117 # function to take care of the rest. Make sure it doesn't do anything with
1111 # largefiles by passing a matcher that will ignore them.
1118 # largefiles by passing a matcher that will ignore them.
1112 matcher = composenormalfilematcher(matcher, repo[None].manifest())
1119 matcher = composenormalfilematcher(matcher, repo[None].manifest())
1113 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1120 return orig(repo, matcher, prefix, opts, dry_run, similarity)
1114
1121
1115 # Calling purge with --all will cause the largefiles to be deleted.
1122 # Calling purge with --all will cause the largefiles to be deleted.
1116 # Override repo.status to prevent this from happening.
1123 # Override repo.status to prevent this from happening.
1117 def overridepurge(orig, ui, repo, *dirs, **opts):
1124 def overridepurge(orig, ui, repo, *dirs, **opts):
1118 oldstatus = repo.status
1125 oldstatus = repo.status
1119 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1126 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1120 clean=False, unknown=False, listsubrepos=False):
1127 clean=False, unknown=False, listsubrepos=False):
1121 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1128 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1122 listsubrepos)
1129 listsubrepos)
1123 lfdirstate = lfutil.openlfdirstate(ui, repo)
1130 lfdirstate = lfutil.openlfdirstate(ui, repo)
1124 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1131 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1125 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1132 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1126 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1133 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1127 unknown, ignored, r.clean)
1134 unknown, ignored, r.clean)
1128 repo.status = overridestatus
1135 repo.status = overridestatus
1129 orig(ui, repo, *dirs, **opts)
1136 orig(ui, repo, *dirs, **opts)
1130 repo.status = oldstatus
1137 repo.status = oldstatus
1131 def overriderollback(orig, ui, repo, **opts):
1138 def overriderollback(orig, ui, repo, **opts):
1132 wlock = repo.wlock()
1139 wlock = repo.wlock()
1133 try:
1140 try:
1134 before = repo.dirstate.parents()
1141 before = repo.dirstate.parents()
1135 orphans = set(f for f in repo.dirstate
1142 orphans = set(f for f in repo.dirstate
1136 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1143 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1137 result = orig(ui, repo, **opts)
1144 result = orig(ui, repo, **opts)
1138 after = repo.dirstate.parents()
1145 after = repo.dirstate.parents()
1139 if before == after:
1146 if before == after:
1140 return result # no need to restore standins
1147 return result # no need to restore standins
1141
1148
1142 pctx = repo['.']
1149 pctx = repo['.']
1143 for f in repo.dirstate:
1150 for f in repo.dirstate:
1144 if lfutil.isstandin(f):
1151 if lfutil.isstandin(f):
1145 orphans.discard(f)
1152 orphans.discard(f)
1146 if repo.dirstate[f] == 'r':
1153 if repo.dirstate[f] == 'r':
1147 repo.wvfs.unlinkpath(f, ignoremissing=True)
1154 repo.wvfs.unlinkpath(f, ignoremissing=True)
1148 elif f in pctx:
1155 elif f in pctx:
1149 fctx = pctx[f]
1156 fctx = pctx[f]
1150 repo.wwrite(f, fctx.data(), fctx.flags())
1157 repo.wwrite(f, fctx.data(), fctx.flags())
1151 else:
1158 else:
1152 # content of standin is not so important in 'a',
1159 # content of standin is not so important in 'a',
1153 # 'm' or 'n' (coming from the 2nd parent) cases
1160 # 'm' or 'n' (coming from the 2nd parent) cases
1154 lfutil.writestandin(repo, f, '', False)
1161 lfutil.writestandin(repo, f, '', False)
1155 for standin in orphans:
1162 for standin in orphans:
1156 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1163 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1157
1164
1158 lfdirstate = lfutil.openlfdirstate(ui, repo)
1165 lfdirstate = lfutil.openlfdirstate(ui, repo)
1159 orphans = set(lfdirstate)
1166 orphans = set(lfdirstate)
1160 lfiles = lfutil.listlfiles(repo)
1167 lfiles = lfutil.listlfiles(repo)
1161 for file in lfiles:
1168 for file in lfiles:
1162 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1169 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1163 orphans.discard(file)
1170 orphans.discard(file)
1164 for lfile in orphans:
1171 for lfile in orphans:
1165 lfdirstate.drop(lfile)
1172 lfdirstate.drop(lfile)
1166 lfdirstate.write()
1173 lfdirstate.write()
1167 finally:
1174 finally:
1168 wlock.release()
1175 wlock.release()
1169 return result
1176 return result
1170
1177
1171 def overridetransplant(orig, ui, repo, *revs, **opts):
1178 def overridetransplant(orig, ui, repo, *revs, **opts):
1172 resuming = opts.get('continue')
1179 resuming = opts.get('continue')
1173 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1180 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1174 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1181 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1175 try:
1182 try:
1176 result = orig(ui, repo, *revs, **opts)
1183 result = orig(ui, repo, *revs, **opts)
1177 finally:
1184 finally:
1178 repo._lfstatuswriters.pop()
1185 repo._lfstatuswriters.pop()
1179 repo._lfcommithooks.pop()
1186 repo._lfcommithooks.pop()
1180 return result
1187 return result
1181
1188
1182 def overridecat(orig, ui, repo, file1, *pats, **opts):
1189 def overridecat(orig, ui, repo, file1, *pats, **opts):
1183 ctx = scmutil.revsingle(repo, opts.get('rev'))
1190 ctx = scmutil.revsingle(repo, opts.get('rev'))
1184 err = 1
1191 err = 1
1185 notbad = set()
1192 notbad = set()
1186 m = scmutil.match(ctx, (file1,) + pats, opts)
1193 m = scmutil.match(ctx, (file1,) + pats, opts)
1187 origmatchfn = m.matchfn
1194 origmatchfn = m.matchfn
1188 def lfmatchfn(f):
1195 def lfmatchfn(f):
1189 if origmatchfn(f):
1196 if origmatchfn(f):
1190 return True
1197 return True
1191 lf = lfutil.splitstandin(f)
1198 lf = lfutil.splitstandin(f)
1192 if lf is None:
1199 if lf is None:
1193 return False
1200 return False
1194 notbad.add(lf)
1201 notbad.add(lf)
1195 return origmatchfn(lf)
1202 return origmatchfn(lf)
1196 m.matchfn = lfmatchfn
1203 m.matchfn = lfmatchfn
1197 origbadfn = m.bad
1204 origbadfn = m.bad
1198 def lfbadfn(f, msg):
1205 def lfbadfn(f, msg):
1199 if not f in notbad:
1206 if not f in notbad:
1200 origbadfn(f, msg)
1207 origbadfn(f, msg)
1201 m.bad = lfbadfn
1208 m.bad = lfbadfn
1202 for f in ctx.walk(m):
1209 for f in ctx.walk(m):
1203 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1210 fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
1204 pathname=f)
1211 pathname=f)
1205 lf = lfutil.splitstandin(f)
1212 lf = lfutil.splitstandin(f)
1206 if lf is None or origmatchfn(f):
1213 if lf is None or origmatchfn(f):
1207 # duplicating unreachable code from commands.cat
1214 # duplicating unreachable code from commands.cat
1208 data = ctx[f].data()
1215 data = ctx[f].data()
1209 if opts.get('decode'):
1216 if opts.get('decode'):
1210 data = repo.wwritedata(f, data)
1217 data = repo.wwritedata(f, data)
1211 fp.write(data)
1218 fp.write(data)
1212 else:
1219 else:
1213 hash = lfutil.readstandin(repo, lf, ctx.rev())
1220 hash = lfutil.readstandin(repo, lf, ctx.rev())
1214 if not lfutil.inusercache(repo.ui, hash):
1221 if not lfutil.inusercache(repo.ui, hash):
1215 store = basestore._openstore(repo)
1222 store = basestore._openstore(repo)
1216 success, missing = store.get([(lf, hash)])
1223 success, missing = store.get([(lf, hash)])
1217 if len(success) != 1:
1224 if len(success) != 1:
1218 raise util.Abort(
1225 raise util.Abort(
1219 _('largefile %s is not in cache and could not be '
1226 _('largefile %s is not in cache and could not be '
1220 'downloaded') % lf)
1227 'downloaded') % lf)
1221 path = lfutil.usercachepath(repo.ui, hash)
1228 path = lfutil.usercachepath(repo.ui, hash)
1222 fpin = open(path, "rb")
1229 fpin = open(path, "rb")
1223 for chunk in util.filechunkiter(fpin, 128 * 1024):
1230 for chunk in util.filechunkiter(fpin, 128 * 1024):
1224 fp.write(chunk)
1231 fp.write(chunk)
1225 fpin.close()
1232 fpin.close()
1226 fp.close()
1233 fp.close()
1227 err = 0
1234 err = 0
1228 return err
1235 return err
1229
1236
1230 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1237 def mergeupdate(orig, repo, node, branchmerge, force, partial,
1231 *args, **kwargs):
1238 *args, **kwargs):
1232 wlock = repo.wlock()
1239 wlock = repo.wlock()
1233 try:
1240 try:
1234 # branch | | |
1241 # branch | | |
1235 # merge | force | partial | action
1242 # merge | force | partial | action
1236 # -------+-------+---------+--------------
1243 # -------+-------+---------+--------------
1237 # x | x | x | linear-merge
1244 # x | x | x | linear-merge
1238 # o | x | x | branch-merge
1245 # o | x | x | branch-merge
1239 # x | o | x | overwrite (as clean update)
1246 # x | o | x | overwrite (as clean update)
1240 # o | o | x | force-branch-merge (*1)
1247 # o | o | x | force-branch-merge (*1)
1241 # x | x | o | (*)
1248 # x | x | o | (*)
1242 # o | x | o | (*)
1249 # o | x | o | (*)
1243 # x | o | o | overwrite (as revert)
1250 # x | o | o | overwrite (as revert)
1244 # o | o | o | (*)
1251 # o | o | o | (*)
1245 #
1252 #
1246 # (*) don't care
1253 # (*) don't care
1247 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1254 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1248
1255
1249 linearmerge = not branchmerge and not force and not partial
1256 linearmerge = not branchmerge and not force and not partial
1250
1257
1251 if linearmerge or (branchmerge and force and not partial):
1258 if linearmerge or (branchmerge and force and not partial):
1252 # update standins for linear-merge or force-branch-merge,
1259 # update standins for linear-merge or force-branch-merge,
1253 # because largefiles in the working directory may be modified
1260 # because largefiles in the working directory may be modified
1254 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1261 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1255 unsure, s = lfdirstate.status(match_.always(repo.root,
1262 unsure, s = lfdirstate.status(match_.always(repo.root,
1256 repo.getcwd()),
1263 repo.getcwd()),
1257 [], False, False, False)
1264 [], False, False, False)
1258 for lfile in unsure + s.modified + s.added:
1265 for lfile in unsure + s.modified + s.added:
1259 lfutil.updatestandin(repo, lfutil.standin(lfile))
1266 lfutil.updatestandin(repo, lfutil.standin(lfile))
1260
1267
1261 if linearmerge:
1268 if linearmerge:
1262 # Only call updatelfiles on the standins that have changed
1269 # Only call updatelfiles on the standins that have changed
1263 # to save time
1270 # to save time
1264 oldstandins = lfutil.getstandinsstate(repo)
1271 oldstandins = lfutil.getstandinsstate(repo)
1265
1272
1266 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1273 result = orig(repo, node, branchmerge, force, partial, *args, **kwargs)
1267
1274
1268 filelist = None
1275 filelist = None
1269 if linearmerge:
1276 if linearmerge:
1270 newstandins = lfutil.getstandinsstate(repo)
1277 newstandins = lfutil.getstandinsstate(repo)
1271 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1278 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1272
1279
1273 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1280 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1274 normallookup=partial)
1281 normallookup=partial)
1275
1282
1276 return result
1283 return result
1277 finally:
1284 finally:
1278 wlock.release()
1285 wlock.release()
1279
1286
1280 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1287 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1281 result = orig(repo, files, *args, **kwargs)
1288 result = orig(repo, files, *args, **kwargs)
1282
1289
1283 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1290 filelist = [lfutil.splitstandin(f) for f in files if lfutil.isstandin(f)]
1284 if filelist:
1291 if filelist:
1285 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1292 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1286 printmessage=False, normallookup=True)
1293 printmessage=False, normallookup=True)
1287
1294
1288 return result
1295 return result
@@ -1,1721 +1,1726 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex, bin
8 from node import nullid, nullrev, short, hex, bin
9 from i18n import _
9 from i18n import _
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 import match as matchmod
11 import match as matchmod
12 import os, errno, stat
12 import os, errno, stat
13 import obsolete as obsmod
13 import obsolete as obsmod
14 import repoview
14 import repoview
15 import fileset
15 import fileset
16 import revlog
16 import revlog
17
17
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 # Phony node value to stand-in for new files in some uses of
21 # manifests. Manifests support 21-byte hashes for nodes which are
22 # dirty in the working copy.
23 _newnode = '!' * 21
24
20 class basectx(object):
25 class basectx(object):
21 """A basectx object represents the common logic for its children:
26 """A basectx object represents the common logic for its children:
22 changectx: read-only context that is already present in the repo,
27 changectx: read-only context that is already present in the repo,
23 workingctx: a context that represents the working directory and can
28 workingctx: a context that represents the working directory and can
24 be committed,
29 be committed,
25 memctx: a context that represents changes in-memory and can also
30 memctx: a context that represents changes in-memory and can also
26 be committed."""
31 be committed."""
27 def __new__(cls, repo, changeid='', *args, **kwargs):
32 def __new__(cls, repo, changeid='', *args, **kwargs):
28 if isinstance(changeid, basectx):
33 if isinstance(changeid, basectx):
29 return changeid
34 return changeid
30
35
31 o = super(basectx, cls).__new__(cls)
36 o = super(basectx, cls).__new__(cls)
32
37
33 o._repo = repo
38 o._repo = repo
34 o._rev = nullrev
39 o._rev = nullrev
35 o._node = nullid
40 o._node = nullid
36
41
37 return o
42 return o
38
43
39 def __str__(self):
44 def __str__(self):
40 return short(self.node())
45 return short(self.node())
41
46
42 def __int__(self):
47 def __int__(self):
43 return self.rev()
48 return self.rev()
44
49
45 def __repr__(self):
50 def __repr__(self):
46 return "<%s %s>" % (type(self).__name__, str(self))
51 return "<%s %s>" % (type(self).__name__, str(self))
47
52
48 def __eq__(self, other):
53 def __eq__(self, other):
49 try:
54 try:
50 return type(self) == type(other) and self._rev == other._rev
55 return type(self) == type(other) and self._rev == other._rev
51 except AttributeError:
56 except AttributeError:
52 return False
57 return False
53
58
54 def __ne__(self, other):
59 def __ne__(self, other):
55 return not (self == other)
60 return not (self == other)
56
61
57 def __contains__(self, key):
62 def __contains__(self, key):
58 return key in self._manifest
63 return key in self._manifest
59
64
60 def __getitem__(self, key):
65 def __getitem__(self, key):
61 return self.filectx(key)
66 return self.filectx(key)
62
67
63 def __iter__(self):
68 def __iter__(self):
64 for f in sorted(self._manifest):
69 for f in sorted(self._manifest):
65 yield f
70 yield f
66
71
67 def _manifestmatches(self, match, s):
72 def _manifestmatches(self, match, s):
68 """generate a new manifest filtered by the match argument
73 """generate a new manifest filtered by the match argument
69
74
70 This method is for internal use only and mainly exists to provide an
75 This method is for internal use only and mainly exists to provide an
71 object oriented way for other contexts to customize the manifest
76 object oriented way for other contexts to customize the manifest
72 generation.
77 generation.
73 """
78 """
74 return self.manifest().matches(match)
79 return self.manifest().matches(match)
75
80
76 def _matchstatus(self, other, match):
81 def _matchstatus(self, other, match):
77 """return match.always if match is none
82 """return match.always if match is none
78
83
79 This internal method provides a way for child objects to override the
84 This internal method provides a way for child objects to override the
80 match operator.
85 match operator.
81 """
86 """
82 return match or matchmod.always(self._repo.root, self._repo.getcwd())
87 return match or matchmod.always(self._repo.root, self._repo.getcwd())
83
88
84 def _buildstatus(self, other, s, match, listignored, listclean,
89 def _buildstatus(self, other, s, match, listignored, listclean,
85 listunknown):
90 listunknown):
86 """build a status with respect to another context"""
91 """build a status with respect to another context"""
87 # Load earliest manifest first for caching reasons. More specifically,
92 # Load earliest manifest first for caching reasons. More specifically,
88 # if you have revisions 1000 and 1001, 1001 is probably stored as a
93 # if you have revisions 1000 and 1001, 1001 is probably stored as a
89 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
94 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
90 # 1000 and cache it so that when you read 1001, we just need to apply a
95 # 1000 and cache it so that when you read 1001, we just need to apply a
91 # delta to what's in the cache. So that's one full reconstruction + one
96 # delta to what's in the cache. So that's one full reconstruction + one
92 # delta application.
97 # delta application.
93 if self.rev() is not None and self.rev() < other.rev():
98 if self.rev() is not None and self.rev() < other.rev():
94 self.manifest()
99 self.manifest()
95 mf1 = other._manifestmatches(match, s)
100 mf1 = other._manifestmatches(match, s)
96 mf2 = self._manifestmatches(match, s)
101 mf2 = self._manifestmatches(match, s)
97
102
98 modified, added, clean = [], [], []
103 modified, added, clean = [], [], []
99 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
104 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
100 deletedset = set(deleted)
105 deletedset = set(deleted)
101 withflags = mf1.withflags() | mf2.withflags()
106 withflags = mf1.withflags() | mf2.withflags()
102 for fn, mf2node in mf2.iteritems():
107 for fn, mf2node in mf2.iteritems():
103 if fn in mf1:
108 if fn in mf1:
104 if (fn not in deletedset and
109 if (fn not in deletedset and
105 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
110 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
106 (mf1[fn] != mf2node and
111 (mf1[fn] != mf2node and
107 (mf2node or self[fn].cmp(other[fn]))))):
112 (mf2node != _newnode or self[fn].cmp(other[fn]))))):
108 modified.append(fn)
113 modified.append(fn)
109 elif listclean:
114 elif listclean:
110 clean.append(fn)
115 clean.append(fn)
111 del mf1[fn]
116 del mf1[fn]
112 elif fn not in deletedset:
117 elif fn not in deletedset:
113 added.append(fn)
118 added.append(fn)
114 removed = mf1.keys()
119 removed = mf1.keys()
115 if removed:
120 if removed:
116 # need to filter files if they are already reported as removed
121 # need to filter files if they are already reported as removed
117 unknown = [fn for fn in unknown if fn not in mf1]
122 unknown = [fn for fn in unknown if fn not in mf1]
118 ignored = [fn for fn in ignored if fn not in mf1]
123 ignored = [fn for fn in ignored if fn not in mf1]
119
124
120 return scmutil.status(modified, added, removed, deleted, unknown,
125 return scmutil.status(modified, added, removed, deleted, unknown,
121 ignored, clean)
126 ignored, clean)
122
127
123 @propertycache
128 @propertycache
124 def substate(self):
129 def substate(self):
125 return subrepo.state(self, self._repo.ui)
130 return subrepo.state(self, self._repo.ui)
126
131
127 def subrev(self, subpath):
132 def subrev(self, subpath):
128 return self.substate[subpath][1]
133 return self.substate[subpath][1]
129
134
130 def rev(self):
135 def rev(self):
131 return self._rev
136 return self._rev
132 def node(self):
137 def node(self):
133 return self._node
138 return self._node
134 def hex(self):
139 def hex(self):
135 return hex(self.node())
140 return hex(self.node())
136 def manifest(self):
141 def manifest(self):
137 return self._manifest
142 return self._manifest
138 def phasestr(self):
143 def phasestr(self):
139 return phases.phasenames[self.phase()]
144 return phases.phasenames[self.phase()]
140 def mutable(self):
145 def mutable(self):
141 return self.phase() > phases.public
146 return self.phase() > phases.public
142
147
143 def getfileset(self, expr):
148 def getfileset(self, expr):
144 return fileset.getfileset(self, expr)
149 return fileset.getfileset(self, expr)
145
150
146 def obsolete(self):
151 def obsolete(self):
147 """True if the changeset is obsolete"""
152 """True if the changeset is obsolete"""
148 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
153 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
149
154
150 def extinct(self):
155 def extinct(self):
151 """True if the changeset is extinct"""
156 """True if the changeset is extinct"""
152 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
157 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
153
158
154 def unstable(self):
159 def unstable(self):
155 """True if the changeset is not obsolete but it's ancestor are"""
160 """True if the changeset is not obsolete but it's ancestor are"""
156 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
161 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
157
162
158 def bumped(self):
163 def bumped(self):
159 """True if the changeset try to be a successor of a public changeset
164 """True if the changeset try to be a successor of a public changeset
160
165
161 Only non-public and non-obsolete changesets may be bumped.
166 Only non-public and non-obsolete changesets may be bumped.
162 """
167 """
163 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
168 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
164
169
165 def divergent(self):
170 def divergent(self):
166 """Is a successors of a changeset with multiple possible successors set
171 """Is a successors of a changeset with multiple possible successors set
167
172
168 Only non-public and non-obsolete changesets may be divergent.
173 Only non-public and non-obsolete changesets may be divergent.
169 """
174 """
170 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
175 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
171
176
172 def troubled(self):
177 def troubled(self):
173 """True if the changeset is either unstable, bumped or divergent"""
178 """True if the changeset is either unstable, bumped or divergent"""
174 return self.unstable() or self.bumped() or self.divergent()
179 return self.unstable() or self.bumped() or self.divergent()
175
180
176 def troubles(self):
181 def troubles(self):
177 """return the list of troubles affecting this changesets.
182 """return the list of troubles affecting this changesets.
178
183
179 Troubles are returned as strings. possible values are:
184 Troubles are returned as strings. possible values are:
180 - unstable,
185 - unstable,
181 - bumped,
186 - bumped,
182 - divergent.
187 - divergent.
183 """
188 """
184 troubles = []
189 troubles = []
185 if self.unstable():
190 if self.unstable():
186 troubles.append('unstable')
191 troubles.append('unstable')
187 if self.bumped():
192 if self.bumped():
188 troubles.append('bumped')
193 troubles.append('bumped')
189 if self.divergent():
194 if self.divergent():
190 troubles.append('divergent')
195 troubles.append('divergent')
191 return troubles
196 return troubles
192
197
193 def parents(self):
198 def parents(self):
194 """return contexts for each parent changeset"""
199 """return contexts for each parent changeset"""
195 return self._parents
200 return self._parents
196
201
197 def p1(self):
202 def p1(self):
198 return self._parents[0]
203 return self._parents[0]
199
204
200 def p2(self):
205 def p2(self):
201 if len(self._parents) == 2:
206 if len(self._parents) == 2:
202 return self._parents[1]
207 return self._parents[1]
203 return changectx(self._repo, -1)
208 return changectx(self._repo, -1)
204
209
205 def _fileinfo(self, path):
210 def _fileinfo(self, path):
206 if '_manifest' in self.__dict__:
211 if '_manifest' in self.__dict__:
207 try:
212 try:
208 return self._manifest[path], self._manifest.flags(path)
213 return self._manifest[path], self._manifest.flags(path)
209 except KeyError:
214 except KeyError:
210 raise error.ManifestLookupError(self._node, path,
215 raise error.ManifestLookupError(self._node, path,
211 _('not found in manifest'))
216 _('not found in manifest'))
212 if '_manifestdelta' in self.__dict__ or path in self.files():
217 if '_manifestdelta' in self.__dict__ or path in self.files():
213 if path in self._manifestdelta:
218 if path in self._manifestdelta:
214 return (self._manifestdelta[path],
219 return (self._manifestdelta[path],
215 self._manifestdelta.flags(path))
220 self._manifestdelta.flags(path))
216 node, flag = self._repo.manifest.find(self._changeset[0], path)
221 node, flag = self._repo.manifest.find(self._changeset[0], path)
217 if not node:
222 if not node:
218 raise error.ManifestLookupError(self._node, path,
223 raise error.ManifestLookupError(self._node, path,
219 _('not found in manifest'))
224 _('not found in manifest'))
220
225
221 return node, flag
226 return node, flag
222
227
223 def filenode(self, path):
228 def filenode(self, path):
224 return self._fileinfo(path)[0]
229 return self._fileinfo(path)[0]
225
230
226 def flags(self, path):
231 def flags(self, path):
227 try:
232 try:
228 return self._fileinfo(path)[1]
233 return self._fileinfo(path)[1]
229 except error.LookupError:
234 except error.LookupError:
230 return ''
235 return ''
231
236
232 def sub(self, path):
237 def sub(self, path):
233 return subrepo.subrepo(self, path)
238 return subrepo.subrepo(self, path)
234
239
235 def match(self, pats=[], include=None, exclude=None, default='glob'):
240 def match(self, pats=[], include=None, exclude=None, default='glob'):
236 r = self._repo
241 r = self._repo
237 return matchmod.match(r.root, r.getcwd(), pats,
242 return matchmod.match(r.root, r.getcwd(), pats,
238 include, exclude, default,
243 include, exclude, default,
239 auditor=r.auditor, ctx=self)
244 auditor=r.auditor, ctx=self)
240
245
241 def diff(self, ctx2=None, match=None, **opts):
246 def diff(self, ctx2=None, match=None, **opts):
242 """Returns a diff generator for the given contexts and matcher"""
247 """Returns a diff generator for the given contexts and matcher"""
243 if ctx2 is None:
248 if ctx2 is None:
244 ctx2 = self.p1()
249 ctx2 = self.p1()
245 if ctx2 is not None:
250 if ctx2 is not None:
246 ctx2 = self._repo[ctx2]
251 ctx2 = self._repo[ctx2]
247 diffopts = patch.diffopts(self._repo.ui, opts)
252 diffopts = patch.diffopts(self._repo.ui, opts)
248 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
253 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
249
254
250 @propertycache
255 @propertycache
251 def _dirs(self):
256 def _dirs(self):
252 return scmutil.dirs(self._manifest)
257 return scmutil.dirs(self._manifest)
253
258
254 def dirs(self):
259 def dirs(self):
255 return self._dirs
260 return self._dirs
256
261
257 def dirty(self, missing=False, merge=True, branch=True):
262 def dirty(self, missing=False, merge=True, branch=True):
258 return False
263 return False
259
264
260 def status(self, other=None, match=None, listignored=False,
265 def status(self, other=None, match=None, listignored=False,
261 listclean=False, listunknown=False, listsubrepos=False):
266 listclean=False, listunknown=False, listsubrepos=False):
262 """return status of files between two nodes or node and working
267 """return status of files between two nodes or node and working
263 directory.
268 directory.
264
269
265 If other is None, compare this node with working directory.
270 If other is None, compare this node with working directory.
266
271
267 returns (modified, added, removed, deleted, unknown, ignored, clean)
272 returns (modified, added, removed, deleted, unknown, ignored, clean)
268 """
273 """
269
274
270 ctx1 = self
275 ctx1 = self
271 ctx2 = self._repo[other]
276 ctx2 = self._repo[other]
272
277
273 # This next code block is, admittedly, fragile logic that tests for
278 # This next code block is, admittedly, fragile logic that tests for
274 # reversing the contexts and wouldn't need to exist if it weren't for
279 # reversing the contexts and wouldn't need to exist if it weren't for
275 # the fast (and common) code path of comparing the working directory
280 # the fast (and common) code path of comparing the working directory
276 # with its first parent.
281 # with its first parent.
277 #
282 #
278 # What we're aiming for here is the ability to call:
283 # What we're aiming for here is the ability to call:
279 #
284 #
280 # workingctx.status(parentctx)
285 # workingctx.status(parentctx)
281 #
286 #
282 # If we always built the manifest for each context and compared those,
287 # If we always built the manifest for each context and compared those,
283 # then we'd be done. But the special case of the above call means we
288 # then we'd be done. But the special case of the above call means we
284 # just copy the manifest of the parent.
289 # just copy the manifest of the parent.
285 reversed = False
290 reversed = False
286 if (not isinstance(ctx1, changectx)
291 if (not isinstance(ctx1, changectx)
287 and isinstance(ctx2, changectx)):
292 and isinstance(ctx2, changectx)):
288 reversed = True
293 reversed = True
289 ctx1, ctx2 = ctx2, ctx1
294 ctx1, ctx2 = ctx2, ctx1
290
295
291 match = ctx2._matchstatus(ctx1, match)
296 match = ctx2._matchstatus(ctx1, match)
292 r = scmutil.status([], [], [], [], [], [], [])
297 r = scmutil.status([], [], [], [], [], [], [])
293 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
298 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
294 listunknown)
299 listunknown)
295
300
296 if reversed:
301 if reversed:
297 # Reverse added and removed. Clear deleted, unknown and ignored as
302 # Reverse added and removed. Clear deleted, unknown and ignored as
298 # these make no sense to reverse.
303 # these make no sense to reverse.
299 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
304 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
300 r.clean)
305 r.clean)
301
306
302 if listsubrepos:
307 if listsubrepos:
303 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
308 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
304 rev2 = ctx2.subrev(subpath)
309 rev2 = ctx2.subrev(subpath)
305 try:
310 try:
306 submatch = matchmod.narrowmatcher(subpath, match)
311 submatch = matchmod.narrowmatcher(subpath, match)
307 s = sub.status(rev2, match=submatch, ignored=listignored,
312 s = sub.status(rev2, match=submatch, ignored=listignored,
308 clean=listclean, unknown=listunknown,
313 clean=listclean, unknown=listunknown,
309 listsubrepos=True)
314 listsubrepos=True)
310 for rfiles, sfiles in zip(r, s):
315 for rfiles, sfiles in zip(r, s):
311 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
316 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
312 except error.LookupError:
317 except error.LookupError:
313 self._repo.ui.status(_("skipping missing "
318 self._repo.ui.status(_("skipping missing "
314 "subrepository: %s\n") % subpath)
319 "subrepository: %s\n") % subpath)
315
320
316 for l in r:
321 for l in r:
317 l.sort()
322 l.sort()
318
323
319 return r
324 return r
320
325
321
326
322 def makememctx(repo, parents, text, user, date, branch, files, store,
327 def makememctx(repo, parents, text, user, date, branch, files, store,
323 editor=None):
328 editor=None):
324 def getfilectx(repo, memctx, path):
329 def getfilectx(repo, memctx, path):
325 data, mode, copied = store.getfile(path)
330 data, mode, copied = store.getfile(path)
326 if data is None:
331 if data is None:
327 return None
332 return None
328 islink, isexec = mode
333 islink, isexec = mode
329 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
334 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
330 copied=copied, memctx=memctx)
335 copied=copied, memctx=memctx)
331 extra = {}
336 extra = {}
332 if branch:
337 if branch:
333 extra['branch'] = encoding.fromlocal(branch)
338 extra['branch'] = encoding.fromlocal(branch)
334 ctx = memctx(repo, parents, text, files, getfilectx, user,
339 ctx = memctx(repo, parents, text, files, getfilectx, user,
335 date, extra, editor)
340 date, extra, editor)
336 return ctx
341 return ctx
337
342
338 class changectx(basectx):
343 class changectx(basectx):
339 """A changecontext object makes access to data related to a particular
344 """A changecontext object makes access to data related to a particular
340 changeset convenient. It represents a read-only context already present in
345 changeset convenient. It represents a read-only context already present in
341 the repo."""
346 the repo."""
342 def __init__(self, repo, changeid=''):
347 def __init__(self, repo, changeid=''):
343 """changeid is a revision number, node, or tag"""
348 """changeid is a revision number, node, or tag"""
344
349
345 # since basectx.__new__ already took care of copying the object, we
350 # since basectx.__new__ already took care of copying the object, we
346 # don't need to do anything in __init__, so we just exit here
351 # don't need to do anything in __init__, so we just exit here
347 if isinstance(changeid, basectx):
352 if isinstance(changeid, basectx):
348 return
353 return
349
354
350 if changeid == '':
355 if changeid == '':
351 changeid = '.'
356 changeid = '.'
352 self._repo = repo
357 self._repo = repo
353
358
354 try:
359 try:
355 if isinstance(changeid, int):
360 if isinstance(changeid, int):
356 self._node = repo.changelog.node(changeid)
361 self._node = repo.changelog.node(changeid)
357 self._rev = changeid
362 self._rev = changeid
358 return
363 return
359 if isinstance(changeid, long):
364 if isinstance(changeid, long):
360 changeid = str(changeid)
365 changeid = str(changeid)
361 if changeid == '.':
366 if changeid == '.':
362 self._node = repo.dirstate.p1()
367 self._node = repo.dirstate.p1()
363 self._rev = repo.changelog.rev(self._node)
368 self._rev = repo.changelog.rev(self._node)
364 return
369 return
365 if changeid == 'null':
370 if changeid == 'null':
366 self._node = nullid
371 self._node = nullid
367 self._rev = nullrev
372 self._rev = nullrev
368 return
373 return
369 if changeid == 'tip':
374 if changeid == 'tip':
370 self._node = repo.changelog.tip()
375 self._node = repo.changelog.tip()
371 self._rev = repo.changelog.rev(self._node)
376 self._rev = repo.changelog.rev(self._node)
372 return
377 return
373 if len(changeid) == 20:
378 if len(changeid) == 20:
374 try:
379 try:
375 self._node = changeid
380 self._node = changeid
376 self._rev = repo.changelog.rev(changeid)
381 self._rev = repo.changelog.rev(changeid)
377 return
382 return
378 except error.FilteredRepoLookupError:
383 except error.FilteredRepoLookupError:
379 raise
384 raise
380 except LookupError:
385 except LookupError:
381 pass
386 pass
382
387
383 try:
388 try:
384 r = int(changeid)
389 r = int(changeid)
385 if str(r) != changeid:
390 if str(r) != changeid:
386 raise ValueError
391 raise ValueError
387 l = len(repo.changelog)
392 l = len(repo.changelog)
388 if r < 0:
393 if r < 0:
389 r += l
394 r += l
390 if r < 0 or r >= l:
395 if r < 0 or r >= l:
391 raise ValueError
396 raise ValueError
392 self._rev = r
397 self._rev = r
393 self._node = repo.changelog.node(r)
398 self._node = repo.changelog.node(r)
394 return
399 return
395 except error.FilteredIndexError:
400 except error.FilteredIndexError:
396 raise
401 raise
397 except (ValueError, OverflowError, IndexError):
402 except (ValueError, OverflowError, IndexError):
398 pass
403 pass
399
404
400 if len(changeid) == 40:
405 if len(changeid) == 40:
401 try:
406 try:
402 self._node = bin(changeid)
407 self._node = bin(changeid)
403 self._rev = repo.changelog.rev(self._node)
408 self._rev = repo.changelog.rev(self._node)
404 return
409 return
405 except error.FilteredLookupError:
410 except error.FilteredLookupError:
406 raise
411 raise
407 except (TypeError, LookupError):
412 except (TypeError, LookupError):
408 pass
413 pass
409
414
410 # lookup bookmarks through the name interface
415 # lookup bookmarks through the name interface
411 try:
416 try:
412 self._node = repo.names.singlenode(repo, changeid)
417 self._node = repo.names.singlenode(repo, changeid)
413 self._rev = repo.changelog.rev(self._node)
418 self._rev = repo.changelog.rev(self._node)
414 return
419 return
415 except KeyError:
420 except KeyError:
416 pass
421 pass
417 except error.FilteredRepoLookupError:
422 except error.FilteredRepoLookupError:
418 raise
423 raise
419 except error.RepoLookupError:
424 except error.RepoLookupError:
420 pass
425 pass
421
426
422 self._node = repo.unfiltered().changelog._partialmatch(changeid)
427 self._node = repo.unfiltered().changelog._partialmatch(changeid)
423 if self._node is not None:
428 if self._node is not None:
424 self._rev = repo.changelog.rev(self._node)
429 self._rev = repo.changelog.rev(self._node)
425 return
430 return
426
431
427 # lookup failed
432 # lookup failed
428 # check if it might have come from damaged dirstate
433 # check if it might have come from damaged dirstate
429 #
434 #
430 # XXX we could avoid the unfiltered if we had a recognizable
435 # XXX we could avoid the unfiltered if we had a recognizable
431 # exception for filtered changeset access
436 # exception for filtered changeset access
432 if changeid in repo.unfiltered().dirstate.parents():
437 if changeid in repo.unfiltered().dirstate.parents():
433 msg = _("working directory has unknown parent '%s'!")
438 msg = _("working directory has unknown parent '%s'!")
434 raise error.Abort(msg % short(changeid))
439 raise error.Abort(msg % short(changeid))
435 try:
440 try:
436 if len(changeid) == 20:
441 if len(changeid) == 20:
437 changeid = hex(changeid)
442 changeid = hex(changeid)
438 except TypeError:
443 except TypeError:
439 pass
444 pass
440 except (error.FilteredIndexError, error.FilteredLookupError,
445 except (error.FilteredIndexError, error.FilteredLookupError,
441 error.FilteredRepoLookupError):
446 error.FilteredRepoLookupError):
442 if repo.filtername == 'visible':
447 if repo.filtername == 'visible':
443 msg = _("hidden revision '%s'") % changeid
448 msg = _("hidden revision '%s'") % changeid
444 hint = _('use --hidden to access hidden revisions')
449 hint = _('use --hidden to access hidden revisions')
445 raise error.FilteredRepoLookupError(msg, hint=hint)
450 raise error.FilteredRepoLookupError(msg, hint=hint)
446 msg = _("filtered revision '%s' (not in '%s' subset)")
451 msg = _("filtered revision '%s' (not in '%s' subset)")
447 msg %= (changeid, repo.filtername)
452 msg %= (changeid, repo.filtername)
448 raise error.FilteredRepoLookupError(msg)
453 raise error.FilteredRepoLookupError(msg)
449 except IndexError:
454 except IndexError:
450 pass
455 pass
451 raise error.RepoLookupError(
456 raise error.RepoLookupError(
452 _("unknown revision '%s'") % changeid)
457 _("unknown revision '%s'") % changeid)
453
458
454 def __hash__(self):
459 def __hash__(self):
455 try:
460 try:
456 return hash(self._rev)
461 return hash(self._rev)
457 except AttributeError:
462 except AttributeError:
458 return id(self)
463 return id(self)
459
464
460 def __nonzero__(self):
465 def __nonzero__(self):
461 return self._rev != nullrev
466 return self._rev != nullrev
462
467
463 @propertycache
468 @propertycache
464 def _changeset(self):
469 def _changeset(self):
465 return self._repo.changelog.read(self.rev())
470 return self._repo.changelog.read(self.rev())
466
471
467 @propertycache
472 @propertycache
468 def _manifest(self):
473 def _manifest(self):
469 return self._repo.manifest.read(self._changeset[0])
474 return self._repo.manifest.read(self._changeset[0])
470
475
471 @propertycache
476 @propertycache
472 def _manifestdelta(self):
477 def _manifestdelta(self):
473 return self._repo.manifest.readdelta(self._changeset[0])
478 return self._repo.manifest.readdelta(self._changeset[0])
474
479
475 @propertycache
480 @propertycache
476 def _parents(self):
481 def _parents(self):
477 p = self._repo.changelog.parentrevs(self._rev)
482 p = self._repo.changelog.parentrevs(self._rev)
478 if p[1] == nullrev:
483 if p[1] == nullrev:
479 p = p[:-1]
484 p = p[:-1]
480 return [changectx(self._repo, x) for x in p]
485 return [changectx(self._repo, x) for x in p]
481
486
482 def changeset(self):
487 def changeset(self):
483 return self._changeset
488 return self._changeset
484 def manifestnode(self):
489 def manifestnode(self):
485 return self._changeset[0]
490 return self._changeset[0]
486
491
487 def user(self):
492 def user(self):
488 return self._changeset[1]
493 return self._changeset[1]
489 def date(self):
494 def date(self):
490 return self._changeset[2]
495 return self._changeset[2]
491 def files(self):
496 def files(self):
492 return self._changeset[3]
497 return self._changeset[3]
493 def description(self):
498 def description(self):
494 return self._changeset[4]
499 return self._changeset[4]
495 def branch(self):
500 def branch(self):
496 return encoding.tolocal(self._changeset[5].get("branch"))
501 return encoding.tolocal(self._changeset[5].get("branch"))
497 def closesbranch(self):
502 def closesbranch(self):
498 return 'close' in self._changeset[5]
503 return 'close' in self._changeset[5]
499 def extra(self):
504 def extra(self):
500 return self._changeset[5]
505 return self._changeset[5]
501 def tags(self):
506 def tags(self):
502 return self._repo.nodetags(self._node)
507 return self._repo.nodetags(self._node)
503 def bookmarks(self):
508 def bookmarks(self):
504 return self._repo.nodebookmarks(self._node)
509 return self._repo.nodebookmarks(self._node)
505 def phase(self):
510 def phase(self):
506 return self._repo._phasecache.phase(self._repo, self._rev)
511 return self._repo._phasecache.phase(self._repo, self._rev)
507 def hidden(self):
512 def hidden(self):
508 return self._rev in repoview.filterrevs(self._repo, 'visible')
513 return self._rev in repoview.filterrevs(self._repo, 'visible')
509
514
510 def children(self):
515 def children(self):
511 """return contexts for each child changeset"""
516 """return contexts for each child changeset"""
512 c = self._repo.changelog.children(self._node)
517 c = self._repo.changelog.children(self._node)
513 return [changectx(self._repo, x) for x in c]
518 return [changectx(self._repo, x) for x in c]
514
519
515 def ancestors(self):
520 def ancestors(self):
516 for a in self._repo.changelog.ancestors([self._rev]):
521 for a in self._repo.changelog.ancestors([self._rev]):
517 yield changectx(self._repo, a)
522 yield changectx(self._repo, a)
518
523
519 def descendants(self):
524 def descendants(self):
520 for d in self._repo.changelog.descendants([self._rev]):
525 for d in self._repo.changelog.descendants([self._rev]):
521 yield changectx(self._repo, d)
526 yield changectx(self._repo, d)
522
527
523 def filectx(self, path, fileid=None, filelog=None):
528 def filectx(self, path, fileid=None, filelog=None):
524 """get a file context from this changeset"""
529 """get a file context from this changeset"""
525 if fileid is None:
530 if fileid is None:
526 fileid = self.filenode(path)
531 fileid = self.filenode(path)
527 return filectx(self._repo, path, fileid=fileid,
532 return filectx(self._repo, path, fileid=fileid,
528 changectx=self, filelog=filelog)
533 changectx=self, filelog=filelog)
529
534
530 def ancestor(self, c2, warn=False):
535 def ancestor(self, c2, warn=False):
531 """return the "best" ancestor context of self and c2
536 """return the "best" ancestor context of self and c2
532
537
533 If there are multiple candidates, it will show a message and check
538 If there are multiple candidates, it will show a message and check
534 merge.preferancestor configuration before falling back to the
539 merge.preferancestor configuration before falling back to the
535 revlog ancestor."""
540 revlog ancestor."""
536 # deal with workingctxs
541 # deal with workingctxs
537 n2 = c2._node
542 n2 = c2._node
538 if n2 is None:
543 if n2 is None:
539 n2 = c2._parents[0]._node
544 n2 = c2._parents[0]._node
540 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
545 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
541 if not cahs:
546 if not cahs:
542 anc = nullid
547 anc = nullid
543 elif len(cahs) == 1:
548 elif len(cahs) == 1:
544 anc = cahs[0]
549 anc = cahs[0]
545 else:
550 else:
546 for r in self._repo.ui.configlist('merge', 'preferancestor'):
551 for r in self._repo.ui.configlist('merge', 'preferancestor'):
547 try:
552 try:
548 ctx = changectx(self._repo, r)
553 ctx = changectx(self._repo, r)
549 except error.RepoLookupError:
554 except error.RepoLookupError:
550 continue
555 continue
551 anc = ctx.node()
556 anc = ctx.node()
552 if anc in cahs:
557 if anc in cahs:
553 break
558 break
554 else:
559 else:
555 anc = self._repo.changelog.ancestor(self._node, n2)
560 anc = self._repo.changelog.ancestor(self._node, n2)
556 if warn:
561 if warn:
557 self._repo.ui.status(
562 self._repo.ui.status(
558 (_("note: using %s as ancestor of %s and %s\n") %
563 (_("note: using %s as ancestor of %s and %s\n") %
559 (short(anc), short(self._node), short(n2))) +
564 (short(anc), short(self._node), short(n2))) +
560 ''.join(_(" alternatively, use --config "
565 ''.join(_(" alternatively, use --config "
561 "merge.preferancestor=%s\n") %
566 "merge.preferancestor=%s\n") %
562 short(n) for n in sorted(cahs) if n != anc))
567 short(n) for n in sorted(cahs) if n != anc))
563 return changectx(self._repo, anc)
568 return changectx(self._repo, anc)
564
569
565 def descendant(self, other):
570 def descendant(self, other):
566 """True if other is descendant of this changeset"""
571 """True if other is descendant of this changeset"""
567 return self._repo.changelog.descendant(self._rev, other._rev)
572 return self._repo.changelog.descendant(self._rev, other._rev)
568
573
569 def walk(self, match):
574 def walk(self, match):
570 fset = set(match.files())
575 fset = set(match.files())
571 # for dirstate.walk, files=['.'] means "walk the whole tree".
576 # for dirstate.walk, files=['.'] means "walk the whole tree".
572 # follow that here, too
577 # follow that here, too
573 fset.discard('.')
578 fset.discard('.')
574
579
575 # avoid the entire walk if we're only looking for specific files
580 # avoid the entire walk if we're only looking for specific files
576 if fset and not match.anypats():
581 if fset and not match.anypats():
577 if util.all([fn in self for fn in fset]):
582 if util.all([fn in self for fn in fset]):
578 for fn in sorted(fset):
583 for fn in sorted(fset):
579 if match(fn):
584 if match(fn):
580 yield fn
585 yield fn
581 raise StopIteration
586 raise StopIteration
582
587
583 for fn in self:
588 for fn in self:
584 if fn in fset:
589 if fn in fset:
585 # specified pattern is the exact name
590 # specified pattern is the exact name
586 fset.remove(fn)
591 fset.remove(fn)
587 if match(fn):
592 if match(fn):
588 yield fn
593 yield fn
589 for fn in sorted(fset):
594 for fn in sorted(fset):
590 if fn in self._dirs:
595 if fn in self._dirs:
591 # specified pattern is a directory
596 # specified pattern is a directory
592 continue
597 continue
593 match.bad(fn, _('no such file in rev %s') % self)
598 match.bad(fn, _('no such file in rev %s') % self)
594
599
595 def matches(self, match):
600 def matches(self, match):
596 return self.walk(match)
601 return self.walk(match)
597
602
598 class basefilectx(object):
603 class basefilectx(object):
599 """A filecontext object represents the common logic for its children:
604 """A filecontext object represents the common logic for its children:
600 filectx: read-only access to a filerevision that is already present
605 filectx: read-only access to a filerevision that is already present
601 in the repo,
606 in the repo,
602 workingfilectx: a filecontext that represents files from the working
607 workingfilectx: a filecontext that represents files from the working
603 directory,
608 directory,
604 memfilectx: a filecontext that represents files in-memory."""
609 memfilectx: a filecontext that represents files in-memory."""
605 def __new__(cls, repo, path, *args, **kwargs):
610 def __new__(cls, repo, path, *args, **kwargs):
606 return super(basefilectx, cls).__new__(cls)
611 return super(basefilectx, cls).__new__(cls)
607
612
608 @propertycache
613 @propertycache
609 def _filelog(self):
614 def _filelog(self):
610 return self._repo.file(self._path)
615 return self._repo.file(self._path)
611
616
612 @propertycache
617 @propertycache
613 def _changeid(self):
618 def _changeid(self):
614 if '_changeid' in self.__dict__:
619 if '_changeid' in self.__dict__:
615 return self._changeid
620 return self._changeid
616 elif '_changectx' in self.__dict__:
621 elif '_changectx' in self.__dict__:
617 return self._changectx.rev()
622 return self._changectx.rev()
618 else:
623 else:
619 return self._filelog.linkrev(self._filerev)
624 return self._filelog.linkrev(self._filerev)
620
625
621 @propertycache
626 @propertycache
622 def _filenode(self):
627 def _filenode(self):
623 if '_fileid' in self.__dict__:
628 if '_fileid' in self.__dict__:
624 return self._filelog.lookup(self._fileid)
629 return self._filelog.lookup(self._fileid)
625 else:
630 else:
626 return self._changectx.filenode(self._path)
631 return self._changectx.filenode(self._path)
627
632
628 @propertycache
633 @propertycache
629 def _filerev(self):
634 def _filerev(self):
630 return self._filelog.rev(self._filenode)
635 return self._filelog.rev(self._filenode)
631
636
632 @propertycache
637 @propertycache
633 def _repopath(self):
638 def _repopath(self):
634 return self._path
639 return self._path
635
640
636 def __nonzero__(self):
641 def __nonzero__(self):
637 try:
642 try:
638 self._filenode
643 self._filenode
639 return True
644 return True
640 except error.LookupError:
645 except error.LookupError:
641 # file is missing
646 # file is missing
642 return False
647 return False
643
648
644 def __str__(self):
649 def __str__(self):
645 return "%s@%s" % (self.path(), self._changectx)
650 return "%s@%s" % (self.path(), self._changectx)
646
651
647 def __repr__(self):
652 def __repr__(self):
648 return "<%s %s>" % (type(self).__name__, str(self))
653 return "<%s %s>" % (type(self).__name__, str(self))
649
654
650 def __hash__(self):
655 def __hash__(self):
651 try:
656 try:
652 return hash((self._path, self._filenode))
657 return hash((self._path, self._filenode))
653 except AttributeError:
658 except AttributeError:
654 return id(self)
659 return id(self)
655
660
656 def __eq__(self, other):
661 def __eq__(self, other):
657 try:
662 try:
658 return (type(self) == type(other) and self._path == other._path
663 return (type(self) == type(other) and self._path == other._path
659 and self._filenode == other._filenode)
664 and self._filenode == other._filenode)
660 except AttributeError:
665 except AttributeError:
661 return False
666 return False
662
667
663 def __ne__(self, other):
668 def __ne__(self, other):
664 return not (self == other)
669 return not (self == other)
665
670
666 def filerev(self):
671 def filerev(self):
667 return self._filerev
672 return self._filerev
668 def filenode(self):
673 def filenode(self):
669 return self._filenode
674 return self._filenode
670 def flags(self):
675 def flags(self):
671 return self._changectx.flags(self._path)
676 return self._changectx.flags(self._path)
672 def filelog(self):
677 def filelog(self):
673 return self._filelog
678 return self._filelog
674 def rev(self):
679 def rev(self):
675 return self._changeid
680 return self._changeid
676 def linkrev(self):
681 def linkrev(self):
677 return self._filelog.linkrev(self._filerev)
682 return self._filelog.linkrev(self._filerev)
678 def node(self):
683 def node(self):
679 return self._changectx.node()
684 return self._changectx.node()
680 def hex(self):
685 def hex(self):
681 return self._changectx.hex()
686 return self._changectx.hex()
682 def user(self):
687 def user(self):
683 return self._changectx.user()
688 return self._changectx.user()
684 def date(self):
689 def date(self):
685 return self._changectx.date()
690 return self._changectx.date()
686 def files(self):
691 def files(self):
687 return self._changectx.files()
692 return self._changectx.files()
688 def description(self):
693 def description(self):
689 return self._changectx.description()
694 return self._changectx.description()
690 def branch(self):
695 def branch(self):
691 return self._changectx.branch()
696 return self._changectx.branch()
692 def extra(self):
697 def extra(self):
693 return self._changectx.extra()
698 return self._changectx.extra()
694 def phase(self):
699 def phase(self):
695 return self._changectx.phase()
700 return self._changectx.phase()
696 def phasestr(self):
701 def phasestr(self):
697 return self._changectx.phasestr()
702 return self._changectx.phasestr()
698 def manifest(self):
703 def manifest(self):
699 return self._changectx.manifest()
704 return self._changectx.manifest()
700 def changectx(self):
705 def changectx(self):
701 return self._changectx
706 return self._changectx
702
707
703 def path(self):
708 def path(self):
704 return self._path
709 return self._path
705
710
706 def isbinary(self):
711 def isbinary(self):
707 try:
712 try:
708 return util.binary(self.data())
713 return util.binary(self.data())
709 except IOError:
714 except IOError:
710 return False
715 return False
711 def isexec(self):
716 def isexec(self):
712 return 'x' in self.flags()
717 return 'x' in self.flags()
713 def islink(self):
718 def islink(self):
714 return 'l' in self.flags()
719 return 'l' in self.flags()
715
720
716 def cmp(self, fctx):
721 def cmp(self, fctx):
717 """compare with other file context
722 """compare with other file context
718
723
719 returns True if different than fctx.
724 returns True if different than fctx.
720 """
725 """
721 if (fctx._filerev is None
726 if (fctx._filerev is None
722 and (self._repo._encodefilterpats
727 and (self._repo._encodefilterpats
723 # if file data starts with '\1\n', empty metadata block is
728 # if file data starts with '\1\n', empty metadata block is
724 # prepended, which adds 4 bytes to filelog.size().
729 # prepended, which adds 4 bytes to filelog.size().
725 or self.size() - 4 == fctx.size())
730 or self.size() - 4 == fctx.size())
726 or self.size() == fctx.size()):
731 or self.size() == fctx.size()):
727 return self._filelog.cmp(self._filenode, fctx.data())
732 return self._filelog.cmp(self._filenode, fctx.data())
728
733
729 return True
734 return True
730
735
731 def parents(self):
736 def parents(self):
732 _path = self._path
737 _path = self._path
733 fl = self._filelog
738 fl = self._filelog
734 pl = [(_path, n, fl) for n in self._filelog.parents(self._filenode)]
739 pl = [(_path, n, fl) for n in self._filelog.parents(self._filenode)]
735
740
736 r = self._filelog.renamed(self._filenode)
741 r = self._filelog.renamed(self._filenode)
737 if r:
742 if r:
738 pl[0] = (r[0], r[1], None)
743 pl[0] = (r[0], r[1], None)
739
744
740 return [filectx(self._repo, p, fileid=n, filelog=l)
745 return [filectx(self._repo, p, fileid=n, filelog=l)
741 for p, n, l in pl if n != nullid]
746 for p, n, l in pl if n != nullid]
742
747
743 def p1(self):
748 def p1(self):
744 return self.parents()[0]
749 return self.parents()[0]
745
750
746 def p2(self):
751 def p2(self):
747 p = self.parents()
752 p = self.parents()
748 if len(p) == 2:
753 if len(p) == 2:
749 return p[1]
754 return p[1]
750 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
755 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
751
756
752 def annotate(self, follow=False, linenumber=None, diffopts=None):
757 def annotate(self, follow=False, linenumber=None, diffopts=None):
753 '''returns a list of tuples of (ctx, line) for each line
758 '''returns a list of tuples of (ctx, line) for each line
754 in the file, where ctx is the filectx of the node where
759 in the file, where ctx is the filectx of the node where
755 that line was last changed.
760 that line was last changed.
756 This returns tuples of ((ctx, linenumber), line) for each line,
761 This returns tuples of ((ctx, linenumber), line) for each line,
757 if "linenumber" parameter is NOT "None".
762 if "linenumber" parameter is NOT "None".
758 In such tuples, linenumber means one at the first appearance
763 In such tuples, linenumber means one at the first appearance
759 in the managed file.
764 in the managed file.
760 To reduce annotation cost,
765 To reduce annotation cost,
761 this returns fixed value(False is used) as linenumber,
766 this returns fixed value(False is used) as linenumber,
762 if "linenumber" parameter is "False".'''
767 if "linenumber" parameter is "False".'''
763
768
764 if linenumber is None:
769 if linenumber is None:
765 def decorate(text, rev):
770 def decorate(text, rev):
766 return ([rev] * len(text.splitlines()), text)
771 return ([rev] * len(text.splitlines()), text)
767 elif linenumber:
772 elif linenumber:
768 def decorate(text, rev):
773 def decorate(text, rev):
769 size = len(text.splitlines())
774 size = len(text.splitlines())
770 return ([(rev, i) for i in xrange(1, size + 1)], text)
775 return ([(rev, i) for i in xrange(1, size + 1)], text)
771 else:
776 else:
772 def decorate(text, rev):
777 def decorate(text, rev):
773 return ([(rev, False)] * len(text.splitlines()), text)
778 return ([(rev, False)] * len(text.splitlines()), text)
774
779
775 def pair(parent, child):
780 def pair(parent, child):
776 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
781 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
777 refine=True)
782 refine=True)
778 for (a1, a2, b1, b2), t in blocks:
783 for (a1, a2, b1, b2), t in blocks:
779 # Changed blocks ('!') or blocks made only of blank lines ('~')
784 # Changed blocks ('!') or blocks made only of blank lines ('~')
780 # belong to the child.
785 # belong to the child.
781 if t == '=':
786 if t == '=':
782 child[0][b1:b2] = parent[0][a1:a2]
787 child[0][b1:b2] = parent[0][a1:a2]
783 return child
788 return child
784
789
785 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
790 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
786
791
787 def parents(f):
792 def parents(f):
788 pl = f.parents()
793 pl = f.parents()
789
794
790 # Don't return renamed parents if we aren't following.
795 # Don't return renamed parents if we aren't following.
791 if not follow:
796 if not follow:
792 pl = [p for p in pl if p.path() == f.path()]
797 pl = [p for p in pl if p.path() == f.path()]
793
798
794 # renamed filectx won't have a filelog yet, so set it
799 # renamed filectx won't have a filelog yet, so set it
795 # from the cache to save time
800 # from the cache to save time
796 for p in pl:
801 for p in pl:
797 if not '_filelog' in p.__dict__:
802 if not '_filelog' in p.__dict__:
798 p._filelog = getlog(p.path())
803 p._filelog = getlog(p.path())
799
804
800 return pl
805 return pl
801
806
802 # use linkrev to find the first changeset where self appeared
807 # use linkrev to find the first changeset where self appeared
803 if self.rev() != self.linkrev():
808 if self.rev() != self.linkrev():
804 base = self.filectx(self.filenode())
809 base = self.filectx(self.filenode())
805 else:
810 else:
806 base = self
811 base = self
807
812
808 # This algorithm would prefer to be recursive, but Python is a
813 # This algorithm would prefer to be recursive, but Python is a
809 # bit recursion-hostile. Instead we do an iterative
814 # bit recursion-hostile. Instead we do an iterative
810 # depth-first search.
815 # depth-first search.
811
816
812 visit = [base]
817 visit = [base]
813 hist = {}
818 hist = {}
814 pcache = {}
819 pcache = {}
815 needed = {base: 1}
820 needed = {base: 1}
816 while visit:
821 while visit:
817 f = visit[-1]
822 f = visit[-1]
818 pcached = f in pcache
823 pcached = f in pcache
819 if not pcached:
824 if not pcached:
820 pcache[f] = parents(f)
825 pcache[f] = parents(f)
821
826
822 ready = True
827 ready = True
823 pl = pcache[f]
828 pl = pcache[f]
824 for p in pl:
829 for p in pl:
825 if p not in hist:
830 if p not in hist:
826 ready = False
831 ready = False
827 visit.append(p)
832 visit.append(p)
828 if not pcached:
833 if not pcached:
829 needed[p] = needed.get(p, 0) + 1
834 needed[p] = needed.get(p, 0) + 1
830 if ready:
835 if ready:
831 visit.pop()
836 visit.pop()
832 reusable = f in hist
837 reusable = f in hist
833 if reusable:
838 if reusable:
834 curr = hist[f]
839 curr = hist[f]
835 else:
840 else:
836 curr = decorate(f.data(), f)
841 curr = decorate(f.data(), f)
837 for p in pl:
842 for p in pl:
838 if not reusable:
843 if not reusable:
839 curr = pair(hist[p], curr)
844 curr = pair(hist[p], curr)
840 if needed[p] == 1:
845 if needed[p] == 1:
841 del hist[p]
846 del hist[p]
842 del needed[p]
847 del needed[p]
843 else:
848 else:
844 needed[p] -= 1
849 needed[p] -= 1
845
850
846 hist[f] = curr
851 hist[f] = curr
847 pcache[f] = []
852 pcache[f] = []
848
853
849 return zip(hist[base][0], hist[base][1].splitlines(True))
854 return zip(hist[base][0], hist[base][1].splitlines(True))
850
855
851 def ancestors(self, followfirst=False):
856 def ancestors(self, followfirst=False):
852 visit = {}
857 visit = {}
853 c = self
858 c = self
854 cut = followfirst and 1 or None
859 cut = followfirst and 1 or None
855 while True:
860 while True:
856 for parent in c.parents()[:cut]:
861 for parent in c.parents()[:cut]:
857 visit[(parent.rev(), parent.node())] = parent
862 visit[(parent.rev(), parent.node())] = parent
858 if not visit:
863 if not visit:
859 break
864 break
860 c = visit.pop(max(visit))
865 c = visit.pop(max(visit))
861 yield c
866 yield c
862
867
863 class filectx(basefilectx):
868 class filectx(basefilectx):
864 """A filecontext object makes access to data related to a particular
869 """A filecontext object makes access to data related to a particular
865 filerevision convenient."""
870 filerevision convenient."""
866 def __init__(self, repo, path, changeid=None, fileid=None,
871 def __init__(self, repo, path, changeid=None, fileid=None,
867 filelog=None, changectx=None):
872 filelog=None, changectx=None):
868 """changeid can be a changeset revision, node, or tag.
873 """changeid can be a changeset revision, node, or tag.
869 fileid can be a file revision or node."""
874 fileid can be a file revision or node."""
870 self._repo = repo
875 self._repo = repo
871 self._path = path
876 self._path = path
872
877
873 assert (changeid is not None
878 assert (changeid is not None
874 or fileid is not None
879 or fileid is not None
875 or changectx is not None), \
880 or changectx is not None), \
876 ("bad args: changeid=%r, fileid=%r, changectx=%r"
881 ("bad args: changeid=%r, fileid=%r, changectx=%r"
877 % (changeid, fileid, changectx))
882 % (changeid, fileid, changectx))
878
883
879 if filelog is not None:
884 if filelog is not None:
880 self._filelog = filelog
885 self._filelog = filelog
881
886
882 if changeid is not None:
887 if changeid is not None:
883 self._changeid = changeid
888 self._changeid = changeid
884 if changectx is not None:
889 if changectx is not None:
885 self._changectx = changectx
890 self._changectx = changectx
886 if fileid is not None:
891 if fileid is not None:
887 self._fileid = fileid
892 self._fileid = fileid
888
893
889 @propertycache
894 @propertycache
890 def _changectx(self):
895 def _changectx(self):
891 try:
896 try:
892 return changectx(self._repo, self._changeid)
897 return changectx(self._repo, self._changeid)
893 except error.RepoLookupError:
898 except error.RepoLookupError:
894 # Linkrev may point to any revision in the repository. When the
899 # Linkrev may point to any revision in the repository. When the
895 # repository is filtered this may lead to `filectx` trying to build
900 # repository is filtered this may lead to `filectx` trying to build
896 # `changectx` for filtered revision. In such case we fallback to
901 # `changectx` for filtered revision. In such case we fallback to
897 # creating `changectx` on the unfiltered version of the reposition.
902 # creating `changectx` on the unfiltered version of the reposition.
898 # This fallback should not be an issue because `changectx` from
903 # This fallback should not be an issue because `changectx` from
899 # `filectx` are not used in complex operations that care about
904 # `filectx` are not used in complex operations that care about
900 # filtering.
905 # filtering.
901 #
906 #
902 # This fallback is a cheap and dirty fix that prevent several
907 # This fallback is a cheap and dirty fix that prevent several
903 # crashes. It does not ensure the behavior is correct. However the
908 # crashes. It does not ensure the behavior is correct. However the
904 # behavior was not correct before filtering either and "incorrect
909 # behavior was not correct before filtering either and "incorrect
905 # behavior" is seen as better as "crash"
910 # behavior" is seen as better as "crash"
906 #
911 #
907 # Linkrevs have several serious troubles with filtering that are
912 # Linkrevs have several serious troubles with filtering that are
908 # complicated to solve. Proper handling of the issue here should be
913 # complicated to solve. Proper handling of the issue here should be
909 # considered when solving linkrev issue are on the table.
914 # considered when solving linkrev issue are on the table.
910 return changectx(self._repo.unfiltered(), self._changeid)
915 return changectx(self._repo.unfiltered(), self._changeid)
911
916
912 def filectx(self, fileid):
917 def filectx(self, fileid):
913 '''opens an arbitrary revision of the file without
918 '''opens an arbitrary revision of the file without
914 opening a new filelog'''
919 opening a new filelog'''
915 return filectx(self._repo, self._path, fileid=fileid,
920 return filectx(self._repo, self._path, fileid=fileid,
916 filelog=self._filelog)
921 filelog=self._filelog)
917
922
918 def data(self):
923 def data(self):
919 try:
924 try:
920 return self._filelog.read(self._filenode)
925 return self._filelog.read(self._filenode)
921 except error.CensoredNodeError:
926 except error.CensoredNodeError:
922 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
927 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
923 return ""
928 return ""
924 raise util.Abort(_("censored node: %s") % short(self._filenode),
929 raise util.Abort(_("censored node: %s") % short(self._filenode),
925 hint=_("set censor.policy to ignore errors"))
930 hint=_("set censor.policy to ignore errors"))
926
931
927 def size(self):
932 def size(self):
928 return self._filelog.size(self._filerev)
933 return self._filelog.size(self._filerev)
929
934
930 def renamed(self):
935 def renamed(self):
931 """check if file was actually renamed in this changeset revision
936 """check if file was actually renamed in this changeset revision
932
937
933 If rename logged in file revision, we report copy for changeset only
938 If rename logged in file revision, we report copy for changeset only
934 if file revisions linkrev points back to the changeset in question
939 if file revisions linkrev points back to the changeset in question
935 or both changeset parents contain different file revisions.
940 or both changeset parents contain different file revisions.
936 """
941 """
937
942
938 renamed = self._filelog.renamed(self._filenode)
943 renamed = self._filelog.renamed(self._filenode)
939 if not renamed:
944 if not renamed:
940 return renamed
945 return renamed
941
946
942 if self.rev() == self.linkrev():
947 if self.rev() == self.linkrev():
943 return renamed
948 return renamed
944
949
945 name = self.path()
950 name = self.path()
946 fnode = self._filenode
951 fnode = self._filenode
947 for p in self._changectx.parents():
952 for p in self._changectx.parents():
948 try:
953 try:
949 if fnode == p.filenode(name):
954 if fnode == p.filenode(name):
950 return None
955 return None
951 except error.LookupError:
956 except error.LookupError:
952 pass
957 pass
953 return renamed
958 return renamed
954
959
955 def children(self):
960 def children(self):
956 # hard for renames
961 # hard for renames
957 c = self._filelog.children(self._filenode)
962 c = self._filelog.children(self._filenode)
958 return [filectx(self._repo, self._path, fileid=x,
963 return [filectx(self._repo, self._path, fileid=x,
959 filelog=self._filelog) for x in c]
964 filelog=self._filelog) for x in c]
960
965
961 class committablectx(basectx):
966 class committablectx(basectx):
962 """A committablectx object provides common functionality for a context that
967 """A committablectx object provides common functionality for a context that
963 wants the ability to commit, e.g. workingctx or memctx."""
968 wants the ability to commit, e.g. workingctx or memctx."""
964 def __init__(self, repo, text="", user=None, date=None, extra=None,
969 def __init__(self, repo, text="", user=None, date=None, extra=None,
965 changes=None):
970 changes=None):
966 self._repo = repo
971 self._repo = repo
967 self._rev = None
972 self._rev = None
968 self._node = None
973 self._node = None
969 self._text = text
974 self._text = text
970 if date:
975 if date:
971 self._date = util.parsedate(date)
976 self._date = util.parsedate(date)
972 if user:
977 if user:
973 self._user = user
978 self._user = user
974 if changes:
979 if changes:
975 self._status = changes
980 self._status = changes
976
981
977 self._extra = {}
982 self._extra = {}
978 if extra:
983 if extra:
979 self._extra = extra.copy()
984 self._extra = extra.copy()
980 if 'branch' not in self._extra:
985 if 'branch' not in self._extra:
981 try:
986 try:
982 branch = encoding.fromlocal(self._repo.dirstate.branch())
987 branch = encoding.fromlocal(self._repo.dirstate.branch())
983 except UnicodeDecodeError:
988 except UnicodeDecodeError:
984 raise util.Abort(_('branch name not in UTF-8!'))
989 raise util.Abort(_('branch name not in UTF-8!'))
985 self._extra['branch'] = branch
990 self._extra['branch'] = branch
986 if self._extra['branch'] == '':
991 if self._extra['branch'] == '':
987 self._extra['branch'] = 'default'
992 self._extra['branch'] = 'default'
988
993
989 def __str__(self):
994 def __str__(self):
990 return str(self._parents[0]) + "+"
995 return str(self._parents[0]) + "+"
991
996
992 def __nonzero__(self):
997 def __nonzero__(self):
993 return True
998 return True
994
999
995 def _buildflagfunc(self):
1000 def _buildflagfunc(self):
996 # Create a fallback function for getting file flags when the
1001 # Create a fallback function for getting file flags when the
997 # filesystem doesn't support them
1002 # filesystem doesn't support them
998
1003
999 copiesget = self._repo.dirstate.copies().get
1004 copiesget = self._repo.dirstate.copies().get
1000
1005
1001 if len(self._parents) < 2:
1006 if len(self._parents) < 2:
1002 # when we have one parent, it's easy: copy from parent
1007 # when we have one parent, it's easy: copy from parent
1003 man = self._parents[0].manifest()
1008 man = self._parents[0].manifest()
1004 def func(f):
1009 def func(f):
1005 f = copiesget(f, f)
1010 f = copiesget(f, f)
1006 return man.flags(f)
1011 return man.flags(f)
1007 else:
1012 else:
1008 # merges are tricky: we try to reconstruct the unstored
1013 # merges are tricky: we try to reconstruct the unstored
1009 # result from the merge (issue1802)
1014 # result from the merge (issue1802)
1010 p1, p2 = self._parents
1015 p1, p2 = self._parents
1011 pa = p1.ancestor(p2)
1016 pa = p1.ancestor(p2)
1012 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1017 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1013
1018
1014 def func(f):
1019 def func(f):
1015 f = copiesget(f, f) # may be wrong for merges with copies
1020 f = copiesget(f, f) # may be wrong for merges with copies
1016 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1021 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1017 if fl1 == fl2:
1022 if fl1 == fl2:
1018 return fl1
1023 return fl1
1019 if fl1 == fla:
1024 if fl1 == fla:
1020 return fl2
1025 return fl2
1021 if fl2 == fla:
1026 if fl2 == fla:
1022 return fl1
1027 return fl1
1023 return '' # punt for conflicts
1028 return '' # punt for conflicts
1024
1029
1025 return func
1030 return func
1026
1031
1027 @propertycache
1032 @propertycache
1028 def _flagfunc(self):
1033 def _flagfunc(self):
1029 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1034 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1030
1035
1031 @propertycache
1036 @propertycache
1032 def _manifest(self):
1037 def _manifest(self):
1033 """generate a manifest corresponding to the values in self._status
1038 """generate a manifest corresponding to the values in self._status
1034
1039
1035 This reuse the file nodeid from parent, but we append an extra letter
1040 This reuse the file nodeid from parent, but we append an extra letter
1036 when modified. Modified files get an extra 'm' while added files get
1041 when modified. Modified files get an extra 'm' while added files get
1037 an extra 'a'. This is used by manifests merge to see that files
1042 an extra 'a'. This is used by manifests merge to see that files
1038 are different and by update logic to avoid deleting newly added files.
1043 are different and by update logic to avoid deleting newly added files.
1039 """
1044 """
1040
1045
1041 man1 = self._parents[0].manifest()
1046 man1 = self._parents[0].manifest()
1042 man = man1.copy()
1047 man = man1.copy()
1043 if len(self._parents) > 1:
1048 if len(self._parents) > 1:
1044 man2 = self.p2().manifest()
1049 man2 = self.p2().manifest()
1045 def getman(f):
1050 def getman(f):
1046 if f in man1:
1051 if f in man1:
1047 return man1
1052 return man1
1048 return man2
1053 return man2
1049 else:
1054 else:
1050 getman = lambda f: man1
1055 getman = lambda f: man1
1051
1056
1052 copied = self._repo.dirstate.copies()
1057 copied = self._repo.dirstate.copies()
1053 ff = self._flagfunc
1058 ff = self._flagfunc
1054 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1059 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1055 for f in l:
1060 for f in l:
1056 orig = copied.get(f, f)
1061 orig = copied.get(f, f)
1057 man[f] = getman(orig).get(orig, nullid) + i
1062 man[f] = getman(orig).get(orig, nullid) + i
1058 try:
1063 try:
1059 man.setflag(f, ff(f))
1064 man.setflag(f, ff(f))
1060 except OSError:
1065 except OSError:
1061 pass
1066 pass
1062
1067
1063 for f in self._status.deleted + self._status.removed:
1068 for f in self._status.deleted + self._status.removed:
1064 if f in man:
1069 if f in man:
1065 del man[f]
1070 del man[f]
1066
1071
1067 return man
1072 return man
1068
1073
1069 @propertycache
1074 @propertycache
1070 def _status(self):
1075 def _status(self):
1071 return self._repo.status()
1076 return self._repo.status()
1072
1077
1073 @propertycache
1078 @propertycache
1074 def _user(self):
1079 def _user(self):
1075 return self._repo.ui.username()
1080 return self._repo.ui.username()
1076
1081
1077 @propertycache
1082 @propertycache
1078 def _date(self):
1083 def _date(self):
1079 return util.makedate()
1084 return util.makedate()
1080
1085
1081 def subrev(self, subpath):
1086 def subrev(self, subpath):
1082 return None
1087 return None
1083
1088
1084 def user(self):
1089 def user(self):
1085 return self._user or self._repo.ui.username()
1090 return self._user or self._repo.ui.username()
1086 def date(self):
1091 def date(self):
1087 return self._date
1092 return self._date
1088 def description(self):
1093 def description(self):
1089 return self._text
1094 return self._text
1090 def files(self):
1095 def files(self):
1091 return sorted(self._status.modified + self._status.added +
1096 return sorted(self._status.modified + self._status.added +
1092 self._status.removed)
1097 self._status.removed)
1093
1098
1094 def modified(self):
1099 def modified(self):
1095 return self._status.modified
1100 return self._status.modified
1096 def added(self):
1101 def added(self):
1097 return self._status.added
1102 return self._status.added
1098 def removed(self):
1103 def removed(self):
1099 return self._status.removed
1104 return self._status.removed
1100 def deleted(self):
1105 def deleted(self):
1101 return self._status.deleted
1106 return self._status.deleted
1102 def unknown(self):
1107 def unknown(self):
1103 return self._status.unknown
1108 return self._status.unknown
1104 def ignored(self):
1109 def ignored(self):
1105 return self._status.ignored
1110 return self._status.ignored
1106 def clean(self):
1111 def clean(self):
1107 return self._status.clean
1112 return self._status.clean
1108 def branch(self):
1113 def branch(self):
1109 return encoding.tolocal(self._extra['branch'])
1114 return encoding.tolocal(self._extra['branch'])
1110 def closesbranch(self):
1115 def closesbranch(self):
1111 return 'close' in self._extra
1116 return 'close' in self._extra
1112 def extra(self):
1117 def extra(self):
1113 return self._extra
1118 return self._extra
1114
1119
1115 def tags(self):
1120 def tags(self):
1116 t = []
1121 t = []
1117 for p in self.parents():
1122 for p in self.parents():
1118 t.extend(p.tags())
1123 t.extend(p.tags())
1119 return t
1124 return t
1120
1125
1121 def bookmarks(self):
1126 def bookmarks(self):
1122 b = []
1127 b = []
1123 for p in self.parents():
1128 for p in self.parents():
1124 b.extend(p.bookmarks())
1129 b.extend(p.bookmarks())
1125 return b
1130 return b
1126
1131
1127 def phase(self):
1132 def phase(self):
1128 phase = phases.draft # default phase to draft
1133 phase = phases.draft # default phase to draft
1129 for p in self.parents():
1134 for p in self.parents():
1130 phase = max(phase, p.phase())
1135 phase = max(phase, p.phase())
1131 return phase
1136 return phase
1132
1137
1133 def hidden(self):
1138 def hidden(self):
1134 return False
1139 return False
1135
1140
1136 def children(self):
1141 def children(self):
1137 return []
1142 return []
1138
1143
1139 def flags(self, path):
1144 def flags(self, path):
1140 if '_manifest' in self.__dict__:
1145 if '_manifest' in self.__dict__:
1141 try:
1146 try:
1142 return self._manifest.flags(path)
1147 return self._manifest.flags(path)
1143 except KeyError:
1148 except KeyError:
1144 return ''
1149 return ''
1145
1150
1146 try:
1151 try:
1147 return self._flagfunc(path)
1152 return self._flagfunc(path)
1148 except OSError:
1153 except OSError:
1149 return ''
1154 return ''
1150
1155
1151 def ancestor(self, c2):
1156 def ancestor(self, c2):
1152 """return the "best" ancestor context of self and c2"""
1157 """return the "best" ancestor context of self and c2"""
1153 return self._parents[0].ancestor(c2) # punt on two parents for now
1158 return self._parents[0].ancestor(c2) # punt on two parents for now
1154
1159
1155 def walk(self, match):
1160 def walk(self, match):
1156 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1161 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1157 True, False))
1162 True, False))
1158
1163
1159 def matches(self, match):
1164 def matches(self, match):
1160 return sorted(self._repo.dirstate.matches(match))
1165 return sorted(self._repo.dirstate.matches(match))
1161
1166
1162 def ancestors(self):
1167 def ancestors(self):
1163 for a in self._repo.changelog.ancestors(
1168 for a in self._repo.changelog.ancestors(
1164 [p.rev() for p in self._parents]):
1169 [p.rev() for p in self._parents]):
1165 yield changectx(self._repo, a)
1170 yield changectx(self._repo, a)
1166
1171
1167 def markcommitted(self, node):
1172 def markcommitted(self, node):
1168 """Perform post-commit cleanup necessary after committing this ctx
1173 """Perform post-commit cleanup necessary after committing this ctx
1169
1174
1170 Specifically, this updates backing stores this working context
1175 Specifically, this updates backing stores this working context
1171 wraps to reflect the fact that the changes reflected by this
1176 wraps to reflect the fact that the changes reflected by this
1172 workingctx have been committed. For example, it marks
1177 workingctx have been committed. For example, it marks
1173 modified and added files as normal in the dirstate.
1178 modified and added files as normal in the dirstate.
1174
1179
1175 """
1180 """
1176
1181
1177 self._repo.dirstate.beginparentchange()
1182 self._repo.dirstate.beginparentchange()
1178 for f in self.modified() + self.added():
1183 for f in self.modified() + self.added():
1179 self._repo.dirstate.normal(f)
1184 self._repo.dirstate.normal(f)
1180 for f in self.removed():
1185 for f in self.removed():
1181 self._repo.dirstate.drop(f)
1186 self._repo.dirstate.drop(f)
1182 self._repo.dirstate.setparents(node)
1187 self._repo.dirstate.setparents(node)
1183 self._repo.dirstate.endparentchange()
1188 self._repo.dirstate.endparentchange()
1184
1189
1185 def dirs(self):
1190 def dirs(self):
1186 return self._repo.dirstate.dirs()
1191 return self._repo.dirstate.dirs()
1187
1192
1188 class workingctx(committablectx):
1193 class workingctx(committablectx):
1189 """A workingctx object makes access to data related to
1194 """A workingctx object makes access to data related to
1190 the current working directory convenient.
1195 the current working directory convenient.
1191 date - any valid date string or (unixtime, offset), or None.
1196 date - any valid date string or (unixtime, offset), or None.
1192 user - username string, or None.
1197 user - username string, or None.
1193 extra - a dictionary of extra values, or None.
1198 extra - a dictionary of extra values, or None.
1194 changes - a list of file lists as returned by localrepo.status()
1199 changes - a list of file lists as returned by localrepo.status()
1195 or None to use the repository status.
1200 or None to use the repository status.
1196 """
1201 """
1197 def __init__(self, repo, text="", user=None, date=None, extra=None,
1202 def __init__(self, repo, text="", user=None, date=None, extra=None,
1198 changes=None):
1203 changes=None):
1199 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1204 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1200
1205
1201 def __iter__(self):
1206 def __iter__(self):
1202 d = self._repo.dirstate
1207 d = self._repo.dirstate
1203 for f in d:
1208 for f in d:
1204 if d[f] != 'r':
1209 if d[f] != 'r':
1205 yield f
1210 yield f
1206
1211
1207 def __contains__(self, key):
1212 def __contains__(self, key):
1208 return self._repo.dirstate[key] not in "?r"
1213 return self._repo.dirstate[key] not in "?r"
1209
1214
1210 @propertycache
1215 @propertycache
1211 def _parents(self):
1216 def _parents(self):
1212 p = self._repo.dirstate.parents()
1217 p = self._repo.dirstate.parents()
1213 if p[1] == nullid:
1218 if p[1] == nullid:
1214 p = p[:-1]
1219 p = p[:-1]
1215 return [changectx(self._repo, x) for x in p]
1220 return [changectx(self._repo, x) for x in p]
1216
1221
1217 def filectx(self, path, filelog=None):
1222 def filectx(self, path, filelog=None):
1218 """get a file context from the working directory"""
1223 """get a file context from the working directory"""
1219 return workingfilectx(self._repo, path, workingctx=self,
1224 return workingfilectx(self._repo, path, workingctx=self,
1220 filelog=filelog)
1225 filelog=filelog)
1221
1226
1222 def dirty(self, missing=False, merge=True, branch=True):
1227 def dirty(self, missing=False, merge=True, branch=True):
1223 "check whether a working directory is modified"
1228 "check whether a working directory is modified"
1224 # check subrepos first
1229 # check subrepos first
1225 for s in sorted(self.substate):
1230 for s in sorted(self.substate):
1226 if self.sub(s).dirty():
1231 if self.sub(s).dirty():
1227 return True
1232 return True
1228 # check current working dir
1233 # check current working dir
1229 return ((merge and self.p2()) or
1234 return ((merge and self.p2()) or
1230 (branch and self.branch() != self.p1().branch()) or
1235 (branch and self.branch() != self.p1().branch()) or
1231 self.modified() or self.added() or self.removed() or
1236 self.modified() or self.added() or self.removed() or
1232 (missing and self.deleted()))
1237 (missing and self.deleted()))
1233
1238
1234 def add(self, list, prefix=""):
1239 def add(self, list, prefix=""):
1235 join = lambda f: os.path.join(prefix, f)
1240 join = lambda f: os.path.join(prefix, f)
1236 wlock = self._repo.wlock()
1241 wlock = self._repo.wlock()
1237 ui, ds = self._repo.ui, self._repo.dirstate
1242 ui, ds = self._repo.ui, self._repo.dirstate
1238 try:
1243 try:
1239 rejected = []
1244 rejected = []
1240 lstat = self._repo.wvfs.lstat
1245 lstat = self._repo.wvfs.lstat
1241 for f in list:
1246 for f in list:
1242 scmutil.checkportable(ui, join(f))
1247 scmutil.checkportable(ui, join(f))
1243 try:
1248 try:
1244 st = lstat(f)
1249 st = lstat(f)
1245 except OSError:
1250 except OSError:
1246 ui.warn(_("%s does not exist!\n") % join(f))
1251 ui.warn(_("%s does not exist!\n") % join(f))
1247 rejected.append(f)
1252 rejected.append(f)
1248 continue
1253 continue
1249 if st.st_size > 10000000:
1254 if st.st_size > 10000000:
1250 ui.warn(_("%s: up to %d MB of RAM may be required "
1255 ui.warn(_("%s: up to %d MB of RAM may be required "
1251 "to manage this file\n"
1256 "to manage this file\n"
1252 "(use 'hg revert %s' to cancel the "
1257 "(use 'hg revert %s' to cancel the "
1253 "pending addition)\n")
1258 "pending addition)\n")
1254 % (f, 3 * st.st_size // 1000000, join(f)))
1259 % (f, 3 * st.st_size // 1000000, join(f)))
1255 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1260 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1256 ui.warn(_("%s not added: only files and symlinks "
1261 ui.warn(_("%s not added: only files and symlinks "
1257 "supported currently\n") % join(f))
1262 "supported currently\n") % join(f))
1258 rejected.append(f)
1263 rejected.append(f)
1259 elif ds[f] in 'amn':
1264 elif ds[f] in 'amn':
1260 ui.warn(_("%s already tracked!\n") % join(f))
1265 ui.warn(_("%s already tracked!\n") % join(f))
1261 elif ds[f] == 'r':
1266 elif ds[f] == 'r':
1262 ds.normallookup(f)
1267 ds.normallookup(f)
1263 else:
1268 else:
1264 ds.add(f)
1269 ds.add(f)
1265 return rejected
1270 return rejected
1266 finally:
1271 finally:
1267 wlock.release()
1272 wlock.release()
1268
1273
1269 def forget(self, files, prefix=""):
1274 def forget(self, files, prefix=""):
1270 join = lambda f: os.path.join(prefix, f)
1275 join = lambda f: os.path.join(prefix, f)
1271 wlock = self._repo.wlock()
1276 wlock = self._repo.wlock()
1272 try:
1277 try:
1273 rejected = []
1278 rejected = []
1274 for f in files:
1279 for f in files:
1275 if f not in self._repo.dirstate:
1280 if f not in self._repo.dirstate:
1276 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1281 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1277 rejected.append(f)
1282 rejected.append(f)
1278 elif self._repo.dirstate[f] != 'a':
1283 elif self._repo.dirstate[f] != 'a':
1279 self._repo.dirstate.remove(f)
1284 self._repo.dirstate.remove(f)
1280 else:
1285 else:
1281 self._repo.dirstate.drop(f)
1286 self._repo.dirstate.drop(f)
1282 return rejected
1287 return rejected
1283 finally:
1288 finally:
1284 wlock.release()
1289 wlock.release()
1285
1290
1286 def undelete(self, list):
1291 def undelete(self, list):
1287 pctxs = self.parents()
1292 pctxs = self.parents()
1288 wlock = self._repo.wlock()
1293 wlock = self._repo.wlock()
1289 try:
1294 try:
1290 for f in list:
1295 for f in list:
1291 if self._repo.dirstate[f] != 'r':
1296 if self._repo.dirstate[f] != 'r':
1292 self._repo.ui.warn(_("%s not removed!\n") % f)
1297 self._repo.ui.warn(_("%s not removed!\n") % f)
1293 else:
1298 else:
1294 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1299 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1295 t = fctx.data()
1300 t = fctx.data()
1296 self._repo.wwrite(f, t, fctx.flags())
1301 self._repo.wwrite(f, t, fctx.flags())
1297 self._repo.dirstate.normal(f)
1302 self._repo.dirstate.normal(f)
1298 finally:
1303 finally:
1299 wlock.release()
1304 wlock.release()
1300
1305
1301 def copy(self, source, dest):
1306 def copy(self, source, dest):
1302 try:
1307 try:
1303 st = self._repo.wvfs.lstat(dest)
1308 st = self._repo.wvfs.lstat(dest)
1304 except OSError, err:
1309 except OSError, err:
1305 if err.errno != errno.ENOENT:
1310 if err.errno != errno.ENOENT:
1306 raise
1311 raise
1307 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1312 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1308 return
1313 return
1309 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1314 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1310 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1315 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1311 "symbolic link\n") % dest)
1316 "symbolic link\n") % dest)
1312 else:
1317 else:
1313 wlock = self._repo.wlock()
1318 wlock = self._repo.wlock()
1314 try:
1319 try:
1315 if self._repo.dirstate[dest] in '?':
1320 if self._repo.dirstate[dest] in '?':
1316 self._repo.dirstate.add(dest)
1321 self._repo.dirstate.add(dest)
1317 elif self._repo.dirstate[dest] in 'r':
1322 elif self._repo.dirstate[dest] in 'r':
1318 self._repo.dirstate.normallookup(dest)
1323 self._repo.dirstate.normallookup(dest)
1319 self._repo.dirstate.copy(source, dest)
1324 self._repo.dirstate.copy(source, dest)
1320 finally:
1325 finally:
1321 wlock.release()
1326 wlock.release()
1322
1327
1323 def _filtersuspectsymlink(self, files):
1328 def _filtersuspectsymlink(self, files):
1324 if not files or self._repo.dirstate._checklink:
1329 if not files or self._repo.dirstate._checklink:
1325 return files
1330 return files
1326
1331
1327 # Symlink placeholders may get non-symlink-like contents
1332 # Symlink placeholders may get non-symlink-like contents
1328 # via user error or dereferencing by NFS or Samba servers,
1333 # via user error or dereferencing by NFS or Samba servers,
1329 # so we filter out any placeholders that don't look like a
1334 # so we filter out any placeholders that don't look like a
1330 # symlink
1335 # symlink
1331 sane = []
1336 sane = []
1332 for f in files:
1337 for f in files:
1333 if self.flags(f) == 'l':
1338 if self.flags(f) == 'l':
1334 d = self[f].data()
1339 d = self[f].data()
1335 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1340 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1336 self._repo.ui.debug('ignoring suspect symlink placeholder'
1341 self._repo.ui.debug('ignoring suspect symlink placeholder'
1337 ' "%s"\n' % f)
1342 ' "%s"\n' % f)
1338 continue
1343 continue
1339 sane.append(f)
1344 sane.append(f)
1340 return sane
1345 return sane
1341
1346
1342 def _checklookup(self, files):
1347 def _checklookup(self, files):
1343 # check for any possibly clean files
1348 # check for any possibly clean files
1344 if not files:
1349 if not files:
1345 return [], []
1350 return [], []
1346
1351
1347 modified = []
1352 modified = []
1348 fixup = []
1353 fixup = []
1349 pctx = self._parents[0]
1354 pctx = self._parents[0]
1350 # do a full compare of any files that might have changed
1355 # do a full compare of any files that might have changed
1351 for f in sorted(files):
1356 for f in sorted(files):
1352 if (f not in pctx or self.flags(f) != pctx.flags(f)
1357 if (f not in pctx or self.flags(f) != pctx.flags(f)
1353 or pctx[f].cmp(self[f])):
1358 or pctx[f].cmp(self[f])):
1354 modified.append(f)
1359 modified.append(f)
1355 else:
1360 else:
1356 fixup.append(f)
1361 fixup.append(f)
1357
1362
1358 # update dirstate for files that are actually clean
1363 # update dirstate for files that are actually clean
1359 if fixup:
1364 if fixup:
1360 try:
1365 try:
1361 # updating the dirstate is optional
1366 # updating the dirstate is optional
1362 # so we don't wait on the lock
1367 # so we don't wait on the lock
1363 # wlock can invalidate the dirstate, so cache normal _after_
1368 # wlock can invalidate the dirstate, so cache normal _after_
1364 # taking the lock
1369 # taking the lock
1365 wlock = self._repo.wlock(False)
1370 wlock = self._repo.wlock(False)
1366 normal = self._repo.dirstate.normal
1371 normal = self._repo.dirstate.normal
1367 try:
1372 try:
1368 for f in fixup:
1373 for f in fixup:
1369 normal(f)
1374 normal(f)
1370 finally:
1375 finally:
1371 wlock.release()
1376 wlock.release()
1372 except error.LockError:
1377 except error.LockError:
1373 pass
1378 pass
1374 return modified, fixup
1379 return modified, fixup
1375
1380
1376 def _manifestmatches(self, match, s):
1381 def _manifestmatches(self, match, s):
1377 """Slow path for workingctx
1382 """Slow path for workingctx
1378
1383
1379 The fast path is when we compare the working directory to its parent
1384 The fast path is when we compare the working directory to its parent
1380 which means this function is comparing with a non-parent; therefore we
1385 which means this function is comparing with a non-parent; therefore we
1381 need to build a manifest and return what matches.
1386 need to build a manifest and return what matches.
1382 """
1387 """
1383 mf = self._repo['.']._manifestmatches(match, s)
1388 mf = self._repo['.']._manifestmatches(match, s)
1384 for f in s.modified + s.added:
1389 for f in s.modified + s.added:
1385 mf[f] = None
1390 mf[f] = _newnode
1386 mf.setflag(f, self.flags(f))
1391 mf.setflag(f, self.flags(f))
1387 for f in s.removed:
1392 for f in s.removed:
1388 if f in mf:
1393 if f in mf:
1389 del mf[f]
1394 del mf[f]
1390 return mf
1395 return mf
1391
1396
1392 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1397 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1393 unknown=False):
1398 unknown=False):
1394 '''Gets the status from the dirstate -- internal use only.'''
1399 '''Gets the status from the dirstate -- internal use only.'''
1395 listignored, listclean, listunknown = ignored, clean, unknown
1400 listignored, listclean, listunknown = ignored, clean, unknown
1396 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1401 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1397 subrepos = []
1402 subrepos = []
1398 if '.hgsub' in self:
1403 if '.hgsub' in self:
1399 subrepos = sorted(self.substate)
1404 subrepos = sorted(self.substate)
1400 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1405 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1401 listclean, listunknown)
1406 listclean, listunknown)
1402
1407
1403 # check for any possibly clean files
1408 # check for any possibly clean files
1404 if cmp:
1409 if cmp:
1405 modified2, fixup = self._checklookup(cmp)
1410 modified2, fixup = self._checklookup(cmp)
1406 s.modified.extend(modified2)
1411 s.modified.extend(modified2)
1407
1412
1408 # update dirstate for files that are actually clean
1413 # update dirstate for files that are actually clean
1409 if fixup and listclean:
1414 if fixup and listclean:
1410 s.clean.extend(fixup)
1415 s.clean.extend(fixup)
1411
1416
1412 return s
1417 return s
1413
1418
1414 def _buildstatus(self, other, s, match, listignored, listclean,
1419 def _buildstatus(self, other, s, match, listignored, listclean,
1415 listunknown):
1420 listunknown):
1416 """build a status with respect to another context
1421 """build a status with respect to another context
1417
1422
1418 This includes logic for maintaining the fast path of status when
1423 This includes logic for maintaining the fast path of status when
1419 comparing the working directory against its parent, which is to skip
1424 comparing the working directory against its parent, which is to skip
1420 building a new manifest if self (working directory) is not comparing
1425 building a new manifest if self (working directory) is not comparing
1421 against its parent (repo['.']).
1426 against its parent (repo['.']).
1422 """
1427 """
1423 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1428 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1424 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1429 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1425 # might have accidentally ended up with the entire contents of the file
1430 # might have accidentally ended up with the entire contents of the file
1426 # they are supposed to be linking to.
1431 # they are supposed to be linking to.
1427 s.modified[:] = self._filtersuspectsymlink(s.modified)
1432 s.modified[:] = self._filtersuspectsymlink(s.modified)
1428 if other != self._repo['.']:
1433 if other != self._repo['.']:
1429 s = super(workingctx, self)._buildstatus(other, s, match,
1434 s = super(workingctx, self)._buildstatus(other, s, match,
1430 listignored, listclean,
1435 listignored, listclean,
1431 listunknown)
1436 listunknown)
1432 self._status = s
1437 self._status = s
1433 return s
1438 return s
1434
1439
1435 def _matchstatus(self, other, match):
1440 def _matchstatus(self, other, match):
1436 """override the match method with a filter for directory patterns
1441 """override the match method with a filter for directory patterns
1437
1442
1438 We use inheritance to customize the match.bad method only in cases of
1443 We use inheritance to customize the match.bad method only in cases of
1439 workingctx since it belongs only to the working directory when
1444 workingctx since it belongs only to the working directory when
1440 comparing against the parent changeset.
1445 comparing against the parent changeset.
1441
1446
1442 If we aren't comparing against the working directory's parent, then we
1447 If we aren't comparing against the working directory's parent, then we
1443 just use the default match object sent to us.
1448 just use the default match object sent to us.
1444 """
1449 """
1445 superself = super(workingctx, self)
1450 superself = super(workingctx, self)
1446 match = superself._matchstatus(other, match)
1451 match = superself._matchstatus(other, match)
1447 if other != self._repo['.']:
1452 if other != self._repo['.']:
1448 def bad(f, msg):
1453 def bad(f, msg):
1449 # 'f' may be a directory pattern from 'match.files()',
1454 # 'f' may be a directory pattern from 'match.files()',
1450 # so 'f not in ctx1' is not enough
1455 # so 'f not in ctx1' is not enough
1451 if f not in other and f not in other.dirs():
1456 if f not in other and f not in other.dirs():
1452 self._repo.ui.warn('%s: %s\n' %
1457 self._repo.ui.warn('%s: %s\n' %
1453 (self._repo.dirstate.pathto(f), msg))
1458 (self._repo.dirstate.pathto(f), msg))
1454 match.bad = bad
1459 match.bad = bad
1455 return match
1460 return match
1456
1461
1457 class committablefilectx(basefilectx):
1462 class committablefilectx(basefilectx):
1458 """A committablefilectx provides common functionality for a file context
1463 """A committablefilectx provides common functionality for a file context
1459 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1464 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1460 def __init__(self, repo, path, filelog=None, ctx=None):
1465 def __init__(self, repo, path, filelog=None, ctx=None):
1461 self._repo = repo
1466 self._repo = repo
1462 self._path = path
1467 self._path = path
1463 self._changeid = None
1468 self._changeid = None
1464 self._filerev = self._filenode = None
1469 self._filerev = self._filenode = None
1465
1470
1466 if filelog is not None:
1471 if filelog is not None:
1467 self._filelog = filelog
1472 self._filelog = filelog
1468 if ctx:
1473 if ctx:
1469 self._changectx = ctx
1474 self._changectx = ctx
1470
1475
1471 def __nonzero__(self):
1476 def __nonzero__(self):
1472 return True
1477 return True
1473
1478
1474 def parents(self):
1479 def parents(self):
1475 '''return parent filectxs, following copies if necessary'''
1480 '''return parent filectxs, following copies if necessary'''
1476 def filenode(ctx, path):
1481 def filenode(ctx, path):
1477 return ctx._manifest.get(path, nullid)
1482 return ctx._manifest.get(path, nullid)
1478
1483
1479 path = self._path
1484 path = self._path
1480 fl = self._filelog
1485 fl = self._filelog
1481 pcl = self._changectx._parents
1486 pcl = self._changectx._parents
1482 renamed = self.renamed()
1487 renamed = self.renamed()
1483
1488
1484 if renamed:
1489 if renamed:
1485 pl = [renamed + (None,)]
1490 pl = [renamed + (None,)]
1486 else:
1491 else:
1487 pl = [(path, filenode(pcl[0], path), fl)]
1492 pl = [(path, filenode(pcl[0], path), fl)]
1488
1493
1489 for pc in pcl[1:]:
1494 for pc in pcl[1:]:
1490 pl.append((path, filenode(pc, path), fl))
1495 pl.append((path, filenode(pc, path), fl))
1491
1496
1492 return [filectx(self._repo, p, fileid=n, filelog=l)
1497 return [filectx(self._repo, p, fileid=n, filelog=l)
1493 for p, n, l in pl if n != nullid]
1498 for p, n, l in pl if n != nullid]
1494
1499
1495 def children(self):
1500 def children(self):
1496 return []
1501 return []
1497
1502
1498 class workingfilectx(committablefilectx):
1503 class workingfilectx(committablefilectx):
1499 """A workingfilectx object makes access to data related to a particular
1504 """A workingfilectx object makes access to data related to a particular
1500 file in the working directory convenient."""
1505 file in the working directory convenient."""
1501 def __init__(self, repo, path, filelog=None, workingctx=None):
1506 def __init__(self, repo, path, filelog=None, workingctx=None):
1502 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1507 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1503
1508
1504 @propertycache
1509 @propertycache
1505 def _changectx(self):
1510 def _changectx(self):
1506 return workingctx(self._repo)
1511 return workingctx(self._repo)
1507
1512
1508 def data(self):
1513 def data(self):
1509 return self._repo.wread(self._path)
1514 return self._repo.wread(self._path)
1510 def renamed(self):
1515 def renamed(self):
1511 rp = self._repo.dirstate.copied(self._path)
1516 rp = self._repo.dirstate.copied(self._path)
1512 if not rp:
1517 if not rp:
1513 return None
1518 return None
1514 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1519 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1515
1520
1516 def size(self):
1521 def size(self):
1517 return self._repo.wvfs.lstat(self._path).st_size
1522 return self._repo.wvfs.lstat(self._path).st_size
1518 def date(self):
1523 def date(self):
1519 t, tz = self._changectx.date()
1524 t, tz = self._changectx.date()
1520 try:
1525 try:
1521 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1526 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1522 except OSError, err:
1527 except OSError, err:
1523 if err.errno != errno.ENOENT:
1528 if err.errno != errno.ENOENT:
1524 raise
1529 raise
1525 return (t, tz)
1530 return (t, tz)
1526
1531
1527 def cmp(self, fctx):
1532 def cmp(self, fctx):
1528 """compare with other file context
1533 """compare with other file context
1529
1534
1530 returns True if different than fctx.
1535 returns True if different than fctx.
1531 """
1536 """
1532 # fctx should be a filectx (not a workingfilectx)
1537 # fctx should be a filectx (not a workingfilectx)
1533 # invert comparison to reuse the same code path
1538 # invert comparison to reuse the same code path
1534 return fctx.cmp(self)
1539 return fctx.cmp(self)
1535
1540
1536 def remove(self, ignoremissing=False):
1541 def remove(self, ignoremissing=False):
1537 """wraps unlink for a repo's working directory"""
1542 """wraps unlink for a repo's working directory"""
1538 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1543 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1539
1544
1540 def write(self, data, flags):
1545 def write(self, data, flags):
1541 """wraps repo.wwrite"""
1546 """wraps repo.wwrite"""
1542 self._repo.wwrite(self._path, data, flags)
1547 self._repo.wwrite(self._path, data, flags)
1543
1548
1544 class memctx(committablectx):
1549 class memctx(committablectx):
1545 """Use memctx to perform in-memory commits via localrepo.commitctx().
1550 """Use memctx to perform in-memory commits via localrepo.commitctx().
1546
1551
1547 Revision information is supplied at initialization time while
1552 Revision information is supplied at initialization time while
1548 related files data and is made available through a callback
1553 related files data and is made available through a callback
1549 mechanism. 'repo' is the current localrepo, 'parents' is a
1554 mechanism. 'repo' is the current localrepo, 'parents' is a
1550 sequence of two parent revisions identifiers (pass None for every
1555 sequence of two parent revisions identifiers (pass None for every
1551 missing parent), 'text' is the commit message and 'files' lists
1556 missing parent), 'text' is the commit message and 'files' lists
1552 names of files touched by the revision (normalized and relative to
1557 names of files touched by the revision (normalized and relative to
1553 repository root).
1558 repository root).
1554
1559
1555 filectxfn(repo, memctx, path) is a callable receiving the
1560 filectxfn(repo, memctx, path) is a callable receiving the
1556 repository, the current memctx object and the normalized path of
1561 repository, the current memctx object and the normalized path of
1557 requested file, relative to repository root. It is fired by the
1562 requested file, relative to repository root. It is fired by the
1558 commit function for every file in 'files', but calls order is
1563 commit function for every file in 'files', but calls order is
1559 undefined. If the file is available in the revision being
1564 undefined. If the file is available in the revision being
1560 committed (updated or added), filectxfn returns a memfilectx
1565 committed (updated or added), filectxfn returns a memfilectx
1561 object. If the file was removed, filectxfn raises an
1566 object. If the file was removed, filectxfn raises an
1562 IOError. Moved files are represented by marking the source file
1567 IOError. Moved files are represented by marking the source file
1563 removed and the new file added with copy information (see
1568 removed and the new file added with copy information (see
1564 memfilectx).
1569 memfilectx).
1565
1570
1566 user receives the committer name and defaults to current
1571 user receives the committer name and defaults to current
1567 repository username, date is the commit date in any format
1572 repository username, date is the commit date in any format
1568 supported by util.parsedate() and defaults to current date, extra
1573 supported by util.parsedate() and defaults to current date, extra
1569 is a dictionary of metadata or is left empty.
1574 is a dictionary of metadata or is left empty.
1570 """
1575 """
1571
1576
1572 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1577 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1573 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1578 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1574 # this field to determine what to do in filectxfn.
1579 # this field to determine what to do in filectxfn.
1575 _returnnoneformissingfiles = True
1580 _returnnoneformissingfiles = True
1576
1581
1577 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1582 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1578 date=None, extra=None, editor=False):
1583 date=None, extra=None, editor=False):
1579 super(memctx, self).__init__(repo, text, user, date, extra)
1584 super(memctx, self).__init__(repo, text, user, date, extra)
1580 self._rev = None
1585 self._rev = None
1581 self._node = None
1586 self._node = None
1582 parents = [(p or nullid) for p in parents]
1587 parents = [(p or nullid) for p in parents]
1583 p1, p2 = parents
1588 p1, p2 = parents
1584 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1589 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1585 files = sorted(set(files))
1590 files = sorted(set(files))
1586 self._files = files
1591 self._files = files
1587 self.substate = {}
1592 self.substate = {}
1588
1593
1589 # if store is not callable, wrap it in a function
1594 # if store is not callable, wrap it in a function
1590 if not callable(filectxfn):
1595 if not callable(filectxfn):
1591 def getfilectx(repo, memctx, path):
1596 def getfilectx(repo, memctx, path):
1592 fctx = filectxfn[path]
1597 fctx = filectxfn[path]
1593 # this is weird but apparently we only keep track of one parent
1598 # this is weird but apparently we only keep track of one parent
1594 # (why not only store that instead of a tuple?)
1599 # (why not only store that instead of a tuple?)
1595 copied = fctx.renamed()
1600 copied = fctx.renamed()
1596 if copied:
1601 if copied:
1597 copied = copied[0]
1602 copied = copied[0]
1598 return memfilectx(repo, path, fctx.data(),
1603 return memfilectx(repo, path, fctx.data(),
1599 islink=fctx.islink(), isexec=fctx.isexec(),
1604 islink=fctx.islink(), isexec=fctx.isexec(),
1600 copied=copied, memctx=memctx)
1605 copied=copied, memctx=memctx)
1601 self._filectxfn = getfilectx
1606 self._filectxfn = getfilectx
1602 else:
1607 else:
1603 # "util.cachefunc" reduces invocation of possibly expensive
1608 # "util.cachefunc" reduces invocation of possibly expensive
1604 # "filectxfn" for performance (e.g. converting from another VCS)
1609 # "filectxfn" for performance (e.g. converting from another VCS)
1605 self._filectxfn = util.cachefunc(filectxfn)
1610 self._filectxfn = util.cachefunc(filectxfn)
1606
1611
1607 self._extra = extra and extra.copy() or {}
1612 self._extra = extra and extra.copy() or {}
1608 if self._extra.get('branch', '') == '':
1613 if self._extra.get('branch', '') == '':
1609 self._extra['branch'] = 'default'
1614 self._extra['branch'] = 'default'
1610
1615
1611 if editor:
1616 if editor:
1612 self._text = editor(self._repo, self, [])
1617 self._text = editor(self._repo, self, [])
1613 self._repo.savecommitmessage(self._text)
1618 self._repo.savecommitmessage(self._text)
1614
1619
1615 def filectx(self, path, filelog=None):
1620 def filectx(self, path, filelog=None):
1616 """get a file context from the working directory
1621 """get a file context from the working directory
1617
1622
1618 Returns None if file doesn't exist and should be removed."""
1623 Returns None if file doesn't exist and should be removed."""
1619 return self._filectxfn(self._repo, self, path)
1624 return self._filectxfn(self._repo, self, path)
1620
1625
1621 def commit(self):
1626 def commit(self):
1622 """commit context to the repo"""
1627 """commit context to the repo"""
1623 return self._repo.commitctx(self)
1628 return self._repo.commitctx(self)
1624
1629
1625 @propertycache
1630 @propertycache
1626 def _manifest(self):
1631 def _manifest(self):
1627 """generate a manifest based on the return values of filectxfn"""
1632 """generate a manifest based on the return values of filectxfn"""
1628
1633
1629 # keep this simple for now; just worry about p1
1634 # keep this simple for now; just worry about p1
1630 pctx = self._parents[0]
1635 pctx = self._parents[0]
1631 pman = pctx.manifest()
1636 pman = pctx.manifest()
1632 man = pctx.manifest().copy()
1637 man = pctx.manifest().copy()
1633
1638
1634 for f, fnode in pman.iteritems():
1639 for f, fnode in pman.iteritems():
1635 p1node = nullid
1640 p1node = nullid
1636 p2node = nullid
1641 p2node = nullid
1637 p = pctx[f].parents() # if file isn't in pctx, check p2?
1642 p = pctx[f].parents() # if file isn't in pctx, check p2?
1638 if len(p) > 0:
1643 if len(p) > 0:
1639 p1node = p[0].node()
1644 p1node = p[0].node()
1640 if len(p) > 1:
1645 if len(p) > 1:
1641 p2node = p[1].node()
1646 p2node = p[1].node()
1642 fctx = self[f]
1647 fctx = self[f]
1643 if fctx is None:
1648 if fctx is None:
1644 # removed file
1649 # removed file
1645 del man[f]
1650 del man[f]
1646 else:
1651 else:
1647 man[f] = revlog.hash(fctx.data(), p1node, p2node)
1652 man[f] = revlog.hash(fctx.data(), p1node, p2node)
1648
1653
1649 for f in self._status.added:
1654 for f in self._status.added:
1650 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1655 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1651
1656
1652 for f in self._status.removed:
1657 for f in self._status.removed:
1653 if f in man:
1658 if f in man:
1654 del man[f]
1659 del man[f]
1655
1660
1656 return man
1661 return man
1657
1662
1658 @propertycache
1663 @propertycache
1659 def _status(self):
1664 def _status(self):
1660 """Calculate exact status from ``files`` specified at construction
1665 """Calculate exact status from ``files`` specified at construction
1661 """
1666 """
1662 man1 = self.p1().manifest()
1667 man1 = self.p1().manifest()
1663 p2 = self._parents[1]
1668 p2 = self._parents[1]
1664 # "1 < len(self._parents)" can't be used for checking
1669 # "1 < len(self._parents)" can't be used for checking
1665 # existence of the 2nd parent, because "memctx._parents" is
1670 # existence of the 2nd parent, because "memctx._parents" is
1666 # explicitly initialized by the list, of which length is 2.
1671 # explicitly initialized by the list, of which length is 2.
1667 if p2.node() != nullid:
1672 if p2.node() != nullid:
1668 man2 = p2.manifest()
1673 man2 = p2.manifest()
1669 managing = lambda f: f in man1 or f in man2
1674 managing = lambda f: f in man1 or f in man2
1670 else:
1675 else:
1671 managing = lambda f: f in man1
1676 managing = lambda f: f in man1
1672
1677
1673 modified, added, removed = [], [], []
1678 modified, added, removed = [], [], []
1674 for f in self._files:
1679 for f in self._files:
1675 if not managing(f):
1680 if not managing(f):
1676 added.append(f)
1681 added.append(f)
1677 elif self[f]:
1682 elif self[f]:
1678 modified.append(f)
1683 modified.append(f)
1679 else:
1684 else:
1680 removed.append(f)
1685 removed.append(f)
1681
1686
1682 return scmutil.status(modified, added, removed, [], [], [], [])
1687 return scmutil.status(modified, added, removed, [], [], [], [])
1683
1688
1684 class memfilectx(committablefilectx):
1689 class memfilectx(committablefilectx):
1685 """memfilectx represents an in-memory file to commit.
1690 """memfilectx represents an in-memory file to commit.
1686
1691
1687 See memctx and committablefilectx for more details.
1692 See memctx and committablefilectx for more details.
1688 """
1693 """
1689 def __init__(self, repo, path, data, islink=False,
1694 def __init__(self, repo, path, data, islink=False,
1690 isexec=False, copied=None, memctx=None):
1695 isexec=False, copied=None, memctx=None):
1691 """
1696 """
1692 path is the normalized file path relative to repository root.
1697 path is the normalized file path relative to repository root.
1693 data is the file content as a string.
1698 data is the file content as a string.
1694 islink is True if the file is a symbolic link.
1699 islink is True if the file is a symbolic link.
1695 isexec is True if the file is executable.
1700 isexec is True if the file is executable.
1696 copied is the source file path if current file was copied in the
1701 copied is the source file path if current file was copied in the
1697 revision being committed, or None."""
1702 revision being committed, or None."""
1698 super(memfilectx, self).__init__(repo, path, None, memctx)
1703 super(memfilectx, self).__init__(repo, path, None, memctx)
1699 self._data = data
1704 self._data = data
1700 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1705 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1701 self._copied = None
1706 self._copied = None
1702 if copied:
1707 if copied:
1703 self._copied = (copied, nullid)
1708 self._copied = (copied, nullid)
1704
1709
1705 def data(self):
1710 def data(self):
1706 return self._data
1711 return self._data
1707 def size(self):
1712 def size(self):
1708 return len(self.data())
1713 return len(self.data())
1709 def flags(self):
1714 def flags(self):
1710 return self._flags
1715 return self._flags
1711 def renamed(self):
1716 def renamed(self):
1712 return self._copied
1717 return self._copied
1713
1718
1714 def remove(self, ignoremissing=False):
1719 def remove(self, ignoremissing=False):
1715 """wraps unlink for a repo's working directory"""
1720 """wraps unlink for a repo's working directory"""
1716 # need to figure out what to do here
1721 # need to figure out what to do here
1717 del self._changectx[self._path]
1722 del self._changectx[self._path]
1718
1723
1719 def write(self, data, flags):
1724 def write(self, data, flags):
1720 """wraps repo.wwrite"""
1725 """wraps repo.wwrite"""
1721 self._data = data
1726 self._data = data
@@ -1,438 +1,460 b''
1 # encoding.py - character transcoding support for Mercurial
1 # encoding.py - character transcoding support for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import error
8 import error
9 import unicodedata, locale, os
9 import unicodedata, locale, os
10
10
11 # These unicode characters are ignored by HFS+ (Apple Technote 1150,
12 # "Unicode Subtleties"), so we need to ignore them in some places for
13 # sanity.
14 _ignore = [unichr(int(x, 16)).encode("utf-8") for x in
15 "200c 200d 200e 200f 202a 202b 202c 202d 202e "
16 "206a 206b 206c 206d 206e 206f feff".split()]
17 # verify the next function will work
18 assert set([i[0] for i in _ignore]) == set(["\xe2", "\xef"])
19
20 def hfsignoreclean(s):
21 """Remove codepoints ignored by HFS+ from s.
22
23 >>> hfsignoreclean(u'.h\u200cg'.encode('utf-8'))
24 '.hg'
25 >>> hfsignoreclean(u'.h\ufeffg'.encode('utf-8'))
26 '.hg'
27 """
28 if "\xe2" in s or "\xef" in s:
29 for c in _ignore:
30 s = s.replace(c, '')
31 return s
32
11 def _getpreferredencoding():
33 def _getpreferredencoding():
12 '''
34 '''
13 On darwin, getpreferredencoding ignores the locale environment and
35 On darwin, getpreferredencoding ignores the locale environment and
14 always returns mac-roman. http://bugs.python.org/issue6202 fixes this
36 always returns mac-roman. http://bugs.python.org/issue6202 fixes this
15 for Python 2.7 and up. This is the same corrected code for earlier
37 for Python 2.7 and up. This is the same corrected code for earlier
16 Python versions.
38 Python versions.
17
39
18 However, we can't use a version check for this method, as some distributions
40 However, we can't use a version check for this method, as some distributions
19 patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman
41 patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman
20 encoding, as it is unlikely that this encoding is the actually expected.
42 encoding, as it is unlikely that this encoding is the actually expected.
21 '''
43 '''
22 try:
44 try:
23 locale.CODESET
45 locale.CODESET
24 except AttributeError:
46 except AttributeError:
25 # Fall back to parsing environment variables :-(
47 # Fall back to parsing environment variables :-(
26 return locale.getdefaultlocale()[1]
48 return locale.getdefaultlocale()[1]
27
49
28 oldloc = locale.setlocale(locale.LC_CTYPE)
50 oldloc = locale.setlocale(locale.LC_CTYPE)
29 locale.setlocale(locale.LC_CTYPE, "")
51 locale.setlocale(locale.LC_CTYPE, "")
30 result = locale.nl_langinfo(locale.CODESET)
52 result = locale.nl_langinfo(locale.CODESET)
31 locale.setlocale(locale.LC_CTYPE, oldloc)
53 locale.setlocale(locale.LC_CTYPE, oldloc)
32
54
33 return result
55 return result
34
56
35 _encodingfixers = {
57 _encodingfixers = {
36 '646': lambda: 'ascii',
58 '646': lambda: 'ascii',
37 'ANSI_X3.4-1968': lambda: 'ascii',
59 'ANSI_X3.4-1968': lambda: 'ascii',
38 'mac-roman': _getpreferredencoding
60 'mac-roman': _getpreferredencoding
39 }
61 }
40
62
41 try:
63 try:
42 encoding = os.environ.get("HGENCODING")
64 encoding = os.environ.get("HGENCODING")
43 if not encoding:
65 if not encoding:
44 encoding = locale.getpreferredencoding() or 'ascii'
66 encoding = locale.getpreferredencoding() or 'ascii'
45 encoding = _encodingfixers.get(encoding, lambda: encoding)()
67 encoding = _encodingfixers.get(encoding, lambda: encoding)()
46 except locale.Error:
68 except locale.Error:
47 encoding = 'ascii'
69 encoding = 'ascii'
48 encodingmode = os.environ.get("HGENCODINGMODE", "strict")
70 encodingmode = os.environ.get("HGENCODINGMODE", "strict")
49 fallbackencoding = 'ISO-8859-1'
71 fallbackencoding = 'ISO-8859-1'
50
72
51 class localstr(str):
73 class localstr(str):
52 '''This class allows strings that are unmodified to be
74 '''This class allows strings that are unmodified to be
53 round-tripped to the local encoding and back'''
75 round-tripped to the local encoding and back'''
54 def __new__(cls, u, l):
76 def __new__(cls, u, l):
55 s = str.__new__(cls, l)
77 s = str.__new__(cls, l)
56 s._utf8 = u
78 s._utf8 = u
57 return s
79 return s
58 def __hash__(self):
80 def __hash__(self):
59 return hash(self._utf8) # avoid collisions in local string space
81 return hash(self._utf8) # avoid collisions in local string space
60
82
61 def tolocal(s):
83 def tolocal(s):
62 """
84 """
63 Convert a string from internal UTF-8 to local encoding
85 Convert a string from internal UTF-8 to local encoding
64
86
65 All internal strings should be UTF-8 but some repos before the
87 All internal strings should be UTF-8 but some repos before the
66 implementation of locale support may contain latin1 or possibly
88 implementation of locale support may contain latin1 or possibly
67 other character sets. We attempt to decode everything strictly
89 other character sets. We attempt to decode everything strictly
68 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
90 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
69 replace unknown characters.
91 replace unknown characters.
70
92
71 The localstr class is used to cache the known UTF-8 encoding of
93 The localstr class is used to cache the known UTF-8 encoding of
72 strings next to their local representation to allow lossless
94 strings next to their local representation to allow lossless
73 round-trip conversion back to UTF-8.
95 round-trip conversion back to UTF-8.
74
96
75 >>> u = 'foo: \\xc3\\xa4' # utf-8
97 >>> u = 'foo: \\xc3\\xa4' # utf-8
76 >>> l = tolocal(u)
98 >>> l = tolocal(u)
77 >>> l
99 >>> l
78 'foo: ?'
100 'foo: ?'
79 >>> fromlocal(l)
101 >>> fromlocal(l)
80 'foo: \\xc3\\xa4'
102 'foo: \\xc3\\xa4'
81 >>> u2 = 'foo: \\xc3\\xa1'
103 >>> u2 = 'foo: \\xc3\\xa1'
82 >>> d = { l: 1, tolocal(u2): 2 }
104 >>> d = { l: 1, tolocal(u2): 2 }
83 >>> len(d) # no collision
105 >>> len(d) # no collision
84 2
106 2
85 >>> 'foo: ?' in d
107 >>> 'foo: ?' in d
86 False
108 False
87 >>> l1 = 'foo: \\xe4' # historical latin1 fallback
109 >>> l1 = 'foo: \\xe4' # historical latin1 fallback
88 >>> l = tolocal(l1)
110 >>> l = tolocal(l1)
89 >>> l
111 >>> l
90 'foo: ?'
112 'foo: ?'
91 >>> fromlocal(l) # magically in utf-8
113 >>> fromlocal(l) # magically in utf-8
92 'foo: \\xc3\\xa4'
114 'foo: \\xc3\\xa4'
93 """
115 """
94
116
95 try:
117 try:
96 try:
118 try:
97 # make sure string is actually stored in UTF-8
119 # make sure string is actually stored in UTF-8
98 u = s.decode('UTF-8')
120 u = s.decode('UTF-8')
99 if encoding == 'UTF-8':
121 if encoding == 'UTF-8':
100 # fast path
122 # fast path
101 return s
123 return s
102 r = u.encode(encoding, "replace")
124 r = u.encode(encoding, "replace")
103 if u == r.decode(encoding):
125 if u == r.decode(encoding):
104 # r is a safe, non-lossy encoding of s
126 # r is a safe, non-lossy encoding of s
105 return r
127 return r
106 return localstr(s, r)
128 return localstr(s, r)
107 except UnicodeDecodeError:
129 except UnicodeDecodeError:
108 # we should only get here if we're looking at an ancient changeset
130 # we should only get here if we're looking at an ancient changeset
109 try:
131 try:
110 u = s.decode(fallbackencoding)
132 u = s.decode(fallbackencoding)
111 r = u.encode(encoding, "replace")
133 r = u.encode(encoding, "replace")
112 if u == r.decode(encoding):
134 if u == r.decode(encoding):
113 # r is a safe, non-lossy encoding of s
135 # r is a safe, non-lossy encoding of s
114 return r
136 return r
115 return localstr(u.encode('UTF-8'), r)
137 return localstr(u.encode('UTF-8'), r)
116 except UnicodeDecodeError:
138 except UnicodeDecodeError:
117 u = s.decode("utf-8", "replace") # last ditch
139 u = s.decode("utf-8", "replace") # last ditch
118 return u.encode(encoding, "replace") # can't round-trip
140 return u.encode(encoding, "replace") # can't round-trip
119 except LookupError, k:
141 except LookupError, k:
120 raise error.Abort(k, hint="please check your locale settings")
142 raise error.Abort(k, hint="please check your locale settings")
121
143
122 def fromlocal(s):
144 def fromlocal(s):
123 """
145 """
124 Convert a string from the local character encoding to UTF-8
146 Convert a string from the local character encoding to UTF-8
125
147
126 We attempt to decode strings using the encoding mode set by
148 We attempt to decode strings using the encoding mode set by
127 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
149 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
128 characters will cause an error message. Other modes include
150 characters will cause an error message. Other modes include
129 'replace', which replaces unknown characters with a special
151 'replace', which replaces unknown characters with a special
130 Unicode character, and 'ignore', which drops the character.
152 Unicode character, and 'ignore', which drops the character.
131 """
153 """
132
154
133 # can we do a lossless round-trip?
155 # can we do a lossless round-trip?
134 if isinstance(s, localstr):
156 if isinstance(s, localstr):
135 return s._utf8
157 return s._utf8
136
158
137 try:
159 try:
138 return s.decode(encoding, encodingmode).encode("utf-8")
160 return s.decode(encoding, encodingmode).encode("utf-8")
139 except UnicodeDecodeError, inst:
161 except UnicodeDecodeError, inst:
140 sub = s[max(0, inst.start - 10):inst.start + 10]
162 sub = s[max(0, inst.start - 10):inst.start + 10]
141 raise error.Abort("decoding near '%s': %s!" % (sub, inst))
163 raise error.Abort("decoding near '%s': %s!" % (sub, inst))
142 except LookupError, k:
164 except LookupError, k:
143 raise error.Abort(k, hint="please check your locale settings")
165 raise error.Abort(k, hint="please check your locale settings")
144
166
145 # How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
167 # How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
146 wide = (os.environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide"
168 wide = (os.environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide"
147 and "WFA" or "WF")
169 and "WFA" or "WF")
148
170
149 def colwidth(s):
171 def colwidth(s):
150 "Find the column width of a string for display in the local encoding"
172 "Find the column width of a string for display in the local encoding"
151 return ucolwidth(s.decode(encoding, 'replace'))
173 return ucolwidth(s.decode(encoding, 'replace'))
152
174
153 def ucolwidth(d):
175 def ucolwidth(d):
154 "Find the column width of a Unicode string for display"
176 "Find the column width of a Unicode string for display"
155 eaw = getattr(unicodedata, 'east_asian_width', None)
177 eaw = getattr(unicodedata, 'east_asian_width', None)
156 if eaw is not None:
178 if eaw is not None:
157 return sum([eaw(c) in wide and 2 or 1 for c in d])
179 return sum([eaw(c) in wide and 2 or 1 for c in d])
158 return len(d)
180 return len(d)
159
181
160 def getcols(s, start, c):
182 def getcols(s, start, c):
161 '''Use colwidth to find a c-column substring of s starting at byte
183 '''Use colwidth to find a c-column substring of s starting at byte
162 index start'''
184 index start'''
163 for x in xrange(start + c, len(s)):
185 for x in xrange(start + c, len(s)):
164 t = s[start:x]
186 t = s[start:x]
165 if colwidth(t) == c:
187 if colwidth(t) == c:
166 return t
188 return t
167
189
168 def trim(s, width, ellipsis='', leftside=False):
190 def trim(s, width, ellipsis='', leftside=False):
169 """Trim string 's' to at most 'width' columns (including 'ellipsis').
191 """Trim string 's' to at most 'width' columns (including 'ellipsis').
170
192
171 If 'leftside' is True, left side of string 's' is trimmed.
193 If 'leftside' is True, left side of string 's' is trimmed.
172 'ellipsis' is always placed at trimmed side.
194 'ellipsis' is always placed at trimmed side.
173
195
174 >>> ellipsis = '+++'
196 >>> ellipsis = '+++'
175 >>> from mercurial import encoding
197 >>> from mercurial import encoding
176 >>> encoding.encoding = 'utf-8'
198 >>> encoding.encoding = 'utf-8'
177 >>> t= '1234567890'
199 >>> t= '1234567890'
178 >>> print trim(t, 12, ellipsis=ellipsis)
200 >>> print trim(t, 12, ellipsis=ellipsis)
179 1234567890
201 1234567890
180 >>> print trim(t, 10, ellipsis=ellipsis)
202 >>> print trim(t, 10, ellipsis=ellipsis)
181 1234567890
203 1234567890
182 >>> print trim(t, 8, ellipsis=ellipsis)
204 >>> print trim(t, 8, ellipsis=ellipsis)
183 12345+++
205 12345+++
184 >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
206 >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
185 +++67890
207 +++67890
186 >>> print trim(t, 8)
208 >>> print trim(t, 8)
187 12345678
209 12345678
188 >>> print trim(t, 8, leftside=True)
210 >>> print trim(t, 8, leftside=True)
189 34567890
211 34567890
190 >>> print trim(t, 3, ellipsis=ellipsis)
212 >>> print trim(t, 3, ellipsis=ellipsis)
191 +++
213 +++
192 >>> print trim(t, 1, ellipsis=ellipsis)
214 >>> print trim(t, 1, ellipsis=ellipsis)
193 +
215 +
194 >>> u = u'\u3042\u3044\u3046\u3048\u304a' # 2 x 5 = 10 columns
216 >>> u = u'\u3042\u3044\u3046\u3048\u304a' # 2 x 5 = 10 columns
195 >>> t = u.encode(encoding.encoding)
217 >>> t = u.encode(encoding.encoding)
196 >>> print trim(t, 12, ellipsis=ellipsis)
218 >>> print trim(t, 12, ellipsis=ellipsis)
197 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
219 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
198 >>> print trim(t, 10, ellipsis=ellipsis)
220 >>> print trim(t, 10, ellipsis=ellipsis)
199 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
221 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
200 >>> print trim(t, 8, ellipsis=ellipsis)
222 >>> print trim(t, 8, ellipsis=ellipsis)
201 \xe3\x81\x82\xe3\x81\x84+++
223 \xe3\x81\x82\xe3\x81\x84+++
202 >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
224 >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
203 +++\xe3\x81\x88\xe3\x81\x8a
225 +++\xe3\x81\x88\xe3\x81\x8a
204 >>> print trim(t, 5)
226 >>> print trim(t, 5)
205 \xe3\x81\x82\xe3\x81\x84
227 \xe3\x81\x82\xe3\x81\x84
206 >>> print trim(t, 5, leftside=True)
228 >>> print trim(t, 5, leftside=True)
207 \xe3\x81\x88\xe3\x81\x8a
229 \xe3\x81\x88\xe3\x81\x8a
208 >>> print trim(t, 4, ellipsis=ellipsis)
230 >>> print trim(t, 4, ellipsis=ellipsis)
209 +++
231 +++
210 >>> print trim(t, 4, ellipsis=ellipsis, leftside=True)
232 >>> print trim(t, 4, ellipsis=ellipsis, leftside=True)
211 +++
233 +++
212 >>> t = '\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa' # invalid byte sequence
234 >>> t = '\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa' # invalid byte sequence
213 >>> print trim(t, 12, ellipsis=ellipsis)
235 >>> print trim(t, 12, ellipsis=ellipsis)
214 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
236 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
215 >>> print trim(t, 10, ellipsis=ellipsis)
237 >>> print trim(t, 10, ellipsis=ellipsis)
216 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
238 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
217 >>> print trim(t, 8, ellipsis=ellipsis)
239 >>> print trim(t, 8, ellipsis=ellipsis)
218 \x11\x22\x33\x44\x55+++
240 \x11\x22\x33\x44\x55+++
219 >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
241 >>> print trim(t, 8, ellipsis=ellipsis, leftside=True)
220 +++\x66\x77\x88\x99\xaa
242 +++\x66\x77\x88\x99\xaa
221 >>> print trim(t, 8)
243 >>> print trim(t, 8)
222 \x11\x22\x33\x44\x55\x66\x77\x88
244 \x11\x22\x33\x44\x55\x66\x77\x88
223 >>> print trim(t, 8, leftside=True)
245 >>> print trim(t, 8, leftside=True)
224 \x33\x44\x55\x66\x77\x88\x99\xaa
246 \x33\x44\x55\x66\x77\x88\x99\xaa
225 >>> print trim(t, 3, ellipsis=ellipsis)
247 >>> print trim(t, 3, ellipsis=ellipsis)
226 +++
248 +++
227 >>> print trim(t, 1, ellipsis=ellipsis)
249 >>> print trim(t, 1, ellipsis=ellipsis)
228 +
250 +
229 """
251 """
230 try:
252 try:
231 u = s.decode(encoding)
253 u = s.decode(encoding)
232 except UnicodeDecodeError:
254 except UnicodeDecodeError:
233 if len(s) <= width: # trimming is not needed
255 if len(s) <= width: # trimming is not needed
234 return s
256 return s
235 width -= len(ellipsis)
257 width -= len(ellipsis)
236 if width <= 0: # no enough room even for ellipsis
258 if width <= 0: # no enough room even for ellipsis
237 return ellipsis[:width + len(ellipsis)]
259 return ellipsis[:width + len(ellipsis)]
238 if leftside:
260 if leftside:
239 return ellipsis + s[-width:]
261 return ellipsis + s[-width:]
240 return s[:width] + ellipsis
262 return s[:width] + ellipsis
241
263
242 if ucolwidth(u) <= width: # trimming is not needed
264 if ucolwidth(u) <= width: # trimming is not needed
243 return s
265 return s
244
266
245 width -= len(ellipsis)
267 width -= len(ellipsis)
246 if width <= 0: # no enough room even for ellipsis
268 if width <= 0: # no enough room even for ellipsis
247 return ellipsis[:width + len(ellipsis)]
269 return ellipsis[:width + len(ellipsis)]
248
270
249 if leftside:
271 if leftside:
250 uslice = lambda i: u[i:]
272 uslice = lambda i: u[i:]
251 concat = lambda s: ellipsis + s
273 concat = lambda s: ellipsis + s
252 else:
274 else:
253 uslice = lambda i: u[:-i]
275 uslice = lambda i: u[:-i]
254 concat = lambda s: s + ellipsis
276 concat = lambda s: s + ellipsis
255 for i in xrange(1, len(u)):
277 for i in xrange(1, len(u)):
256 usub = uslice(i)
278 usub = uslice(i)
257 if ucolwidth(usub) <= width:
279 if ucolwidth(usub) <= width:
258 return concat(usub.encode(encoding))
280 return concat(usub.encode(encoding))
259 return ellipsis # no enough room for multi-column characters
281 return ellipsis # no enough room for multi-column characters
260
282
261 def _asciilower(s):
283 def _asciilower(s):
262 '''convert a string to lowercase if ASCII
284 '''convert a string to lowercase if ASCII
263
285
264 Raises UnicodeDecodeError if non-ASCII characters are found.'''
286 Raises UnicodeDecodeError if non-ASCII characters are found.'''
265 s.decode('ascii')
287 s.decode('ascii')
266 return s.lower()
288 return s.lower()
267
289
268 def asciilower(s):
290 def asciilower(s):
269 # delay importing avoids cyclic dependency around "parsers" in
291 # delay importing avoids cyclic dependency around "parsers" in
270 # pure Python build (util => i18n => encoding => parsers => util)
292 # pure Python build (util => i18n => encoding => parsers => util)
271 import parsers
293 import parsers
272 impl = getattr(parsers, 'asciilower', _asciilower)
294 impl = getattr(parsers, 'asciilower', _asciilower)
273 global asciilower
295 global asciilower
274 asciilower = impl
296 asciilower = impl
275 return impl(s)
297 return impl(s)
276
298
277 def lower(s):
299 def lower(s):
278 "best-effort encoding-aware case-folding of local string s"
300 "best-effort encoding-aware case-folding of local string s"
279 try:
301 try:
280 return asciilower(s)
302 return asciilower(s)
281 except UnicodeDecodeError:
303 except UnicodeDecodeError:
282 pass
304 pass
283 try:
305 try:
284 if isinstance(s, localstr):
306 if isinstance(s, localstr):
285 u = s._utf8.decode("utf-8")
307 u = s._utf8.decode("utf-8")
286 else:
308 else:
287 u = s.decode(encoding, encodingmode)
309 u = s.decode(encoding, encodingmode)
288
310
289 lu = u.lower()
311 lu = u.lower()
290 if u == lu:
312 if u == lu:
291 return s # preserve localstring
313 return s # preserve localstring
292 return lu.encode(encoding)
314 return lu.encode(encoding)
293 except UnicodeError:
315 except UnicodeError:
294 return s.lower() # we don't know how to fold this except in ASCII
316 return s.lower() # we don't know how to fold this except in ASCII
295 except LookupError, k:
317 except LookupError, k:
296 raise error.Abort(k, hint="please check your locale settings")
318 raise error.Abort(k, hint="please check your locale settings")
297
319
298 def upper(s):
320 def upper(s):
299 "best-effort encoding-aware case-folding of local string s"
321 "best-effort encoding-aware case-folding of local string s"
300 try:
322 try:
301 s.decode('ascii') # throw exception for non-ASCII character
323 s.decode('ascii') # throw exception for non-ASCII character
302 return s.upper()
324 return s.upper()
303 except UnicodeDecodeError:
325 except UnicodeDecodeError:
304 pass
326 pass
305 try:
327 try:
306 if isinstance(s, localstr):
328 if isinstance(s, localstr):
307 u = s._utf8.decode("utf-8")
329 u = s._utf8.decode("utf-8")
308 else:
330 else:
309 u = s.decode(encoding, encodingmode)
331 u = s.decode(encoding, encodingmode)
310
332
311 uu = u.upper()
333 uu = u.upper()
312 if u == uu:
334 if u == uu:
313 return s # preserve localstring
335 return s # preserve localstring
314 return uu.encode(encoding)
336 return uu.encode(encoding)
315 except UnicodeError:
337 except UnicodeError:
316 return s.upper() # we don't know how to fold this except in ASCII
338 return s.upper() # we don't know how to fold this except in ASCII
317 except LookupError, k:
339 except LookupError, k:
318 raise error.Abort(k, hint="please check your locale settings")
340 raise error.Abort(k, hint="please check your locale settings")
319
341
320 _jsonmap = {}
342 _jsonmap = {}
321
343
322 def jsonescape(s):
344 def jsonescape(s):
323 '''returns a string suitable for JSON
345 '''returns a string suitable for JSON
324
346
325 JSON is problematic for us because it doesn't support non-Unicode
347 JSON is problematic for us because it doesn't support non-Unicode
326 bytes. To deal with this, we take the following approach:
348 bytes. To deal with this, we take the following approach:
327
349
328 - localstr objects are converted back to UTF-8
350 - localstr objects are converted back to UTF-8
329 - valid UTF-8/ASCII strings are passed as-is
351 - valid UTF-8/ASCII strings are passed as-is
330 - other strings are converted to UTF-8b surrogate encoding
352 - other strings are converted to UTF-8b surrogate encoding
331 - apply JSON-specified string escaping
353 - apply JSON-specified string escaping
332
354
333 (escapes are doubled in these tests)
355 (escapes are doubled in these tests)
334
356
335 >>> jsonescape('this is a test')
357 >>> jsonescape('this is a test')
336 'this is a test'
358 'this is a test'
337 >>> jsonescape('escape characters: \\0 \\x0b \\t \\n \\r \\" \\\\')
359 >>> jsonescape('escape characters: \\0 \\x0b \\t \\n \\r \\" \\\\')
338 'escape characters: \\\\u0000 \\\\u000b \\\\t \\\\n \\\\r \\\\" \\\\\\\\'
360 'escape characters: \\\\u0000 \\\\u000b \\\\t \\\\n \\\\r \\\\" \\\\\\\\'
339 >>> jsonescape('a weird byte: \\xdd')
361 >>> jsonescape('a weird byte: \\xdd')
340 'a weird byte: \\xed\\xb3\\x9d'
362 'a weird byte: \\xed\\xb3\\x9d'
341 >>> jsonescape('utf-8: caf\\xc3\\xa9')
363 >>> jsonescape('utf-8: caf\\xc3\\xa9')
342 'utf-8: caf\\xc3\\xa9'
364 'utf-8: caf\\xc3\\xa9'
343 >>> jsonescape('')
365 >>> jsonescape('')
344 ''
366 ''
345 '''
367 '''
346
368
347 if not _jsonmap:
369 if not _jsonmap:
348 for x in xrange(32):
370 for x in xrange(32):
349 _jsonmap[chr(x)] = "\u%04x" %x
371 _jsonmap[chr(x)] = "\u%04x" %x
350 for x in xrange(32, 256):
372 for x in xrange(32, 256):
351 c = chr(x)
373 c = chr(x)
352 _jsonmap[c] = c
374 _jsonmap[c] = c
353 _jsonmap['\t'] = '\\t'
375 _jsonmap['\t'] = '\\t'
354 _jsonmap['\n'] = '\\n'
376 _jsonmap['\n'] = '\\n'
355 _jsonmap['\"'] = '\\"'
377 _jsonmap['\"'] = '\\"'
356 _jsonmap['\\'] = '\\\\'
378 _jsonmap['\\'] = '\\\\'
357 _jsonmap['\b'] = '\\b'
379 _jsonmap['\b'] = '\\b'
358 _jsonmap['\f'] = '\\f'
380 _jsonmap['\f'] = '\\f'
359 _jsonmap['\r'] = '\\r'
381 _jsonmap['\r'] = '\\r'
360
382
361 return ''.join(_jsonmap[c] for c in toutf8b(s))
383 return ''.join(_jsonmap[c] for c in toutf8b(s))
362
384
363 def toutf8b(s):
385 def toutf8b(s):
364 '''convert a local, possibly-binary string into UTF-8b
386 '''convert a local, possibly-binary string into UTF-8b
365
387
366 This is intended as a generic method to preserve data when working
388 This is intended as a generic method to preserve data when working
367 with schemes like JSON and XML that have no provision for
389 with schemes like JSON and XML that have no provision for
368 arbitrary byte strings. As Mercurial often doesn't know
390 arbitrary byte strings. As Mercurial often doesn't know
369 what encoding data is in, we use so-called UTF-8b.
391 what encoding data is in, we use so-called UTF-8b.
370
392
371 If a string is already valid UTF-8 (or ASCII), it passes unmodified.
393 If a string is already valid UTF-8 (or ASCII), it passes unmodified.
372 Otherwise, unsupported bytes are mapped to UTF-16 surrogate range,
394 Otherwise, unsupported bytes are mapped to UTF-16 surrogate range,
373 uDC00-uDCFF.
395 uDC00-uDCFF.
374
396
375 Principles of operation:
397 Principles of operation:
376
398
377 - ASCII and UTF-8 data successfully round-trips and is understood
399 - ASCII and UTF-8 data successfully round-trips and is understood
378 by Unicode-oriented clients
400 by Unicode-oriented clients
379 - filenames and file contents in arbitrary other encodings can have
401 - filenames and file contents in arbitrary other encodings can have
380 be round-tripped or recovered by clueful clients
402 be round-tripped or recovered by clueful clients
381 - local strings that have a cached known UTF-8 encoding (aka
403 - local strings that have a cached known UTF-8 encoding (aka
382 localstr) get sent as UTF-8 so Unicode-oriented clients get the
404 localstr) get sent as UTF-8 so Unicode-oriented clients get the
383 Unicode data they want
405 Unicode data they want
384 - because we must preserve UTF-8 bytestring in places such as
406 - because we must preserve UTF-8 bytestring in places such as
385 filenames, metadata can't be roundtripped without help
407 filenames, metadata can't be roundtripped without help
386
408
387 (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and
409 (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and
388 arbitrary bytes into an internal Unicode format that can be
410 arbitrary bytes into an internal Unicode format that can be
389 re-encoded back into the original. Here we are exposing the
411 re-encoded back into the original. Here we are exposing the
390 internal surrogate encoding as a UTF-8 string.)
412 internal surrogate encoding as a UTF-8 string.)
391 '''
413 '''
392
414
393 if isinstance(s, localstr):
415 if isinstance(s, localstr):
394 return s._utf8
416 return s._utf8
395
417
396 try:
418 try:
397 s.decode('utf-8')
419 s.decode('utf-8')
398 return s
420 return s
399 except UnicodeDecodeError:
421 except UnicodeDecodeError:
400 # surrogate-encode any characters that don't round-trip
422 # surrogate-encode any characters that don't round-trip
401 s2 = s.decode('utf-8', 'ignore').encode('utf-8')
423 s2 = s.decode('utf-8', 'ignore').encode('utf-8')
402 r = ""
424 r = ""
403 pos = 0
425 pos = 0
404 for c in s:
426 for c in s:
405 if s2[pos:pos + 1] == c:
427 if s2[pos:pos + 1] == c:
406 r += c
428 r += c
407 pos += 1
429 pos += 1
408 else:
430 else:
409 r += unichr(0xdc00 + ord(c)).encode('utf-8')
431 r += unichr(0xdc00 + ord(c)).encode('utf-8')
410 return r
432 return r
411
433
412 def fromutf8b(s):
434 def fromutf8b(s):
413 '''Given a UTF-8b string, return a local, possibly-binary string.
435 '''Given a UTF-8b string, return a local, possibly-binary string.
414
436
415 return the original binary string. This
437 return the original binary string. This
416 is a round-trip process for strings like filenames, but metadata
438 is a round-trip process for strings like filenames, but metadata
417 that's was passed through tolocal will remain in UTF-8.
439 that's was passed through tolocal will remain in UTF-8.
418
440
419 >>> m = "\\xc3\\xa9\\x99abcd"
441 >>> m = "\\xc3\\xa9\\x99abcd"
420 >>> n = toutf8b(m)
442 >>> n = toutf8b(m)
421 >>> n
443 >>> n
422 '\\xc3\\xa9\\xed\\xb2\\x99abcd'
444 '\\xc3\\xa9\\xed\\xb2\\x99abcd'
423 >>> fromutf8b(n) == m
445 >>> fromutf8b(n) == m
424 True
446 True
425 '''
447 '''
426
448
427 # fast path - look for uDxxx prefixes in s
449 # fast path - look for uDxxx prefixes in s
428 if "\xed" not in s:
450 if "\xed" not in s:
429 return s
451 return s
430
452
431 u = s.decode("utf-8")
453 u = s.decode("utf-8")
432 r = ""
454 r = ""
433 for c in u:
455 for c in u:
434 if ord(c) & 0xff00 == 0xdc00:
456 if ord(c) & 0xff00 == 0xdc00:
435 r += chr(ord(c) & 0xff)
457 r += chr(ord(c) & 0xff)
436 else:
458 else:
437 r += c.encode("utf-8")
459 r += c.encode("utf-8")
438 return r
460 return r
@@ -1,284 +1,287 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import mdiff, parsers, error, revlog, util
9 import mdiff, parsers, error, revlog, util
10 import array, struct
10 import array, struct
11
11
12 class manifestdict(dict):
12 class manifestdict(dict):
13 def __init__(self, mapping=None, flags=None):
13 def __init__(self, mapping=None, flags=None):
14 if mapping is None:
14 if mapping is None:
15 mapping = {}
15 mapping = {}
16 if flags is None:
16 if flags is None:
17 flags = {}
17 flags = {}
18 dict.__init__(self, mapping)
18 dict.__init__(self, mapping)
19 self._flags = flags
19 self._flags = flags
20 def __setitem__(self, k, v):
21 assert v is not None
22 dict.__setitem__(self, k, v)
20 def flags(self, f):
23 def flags(self, f):
21 return self._flags.get(f, "")
24 return self._flags.get(f, "")
22 def withflags(self):
25 def withflags(self):
23 return set(self._flags.keys())
26 return set(self._flags.keys())
24 def setflag(self, f, flags):
27 def setflag(self, f, flags):
25 """Set the flags (symlink, executable) for path f."""
28 """Set the flags (symlink, executable) for path f."""
26 self._flags[f] = flags
29 self._flags[f] = flags
27 def copy(self):
30 def copy(self):
28 return manifestdict(self, dict.copy(self._flags))
31 return manifestdict(self, dict.copy(self._flags))
29 def intersectfiles(self, files):
32 def intersectfiles(self, files):
30 '''make a new manifestdict with the intersection of self with files
33 '''make a new manifestdict with the intersection of self with files
31
34
32 The algorithm assumes that files is much smaller than self.'''
35 The algorithm assumes that files is much smaller than self.'''
33 ret = manifestdict()
36 ret = manifestdict()
34 for fn in files:
37 for fn in files:
35 if fn in self:
38 if fn in self:
36 ret[fn] = self[fn]
39 ret[fn] = self[fn]
37 flags = self._flags.get(fn, None)
40 flags = self._flags.get(fn, None)
38 if flags:
41 if flags:
39 ret._flags[fn] = flags
42 ret._flags[fn] = flags
40 return ret
43 return ret
41
44
42 def matches(self, match):
45 def matches(self, match):
43 '''generate a new manifest filtered by the match argument'''
46 '''generate a new manifest filtered by the match argument'''
44 if match.always():
47 if match.always():
45 return self.copy()
48 return self.copy()
46
49
47 files = match.files()
50 files = match.files()
48 if (match.matchfn == match.exact or
51 if (match.matchfn == match.exact or
49 (not match.anypats() and util.all(fn in self for fn in files))):
52 (not match.anypats() and util.all(fn in self for fn in files))):
50 return self.intersectfiles(files)
53 return self.intersectfiles(files)
51
54
52 mf = self.copy()
55 mf = self.copy()
53 for fn in mf.keys():
56 for fn in mf.keys():
54 if not match(fn):
57 if not match(fn):
55 del mf[fn]
58 del mf[fn]
56 return mf
59 return mf
57
60
58 def diff(self, m2):
61 def diff(self, m2):
59 '''Finds changes between the current manifest and m2. The result is
62 '''Finds changes between the current manifest and m2. The result is
60 returned as a dict with filename as key and values of the form
63 returned as a dict with filename as key and values of the form
61 ((n1,fl1),(n2,fl2)), where n1/n2 is the nodeid in the current/other
64 ((n1,fl1),(n2,fl2)), where n1/n2 is the nodeid in the current/other
62 manifest and fl1/fl2 is the flag in the current/other manifest. Where
65 manifest and fl1/fl2 is the flag in the current/other manifest. Where
63 the file does not exist, the nodeid will be None and the flags will be
66 the file does not exist, the nodeid will be None and the flags will be
64 the empty string.'''
67 the empty string.'''
65 diff = {}
68 diff = {}
66
69
67 for fn, n1 in self.iteritems():
70 for fn, n1 in self.iteritems():
68 fl1 = self._flags.get(fn, '')
71 fl1 = self._flags.get(fn, '')
69 n2 = m2.get(fn, None)
72 n2 = m2.get(fn, None)
70 fl2 = m2._flags.get(fn, '')
73 fl2 = m2._flags.get(fn, '')
71 if n2 is None:
74 if n2 is None:
72 fl2 = ''
75 fl2 = ''
73 if n1 != n2 or fl1 != fl2:
76 if n1 != n2 or fl1 != fl2:
74 diff[fn] = ((n1, fl1), (n2, fl2))
77 diff[fn] = ((n1, fl1), (n2, fl2))
75
78
76 for fn, n2 in m2.iteritems():
79 for fn, n2 in m2.iteritems():
77 if fn not in self:
80 if fn not in self:
78 fl2 = m2._flags.get(fn, '')
81 fl2 = m2._flags.get(fn, '')
79 diff[fn] = ((None, ''), (n2, fl2))
82 diff[fn] = ((None, ''), (n2, fl2))
80
83
81 return diff
84 return diff
82
85
83 def text(self):
86 def text(self):
84 """Get the full data of this manifest as a bytestring."""
87 """Get the full data of this manifest as a bytestring."""
85 fl = sorted(self)
88 fl = sorted(self)
86 _checkforbidden(fl)
89 _checkforbidden(fl)
87
90
88 hex, flags = revlog.hex, self.flags
91 hex, flags = revlog.hex, self.flags
89 # if this is changed to support newlines in filenames,
92 # if this is changed to support newlines in filenames,
90 # be sure to check the templates/ dir again (especially *-raw.tmpl)
93 # be sure to check the templates/ dir again (especially *-raw.tmpl)
91 return ''.join("%s\0%s%s\n" % (f, hex(self[f]), flags(f)) for f in fl)
94 return ''.join("%s\0%s%s\n" % (f, hex(self[f]), flags(f)) for f in fl)
92
95
93 def fastdelta(self, base, changes):
96 def fastdelta(self, base, changes):
94 """Given a base manifest text as an array.array and a list of changes
97 """Given a base manifest text as an array.array and a list of changes
95 relative to that text, compute a delta that can be used by revlog.
98 relative to that text, compute a delta that can be used by revlog.
96 """
99 """
97 delta = []
100 delta = []
98 dstart = None
101 dstart = None
99 dend = None
102 dend = None
100 dline = [""]
103 dline = [""]
101 start = 0
104 start = 0
102 # zero copy representation of base as a buffer
105 # zero copy representation of base as a buffer
103 addbuf = util.buffer(base)
106 addbuf = util.buffer(base)
104
107
105 # start with a readonly loop that finds the offset of
108 # start with a readonly loop that finds the offset of
106 # each line and creates the deltas
109 # each line and creates the deltas
107 for f, todelete in changes:
110 for f, todelete in changes:
108 # bs will either be the index of the item or the insert point
111 # bs will either be the index of the item or the insert point
109 start, end = _msearch(addbuf, f, start)
112 start, end = _msearch(addbuf, f, start)
110 if not todelete:
113 if not todelete:
111 l = "%s\0%s%s\n" % (f, revlog.hex(self[f]), self.flags(f))
114 l = "%s\0%s%s\n" % (f, revlog.hex(self[f]), self.flags(f))
112 else:
115 else:
113 if start == end:
116 if start == end:
114 # item we want to delete was not found, error out
117 # item we want to delete was not found, error out
115 raise AssertionError(
118 raise AssertionError(
116 _("failed to remove %s from manifest") % f)
119 _("failed to remove %s from manifest") % f)
117 l = ""
120 l = ""
118 if dstart is not None and dstart <= start and dend >= start:
121 if dstart is not None and dstart <= start and dend >= start:
119 if dend < end:
122 if dend < end:
120 dend = end
123 dend = end
121 if l:
124 if l:
122 dline.append(l)
125 dline.append(l)
123 else:
126 else:
124 if dstart is not None:
127 if dstart is not None:
125 delta.append([dstart, dend, "".join(dline)])
128 delta.append([dstart, dend, "".join(dline)])
126 dstart = start
129 dstart = start
127 dend = end
130 dend = end
128 dline = [l]
131 dline = [l]
129
132
130 if dstart is not None:
133 if dstart is not None:
131 delta.append([dstart, dend, "".join(dline)])
134 delta.append([dstart, dend, "".join(dline)])
132 # apply the delta to the base, and get a delta for addrevision
135 # apply the delta to the base, and get a delta for addrevision
133 deltatext, arraytext = _addlistdelta(base, delta)
136 deltatext, arraytext = _addlistdelta(base, delta)
134 return arraytext, deltatext
137 return arraytext, deltatext
135
138
136 def _msearch(m, s, lo=0, hi=None):
139 def _msearch(m, s, lo=0, hi=None):
137 '''return a tuple (start, end) that says where to find s within m.
140 '''return a tuple (start, end) that says where to find s within m.
138
141
139 If the string is found m[start:end] are the line containing
142 If the string is found m[start:end] are the line containing
140 that string. If start == end the string was not found and
143 that string. If start == end the string was not found and
141 they indicate the proper sorted insertion point.
144 they indicate the proper sorted insertion point.
142
145
143 m should be a buffer or a string
146 m should be a buffer or a string
144 s is a string'''
147 s is a string'''
145 def advance(i, c):
148 def advance(i, c):
146 while i < lenm and m[i] != c:
149 while i < lenm and m[i] != c:
147 i += 1
150 i += 1
148 return i
151 return i
149 if not s:
152 if not s:
150 return (lo, lo)
153 return (lo, lo)
151 lenm = len(m)
154 lenm = len(m)
152 if not hi:
155 if not hi:
153 hi = lenm
156 hi = lenm
154 while lo < hi:
157 while lo < hi:
155 mid = (lo + hi) // 2
158 mid = (lo + hi) // 2
156 start = mid
159 start = mid
157 while start > 0 and m[start - 1] != '\n':
160 while start > 0 and m[start - 1] != '\n':
158 start -= 1
161 start -= 1
159 end = advance(start, '\0')
162 end = advance(start, '\0')
160 if m[start:end] < s:
163 if m[start:end] < s:
161 # we know that after the null there are 40 bytes of sha1
164 # we know that after the null there are 40 bytes of sha1
162 # this translates to the bisect lo = mid + 1
165 # this translates to the bisect lo = mid + 1
163 lo = advance(end + 40, '\n') + 1
166 lo = advance(end + 40, '\n') + 1
164 else:
167 else:
165 # this translates to the bisect hi = mid
168 # this translates to the bisect hi = mid
166 hi = start
169 hi = start
167 end = advance(lo, '\0')
170 end = advance(lo, '\0')
168 found = m[lo:end]
171 found = m[lo:end]
169 if s == found:
172 if s == found:
170 # we know that after the null there are 40 bytes of sha1
173 # we know that after the null there are 40 bytes of sha1
171 end = advance(end + 40, '\n')
174 end = advance(end + 40, '\n')
172 return (lo, end + 1)
175 return (lo, end + 1)
173 else:
176 else:
174 return (lo, lo)
177 return (lo, lo)
175
178
176 def _checkforbidden(l):
179 def _checkforbidden(l):
177 """Check filenames for illegal characters."""
180 """Check filenames for illegal characters."""
178 for f in l:
181 for f in l:
179 if '\n' in f or '\r' in f:
182 if '\n' in f or '\r' in f:
180 raise error.RevlogError(
183 raise error.RevlogError(
181 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
184 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
182
185
183
186
184 # apply the changes collected during the bisect loop to our addlist
187 # apply the changes collected during the bisect loop to our addlist
185 # return a delta suitable for addrevision
188 # return a delta suitable for addrevision
186 def _addlistdelta(addlist, x):
189 def _addlistdelta(addlist, x):
187 # for large addlist arrays, building a new array is cheaper
190 # for large addlist arrays, building a new array is cheaper
188 # than repeatedly modifying the existing one
191 # than repeatedly modifying the existing one
189 currentposition = 0
192 currentposition = 0
190 newaddlist = array.array('c')
193 newaddlist = array.array('c')
191
194
192 for start, end, content in x:
195 for start, end, content in x:
193 newaddlist += addlist[currentposition:start]
196 newaddlist += addlist[currentposition:start]
194 if content:
197 if content:
195 newaddlist += array.array('c', content)
198 newaddlist += array.array('c', content)
196
199
197 currentposition = end
200 currentposition = end
198
201
199 newaddlist += addlist[currentposition:]
202 newaddlist += addlist[currentposition:]
200
203
201 deltatext = "".join(struct.pack(">lll", start, end, len(content))
204 deltatext = "".join(struct.pack(">lll", start, end, len(content))
202 + content for start, end, content in x)
205 + content for start, end, content in x)
203 return deltatext, newaddlist
206 return deltatext, newaddlist
204
207
205 def _parse(lines):
208 def _parse(lines):
206 mfdict = manifestdict()
209 mfdict = manifestdict()
207 parsers.parse_manifest(mfdict, mfdict._flags, lines)
210 parsers.parse_manifest(mfdict, mfdict._flags, lines)
208 return mfdict
211 return mfdict
209
212
210 class manifest(revlog.revlog):
213 class manifest(revlog.revlog):
211 def __init__(self, opener):
214 def __init__(self, opener):
212 # we expect to deal with not more than four revs at a time,
215 # we expect to deal with not more than four revs at a time,
213 # during a commit --amend
216 # during a commit --amend
214 self._mancache = util.lrucachedict(4)
217 self._mancache = util.lrucachedict(4)
215 revlog.revlog.__init__(self, opener, "00manifest.i")
218 revlog.revlog.__init__(self, opener, "00manifest.i")
216
219
217 def readdelta(self, node):
220 def readdelta(self, node):
218 r = self.rev(node)
221 r = self.rev(node)
219 return _parse(mdiff.patchtext(self.revdiff(self.deltaparent(r), r)))
222 return _parse(mdiff.patchtext(self.revdiff(self.deltaparent(r), r)))
220
223
221 def readfast(self, node):
224 def readfast(self, node):
222 '''use the faster of readdelta or read'''
225 '''use the faster of readdelta or read'''
223 r = self.rev(node)
226 r = self.rev(node)
224 deltaparent = self.deltaparent(r)
227 deltaparent = self.deltaparent(r)
225 if deltaparent != revlog.nullrev and deltaparent in self.parentrevs(r):
228 if deltaparent != revlog.nullrev and deltaparent in self.parentrevs(r):
226 return self.readdelta(node)
229 return self.readdelta(node)
227 return self.read(node)
230 return self.read(node)
228
231
229 def read(self, node):
232 def read(self, node):
230 if node == revlog.nullid:
233 if node == revlog.nullid:
231 return manifestdict() # don't upset local cache
234 return manifestdict() # don't upset local cache
232 if node in self._mancache:
235 if node in self._mancache:
233 return self._mancache[node][0]
236 return self._mancache[node][0]
234 text = self.revision(node)
237 text = self.revision(node)
235 arraytext = array.array('c', text)
238 arraytext = array.array('c', text)
236 mapping = _parse(text)
239 mapping = _parse(text)
237 self._mancache[node] = (mapping, arraytext)
240 self._mancache[node] = (mapping, arraytext)
238 return mapping
241 return mapping
239
242
240 def find(self, node, f):
243 def find(self, node, f):
241 '''look up entry for a single file efficiently.
244 '''look up entry for a single file efficiently.
242 return (node, flags) pair if found, (None, None) if not.'''
245 return (node, flags) pair if found, (None, None) if not.'''
243 if node in self._mancache:
246 if node in self._mancache:
244 mapping = self._mancache[node][0]
247 mapping = self._mancache[node][0]
245 return mapping.get(f), mapping.flags(f)
248 return mapping.get(f), mapping.flags(f)
246 text = self.revision(node)
249 text = self.revision(node)
247 start, end = _msearch(text, f)
250 start, end = _msearch(text, f)
248 if start == end:
251 if start == end:
249 return None, None
252 return None, None
250 l = text[start:end]
253 l = text[start:end]
251 f, n = l.split('\0')
254 f, n = l.split('\0')
252 return revlog.bin(n[:40]), n[40:-1]
255 return revlog.bin(n[:40]), n[40:-1]
253
256
254 def add(self, map, transaction, link, p1, p2, added, removed):
257 def add(self, map, transaction, link, p1, p2, added, removed):
255 if p1 in self._mancache:
258 if p1 in self._mancache:
256 # If our first parent is in the manifest cache, we can
259 # If our first parent is in the manifest cache, we can
257 # compute a delta here using properties we know about the
260 # compute a delta here using properties we know about the
258 # manifest up-front, which may save time later for the
261 # manifest up-front, which may save time later for the
259 # revlog layer.
262 # revlog layer.
260
263
261 _checkforbidden(added)
264 _checkforbidden(added)
262 # combine the changed lists into one list for sorting
265 # combine the changed lists into one list for sorting
263 work = [(x, False) for x in added]
266 work = [(x, False) for x in added]
264 work.extend((x, True) for x in removed)
267 work.extend((x, True) for x in removed)
265 # this could use heapq.merge() (from Python 2.6+) or equivalent
268 # this could use heapq.merge() (from Python 2.6+) or equivalent
266 # since the lists are already sorted
269 # since the lists are already sorted
267 work.sort()
270 work.sort()
268
271
269 arraytext, deltatext = map.fastdelta(self._mancache[p1][1], work)
272 arraytext, deltatext = map.fastdelta(self._mancache[p1][1], work)
270 cachedelta = self.rev(p1), deltatext
273 cachedelta = self.rev(p1), deltatext
271 text = util.buffer(arraytext)
274 text = util.buffer(arraytext)
272 else:
275 else:
273 # The first parent manifest isn't already loaded, so we'll
276 # The first parent manifest isn't already loaded, so we'll
274 # just encode a fulltext of the manifest and pass that
277 # just encode a fulltext of the manifest and pass that
275 # through to the revlog layer, and let it handle the delta
278 # through to the revlog layer, and let it handle the delta
276 # process.
279 # process.
277 text = map.text()
280 text = map.text()
278 arraytext = array.array('c', text)
281 arraytext = array.array('c', text)
279 cachedelta = None
282 cachedelta = None
280
283
281 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
284 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
282 self._mancache[n] = (map, arraytext)
285 self._mancache[n] = (map, arraytext)
283
286
284 return n
287 return n
@@ -1,166 +1,177 b''
1 import os, errno, stat
1 import os, errno, stat
2
2
3 import encoding
3 import util
4 import util
4 from i18n import _
5 from i18n import _
5
6
7 def _lowerclean(s):
8 return encoding.hfsignoreclean(s.lower())
9
6 class pathauditor(object):
10 class pathauditor(object):
7 '''ensure that a filesystem path contains no banned components.
11 '''ensure that a filesystem path contains no banned components.
8 the following properties of a path are checked:
12 the following properties of a path are checked:
9
13
10 - ends with a directory separator
14 - ends with a directory separator
11 - under top-level .hg
15 - under top-level .hg
12 - starts at the root of a windows drive
16 - starts at the root of a windows drive
13 - contains ".."
17 - contains ".."
14 - traverses a symlink (e.g. a/symlink_here/b)
18 - traverses a symlink (e.g. a/symlink_here/b)
15 - inside a nested repository (a callback can be used to approve
19 - inside a nested repository (a callback can be used to approve
16 some nested repositories, e.g., subrepositories)
20 some nested repositories, e.g., subrepositories)
17 '''
21 '''
18
22
19 def __init__(self, root, callback=None):
23 def __init__(self, root, callback=None):
20 self.audited = set()
24 self.audited = set()
21 self.auditeddir = set()
25 self.auditeddir = set()
22 self.root = root
26 self.root = root
23 self.callback = callback
27 self.callback = callback
24 if os.path.lexists(root) and not util.checkcase(root):
28 if os.path.lexists(root) and not util.checkcase(root):
25 self.normcase = util.normcase
29 self.normcase = util.normcase
26 else:
30 else:
27 self.normcase = lambda x: x
31 self.normcase = lambda x: x
28
32
29 def __call__(self, path):
33 def __call__(self, path):
30 '''Check the relative path.
34 '''Check the relative path.
31 path may contain a pattern (e.g. foodir/**.txt)'''
35 path may contain a pattern (e.g. foodir/**.txt)'''
32
36
33 path = util.localpath(path)
37 path = util.localpath(path)
34 normpath = self.normcase(path)
38 normpath = self.normcase(path)
35 if normpath in self.audited:
39 if normpath in self.audited:
36 return
40 return
37 # AIX ignores "/" at end of path, others raise EISDIR.
41 # AIX ignores "/" at end of path, others raise EISDIR.
38 if util.endswithsep(path):
42 if util.endswithsep(path):
39 raise util.Abort(_("path ends in directory separator: %s") % path)
43 raise util.Abort(_("path ends in directory separator: %s") % path)
40 parts = util.splitpath(path)
44 parts = util.splitpath(path)
41 if (os.path.splitdrive(path)[0]
45 if (os.path.splitdrive(path)[0]
42 or parts[0].lower() in ('.hg', '.hg.', '')
46 or _lowerclean(parts[0]) in ('.hg', '.hg.', '')
43 or os.pardir in parts):
47 or os.pardir in parts):
44 raise util.Abort(_("path contains illegal component: %s") % path)
48 raise util.Abort(_("path contains illegal component: %s") % path)
45 if '.hg' in path.lower():
49 # Windows shortname aliases
46 lparts = [p.lower() for p in parts]
50 for p in parts:
51 if "~" in p:
52 first, last = p.split("~", 1)
53 if last.isdigit() and first.upper() in ["HG", "HG8B6C"]:
54 raise util.Abort(_("path contains illegal component: %s")
55 % path)
56 if '.hg' in _lowerclean(path):
57 lparts = [_lowerclean(p.lower()) for p in parts]
47 for p in '.hg', '.hg.':
58 for p in '.hg', '.hg.':
48 if p in lparts[1:]:
59 if p in lparts[1:]:
49 pos = lparts.index(p)
60 pos = lparts.index(p)
50 base = os.path.join(*parts[:pos])
61 base = os.path.join(*parts[:pos])
51 raise util.Abort(_("path '%s' is inside nested repo %r")
62 raise util.Abort(_("path '%s' is inside nested repo %r")
52 % (path, base))
63 % (path, base))
53
64
54 normparts = util.splitpath(normpath)
65 normparts = util.splitpath(normpath)
55 assert len(parts) == len(normparts)
66 assert len(parts) == len(normparts)
56
67
57 parts.pop()
68 parts.pop()
58 normparts.pop()
69 normparts.pop()
59 prefixes = []
70 prefixes = []
60 while parts:
71 while parts:
61 prefix = os.sep.join(parts)
72 prefix = os.sep.join(parts)
62 normprefix = os.sep.join(normparts)
73 normprefix = os.sep.join(normparts)
63 if normprefix in self.auditeddir:
74 if normprefix in self.auditeddir:
64 break
75 break
65 curpath = os.path.join(self.root, prefix)
76 curpath = os.path.join(self.root, prefix)
66 try:
77 try:
67 st = os.lstat(curpath)
78 st = os.lstat(curpath)
68 except OSError, err:
79 except OSError, err:
69 # EINVAL can be raised as invalid path syntax under win32.
80 # EINVAL can be raised as invalid path syntax under win32.
70 # They must be ignored for patterns can be checked too.
81 # They must be ignored for patterns can be checked too.
71 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
82 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
72 raise
83 raise
73 else:
84 else:
74 if stat.S_ISLNK(st.st_mode):
85 if stat.S_ISLNK(st.st_mode):
75 raise util.Abort(
86 raise util.Abort(
76 _('path %r traverses symbolic link %r')
87 _('path %r traverses symbolic link %r')
77 % (path, prefix))
88 % (path, prefix))
78 elif (stat.S_ISDIR(st.st_mode) and
89 elif (stat.S_ISDIR(st.st_mode) and
79 os.path.isdir(os.path.join(curpath, '.hg'))):
90 os.path.isdir(os.path.join(curpath, '.hg'))):
80 if not self.callback or not self.callback(curpath):
91 if not self.callback or not self.callback(curpath):
81 raise util.Abort(_("path '%s' is inside nested "
92 raise util.Abort(_("path '%s' is inside nested "
82 "repo %r")
93 "repo %r")
83 % (path, prefix))
94 % (path, prefix))
84 prefixes.append(normprefix)
95 prefixes.append(normprefix)
85 parts.pop()
96 parts.pop()
86 normparts.pop()
97 normparts.pop()
87
98
88 self.audited.add(normpath)
99 self.audited.add(normpath)
89 # only add prefixes to the cache after checking everything: we don't
100 # only add prefixes to the cache after checking everything: we don't
90 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
101 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
91 self.auditeddir.update(prefixes)
102 self.auditeddir.update(prefixes)
92
103
93 def check(self, path):
104 def check(self, path):
94 try:
105 try:
95 self(path)
106 self(path)
96 return True
107 return True
97 except (OSError, util.Abort):
108 except (OSError, util.Abort):
98 return False
109 return False
99
110
100 def canonpath(root, cwd, myname, auditor=None):
111 def canonpath(root, cwd, myname, auditor=None):
101 '''return the canonical path of myname, given cwd and root'''
112 '''return the canonical path of myname, given cwd and root'''
102 if util.endswithsep(root):
113 if util.endswithsep(root):
103 rootsep = root
114 rootsep = root
104 else:
115 else:
105 rootsep = root + os.sep
116 rootsep = root + os.sep
106 name = myname
117 name = myname
107 if not os.path.isabs(name):
118 if not os.path.isabs(name):
108 name = os.path.join(root, cwd, name)
119 name = os.path.join(root, cwd, name)
109 name = os.path.normpath(name)
120 name = os.path.normpath(name)
110 if auditor is None:
121 if auditor is None:
111 auditor = pathauditor(root)
122 auditor = pathauditor(root)
112 if name != rootsep and name.startswith(rootsep):
123 if name != rootsep and name.startswith(rootsep):
113 name = name[len(rootsep):]
124 name = name[len(rootsep):]
114 auditor(name)
125 auditor(name)
115 return util.pconvert(name)
126 return util.pconvert(name)
116 elif name == root:
127 elif name == root:
117 return ''
128 return ''
118 else:
129 else:
119 # Determine whether `name' is in the hierarchy at or beneath `root',
130 # Determine whether `name' is in the hierarchy at or beneath `root',
120 # by iterating name=dirname(name) until that causes no change (can't
131 # by iterating name=dirname(name) until that causes no change (can't
121 # check name == '/', because that doesn't work on windows). The list
132 # check name == '/', because that doesn't work on windows). The list
122 # `rel' holds the reversed list of components making up the relative
133 # `rel' holds the reversed list of components making up the relative
123 # file name we want.
134 # file name we want.
124 rel = []
135 rel = []
125 while True:
136 while True:
126 try:
137 try:
127 s = util.samefile(name, root)
138 s = util.samefile(name, root)
128 except OSError:
139 except OSError:
129 s = False
140 s = False
130 if s:
141 if s:
131 if not rel:
142 if not rel:
132 # name was actually the same as root (maybe a symlink)
143 # name was actually the same as root (maybe a symlink)
133 return ''
144 return ''
134 rel.reverse()
145 rel.reverse()
135 name = os.path.join(*rel)
146 name = os.path.join(*rel)
136 auditor(name)
147 auditor(name)
137 return util.pconvert(name)
148 return util.pconvert(name)
138 dirname, basename = util.split(name)
149 dirname, basename = util.split(name)
139 rel.append(basename)
150 rel.append(basename)
140 if dirname == name:
151 if dirname == name:
141 break
152 break
142 name = dirname
153 name = dirname
143
154
144 raise util.Abort(_("%s not under root '%s'") % (myname, root))
155 raise util.Abort(_("%s not under root '%s'") % (myname, root))
145
156
146 def normasprefix(path):
157 def normasprefix(path):
147 '''normalize the specified path as path prefix
158 '''normalize the specified path as path prefix
148
159
149 Returned value can be used safely for "p.startswith(prefix)",
160 Returned value can be used safely for "p.startswith(prefix)",
150 "p[len(prefix):]", and so on.
161 "p[len(prefix):]", and so on.
151
162
152 For efficiency, this expects "path" argument to be already
163 For efficiency, this expects "path" argument to be already
153 normalized by "os.path.normpath", "os.path.realpath", and so on.
164 normalized by "os.path.normpath", "os.path.realpath", and so on.
154
165
155 See also issue3033 for detail about need of this function.
166 See also issue3033 for detail about need of this function.
156
167
157 >>> normasprefix('/foo/bar').replace(os.sep, '/')
168 >>> normasprefix('/foo/bar').replace(os.sep, '/')
158 '/foo/bar/'
169 '/foo/bar/'
159 >>> normasprefix('/').replace(os.sep, '/')
170 >>> normasprefix('/').replace(os.sep, '/')
160 '/'
171 '/'
161 '''
172 '''
162 d, p = os.path.splitdrive(path)
173 d, p = os.path.splitdrive(path)
163 if len(p) != len(os.sep):
174 if len(p) != len(os.sep):
164 return path + os.sep
175 return path + os.sep
165 else:
176 else:
166 return path
177 return path
@@ -1,596 +1,599 b''
1 # posix.py - Posix utility function implementations for Mercurial
1 # posix.py - Posix utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import encoding
9 import encoding
10 import os, sys, errno, stat, getpass, pwd, grp, socket, tempfile, unicodedata
10 import os, sys, errno, stat, getpass, pwd, grp, socket, tempfile, unicodedata
11 import fcntl
11 import fcntl
12
12
13 posixfile = open
13 posixfile = open
14 normpath = os.path.normpath
14 normpath = os.path.normpath
15 samestat = os.path.samestat
15 samestat = os.path.samestat
16 oslink = os.link
16 oslink = os.link
17 unlink = os.unlink
17 unlink = os.unlink
18 rename = os.rename
18 rename = os.rename
19 expandglobs = False
19 expandglobs = False
20
20
21 umask = os.umask(0)
21 umask = os.umask(0)
22 os.umask(umask)
22 os.umask(umask)
23
23
24 def split(p):
24 def split(p):
25 '''Same as posixpath.split, but faster
25 '''Same as posixpath.split, but faster
26
26
27 >>> import posixpath
27 >>> import posixpath
28 >>> for f in ['/absolute/path/to/file',
28 >>> for f in ['/absolute/path/to/file',
29 ... 'relative/path/to/file',
29 ... 'relative/path/to/file',
30 ... 'file_alone',
30 ... 'file_alone',
31 ... 'path/to/directory/',
31 ... 'path/to/directory/',
32 ... '/multiple/path//separators',
32 ... '/multiple/path//separators',
33 ... '/file_at_root',
33 ... '/file_at_root',
34 ... '///multiple_leading_separators_at_root',
34 ... '///multiple_leading_separators_at_root',
35 ... '']:
35 ... '']:
36 ... assert split(f) == posixpath.split(f), f
36 ... assert split(f) == posixpath.split(f), f
37 '''
37 '''
38 ht = p.rsplit('/', 1)
38 ht = p.rsplit('/', 1)
39 if len(ht) == 1:
39 if len(ht) == 1:
40 return '', p
40 return '', p
41 nh = ht[0].rstrip('/')
41 nh = ht[0].rstrip('/')
42 if nh:
42 if nh:
43 return nh, ht[1]
43 return nh, ht[1]
44 return ht[0] + '/', ht[1]
44 return ht[0] + '/', ht[1]
45
45
46 def openhardlinks():
46 def openhardlinks():
47 '''return true if it is safe to hold open file handles to hardlinks'''
47 '''return true if it is safe to hold open file handles to hardlinks'''
48 return True
48 return True
49
49
50 def nlinks(name):
50 def nlinks(name):
51 '''return number of hardlinks for the given file'''
51 '''return number of hardlinks for the given file'''
52 return os.lstat(name).st_nlink
52 return os.lstat(name).st_nlink
53
53
54 def parsepatchoutput(output_line):
54 def parsepatchoutput(output_line):
55 """parses the output produced by patch and returns the filename"""
55 """parses the output produced by patch and returns the filename"""
56 pf = output_line[14:]
56 pf = output_line[14:]
57 if os.sys.platform == 'OpenVMS':
57 if os.sys.platform == 'OpenVMS':
58 if pf[0] == '`':
58 if pf[0] == '`':
59 pf = pf[1:-1] # Remove the quotes
59 pf = pf[1:-1] # Remove the quotes
60 else:
60 else:
61 if pf.startswith("'") and pf.endswith("'") and " " in pf:
61 if pf.startswith("'") and pf.endswith("'") and " " in pf:
62 pf = pf[1:-1] # Remove the quotes
62 pf = pf[1:-1] # Remove the quotes
63 return pf
63 return pf
64
64
65 def sshargs(sshcmd, host, user, port):
65 def sshargs(sshcmd, host, user, port):
66 '''Build argument list for ssh'''
66 '''Build argument list for ssh'''
67 args = user and ("%s@%s" % (user, host)) or host
67 args = user and ("%s@%s" % (user, host)) or host
68 return port and ("%s -p %s" % (args, port)) or args
68 return port and ("%s -p %s" % (args, port)) or args
69
69
70 def isexec(f):
70 def isexec(f):
71 """check whether a file is executable"""
71 """check whether a file is executable"""
72 return (os.lstat(f).st_mode & 0100 != 0)
72 return (os.lstat(f).st_mode & 0100 != 0)
73
73
74 def setflags(f, l, x):
74 def setflags(f, l, x):
75 s = os.lstat(f).st_mode
75 s = os.lstat(f).st_mode
76 if l:
76 if l:
77 if not stat.S_ISLNK(s):
77 if not stat.S_ISLNK(s):
78 # switch file to link
78 # switch file to link
79 fp = open(f)
79 fp = open(f)
80 data = fp.read()
80 data = fp.read()
81 fp.close()
81 fp.close()
82 os.unlink(f)
82 os.unlink(f)
83 try:
83 try:
84 os.symlink(data, f)
84 os.symlink(data, f)
85 except OSError:
85 except OSError:
86 # failed to make a link, rewrite file
86 # failed to make a link, rewrite file
87 fp = open(f, "w")
87 fp = open(f, "w")
88 fp.write(data)
88 fp.write(data)
89 fp.close()
89 fp.close()
90 # no chmod needed at this point
90 # no chmod needed at this point
91 return
91 return
92 if stat.S_ISLNK(s):
92 if stat.S_ISLNK(s):
93 # switch link to file
93 # switch link to file
94 data = os.readlink(f)
94 data = os.readlink(f)
95 os.unlink(f)
95 os.unlink(f)
96 fp = open(f, "w")
96 fp = open(f, "w")
97 fp.write(data)
97 fp.write(data)
98 fp.close()
98 fp.close()
99 s = 0666 & ~umask # avoid restatting for chmod
99 s = 0666 & ~umask # avoid restatting for chmod
100
100
101 sx = s & 0100
101 sx = s & 0100
102 if x and not sx:
102 if x and not sx:
103 # Turn on +x for every +r bit when making a file executable
103 # Turn on +x for every +r bit when making a file executable
104 # and obey umask.
104 # and obey umask.
105 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
105 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
106 elif not x and sx:
106 elif not x and sx:
107 # Turn off all +x bits
107 # Turn off all +x bits
108 os.chmod(f, s & 0666)
108 os.chmod(f, s & 0666)
109
109
110 def copymode(src, dst, mode=None):
110 def copymode(src, dst, mode=None):
111 '''Copy the file mode from the file at path src to dst.
111 '''Copy the file mode from the file at path src to dst.
112 If src doesn't exist, we're using mode instead. If mode is None, we're
112 If src doesn't exist, we're using mode instead. If mode is None, we're
113 using umask.'''
113 using umask.'''
114 try:
114 try:
115 st_mode = os.lstat(src).st_mode & 0777
115 st_mode = os.lstat(src).st_mode & 0777
116 except OSError, inst:
116 except OSError, inst:
117 if inst.errno != errno.ENOENT:
117 if inst.errno != errno.ENOENT:
118 raise
118 raise
119 st_mode = mode
119 st_mode = mode
120 if st_mode is None:
120 if st_mode is None:
121 st_mode = ~umask
121 st_mode = ~umask
122 st_mode &= 0666
122 st_mode &= 0666
123 os.chmod(dst, st_mode)
123 os.chmod(dst, st_mode)
124
124
125 def checkexec(path):
125 def checkexec(path):
126 """
126 """
127 Check whether the given path is on a filesystem with UNIX-like exec flags
127 Check whether the given path is on a filesystem with UNIX-like exec flags
128
128
129 Requires a directory (like /foo/.hg)
129 Requires a directory (like /foo/.hg)
130 """
130 """
131
131
132 # VFAT on some Linux versions can flip mode but it doesn't persist
132 # VFAT on some Linux versions can flip mode but it doesn't persist
133 # a FS remount. Frequently we can detect it if files are created
133 # a FS remount. Frequently we can detect it if files are created
134 # with exec bit on.
134 # with exec bit on.
135
135
136 try:
136 try:
137 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
137 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
138 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
138 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
139 try:
139 try:
140 os.close(fh)
140 os.close(fh)
141 m = os.stat(fn).st_mode & 0777
141 m = os.stat(fn).st_mode & 0777
142 new_file_has_exec = m & EXECFLAGS
142 new_file_has_exec = m & EXECFLAGS
143 os.chmod(fn, m ^ EXECFLAGS)
143 os.chmod(fn, m ^ EXECFLAGS)
144 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
144 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
145 finally:
145 finally:
146 os.unlink(fn)
146 os.unlink(fn)
147 except (IOError, OSError):
147 except (IOError, OSError):
148 # we don't care, the user probably won't be able to commit anyway
148 # we don't care, the user probably won't be able to commit anyway
149 return False
149 return False
150 return not (new_file_has_exec or exec_flags_cannot_flip)
150 return not (new_file_has_exec or exec_flags_cannot_flip)
151
151
152 def checklink(path):
152 def checklink(path):
153 """check whether the given path is on a symlink-capable filesystem"""
153 """check whether the given path is on a symlink-capable filesystem"""
154 # mktemp is not racy because symlink creation will fail if the
154 # mktemp is not racy because symlink creation will fail if the
155 # file already exists
155 # file already exists
156 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
156 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
157 try:
157 try:
158 fd = tempfile.NamedTemporaryFile(dir=path, prefix='hg-checklink-')
158 fd = tempfile.NamedTemporaryFile(dir=path, prefix='hg-checklink-')
159 try:
159 try:
160 os.symlink(os.path.basename(fd.name), name)
160 os.symlink(os.path.basename(fd.name), name)
161 os.unlink(name)
161 os.unlink(name)
162 return True
162 return True
163 finally:
163 finally:
164 fd.close()
164 fd.close()
165 except AttributeError:
165 except AttributeError:
166 return False
166 return False
167 except OSError, inst:
167 except OSError, inst:
168 # sshfs might report failure while successfully creating the link
168 # sshfs might report failure while successfully creating the link
169 if inst[0] == errno.EIO and os.path.exists(name):
169 if inst[0] == errno.EIO and os.path.exists(name):
170 os.unlink(name)
170 os.unlink(name)
171 return False
171 return False
172
172
173 def checkosfilename(path):
173 def checkosfilename(path):
174 '''Check that the base-relative path is a valid filename on this platform.
174 '''Check that the base-relative path is a valid filename on this platform.
175 Returns None if the path is ok, or a UI string describing the problem.'''
175 Returns None if the path is ok, or a UI string describing the problem.'''
176 pass # on posix platforms, every path is ok
176 pass # on posix platforms, every path is ok
177
177
178 def setbinary(fd):
178 def setbinary(fd):
179 pass
179 pass
180
180
181 def pconvert(path):
181 def pconvert(path):
182 return path
182 return path
183
183
184 def localpath(path):
184 def localpath(path):
185 return path
185 return path
186
186
187 def samefile(fpath1, fpath2):
187 def samefile(fpath1, fpath2):
188 """Returns whether path1 and path2 refer to the same file. This is only
188 """Returns whether path1 and path2 refer to the same file. This is only
189 guaranteed to work for files, not directories."""
189 guaranteed to work for files, not directories."""
190 return os.path.samefile(fpath1, fpath2)
190 return os.path.samefile(fpath1, fpath2)
191
191
192 def samedevice(fpath1, fpath2):
192 def samedevice(fpath1, fpath2):
193 """Returns whether fpath1 and fpath2 are on the same device. This is only
193 """Returns whether fpath1 and fpath2 are on the same device. This is only
194 guaranteed to work for files, not directories."""
194 guaranteed to work for files, not directories."""
195 st1 = os.lstat(fpath1)
195 st1 = os.lstat(fpath1)
196 st2 = os.lstat(fpath2)
196 st2 = os.lstat(fpath2)
197 return st1.st_dev == st2.st_dev
197 return st1.st_dev == st2.st_dev
198
198
199 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
199 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
200 def normcase(path):
200 def normcase(path):
201 return path.lower()
201 return path.lower()
202
202
203 if sys.platform == 'darwin':
203 if sys.platform == 'darwin':
204
204
205 def normcase(path):
205 def normcase(path):
206 '''
206 '''
207 Normalize a filename for OS X-compatible comparison:
207 Normalize a filename for OS X-compatible comparison:
208 - escape-encode invalid characters
208 - escape-encode invalid characters
209 - decompose to NFD
209 - decompose to NFD
210 - lowercase
210 - lowercase
211 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
211
212
212 >>> normcase('UPPER')
213 >>> normcase('UPPER')
213 'upper'
214 'upper'
214 >>> normcase('Caf\xc3\xa9')
215 >>> normcase('Caf\xc3\xa9')
215 'cafe\\xcc\\x81'
216 'cafe\\xcc\\x81'
216 >>> normcase('\xc3\x89')
217 >>> normcase('\xc3\x89')
217 'e\\xcc\\x81'
218 'e\\xcc\\x81'
218 >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918
219 >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918
219 '%b8%ca%c3\\xca\\xbe%c8.jpg'
220 '%b8%ca%c3\\xca\\xbe%c8.jpg'
220 '''
221 '''
221
222
222 try:
223 try:
223 return encoding.asciilower(path) # exception for non-ASCII
224 return encoding.asciilower(path) # exception for non-ASCII
224 except UnicodeDecodeError:
225 except UnicodeDecodeError:
225 pass
226 pass
226 try:
227 try:
227 u = path.decode('utf-8')
228 u = path.decode('utf-8')
228 except UnicodeDecodeError:
229 except UnicodeDecodeError:
229 # OS X percent-encodes any bytes that aren't valid utf-8
230 # OS X percent-encodes any bytes that aren't valid utf-8
230 s = ''
231 s = ''
231 g = ''
232 g = ''
232 l = 0
233 l = 0
233 for c in path:
234 for c in path:
234 o = ord(c)
235 o = ord(c)
235 if l and o < 128 or o >= 192:
236 if l and o < 128 or o >= 192:
236 # we want a continuation byte, but didn't get one
237 # we want a continuation byte, but didn't get one
237 s += ''.join(["%%%02X" % ord(x) for x in g])
238 s += ''.join(["%%%02X" % ord(x) for x in g])
238 g = ''
239 g = ''
239 l = 0
240 l = 0
240 if l == 0 and o < 128:
241 if l == 0 and o < 128:
241 # ascii
242 # ascii
242 s += c
243 s += c
243 elif l == 0 and 194 <= o < 245:
244 elif l == 0 and 194 <= o < 245:
244 # valid leading bytes
245 # valid leading bytes
245 if o < 224:
246 if o < 224:
246 l = 1
247 l = 1
247 elif o < 240:
248 elif o < 240:
248 l = 2
249 l = 2
249 else:
250 else:
250 l = 3
251 l = 3
251 g = c
252 g = c
252 elif l > 0 and 128 <= o < 192:
253 elif l > 0 and 128 <= o < 192:
253 # valid continuations
254 # valid continuations
254 g += c
255 g += c
255 l -= 1
256 l -= 1
256 if not l:
257 if not l:
257 s += g
258 s += g
258 g = ''
259 g = ''
259 else:
260 else:
260 # invalid
261 # invalid
261 s += "%%%02X" % o
262 s += "%%%02X" % o
262
263
263 # any remaining partial characters
264 # any remaining partial characters
264 s += ''.join(["%%%02X" % ord(x) for x in g])
265 s += ''.join(["%%%02X" % ord(x) for x in g])
265 u = s.decode('utf-8')
266 u = s.decode('utf-8')
266
267
267 # Decompose then lowercase (HFS+ technote specifies lower)
268 # Decompose then lowercase (HFS+ technote specifies lower)
268 return unicodedata.normalize('NFD', u).lower().encode('utf-8')
269 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
270 # drop HFS+ ignored characters
271 return encoding.hfsignoreclean(enc)
269
272
270 if sys.platform == 'cygwin':
273 if sys.platform == 'cygwin':
271 # workaround for cygwin, in which mount point part of path is
274 # workaround for cygwin, in which mount point part of path is
272 # treated as case sensitive, even though underlying NTFS is case
275 # treated as case sensitive, even though underlying NTFS is case
273 # insensitive.
276 # insensitive.
274
277
275 # default mount points
278 # default mount points
276 cygwinmountpoints = sorted([
279 cygwinmountpoints = sorted([
277 "/usr/bin",
280 "/usr/bin",
278 "/usr/lib",
281 "/usr/lib",
279 "/cygdrive",
282 "/cygdrive",
280 ], reverse=True)
283 ], reverse=True)
281
284
282 # use upper-ing as normcase as same as NTFS workaround
285 # use upper-ing as normcase as same as NTFS workaround
283 def normcase(path):
286 def normcase(path):
284 pathlen = len(path)
287 pathlen = len(path)
285 if (pathlen == 0) or (path[0] != os.sep):
288 if (pathlen == 0) or (path[0] != os.sep):
286 # treat as relative
289 # treat as relative
287 return encoding.upper(path)
290 return encoding.upper(path)
288
291
289 # to preserve case of mountpoint part
292 # to preserve case of mountpoint part
290 for mp in cygwinmountpoints:
293 for mp in cygwinmountpoints:
291 if not path.startswith(mp):
294 if not path.startswith(mp):
292 continue
295 continue
293
296
294 mplen = len(mp)
297 mplen = len(mp)
295 if mplen == pathlen: # mount point itself
298 if mplen == pathlen: # mount point itself
296 return mp
299 return mp
297 if path[mplen] == os.sep:
300 if path[mplen] == os.sep:
298 return mp + encoding.upper(path[mplen:])
301 return mp + encoding.upper(path[mplen:])
299
302
300 return encoding.upper(path)
303 return encoding.upper(path)
301
304
302 # Cygwin translates native ACLs to POSIX permissions,
305 # Cygwin translates native ACLs to POSIX permissions,
303 # but these translations are not supported by native
306 # but these translations are not supported by native
304 # tools, so the exec bit tends to be set erroneously.
307 # tools, so the exec bit tends to be set erroneously.
305 # Therefore, disable executable bit access on Cygwin.
308 # Therefore, disable executable bit access on Cygwin.
306 def checkexec(path):
309 def checkexec(path):
307 return False
310 return False
308
311
309 # Similarly, Cygwin's symlink emulation is likely to create
312 # Similarly, Cygwin's symlink emulation is likely to create
310 # problems when Mercurial is used from both Cygwin and native
313 # problems when Mercurial is used from both Cygwin and native
311 # Windows, with other native tools, or on shared volumes
314 # Windows, with other native tools, or on shared volumes
312 def checklink(path):
315 def checklink(path):
313 return False
316 return False
314
317
315 def shellquote(s):
318 def shellquote(s):
316 if os.sys.platform == 'OpenVMS':
319 if os.sys.platform == 'OpenVMS':
317 return '"%s"' % s
320 return '"%s"' % s
318 else:
321 else:
319 return "'%s'" % s.replace("'", "'\\''")
322 return "'%s'" % s.replace("'", "'\\''")
320
323
321 def quotecommand(cmd):
324 def quotecommand(cmd):
322 return cmd
325 return cmd
323
326
324 def popen(command, mode='r'):
327 def popen(command, mode='r'):
325 return os.popen(command, mode)
328 return os.popen(command, mode)
326
329
327 def testpid(pid):
330 def testpid(pid):
328 '''return False if pid dead, True if running or not sure'''
331 '''return False if pid dead, True if running or not sure'''
329 if os.sys.platform == 'OpenVMS':
332 if os.sys.platform == 'OpenVMS':
330 return True
333 return True
331 try:
334 try:
332 os.kill(pid, 0)
335 os.kill(pid, 0)
333 return True
336 return True
334 except OSError, inst:
337 except OSError, inst:
335 return inst.errno != errno.ESRCH
338 return inst.errno != errno.ESRCH
336
339
337 def explainexit(code):
340 def explainexit(code):
338 """return a 2-tuple (desc, code) describing a subprocess status
341 """return a 2-tuple (desc, code) describing a subprocess status
339 (codes from kill are negative - not os.system/wait encoding)"""
342 (codes from kill are negative - not os.system/wait encoding)"""
340 if code >= 0:
343 if code >= 0:
341 return _("exited with status %d") % code, code
344 return _("exited with status %d") % code, code
342 return _("killed by signal %d") % -code, -code
345 return _("killed by signal %d") % -code, -code
343
346
344 def isowner(st):
347 def isowner(st):
345 """Return True if the stat object st is from the current user."""
348 """Return True if the stat object st is from the current user."""
346 return st.st_uid == os.getuid()
349 return st.st_uid == os.getuid()
347
350
348 def findexe(command):
351 def findexe(command):
349 '''Find executable for command searching like which does.
352 '''Find executable for command searching like which does.
350 If command is a basename then PATH is searched for command.
353 If command is a basename then PATH is searched for command.
351 PATH isn't searched if command is an absolute or relative path.
354 PATH isn't searched if command is an absolute or relative path.
352 If command isn't found None is returned.'''
355 If command isn't found None is returned.'''
353 if sys.platform == 'OpenVMS':
356 if sys.platform == 'OpenVMS':
354 return command
357 return command
355
358
356 def findexisting(executable):
359 def findexisting(executable):
357 'Will return executable if existing file'
360 'Will return executable if existing file'
358 if os.path.isfile(executable) and os.access(executable, os.X_OK):
361 if os.path.isfile(executable) and os.access(executable, os.X_OK):
359 return executable
362 return executable
360 return None
363 return None
361
364
362 if os.sep in command:
365 if os.sep in command:
363 return findexisting(command)
366 return findexisting(command)
364
367
365 if sys.platform == 'plan9':
368 if sys.platform == 'plan9':
366 return findexisting(os.path.join('/bin', command))
369 return findexisting(os.path.join('/bin', command))
367
370
368 for path in os.environ.get('PATH', '').split(os.pathsep):
371 for path in os.environ.get('PATH', '').split(os.pathsep):
369 executable = findexisting(os.path.join(path, command))
372 executable = findexisting(os.path.join(path, command))
370 if executable is not None:
373 if executable is not None:
371 return executable
374 return executable
372 return None
375 return None
373
376
374 def setsignalhandler():
377 def setsignalhandler():
375 pass
378 pass
376
379
377 _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
380 _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
378
381
379 def statfiles(files):
382 def statfiles(files):
380 '''Stat each file in files. Yield each stat, or None if a file does not
383 '''Stat each file in files. Yield each stat, or None if a file does not
381 exist or has a type we don't care about.'''
384 exist or has a type we don't care about.'''
382 lstat = os.lstat
385 lstat = os.lstat
383 getkind = stat.S_IFMT
386 getkind = stat.S_IFMT
384 for nf in files:
387 for nf in files:
385 try:
388 try:
386 st = lstat(nf)
389 st = lstat(nf)
387 if getkind(st.st_mode) not in _wantedkinds:
390 if getkind(st.st_mode) not in _wantedkinds:
388 st = None
391 st = None
389 except OSError, err:
392 except OSError, err:
390 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
393 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
391 raise
394 raise
392 st = None
395 st = None
393 yield st
396 yield st
394
397
395 def getuser():
398 def getuser():
396 '''return name of current user'''
399 '''return name of current user'''
397 return getpass.getuser()
400 return getpass.getuser()
398
401
399 def username(uid=None):
402 def username(uid=None):
400 """Return the name of the user with the given uid.
403 """Return the name of the user with the given uid.
401
404
402 If uid is None, return the name of the current user."""
405 If uid is None, return the name of the current user."""
403
406
404 if uid is None:
407 if uid is None:
405 uid = os.getuid()
408 uid = os.getuid()
406 try:
409 try:
407 return pwd.getpwuid(uid)[0]
410 return pwd.getpwuid(uid)[0]
408 except KeyError:
411 except KeyError:
409 return str(uid)
412 return str(uid)
410
413
411 def groupname(gid=None):
414 def groupname(gid=None):
412 """Return the name of the group with the given gid.
415 """Return the name of the group with the given gid.
413
416
414 If gid is None, return the name of the current group."""
417 If gid is None, return the name of the current group."""
415
418
416 if gid is None:
419 if gid is None:
417 gid = os.getgid()
420 gid = os.getgid()
418 try:
421 try:
419 return grp.getgrgid(gid)[0]
422 return grp.getgrgid(gid)[0]
420 except KeyError:
423 except KeyError:
421 return str(gid)
424 return str(gid)
422
425
423 def groupmembers(name):
426 def groupmembers(name):
424 """Return the list of members of the group with the given
427 """Return the list of members of the group with the given
425 name, KeyError if the group does not exist.
428 name, KeyError if the group does not exist.
426 """
429 """
427 return list(grp.getgrnam(name).gr_mem)
430 return list(grp.getgrnam(name).gr_mem)
428
431
429 def spawndetached(args):
432 def spawndetached(args):
430 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
433 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
431 args[0], args)
434 args[0], args)
432
435
433 def gethgcmd():
436 def gethgcmd():
434 return sys.argv[:1]
437 return sys.argv[:1]
435
438
436 def termwidth():
439 def termwidth():
437 try:
440 try:
438 import termios, array
441 import termios, array
439 for dev in (sys.stderr, sys.stdout, sys.stdin):
442 for dev in (sys.stderr, sys.stdout, sys.stdin):
440 try:
443 try:
441 try:
444 try:
442 fd = dev.fileno()
445 fd = dev.fileno()
443 except AttributeError:
446 except AttributeError:
444 continue
447 continue
445 if not os.isatty(fd):
448 if not os.isatty(fd):
446 continue
449 continue
447 try:
450 try:
448 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
451 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
449 width = array.array('h', arri)[1]
452 width = array.array('h', arri)[1]
450 if width > 0:
453 if width > 0:
451 return width
454 return width
452 except AttributeError:
455 except AttributeError:
453 pass
456 pass
454 except ValueError:
457 except ValueError:
455 pass
458 pass
456 except IOError, e:
459 except IOError, e:
457 if e[0] == errno.EINVAL:
460 if e[0] == errno.EINVAL:
458 pass
461 pass
459 else:
462 else:
460 raise
463 raise
461 except ImportError:
464 except ImportError:
462 pass
465 pass
463 return 80
466 return 80
464
467
465 def makedir(path, notindexed):
468 def makedir(path, notindexed):
466 os.mkdir(path)
469 os.mkdir(path)
467
470
468 def unlinkpath(f, ignoremissing=False):
471 def unlinkpath(f, ignoremissing=False):
469 """unlink and remove the directory if it is empty"""
472 """unlink and remove the directory if it is empty"""
470 try:
473 try:
471 os.unlink(f)
474 os.unlink(f)
472 except OSError, e:
475 except OSError, e:
473 if not (ignoremissing and e.errno == errno.ENOENT):
476 if not (ignoremissing and e.errno == errno.ENOENT):
474 raise
477 raise
475 # try removing directories that might now be empty
478 # try removing directories that might now be empty
476 try:
479 try:
477 os.removedirs(os.path.dirname(f))
480 os.removedirs(os.path.dirname(f))
478 except OSError:
481 except OSError:
479 pass
482 pass
480
483
481 def lookupreg(key, name=None, scope=None):
484 def lookupreg(key, name=None, scope=None):
482 return None
485 return None
483
486
484 def hidewindow():
487 def hidewindow():
485 """Hide current shell window.
488 """Hide current shell window.
486
489
487 Used to hide the window opened when starting asynchronous
490 Used to hide the window opened when starting asynchronous
488 child process under Windows, unneeded on other systems.
491 child process under Windows, unneeded on other systems.
489 """
492 """
490 pass
493 pass
491
494
492 class cachestat(object):
495 class cachestat(object):
493 def __init__(self, path):
496 def __init__(self, path):
494 self.stat = os.stat(path)
497 self.stat = os.stat(path)
495
498
496 def cacheable(self):
499 def cacheable(self):
497 return bool(self.stat.st_ino)
500 return bool(self.stat.st_ino)
498
501
499 __hash__ = object.__hash__
502 __hash__ = object.__hash__
500
503
501 def __eq__(self, other):
504 def __eq__(self, other):
502 try:
505 try:
503 # Only dev, ino, size, mtime and atime are likely to change. Out
506 # Only dev, ino, size, mtime and atime are likely to change. Out
504 # of these, we shouldn't compare atime but should compare the
507 # of these, we shouldn't compare atime but should compare the
505 # rest. However, one of the other fields changing indicates
508 # rest. However, one of the other fields changing indicates
506 # something fishy going on, so return False if anything but atime
509 # something fishy going on, so return False if anything but atime
507 # changes.
510 # changes.
508 return (self.stat.st_mode == other.stat.st_mode and
511 return (self.stat.st_mode == other.stat.st_mode and
509 self.stat.st_ino == other.stat.st_ino and
512 self.stat.st_ino == other.stat.st_ino and
510 self.stat.st_dev == other.stat.st_dev and
513 self.stat.st_dev == other.stat.st_dev and
511 self.stat.st_nlink == other.stat.st_nlink and
514 self.stat.st_nlink == other.stat.st_nlink and
512 self.stat.st_uid == other.stat.st_uid and
515 self.stat.st_uid == other.stat.st_uid and
513 self.stat.st_gid == other.stat.st_gid and
516 self.stat.st_gid == other.stat.st_gid and
514 self.stat.st_size == other.stat.st_size and
517 self.stat.st_size == other.stat.st_size and
515 self.stat.st_mtime == other.stat.st_mtime and
518 self.stat.st_mtime == other.stat.st_mtime and
516 self.stat.st_ctime == other.stat.st_ctime)
519 self.stat.st_ctime == other.stat.st_ctime)
517 except AttributeError:
520 except AttributeError:
518 return False
521 return False
519
522
520 def __ne__(self, other):
523 def __ne__(self, other):
521 return not self == other
524 return not self == other
522
525
523 def executablepath():
526 def executablepath():
524 return None # available on Windows only
527 return None # available on Windows only
525
528
526 class unixdomainserver(socket.socket):
529 class unixdomainserver(socket.socket):
527 def __init__(self, join, subsystem):
530 def __init__(self, join, subsystem):
528 '''Create a unix domain socket with the given prefix.'''
531 '''Create a unix domain socket with the given prefix.'''
529 super(unixdomainserver, self).__init__(socket.AF_UNIX)
532 super(unixdomainserver, self).__init__(socket.AF_UNIX)
530 sockname = subsystem + '.sock'
533 sockname = subsystem + '.sock'
531 self.realpath = self.path = join(sockname)
534 self.realpath = self.path = join(sockname)
532 if os.path.islink(self.path):
535 if os.path.islink(self.path):
533 if os.path.exists(self.path):
536 if os.path.exists(self.path):
534 self.realpath = os.readlink(self.path)
537 self.realpath = os.readlink(self.path)
535 else:
538 else:
536 os.unlink(self.path)
539 os.unlink(self.path)
537 try:
540 try:
538 self.bind(self.realpath)
541 self.bind(self.realpath)
539 except socket.error, err:
542 except socket.error, err:
540 if err.args[0] == 'AF_UNIX path too long':
543 if err.args[0] == 'AF_UNIX path too long':
541 tmpdir = tempfile.mkdtemp(prefix='hg-%s-' % subsystem)
544 tmpdir = tempfile.mkdtemp(prefix='hg-%s-' % subsystem)
542 self.realpath = os.path.join(tmpdir, sockname)
545 self.realpath = os.path.join(tmpdir, sockname)
543 try:
546 try:
544 self.bind(self.realpath)
547 self.bind(self.realpath)
545 os.symlink(self.realpath, self.path)
548 os.symlink(self.realpath, self.path)
546 except (OSError, socket.error):
549 except (OSError, socket.error):
547 self.cleanup()
550 self.cleanup()
548 raise
551 raise
549 else:
552 else:
550 raise
553 raise
551 self.listen(5)
554 self.listen(5)
552
555
553 def cleanup(self):
556 def cleanup(self):
554 def okayifmissing(f, path):
557 def okayifmissing(f, path):
555 try:
558 try:
556 f(path)
559 f(path)
557 except OSError, err:
560 except OSError, err:
558 if err.errno != errno.ENOENT:
561 if err.errno != errno.ENOENT:
559 raise
562 raise
560
563
561 okayifmissing(os.unlink, self.path)
564 okayifmissing(os.unlink, self.path)
562 if self.realpath != self.path:
565 if self.realpath != self.path:
563 okayifmissing(os.unlink, self.realpath)
566 okayifmissing(os.unlink, self.realpath)
564 okayifmissing(os.rmdir, os.path.dirname(self.realpath))
567 okayifmissing(os.rmdir, os.path.dirname(self.realpath))
565
568
566 def statislink(st):
569 def statislink(st):
567 '''check whether a stat result is a symlink'''
570 '''check whether a stat result is a symlink'''
568 return st and stat.S_ISLNK(st.st_mode)
571 return st and stat.S_ISLNK(st.st_mode)
569
572
570 def statisexec(st):
573 def statisexec(st):
571 '''check whether a stat result is an executable file'''
574 '''check whether a stat result is an executable file'''
572 return st and (st.st_mode & 0100 != 0)
575 return st and (st.st_mode & 0100 != 0)
573
576
574 def readpipe(pipe):
577 def readpipe(pipe):
575 """Read all available data from a pipe."""
578 """Read all available data from a pipe."""
576 # We can't fstat() a pipe because Linux will always report 0.
579 # We can't fstat() a pipe because Linux will always report 0.
577 # So, we set the pipe to non-blocking mode and read everything
580 # So, we set the pipe to non-blocking mode and read everything
578 # that's available.
581 # that's available.
579 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
582 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
580 flags |= os.O_NONBLOCK
583 flags |= os.O_NONBLOCK
581 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
584 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
582
585
583 try:
586 try:
584 chunks = []
587 chunks = []
585 while True:
588 while True:
586 try:
589 try:
587 s = pipe.read()
590 s = pipe.read()
588 if not s:
591 if not s:
589 break
592 break
590 chunks.append(s)
593 chunks.append(s)
591 except IOError:
594 except IOError:
592 break
595 break
593
596
594 return ''.join(chunks)
597 return ''.join(chunks)
595 finally:
598 finally:
596 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
599 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
@@ -1,198 +1,210 b''
1 #require icasefs
1 #require icasefs
2
2
3 $ hg debugfs | grep 'case-sensitive:'
3 $ hg debugfs | grep 'case-sensitive:'
4 case-sensitive: no
4 case-sensitive: no
5
5
6 test file addition with bad case
6 test file addition with bad case
7
7
8 $ hg init repo1
8 $ hg init repo1
9 $ cd repo1
9 $ cd repo1
10 $ echo a > a
10 $ echo a > a
11 $ hg add A
11 $ hg add A
12 adding a
12 adding a
13 $ hg st
13 $ hg st
14 A a
14 A a
15 $ hg ci -m adda
15 $ hg ci -m adda
16 $ hg manifest
16 $ hg manifest
17 a
17 a
18 $ cd ..
18 $ cd ..
19
19
20 test case collision on rename (issue750)
20 test case collision on rename (issue750)
21
21
22 $ hg init repo2
22 $ hg init repo2
23 $ cd repo2
23 $ cd repo2
24 $ echo a > a
24 $ echo a > a
25 $ hg --debug ci -Am adda
25 $ hg --debug ci -Am adda
26 adding a
26 adding a
27 a
27 a
28 committed changeset 0:07f4944404050f47db2e5c5071e0e84e7a27bba9
28 committed changeset 0:07f4944404050f47db2e5c5071e0e84e7a27bba9
29
29
30 Case-changing renames should work:
30 Case-changing renames should work:
31
31
32 $ hg mv a A
32 $ hg mv a A
33 $ hg mv A a
33 $ hg mv A a
34 $ hg st
34 $ hg st
35
35
36 test changing case of path components
36 test changing case of path components
37
37
38 $ mkdir D
38 $ mkdir D
39 $ echo b > D/b
39 $ echo b > D/b
40 $ hg ci -Am addb D/b
40 $ hg ci -Am addb D/b
41 $ hg mv D/b d/b
41 $ hg mv D/b d/b
42 D/b: not overwriting - file exists
42 D/b: not overwriting - file exists
43 $ hg mv D/b d/c
43 $ hg mv D/b d/c
44 $ hg st
44 $ hg st
45 A D/c
45 A D/c
46 R D/b
46 R D/b
47 $ mv D temp
47 $ mv D temp
48 $ mv temp d
48 $ mv temp d
49 $ hg st
49 $ hg st
50 A D/c
50 A D/c
51 R D/b
51 R D/b
52 $ hg revert -aq
52 $ hg revert -aq
53 $ rm d/c
53 $ rm d/c
54 $ echo c > D/c
54 $ echo c > D/c
55 $ hg add D/c
55 $ hg add D/c
56 $ hg st
56 $ hg st
57 A D/c
57 A D/c
58 $ hg ci -m addc D/c
58 $ hg ci -m addc D/c
59 $ hg mv d/b d/e
59 $ hg mv d/b d/e
60 moving D/b to D/e (glob)
60 moving D/b to D/e (glob)
61 $ hg st
61 $ hg st
62 A D/e
62 A D/e
63 R D/b
63 R D/b
64 $ hg revert -aq
64 $ hg revert -aq
65 $ rm d/e
65 $ rm d/e
66 $ hg mv d/b D/B
66 $ hg mv d/b D/B
67 moving D/b to D/B (glob)
67 moving D/b to D/B (glob)
68 $ hg st
68 $ hg st
69 A D/B
69 A D/B
70 R D/b
70 R D/b
71 $ cd ..
71 $ cd ..
72
72
73 test case collision between revisions (issue912)
73 test case collision between revisions (issue912)
74
74
75 $ hg init repo3
75 $ hg init repo3
76 $ cd repo3
76 $ cd repo3
77 $ echo a > a
77 $ echo a > a
78 $ hg ci -Am adda
78 $ hg ci -Am adda
79 adding a
79 adding a
80 $ hg rm a
80 $ hg rm a
81 $ hg ci -Am removea
81 $ hg ci -Am removea
82 $ echo A > A
82 $ echo A > A
83
83
84 on linux hfs keeps the old case stored, force it
84 on linux hfs keeps the old case stored, force it
85
85
86 $ mv a aa
86 $ mv a aa
87 $ mv aa A
87 $ mv aa A
88 $ hg ci -Am addA
88 $ hg ci -Am addA
89 adding A
89 adding A
90
90
91 used to fail under case insensitive fs
91 used to fail under case insensitive fs
92
92
93 $ hg up -C 0
93 $ hg up -C 0
94 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
94 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
95 $ hg up -C
95 $ hg up -C
96 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
96 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
97
97
98 no clobbering of untracked files with wrong casing
98 no clobbering of untracked files with wrong casing
99
99
100 $ hg up -r null
100 $ hg up -r null
101 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
101 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
102 $ echo gold > a
102 $ echo gold > a
103 $ hg up
103 $ hg up
104 A: untracked file differs
104 A: untracked file differs
105 abort: untracked files in working directory differ from files in requested revision
105 abort: untracked files in working directory differ from files in requested revision
106 [255]
106 [255]
107 $ cat a
107 $ cat a
108 gold
108 gold
109 $ rm a
109 $ rm a
110
110
111 test that normal file in different case on target context is not
111 test that normal file in different case on target context is not
112 unlinked by largefiles extension.
112 unlinked by largefiles extension.
113
113
114 $ cat >> .hg/hgrc <<EOF
114 $ cat >> .hg/hgrc <<EOF
115 > [extensions]
115 > [extensions]
116 > largefiles=
116 > largefiles=
117 > EOF
117 > EOF
118 $ hg update -q -C 1
118 $ hg update -q -C 1
119 $ hg status -A
119 $ hg status -A
120 $ echo 'A as largefiles' > A
120 $ echo 'A as largefiles' > A
121 $ hg add --large A
121 $ hg add --large A
122 $ hg commit -m '#3'
122 $ hg commit -m '#3'
123 created new head
123 created new head
124 $ hg manifest -r 3
124 $ hg manifest -r 3
125 .hglf/A
125 .hglf/A
126 $ hg manifest -r 0
126 $ hg manifest -r 0
127 a
127 a
128 $ hg update -q -C 0
128 $ hg update -q -C 0
129 $ hg status -A
129 $ hg status -A
130 C a
130 C a
131 $ hg update -q -C 3
131 $ hg update -q -C 3
132 $ hg update -q 0
132 $ hg update -q 0
133
133
134 $ cd ..
134 $ cd ..
135
135
136 issue 3342: file in nested directory causes unexpected abort
136 issue 3342: file in nested directory causes unexpected abort
137
137
138 $ hg init issue3342
138 $ hg init issue3342
139 $ cd issue3342
139 $ cd issue3342
140
140
141 $ mkdir -p a/B/c/D
141 $ mkdir -p a/B/c/D
142 $ echo e > a/B/c/D/e
142 $ echo e > a/B/c/D/e
143 $ hg add a/B/c/D/e
143 $ hg add a/B/c/D/e
144
144
145 $ cd ..
145 $ cd ..
146
146
147 issue 3340: mq does not handle case changes correctly
147 issue 3340: mq does not handle case changes correctly
148
148
149 in addition to reported case, 'hg qrefresh' is also tested against
149 in addition to reported case, 'hg qrefresh' is also tested against
150 case changes.
150 case changes.
151
151
152 $ echo "[extensions]" >> $HGRCPATH
152 $ echo "[extensions]" >> $HGRCPATH
153 $ echo "mq=" >> $HGRCPATH
153 $ echo "mq=" >> $HGRCPATH
154
154
155 $ hg init issue3340
155 $ hg init issue3340
156 $ cd issue3340
156 $ cd issue3340
157
157
158 $ echo a > mIxEdCaSe
158 $ echo a > mIxEdCaSe
159 $ hg add mIxEdCaSe
159 $ hg add mIxEdCaSe
160 $ hg commit -m '#0'
160 $ hg commit -m '#0'
161 $ hg rename mIxEdCaSe tmp
161 $ hg rename mIxEdCaSe tmp
162 $ hg rename tmp MiXeDcAsE
162 $ hg rename tmp MiXeDcAsE
163 $ hg status -A
163 $ hg status -A
164 A MiXeDcAsE
164 A MiXeDcAsE
165 mIxEdCaSe
165 mIxEdCaSe
166 R mIxEdCaSe
166 R mIxEdCaSe
167 $ hg qnew changecase
167 $ hg qnew changecase
168 $ hg status -A
168 $ hg status -A
169 C MiXeDcAsE
169 C MiXeDcAsE
170
170
171 $ hg qpop -a
171 $ hg qpop -a
172 popping changecase
172 popping changecase
173 patch queue now empty
173 patch queue now empty
174 $ hg qnew refresh-casechange
174 $ hg qnew refresh-casechange
175 $ hg status -A
175 $ hg status -A
176 C mIxEdCaSe
176 C mIxEdCaSe
177 $ hg rename mIxEdCaSe tmp
177 $ hg rename mIxEdCaSe tmp
178 $ hg rename tmp MiXeDcAsE
178 $ hg rename tmp MiXeDcAsE
179 $ hg status -A
179 $ hg status -A
180 A MiXeDcAsE
180 A MiXeDcAsE
181 mIxEdCaSe
181 mIxEdCaSe
182 R mIxEdCaSe
182 R mIxEdCaSe
183 $ hg qrefresh
183 $ hg qrefresh
184 $ hg status -A
184 $ hg status -A
185 C MiXeDcAsE
185 C MiXeDcAsE
186
186
187 $ hg qpop -a
187 $ hg qpop -a
188 popping refresh-casechange
188 popping refresh-casechange
189 patch queue now empty
189 patch queue now empty
190 $ hg qnew refresh-pattern
190 $ hg qnew refresh-pattern
191 $ hg status
191 $ hg status
192 $ echo A > A
192 $ echo A > A
193 $ hg add
193 $ hg add
194 adding A
194 adding A
195 $ hg qrefresh a # issue 3271, qrefresh with file handled case wrong
195 $ hg qrefresh a # issue 3271, qrefresh with file handled case wrong
196 $ hg status # empty status means the qrefresh worked
196 $ hg status # empty status means the qrefresh worked
197
197
198 #if osx
199
200 We assume anyone running the tests on a case-insensitive volume on OS
201 X will be using HFS+. If that's not true, this test will fail.
202
203 $ rm A
204 >>> open(u'a\u200c'.encode('utf-8'), 'w').write('unicode is fun')
205 $ hg status
206 M A
207
208 #endif
209
198 $ cd ..
210 $ cd ..
@@ -1,521 +1,572 b''
1 commit date test
1 commit date test
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo foo > foo
5 $ echo foo > foo
6 $ hg add foo
6 $ hg add foo
7 $ cat > $TESTTMP/checkeditform.sh <<EOF
7 $ cat > $TESTTMP/checkeditform.sh <<EOF
8 > env | grep HGEDITFORM
8 > env | grep HGEDITFORM
9 > true
9 > true
10 > EOF
10 > EOF
11 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg commit -m ""
11 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg commit -m ""
12 HGEDITFORM=commit.normal.normal
12 HGEDITFORM=commit.normal.normal
13 abort: empty commit message
13 abort: empty commit message
14 [255]
14 [255]
15 $ hg commit -d '0 0' -m commit-1
15 $ hg commit -d '0 0' -m commit-1
16 $ echo foo >> foo
16 $ echo foo >> foo
17 $ hg commit -d '1 4444444' -m commit-3
17 $ hg commit -d '1 4444444' -m commit-3
18 abort: impossible time zone offset: 4444444
18 abort: impossible time zone offset: 4444444
19 [255]
19 [255]
20 $ hg commit -d '1 15.1' -m commit-4
20 $ hg commit -d '1 15.1' -m commit-4
21 abort: invalid date: '1\t15.1'
21 abort: invalid date: '1\t15.1'
22 [255]
22 [255]
23 $ hg commit -d 'foo bar' -m commit-5
23 $ hg commit -d 'foo bar' -m commit-5
24 abort: invalid date: 'foo bar'
24 abort: invalid date: 'foo bar'
25 [255]
25 [255]
26 $ hg commit -d ' 1 4444' -m commit-6
26 $ hg commit -d ' 1 4444' -m commit-6
27 $ hg commit -d '111111111111 0' -m commit-7
27 $ hg commit -d '111111111111 0' -m commit-7
28 abort: date exceeds 32 bits: 111111111111
28 abort: date exceeds 32 bits: 111111111111
29 [255]
29 [255]
30 $ hg commit -d '-7654321 3600' -m commit-7
30 $ hg commit -d '-7654321 3600' -m commit-7
31 abort: negative date value: -7654321
31 abort: negative date value: -7654321
32 [255]
32 [255]
33
33
34 commit added file that has been deleted
34 commit added file that has been deleted
35
35
36 $ echo bar > bar
36 $ echo bar > bar
37 $ hg add bar
37 $ hg add bar
38 $ rm bar
38 $ rm bar
39 $ hg commit -m commit-8
39 $ hg commit -m commit-8
40 nothing changed (1 missing files, see 'hg status')
40 nothing changed (1 missing files, see 'hg status')
41 [1]
41 [1]
42 $ hg commit -m commit-8-2 bar
42 $ hg commit -m commit-8-2 bar
43 abort: bar: file not found!
43 abort: bar: file not found!
44 [255]
44 [255]
45
45
46 $ hg -q revert -a --no-backup
46 $ hg -q revert -a --no-backup
47
47
48 $ mkdir dir
48 $ mkdir dir
49 $ echo boo > dir/file
49 $ echo boo > dir/file
50 $ hg add
50 $ hg add
51 adding dir/file (glob)
51 adding dir/file (glob)
52 $ hg -v commit -m commit-9 dir
52 $ hg -v commit -m commit-9 dir
53 dir/file
53 dir/file
54 committed changeset 2:d2a76177cb42
54 committed changeset 2:d2a76177cb42
55
55
56 $ echo > dir.file
56 $ echo > dir.file
57 $ hg add
57 $ hg add
58 adding dir.file
58 adding dir.file
59 $ hg commit -m commit-10 dir dir.file
59 $ hg commit -m commit-10 dir dir.file
60 abort: dir: no match under directory!
60 abort: dir: no match under directory!
61 [255]
61 [255]
62
62
63 $ echo >> dir/file
63 $ echo >> dir/file
64 $ mkdir bleh
64 $ mkdir bleh
65 $ mkdir dir2
65 $ mkdir dir2
66 $ cd bleh
66 $ cd bleh
67 $ hg commit -m commit-11 .
67 $ hg commit -m commit-11 .
68 abort: bleh: no match under directory!
68 abort: bleh: no match under directory!
69 [255]
69 [255]
70 $ hg commit -m commit-12 ../dir ../dir2
70 $ hg commit -m commit-12 ../dir ../dir2
71 abort: dir2: no match under directory!
71 abort: dir2: no match under directory!
72 [255]
72 [255]
73 $ hg -v commit -m commit-13 ../dir
73 $ hg -v commit -m commit-13 ../dir
74 dir/file
74 dir/file
75 committed changeset 3:1cd62a2d8db5
75 committed changeset 3:1cd62a2d8db5
76 $ cd ..
76 $ cd ..
77
77
78 $ hg commit -m commit-14 does-not-exist
78 $ hg commit -m commit-14 does-not-exist
79 abort: does-not-exist: * (glob)
79 abort: does-not-exist: * (glob)
80 [255]
80 [255]
81
81
82 #if symlink
82 #if symlink
83 $ ln -s foo baz
83 $ ln -s foo baz
84 $ hg commit -m commit-15 baz
84 $ hg commit -m commit-15 baz
85 abort: baz: file not tracked!
85 abort: baz: file not tracked!
86 [255]
86 [255]
87 #endif
87 #endif
88
88
89 $ touch quux
89 $ touch quux
90 $ hg commit -m commit-16 quux
90 $ hg commit -m commit-16 quux
91 abort: quux: file not tracked!
91 abort: quux: file not tracked!
92 [255]
92 [255]
93 $ echo >> dir/file
93 $ echo >> dir/file
94 $ hg -v commit -m commit-17 dir/file
94 $ hg -v commit -m commit-17 dir/file
95 dir/file
95 dir/file
96 committed changeset 4:49176991390e
96 committed changeset 4:49176991390e
97
97
98 An empty date was interpreted as epoch origin
98 An empty date was interpreted as epoch origin
99
99
100 $ echo foo >> foo
100 $ echo foo >> foo
101 $ hg commit -d '' -m commit-no-date
101 $ hg commit -d '' -m commit-no-date
102 $ hg tip --template '{date|isodate}\n' | grep '1970'
102 $ hg tip --template '{date|isodate}\n' | grep '1970'
103 [1]
103 [1]
104
104
105 Make sure we do not obscure unknown requires file entries (issue2649)
105 Make sure we do not obscure unknown requires file entries (issue2649)
106
106
107 $ echo foo >> foo
107 $ echo foo >> foo
108 $ echo fake >> .hg/requires
108 $ echo fake >> .hg/requires
109 $ hg commit -m bla
109 $ hg commit -m bla
110 abort: repository requires features unknown to this Mercurial: fake!
110 abort: repository requires features unknown to this Mercurial: fake!
111 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
111 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
112 [255]
112 [255]
113
113
114 $ cd ..
114 $ cd ..
115
115
116
116
117 partial subdir commit test
117 partial subdir commit test
118
118
119 $ hg init test2
119 $ hg init test2
120 $ cd test2
120 $ cd test2
121 $ mkdir foo
121 $ mkdir foo
122 $ echo foo > foo/foo
122 $ echo foo > foo/foo
123 $ mkdir bar
123 $ mkdir bar
124 $ echo bar > bar/bar
124 $ echo bar > bar/bar
125 $ hg add
125 $ hg add
126 adding bar/bar (glob)
126 adding bar/bar (glob)
127 adding foo/foo (glob)
127 adding foo/foo (glob)
128 $ HGEDITOR=cat hg ci -e -m commit-subdir-1 foo
128 $ HGEDITOR=cat hg ci -e -m commit-subdir-1 foo
129 commit-subdir-1
129 commit-subdir-1
130
130
131
131
132 HG: Enter commit message. Lines beginning with 'HG:' are removed.
132 HG: Enter commit message. Lines beginning with 'HG:' are removed.
133 HG: Leave message empty to abort commit.
133 HG: Leave message empty to abort commit.
134 HG: --
134 HG: --
135 HG: user: test
135 HG: user: test
136 HG: branch 'default'
136 HG: branch 'default'
137 HG: added foo/foo
137 HG: added foo/foo
138
138
139
139
140 $ hg ci -m commit-subdir-2 bar
140 $ hg ci -m commit-subdir-2 bar
141
141
142 subdir log 1
142 subdir log 1
143
143
144 $ hg log -v foo
144 $ hg log -v foo
145 changeset: 0:f97e73a25882
145 changeset: 0:f97e73a25882
146 user: test
146 user: test
147 date: Thu Jan 01 00:00:00 1970 +0000
147 date: Thu Jan 01 00:00:00 1970 +0000
148 files: foo/foo
148 files: foo/foo
149 description:
149 description:
150 commit-subdir-1
150 commit-subdir-1
151
151
152
152
153
153
154 subdir log 2
154 subdir log 2
155
155
156 $ hg log -v bar
156 $ hg log -v bar
157 changeset: 1:aa809156d50d
157 changeset: 1:aa809156d50d
158 tag: tip
158 tag: tip
159 user: test
159 user: test
160 date: Thu Jan 01 00:00:00 1970 +0000
160 date: Thu Jan 01 00:00:00 1970 +0000
161 files: bar/bar
161 files: bar/bar
162 description:
162 description:
163 commit-subdir-2
163 commit-subdir-2
164
164
165
165
166
166
167 full log
167 full log
168
168
169 $ hg log -v
169 $ hg log -v
170 changeset: 1:aa809156d50d
170 changeset: 1:aa809156d50d
171 tag: tip
171 tag: tip
172 user: test
172 user: test
173 date: Thu Jan 01 00:00:00 1970 +0000
173 date: Thu Jan 01 00:00:00 1970 +0000
174 files: bar/bar
174 files: bar/bar
175 description:
175 description:
176 commit-subdir-2
176 commit-subdir-2
177
177
178
178
179 changeset: 0:f97e73a25882
179 changeset: 0:f97e73a25882
180 user: test
180 user: test
181 date: Thu Jan 01 00:00:00 1970 +0000
181 date: Thu Jan 01 00:00:00 1970 +0000
182 files: foo/foo
182 files: foo/foo
183 description:
183 description:
184 commit-subdir-1
184 commit-subdir-1
185
185
186
186
187 $ cd ..
187 $ cd ..
188
188
189
189
190 dot and subdir commit test
190 dot and subdir commit test
191
191
192 $ hg init test3
192 $ hg init test3
193 $ echo commit-foo-subdir > commit-log-test
193 $ echo commit-foo-subdir > commit-log-test
194 $ cd test3
194 $ cd test3
195 $ mkdir foo
195 $ mkdir foo
196 $ echo foo content > foo/plain-file
196 $ echo foo content > foo/plain-file
197 $ hg add foo/plain-file
197 $ hg add foo/plain-file
198 $ HGEDITOR=cat hg ci --edit -l ../commit-log-test foo
198 $ HGEDITOR=cat hg ci --edit -l ../commit-log-test foo
199 commit-foo-subdir
199 commit-foo-subdir
200
200
201
201
202 HG: Enter commit message. Lines beginning with 'HG:' are removed.
202 HG: Enter commit message. Lines beginning with 'HG:' are removed.
203 HG: Leave message empty to abort commit.
203 HG: Leave message empty to abort commit.
204 HG: --
204 HG: --
205 HG: user: test
205 HG: user: test
206 HG: branch 'default'
206 HG: branch 'default'
207 HG: added foo/plain-file
207 HG: added foo/plain-file
208
208
209
209
210 $ echo modified foo content > foo/plain-file
210 $ echo modified foo content > foo/plain-file
211 $ hg ci -m commit-foo-dot .
211 $ hg ci -m commit-foo-dot .
212
212
213 full log
213 full log
214
214
215 $ hg log -v
215 $ hg log -v
216 changeset: 1:95b38e3a5b2e
216 changeset: 1:95b38e3a5b2e
217 tag: tip
217 tag: tip
218 user: test
218 user: test
219 date: Thu Jan 01 00:00:00 1970 +0000
219 date: Thu Jan 01 00:00:00 1970 +0000
220 files: foo/plain-file
220 files: foo/plain-file
221 description:
221 description:
222 commit-foo-dot
222 commit-foo-dot
223
223
224
224
225 changeset: 0:65d4e9386227
225 changeset: 0:65d4e9386227
226 user: test
226 user: test
227 date: Thu Jan 01 00:00:00 1970 +0000
227 date: Thu Jan 01 00:00:00 1970 +0000
228 files: foo/plain-file
228 files: foo/plain-file
229 description:
229 description:
230 commit-foo-subdir
230 commit-foo-subdir
231
231
232
232
233
233
234 subdir log
234 subdir log
235
235
236 $ cd foo
236 $ cd foo
237 $ hg log .
237 $ hg log .
238 changeset: 1:95b38e3a5b2e
238 changeset: 1:95b38e3a5b2e
239 tag: tip
239 tag: tip
240 user: test
240 user: test
241 date: Thu Jan 01 00:00:00 1970 +0000
241 date: Thu Jan 01 00:00:00 1970 +0000
242 summary: commit-foo-dot
242 summary: commit-foo-dot
243
243
244 changeset: 0:65d4e9386227
244 changeset: 0:65d4e9386227
245 user: test
245 user: test
246 date: Thu Jan 01 00:00:00 1970 +0000
246 date: Thu Jan 01 00:00:00 1970 +0000
247 summary: commit-foo-subdir
247 summary: commit-foo-subdir
248
248
249 $ cd ..
249 $ cd ..
250 $ cd ..
250 $ cd ..
251
251
252 Issue1049: Hg permits partial commit of merge without warning
252 Issue1049: Hg permits partial commit of merge without warning
253
253
254 $ hg init issue1049
254 $ hg init issue1049
255 $ cd issue1049
255 $ cd issue1049
256 $ echo a > a
256 $ echo a > a
257 $ hg ci -Ama
257 $ hg ci -Ama
258 adding a
258 adding a
259 $ echo a >> a
259 $ echo a >> a
260 $ hg ci -mb
260 $ hg ci -mb
261 $ hg up 0
261 $ hg up 0
262 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
262 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
263 $ echo b >> a
263 $ echo b >> a
264 $ hg ci -mc
264 $ hg ci -mc
265 created new head
265 created new head
266 $ HGMERGE=true hg merge
266 $ HGMERGE=true hg merge
267 merging a
267 merging a
268 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
268 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
269 (branch merge, don't forget to commit)
269 (branch merge, don't forget to commit)
270
270
271 should fail because we are specifying a file name
271 should fail because we are specifying a file name
272
272
273 $ hg ci -mmerge a
273 $ hg ci -mmerge a
274 abort: cannot partially commit a merge (do not specify files or patterns)
274 abort: cannot partially commit a merge (do not specify files or patterns)
275 [255]
275 [255]
276
276
277 should fail because we are specifying a pattern
277 should fail because we are specifying a pattern
278
278
279 $ hg ci -mmerge -I a
279 $ hg ci -mmerge -I a
280 abort: cannot partially commit a merge (do not specify files or patterns)
280 abort: cannot partially commit a merge (do not specify files or patterns)
281 [255]
281 [255]
282
282
283 should succeed
283 should succeed
284
284
285 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg ci -mmerge --edit
285 $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg ci -mmerge --edit
286 HGEDITFORM=commit.normal.merge
286 HGEDITFORM=commit.normal.merge
287 $ cd ..
287 $ cd ..
288
288
289
289
290 test commit message content
290 test commit message content
291
291
292 $ hg init commitmsg
292 $ hg init commitmsg
293 $ cd commitmsg
293 $ cd commitmsg
294 $ echo changed > changed
294 $ echo changed > changed
295 $ echo removed > removed
295 $ echo removed > removed
296 $ hg book currentbookmark
296 $ hg book currentbookmark
297 $ hg ci -qAm init
297 $ hg ci -qAm init
298
298
299 $ hg rm removed
299 $ hg rm removed
300 $ echo changed >> changed
300 $ echo changed >> changed
301 $ echo added > added
301 $ echo added > added
302 $ hg add added
302 $ hg add added
303 $ HGEDITOR=cat hg ci -A
303 $ HGEDITOR=cat hg ci -A
304
304
305
305
306 HG: Enter commit message. Lines beginning with 'HG:' are removed.
306 HG: Enter commit message. Lines beginning with 'HG:' are removed.
307 HG: Leave message empty to abort commit.
307 HG: Leave message empty to abort commit.
308 HG: --
308 HG: --
309 HG: user: test
309 HG: user: test
310 HG: branch 'default'
310 HG: branch 'default'
311 HG: bookmark 'currentbookmark'
311 HG: bookmark 'currentbookmark'
312 HG: added added
312 HG: added added
313 HG: changed changed
313 HG: changed changed
314 HG: removed removed
314 HG: removed removed
315 abort: empty commit message
315 abort: empty commit message
316 [255]
316 [255]
317
317
318 test saving last-message.txt
318 test saving last-message.txt
319
319
320 $ hg init sub
320 $ hg init sub
321 $ echo a > sub/a
321 $ echo a > sub/a
322 $ hg -R sub add sub/a
322 $ hg -R sub add sub/a
323 $ cat > sub/.hg/hgrc <<EOF
323 $ cat > sub/.hg/hgrc <<EOF
324 > [hooks]
324 > [hooks]
325 > precommit.test-saving-last-message = false
325 > precommit.test-saving-last-message = false
326 > EOF
326 > EOF
327
327
328 $ echo 'sub = sub' > .hgsub
328 $ echo 'sub = sub' > .hgsub
329 $ hg add .hgsub
329 $ hg add .hgsub
330
330
331 $ cat > $TESTTMP/editor.sh <<EOF
331 $ cat > $TESTTMP/editor.sh <<EOF
332 > echo "==== before editing:"
332 > echo "==== before editing:"
333 > cat \$1
333 > cat \$1
334 > echo "===="
334 > echo "===="
335 > echo "test saving last-message.txt" >> \$1
335 > echo "test saving last-message.txt" >> \$1
336 > EOF
336 > EOF
337
337
338 $ rm -f .hg/last-message.txt
338 $ rm -f .hg/last-message.txt
339 $ HGEDITOR="sh $TESTTMP/editor.sh" hg commit -S -q
339 $ HGEDITOR="sh $TESTTMP/editor.sh" hg commit -S -q
340 ==== before editing:
340 ==== before editing:
341
341
342
342
343 HG: Enter commit message. Lines beginning with 'HG:' are removed.
343 HG: Enter commit message. Lines beginning with 'HG:' are removed.
344 HG: Leave message empty to abort commit.
344 HG: Leave message empty to abort commit.
345 HG: --
345 HG: --
346 HG: user: test
346 HG: user: test
347 HG: branch 'default'
347 HG: branch 'default'
348 HG: bookmark 'currentbookmark'
348 HG: bookmark 'currentbookmark'
349 HG: subrepo sub
349 HG: subrepo sub
350 HG: added .hgsub
350 HG: added .hgsub
351 HG: added added
351 HG: added added
352 HG: changed .hgsubstate
352 HG: changed .hgsubstate
353 HG: changed changed
353 HG: changed changed
354 HG: removed removed
354 HG: removed removed
355 ====
355 ====
356 abort: precommit.test-saving-last-message hook exited with status 1 (in subrepo sub)
356 abort: precommit.test-saving-last-message hook exited with status 1 (in subrepo sub)
357 [255]
357 [255]
358 $ cat .hg/last-message.txt
358 $ cat .hg/last-message.txt
359
359
360
360
361 test saving last-message.txt
361 test saving last-message.txt
362
362
363 test that '[committemplate] changeset' definition and commit log
363 test that '[committemplate] changeset' definition and commit log
364 specific template keywords work well
364 specific template keywords work well
365
365
366 $ cat >> .hg/hgrc <<EOF
366 $ cat >> .hg/hgrc <<EOF
367 > [committemplate]
367 > [committemplate]
368 > changeset.commit.normal = HG: this is "commit.normal" template
368 > changeset.commit.normal = HG: this is "commit.normal" template
369 > HG: {extramsg}
369 > HG: {extramsg}
370 > {if(currentbookmark,
370 > {if(currentbookmark,
371 > "HG: bookmark '{currentbookmark}' is activated\n",
371 > "HG: bookmark '{currentbookmark}' is activated\n",
372 > "HG: no bookmark is activated\n")}{subrepos %
372 > "HG: no bookmark is activated\n")}{subrepos %
373 > "HG: subrepo '{subrepo}' is changed\n"}
373 > "HG: subrepo '{subrepo}' is changed\n"}
374 >
374 >
375 > changeset.commit = HG: this is "commit" template
375 > changeset.commit = HG: this is "commit" template
376 > HG: {extramsg}
376 > HG: {extramsg}
377 > {if(currentbookmark,
377 > {if(currentbookmark,
378 > "HG: bookmark '{currentbookmark}' is activated\n",
378 > "HG: bookmark '{currentbookmark}' is activated\n",
379 > "HG: no bookmark is activated\n")}{subrepos %
379 > "HG: no bookmark is activated\n")}{subrepos %
380 > "HG: subrepo '{subrepo}' is changed\n"}
380 > "HG: subrepo '{subrepo}' is changed\n"}
381 >
381 >
382 > changeset = HG: this is customized commit template
382 > changeset = HG: this is customized commit template
383 > HG: {extramsg}
383 > HG: {extramsg}
384 > {if(currentbookmark,
384 > {if(currentbookmark,
385 > "HG: bookmark '{currentbookmark}' is activated\n",
385 > "HG: bookmark '{currentbookmark}' is activated\n",
386 > "HG: no bookmark is activated\n")}{subrepos %
386 > "HG: no bookmark is activated\n")}{subrepos %
387 > "HG: subrepo '{subrepo}' is changed\n"}
387 > "HG: subrepo '{subrepo}' is changed\n"}
388 > EOF
388 > EOF
389
389
390 $ hg init sub2
390 $ hg init sub2
391 $ echo a > sub2/a
391 $ echo a > sub2/a
392 $ hg -R sub2 add sub2/a
392 $ hg -R sub2 add sub2/a
393 $ echo 'sub2 = sub2' >> .hgsub
393 $ echo 'sub2 = sub2' >> .hgsub
394
394
395 $ HGEDITOR=cat hg commit -S -q
395 $ HGEDITOR=cat hg commit -S -q
396 HG: this is "commit.normal" template
396 HG: this is "commit.normal" template
397 HG: Leave message empty to abort commit.
397 HG: Leave message empty to abort commit.
398 HG: bookmark 'currentbookmark' is activated
398 HG: bookmark 'currentbookmark' is activated
399 HG: subrepo 'sub' is changed
399 HG: subrepo 'sub' is changed
400 HG: subrepo 'sub2' is changed
400 HG: subrepo 'sub2' is changed
401 abort: empty commit message
401 abort: empty commit message
402 [255]
402 [255]
403
403
404 $ cat >> .hg/hgrc <<EOF
404 $ cat >> .hg/hgrc <<EOF
405 > [committemplate]
405 > [committemplate]
406 > changeset.commit.normal =
406 > changeset.commit.normal =
407 > # now, "changeset.commit" should be chosen for "hg commit"
407 > # now, "changeset.commit" should be chosen for "hg commit"
408 > EOF
408 > EOF
409
409
410 $ hg bookmark --inactive currentbookmark
410 $ hg bookmark --inactive currentbookmark
411 $ hg forget .hgsub
411 $ hg forget .hgsub
412 $ HGEDITOR=cat hg commit -q
412 $ HGEDITOR=cat hg commit -q
413 HG: this is "commit" template
413 HG: this is "commit" template
414 HG: Leave message empty to abort commit.
414 HG: Leave message empty to abort commit.
415 HG: no bookmark is activated
415 HG: no bookmark is activated
416 abort: empty commit message
416 abort: empty commit message
417 [255]
417 [255]
418
418
419 $ cat >> .hg/hgrc <<EOF
419 $ cat >> .hg/hgrc <<EOF
420 > [committemplate]
420 > [committemplate]
421 > changeset.commit =
421 > changeset.commit =
422 > # now, "changeset" should be chosen for "hg commit"
422 > # now, "changeset" should be chosen for "hg commit"
423 > EOF
423 > EOF
424
424
425 $ HGEDITOR=cat hg commit -q
425 $ HGEDITOR=cat hg commit -q
426 HG: this is customized commit template
426 HG: this is customized commit template
427 HG: Leave message empty to abort commit.
427 HG: Leave message empty to abort commit.
428 HG: no bookmark is activated
428 HG: no bookmark is activated
429 abort: empty commit message
429 abort: empty commit message
430 [255]
430 [255]
431
431
432 prove that we can show a diff of an amend using committemplate:
432 prove that we can show a diff of an amend using committemplate:
433
433
434 $ hg init issue4470
434 $ hg init issue4470
435 $ cd issue4470
435 $ cd issue4470
436 $ cat >> .hg/hgrc <<EOF
436 $ cat >> .hg/hgrc <<EOF
437 > [committemplate]
437 > [committemplate]
438 > changeset = {desc}\n\n
438 > changeset = {desc}\n\n
439 > HG: {extramsg}
439 > HG: {extramsg}
440 > HG: user: {author}\n{ifeq(p2rev, "-1", "",
440 > HG: user: {author}\n{ifeq(p2rev, "-1", "",
441 > "HG: branch merge\n")
441 > "HG: branch merge\n")
442 > }HG: branch '{branch}'\n{if(currentbookmark,
442 > }HG: branch '{branch}'\n{if(currentbookmark,
443 > "HG: bookmark '{currentbookmark}'\n") }{subrepos %
443 > "HG: bookmark '{currentbookmark}'\n") }{subrepos %
444 > "HG: subrepo {subrepo}\n" }
444 > "HG: subrepo {subrepo}\n" }
445 > {splitlines(diff()) % 'HG: {line}\n'}
445 > {splitlines(diff()) % 'HG: {line}\n'}
446 > EOF
446 > EOF
447 $ echo a > a
447 $ echo a > a
448 $ echo b > b
448 $ echo b > b
449 $ hg addr
449 $ hg addr
450 adding a
450 adding a
451 adding b
451 adding b
452 $ hg ci -m 'init'
452 $ hg ci -m 'init'
453 $ hg rm b
453 $ hg rm b
454 $ hg ci -m 'rm b'
454 $ hg ci -m 'rm b'
455 $ hg export .
455 $ hg export .
456 # HG changeset patch
456 # HG changeset patch
457 # User test
457 # User test
458 # Date 0 0
458 # Date 0 0
459 # Thu Jan 01 00:00:00 1970 +0000
459 # Thu Jan 01 00:00:00 1970 +0000
460 # Node ID 88d0ffa85e7a92ccc7c9cc187f9b17858bd206a7
460 # Node ID 88d0ffa85e7a92ccc7c9cc187f9b17858bd206a7
461 # Parent 9118d25c26b1ca5cab5683b02100e7eb2c0d9471
461 # Parent 9118d25c26b1ca5cab5683b02100e7eb2c0d9471
462 rm b
462 rm b
463
463
464 diff -r 9118d25c26b1 -r 88d0ffa85e7a b
464 diff -r 9118d25c26b1 -r 88d0ffa85e7a b
465 --- a/b Thu Jan 01 00:00:00 1970 +0000
465 --- a/b Thu Jan 01 00:00:00 1970 +0000
466 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
466 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
467 @@ -1,1 +0,0 @@
467 @@ -1,1 +0,0 @@
468 -b
468 -b
469 $ echo a >> a
469 $ echo a >> a
470 $ HGEDITOR=cat hg commit --amend
470 $ HGEDITOR=cat hg commit --amend
471 rm b
471 rm b
472
472
473
473
474 HG: Leave message empty to abort commit.
474 HG: Leave message empty to abort commit.
475 HG: user: test
475 HG: user: test
476 HG: branch 'default'
476 HG: branch 'default'
477
477
478 HG: diff -r 9118d25c26b1 a
478 HG: diff -r 9118d25c26b1 a
479 HG: --- a/a Thu Jan 01 00:00:00 1970 +0000
479 HG: --- a/a Thu Jan 01 00:00:00 1970 +0000
480 HG: +++ b/a Thu Jan 01 00:00:00 1970 +0000
480 HG: +++ b/a Thu Jan 01 00:00:00 1970 +0000
481 HG: @@ -1,1 +1,2 @@
481 HG: @@ -1,1 +1,2 @@
482 HG: a
482 HG: a
483 HG: +a
483 HG: +a
484 HG: diff -r 9118d25c26b1 b
484 HG: diff -r 9118d25c26b1 b
485 HG: --- a/b Thu Jan 01 00:00:00 1970 +0000
485 HG: --- a/b Thu Jan 01 00:00:00 1970 +0000
486 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
486 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
487 HG: @@ -1,1 +0,0 @@
487 HG: @@ -1,1 +0,0 @@
488 HG: -b
488 HG: -b
489 saved backup bundle to $TESTTMP/*/*-amend-backup.hg (glob)
489 saved backup bundle to $TESTTMP/*/*-amend-backup.hg (glob)
490 $ cd ..
490 $ cd ..
491
491
492 cleanup
492 cleanup
493 $ cat >> .hg/hgrc <<EOF
493 $ cat >> .hg/hgrc <<EOF
494 > # disable customizing for subsequent tests
494 > # disable customizing for subsequent tests
495 > [committemplate]
495 > [committemplate]
496 > changeset =
496 > changeset =
497 > EOF
497 > EOF
498
498
499 $ cd ..
499 $ cd ..
500
500
501
501
502 commit copy
502 commit copy
503
503
504 $ hg init dir2
504 $ hg init dir2
505 $ cd dir2
505 $ cd dir2
506 $ echo bleh > bar
506 $ echo bleh > bar
507 $ hg add bar
507 $ hg add bar
508 $ hg ci -m 'add bar'
508 $ hg ci -m 'add bar'
509
509
510 $ hg cp bar foo
510 $ hg cp bar foo
511 $ echo >> bar
511 $ echo >> bar
512 $ hg ci -m 'cp bar foo; change bar'
512 $ hg ci -m 'cp bar foo; change bar'
513
513
514 $ hg debugrename foo
514 $ hg debugrename foo
515 foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9
515 foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9
516 $ hg debugindex bar
516 $ hg debugindex bar
517 rev offset length ..... linkrev nodeid p1 p2 (re)
517 rev offset length ..... linkrev nodeid p1 p2 (re)
518 0 0 6 ..... 0 26d3ca0dfd18 000000000000 000000000000 (re)
518 0 0 6 ..... 0 26d3ca0dfd18 000000000000 000000000000 (re)
519 1 6 7 ..... 1 d267bddd54f7 26d3ca0dfd18 000000000000 (re)
519 1 6 7 ..... 1 d267bddd54f7 26d3ca0dfd18 000000000000 (re)
520
520
521 $ cd ..
521 verify pathauditor blocks evil filepaths
522 $ cat > evil-commit.py <<EOF
523 > from mercurial import ui, hg, context, node
524 > notrc = u".h\u200cg".encode('utf-8') + '/hgrc'
525 > u = ui.ui()
526 > r = hg.repository(u, '.')
527 > def filectxfn(repo, memctx, path):
528 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
529 > c = context.memctx(r, [r['tip'].node(), node.nullid],
530 > 'evil', [notrc], filectxfn, 0)
531 > r.commitctx(c)
532 > EOF
533 $ $PYTHON evil-commit.py
534 $ hg co --clean tip
535 abort: path contains illegal component: .h\xe2\x80\x8cg/hgrc (esc)
536 [255]
537
538 $ hg rollback -f
539 repository tip rolled back to revision 1 (undo commit)
540 $ cat > evil-commit.py <<EOF
541 > from mercurial import ui, hg, context, node
542 > notrc = "HG~1/hgrc"
543 > u = ui.ui()
544 > r = hg.repository(u, '.')
545 > def filectxfn(repo, memctx, path):
546 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
547 > c = context.memctx(r, [r['tip'].node(), node.nullid],
548 > 'evil', [notrc], filectxfn, 0)
549 > r.commitctx(c)
550 > EOF
551 $ $PYTHON evil-commit.py
552 $ hg co --clean tip
553 abort: path contains illegal component: HG~1/hgrc
554 [255]
555
556 $ hg rollback -f
557 repository tip rolled back to revision 1 (undo commit)
558 $ cat > evil-commit.py <<EOF
559 > from mercurial import ui, hg, context, node
560 > notrc = "HG8B6C~2/hgrc"
561 > u = ui.ui()
562 > r = hg.repository(u, '.')
563 > def filectxfn(repo, memctx, path):
564 > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned')
565 > c = context.memctx(r, [r['tip'].node(), node.nullid],
566 > 'evil', [notrc], filectxfn, 0)
567 > r.commitctx(c)
568 > EOF
569 $ $PYTHON evil-commit.py
570 $ hg co --clean tip
571 abort: path contains illegal component: HG8B6C~2/hgrc
572 [255]
@@ -1,869 +1,879 b''
1 This file contains testcases that tend to be related to special cases or less
1 This file contains testcases that tend to be related to special cases or less
2 common commands affecting largefile.
2 common commands affecting largefile.
3
3
4 Each sections should be independent of each others.
4 Each sections should be independent of each others.
5
5
6 $ USERCACHE="$TESTTMP/cache"; export USERCACHE
6 $ USERCACHE="$TESTTMP/cache"; export USERCACHE
7 $ mkdir "${USERCACHE}"
7 $ mkdir "${USERCACHE}"
8 $ cat >> $HGRCPATH <<EOF
8 $ cat >> $HGRCPATH <<EOF
9 > [extensions]
9 > [extensions]
10 > largefiles=
10 > largefiles=
11 > purge=
11 > purge=
12 > rebase=
12 > rebase=
13 > transplant=
13 > transplant=
14 > [phases]
14 > [phases]
15 > publish=False
15 > publish=False
16 > [largefiles]
16 > [largefiles]
17 > minsize=2
17 > minsize=2
18 > patterns=glob:**.dat
18 > patterns=glob:**.dat
19 > usercache=${USERCACHE}
19 > usercache=${USERCACHE}
20 > [hooks]
20 > [hooks]
21 > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status"
21 > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status"
22 > EOF
22 > EOF
23
23
24
24
25
25
26 Test copies and moves from a directory other than root (issue3516)
26 Test copies and moves from a directory other than root (issue3516)
27 =========================================================================
27 =========================================================================
28
28
29 $ hg init lf_cpmv
29 $ hg init lf_cpmv
30 $ cd lf_cpmv
30 $ cd lf_cpmv
31 $ mkdir dira
31 $ mkdir dira
32 $ mkdir dira/dirb
32 $ mkdir dira/dirb
33 $ touch dira/dirb/largefile
33 $ touch dira/dirb/largefile
34 $ hg add --large dira/dirb/largefile
34 $ hg add --large dira/dirb/largefile
35 $ hg commit -m "added"
35 $ hg commit -m "added"
36 Invoking status precommit hook
36 Invoking status precommit hook
37 A dira/dirb/largefile
37 A dira/dirb/largefile
38 $ cd dira
38 $ cd dira
39 $ hg cp dirb/largefile foo/largefile
39 $ hg cp dirb/largefile foo/largefile
40 $ hg ci -m "deep copy"
40 $ hg ci -m "deep copy"
41 Invoking status precommit hook
41 Invoking status precommit hook
42 A dira/foo/largefile
42 A dira/foo/largefile
43 $ find . | sort
43 $ find . | sort
44 .
44 .
45 ./dirb
45 ./dirb
46 ./dirb/largefile
46 ./dirb/largefile
47 ./foo
47 ./foo
48 ./foo/largefile
48 ./foo/largefile
49 $ hg mv foo/largefile baz/largefile
49 $ hg mv foo/largefile baz/largefile
50 $ hg ci -m "moved"
50 $ hg ci -m "moved"
51 Invoking status precommit hook
51 Invoking status precommit hook
52 A dira/baz/largefile
52 A dira/baz/largefile
53 R dira/foo/largefile
53 R dira/foo/largefile
54 $ find . | sort
54 $ find . | sort
55 .
55 .
56 ./baz
56 ./baz
57 ./baz/largefile
57 ./baz/largefile
58 ./dirb
58 ./dirb
59 ./dirb/largefile
59 ./dirb/largefile
60 $ cd ..
60 $ cd ..
61 $ hg mv dira dirc
61 $ hg mv dira dirc
62 moving .hglf/dira/baz/largefile to .hglf/dirc/baz/largefile (glob)
62 moving .hglf/dira/baz/largefile to .hglf/dirc/baz/largefile (glob)
63 moving .hglf/dira/dirb/largefile to .hglf/dirc/dirb/largefile (glob)
63 moving .hglf/dira/dirb/largefile to .hglf/dirc/dirb/largefile (glob)
64 $ find * | sort
64 $ find * | sort
65 dirc
65 dirc
66 dirc/baz
66 dirc/baz
67 dirc/baz/largefile
67 dirc/baz/largefile
68 dirc/dirb
68 dirc/dirb
69 dirc/dirb/largefile
69 dirc/dirb/largefile
70
70
71 $ hg clone -q . ../fetch
71 $ hg clone -q . ../fetch
72 $ hg --config extensions.fetch= fetch ../fetch
72 $ hg --config extensions.fetch= fetch ../fetch
73 abort: uncommitted changes
73 abort: uncommitted changes
74 [255]
74 [255]
75 $ hg up -qC
75 $ hg up -qC
76 $ cd ..
76 $ cd ..
77
77
78 Clone a local repository owned by another user
78 Clone a local repository owned by another user
79 ===================================================
79 ===================================================
80
80
81 #if unix-permissions
81 #if unix-permissions
82
82
83 We have to simulate that here by setting $HOME and removing write permissions
83 We have to simulate that here by setting $HOME and removing write permissions
84 $ ORIGHOME="$HOME"
84 $ ORIGHOME="$HOME"
85 $ mkdir alice
85 $ mkdir alice
86 $ HOME="`pwd`/alice"
86 $ HOME="`pwd`/alice"
87 $ cd alice
87 $ cd alice
88 $ hg init pubrepo
88 $ hg init pubrepo
89 $ cd pubrepo
89 $ cd pubrepo
90 $ dd if=/dev/zero bs=1k count=11k > a-large-file 2> /dev/null
90 $ dd if=/dev/zero bs=1k count=11k > a-large-file 2> /dev/null
91 $ hg add --large a-large-file
91 $ hg add --large a-large-file
92 $ hg commit -m "Add a large file"
92 $ hg commit -m "Add a large file"
93 Invoking status precommit hook
93 Invoking status precommit hook
94 A a-large-file
94 A a-large-file
95 $ cd ..
95 $ cd ..
96 $ chmod -R a-w pubrepo
96 $ chmod -R a-w pubrepo
97 $ cd ..
97 $ cd ..
98 $ mkdir bob
98 $ mkdir bob
99 $ HOME="`pwd`/bob"
99 $ HOME="`pwd`/bob"
100 $ cd bob
100 $ cd bob
101 $ hg clone --pull ../alice/pubrepo pubrepo
101 $ hg clone --pull ../alice/pubrepo pubrepo
102 requesting all changes
102 requesting all changes
103 adding changesets
103 adding changesets
104 adding manifests
104 adding manifests
105 adding file changes
105 adding file changes
106 added 1 changesets with 1 changes to 1 files
106 added 1 changesets with 1 changes to 1 files
107 updating to branch default
107 updating to branch default
108 getting changed largefiles
108 getting changed largefiles
109 1 largefiles updated, 0 removed
109 1 largefiles updated, 0 removed
110 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
110 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
111 $ cd ..
111 $ cd ..
112 $ chmod -R u+w alice/pubrepo
112 $ chmod -R u+w alice/pubrepo
113 $ HOME="$ORIGHOME"
113 $ HOME="$ORIGHOME"
114
114
115 #endif
115 #endif
116
116
117
117
118 Symlink to a large largefile should behave the same as a symlink to a normal file
118 Symlink to a large largefile should behave the same as a symlink to a normal file
119 =====================================================================================
119 =====================================================================================
120
120
121 #if symlink
121 #if symlink
122
122
123 $ hg init largesymlink
123 $ hg init largesymlink
124 $ cd largesymlink
124 $ cd largesymlink
125 $ dd if=/dev/zero bs=1k count=10k of=largefile 2>/dev/null
125 $ dd if=/dev/zero bs=1k count=10k of=largefile 2>/dev/null
126 $ hg add --large largefile
126 $ hg add --large largefile
127 $ hg commit -m "commit a large file"
127 $ hg commit -m "commit a large file"
128 Invoking status precommit hook
128 Invoking status precommit hook
129 A largefile
129 A largefile
130 $ ln -s largefile largelink
130 $ ln -s largefile largelink
131 $ hg add largelink
131 $ hg add largelink
132 $ hg commit -m "commit a large symlink"
132 $ hg commit -m "commit a large symlink"
133 Invoking status precommit hook
133 Invoking status precommit hook
134 A largelink
134 A largelink
135 $ rm -f largelink
135 $ rm -f largelink
136 $ hg up >/dev/null
136 $ hg up >/dev/null
137 $ test -f largelink
137 $ test -f largelink
138 [1]
138 [1]
139 $ test -L largelink
139 $ test -L largelink
140 [1]
140 [1]
141 $ rm -f largelink # make next part of the test independent of the previous
141 $ rm -f largelink # make next part of the test independent of the previous
142 $ hg up -C >/dev/null
142 $ hg up -C >/dev/null
143 $ test -f largelink
143 $ test -f largelink
144 $ test -L largelink
144 $ test -L largelink
145 $ cd ..
145 $ cd ..
146
146
147 #endif
147 #endif
148
148
149
149
150 test for pattern matching on 'hg status':
150 test for pattern matching on 'hg status':
151 ==============================================
151 ==============================================
152
152
153
153
154 to boost performance, largefiles checks whether specified patterns are
154 to boost performance, largefiles checks whether specified patterns are
155 related to largefiles in working directory (NOT to STANDIN) or not.
155 related to largefiles in working directory (NOT to STANDIN) or not.
156
156
157 $ hg init statusmatch
157 $ hg init statusmatch
158 $ cd statusmatch
158 $ cd statusmatch
159
159
160 $ mkdir -p a/b/c/d
160 $ mkdir -p a/b/c/d
161 $ echo normal > a/b/c/d/e.normal.txt
161 $ echo normal > a/b/c/d/e.normal.txt
162 $ hg add a/b/c/d/e.normal.txt
162 $ hg add a/b/c/d/e.normal.txt
163 $ echo large > a/b/c/d/e.large.txt
163 $ echo large > a/b/c/d/e.large.txt
164 $ hg add --large a/b/c/d/e.large.txt
164 $ hg add --large a/b/c/d/e.large.txt
165 $ mkdir -p a/b/c/x
165 $ mkdir -p a/b/c/x
166 $ echo normal > a/b/c/x/y.normal.txt
166 $ echo normal > a/b/c/x/y.normal.txt
167 $ hg add a/b/c/x/y.normal.txt
167 $ hg add a/b/c/x/y.normal.txt
168 $ hg commit -m 'add files'
168 $ hg commit -m 'add files'
169 Invoking status precommit hook
169 Invoking status precommit hook
170 A a/b/c/d/e.large.txt
170 A a/b/c/d/e.large.txt
171 A a/b/c/d/e.normal.txt
171 A a/b/c/d/e.normal.txt
172 A a/b/c/x/y.normal.txt
172 A a/b/c/x/y.normal.txt
173
173
174 (1) no pattern: no performance boost
174 (1) no pattern: no performance boost
175 $ hg status -A
175 $ hg status -A
176 C a/b/c/d/e.large.txt
176 C a/b/c/d/e.large.txt
177 C a/b/c/d/e.normal.txt
177 C a/b/c/d/e.normal.txt
178 C a/b/c/x/y.normal.txt
178 C a/b/c/x/y.normal.txt
179
179
180 (2) pattern not related to largefiles: performance boost
180 (2) pattern not related to largefiles: performance boost
181 $ hg status -A a/b/c/x
181 $ hg status -A a/b/c/x
182 C a/b/c/x/y.normal.txt
182 C a/b/c/x/y.normal.txt
183
183
184 (3) pattern related to largefiles: no performance boost
184 (3) pattern related to largefiles: no performance boost
185 $ hg status -A a/b/c/d
185 $ hg status -A a/b/c/d
186 C a/b/c/d/e.large.txt
186 C a/b/c/d/e.large.txt
187 C a/b/c/d/e.normal.txt
187 C a/b/c/d/e.normal.txt
188
188
189 (4) pattern related to STANDIN (not to largefiles): performance boost
189 (4) pattern related to STANDIN (not to largefiles): performance boost
190 $ hg status -A .hglf/a
190 $ hg status -A .hglf/a
191 C .hglf/a/b/c/d/e.large.txt
191 C .hglf/a/b/c/d/e.large.txt
192
192
193 (5) mixed case: no performance boost
193 (5) mixed case: no performance boost
194 $ hg status -A a/b/c/x a/b/c/d
194 $ hg status -A a/b/c/x a/b/c/d
195 C a/b/c/d/e.large.txt
195 C a/b/c/d/e.large.txt
196 C a/b/c/d/e.normal.txt
196 C a/b/c/d/e.normal.txt
197 C a/b/c/x/y.normal.txt
197 C a/b/c/x/y.normal.txt
198
198
199 verify that largefiles doesn't break filesets
199 verify that largefiles doesn't break filesets
200
200
201 $ hg log --rev . --exclude "set:binary()"
201 $ hg log --rev . --exclude "set:binary()"
202 changeset: 0:41bd42f10efa
202 changeset: 0:41bd42f10efa
203 tag: tip
203 tag: tip
204 user: test
204 user: test
205 date: Thu Jan 01 00:00:00 1970 +0000
205 date: Thu Jan 01 00:00:00 1970 +0000
206 summary: add files
206 summary: add files
207
207
208 verify that large files in subrepos handled properly
208 verify that large files in subrepos handled properly
209 $ hg init subrepo
209 $ hg init subrepo
210 $ echo "subrepo = subrepo" > .hgsub
210 $ echo "subrepo = subrepo" > .hgsub
211 $ hg add .hgsub
211 $ hg add .hgsub
212 $ hg ci -m "add subrepo"
212 $ hg ci -m "add subrepo"
213 Invoking status precommit hook
213 Invoking status precommit hook
214 A .hgsub
214 A .hgsub
215 ? .hgsubstate
215 ? .hgsubstate
216 $ echo "rev 1" > subrepo/large.txt
216 $ echo "rev 1" > subrepo/large.txt
217 $ hg -R subrepo add --large subrepo/large.txt
217 $ hg -R subrepo add --large subrepo/large.txt
218 $ hg sum
218 $ hg sum
219 parent: 1:8ee150ea2e9c tip
219 parent: 1:8ee150ea2e9c tip
220 add subrepo
220 add subrepo
221 branch: default
221 branch: default
222 commit: 1 subrepos
222 commit: 1 subrepos
223 update: (current)
223 update: (current)
224 $ hg st
224 $ hg st
225 $ hg st -S
225 $ hg st -S
226 A subrepo/large.txt
226 A subrepo/large.txt
227 $ hg ci -S -m "commit top repo"
227 $ hg ci -S -m "commit top repo"
228 committing subrepository subrepo
228 committing subrepository subrepo
229 Invoking status precommit hook
229 Invoking status precommit hook
230 A large.txt
230 A large.txt
231 Invoking status precommit hook
231 Invoking status precommit hook
232 M .hgsubstate
232 M .hgsubstate
233 # No differences
233 # No differences
234 $ hg st -S
234 $ hg st -S
235 $ hg sum
235 $ hg sum
236 parent: 2:ce4cd0c527a6 tip
236 parent: 2:ce4cd0c527a6 tip
237 commit top repo
237 commit top repo
238 branch: default
238 branch: default
239 commit: (clean)
239 commit: (clean)
240 update: (current)
240 update: (current)
241 $ echo "rev 2" > subrepo/large.txt
241 $ echo "rev 2" > subrepo/large.txt
242 $ hg st -S
242 $ hg st -S
243 M subrepo/large.txt
243 M subrepo/large.txt
244 $ hg sum
244 $ hg sum
245 parent: 2:ce4cd0c527a6 tip
245 parent: 2:ce4cd0c527a6 tip
246 commit top repo
246 commit top repo
247 branch: default
247 branch: default
248 commit: 1 subrepos
248 commit: 1 subrepos
249 update: (current)
249 update: (current)
250 $ hg ci -m "this commit should fail without -S"
250 $ hg ci -m "this commit should fail without -S"
251 abort: uncommitted changes in subrepo subrepo
251 abort: uncommitted changes in subrepo subrepo
252 (use --subrepos for recursive commit)
252 (use --subrepos for recursive commit)
253 [255]
253 [255]
254
254
255 Add a normal file to the subrepo, then test archiving
255 Add a normal file to the subrepo, then test archiving
256
256
257 $ echo 'normal file' > subrepo/normal.txt
257 $ echo 'normal file' > subrepo/normal.txt
258 $ mv subrepo/large.txt subrepo/renamed-large.txt
259 $ hg -R subrepo addremove --dry-run
260 removing large.txt
261 adding normal.txt
262 adding renamed-large.txt
263 $ hg status -S
264 ! subrepo/large.txt
265 ? subrepo/normal.txt
266 ? subrepo/renamed-large.txt
267 $ mv subrepo/renamed-large.txt subrepo/large.txt
258 $ hg -R subrepo add subrepo/normal.txt
268 $ hg -R subrepo add subrepo/normal.txt
259
269
260 Lock in subrepo, otherwise the change isn't archived
270 Lock in subrepo, otherwise the change isn't archived
261
271
262 $ hg ci -S -m "add normal file to top level"
272 $ hg ci -S -m "add normal file to top level"
263 committing subrepository subrepo
273 committing subrepository subrepo
264 Invoking status precommit hook
274 Invoking status precommit hook
265 M large.txt
275 M large.txt
266 A normal.txt
276 A normal.txt
267 Invoking status precommit hook
277 Invoking status precommit hook
268 M .hgsubstate
278 M .hgsubstate
269 $ hg archive -S ../lf_subrepo_archive
279 $ hg archive -S ../lf_subrepo_archive
270 $ find ../lf_subrepo_archive | sort
280 $ find ../lf_subrepo_archive | sort
271 ../lf_subrepo_archive
281 ../lf_subrepo_archive
272 ../lf_subrepo_archive/.hg_archival.txt
282 ../lf_subrepo_archive/.hg_archival.txt
273 ../lf_subrepo_archive/.hgsub
283 ../lf_subrepo_archive/.hgsub
274 ../lf_subrepo_archive/.hgsubstate
284 ../lf_subrepo_archive/.hgsubstate
275 ../lf_subrepo_archive/a
285 ../lf_subrepo_archive/a
276 ../lf_subrepo_archive/a/b
286 ../lf_subrepo_archive/a/b
277 ../lf_subrepo_archive/a/b/c
287 ../lf_subrepo_archive/a/b/c
278 ../lf_subrepo_archive/a/b/c/d
288 ../lf_subrepo_archive/a/b/c/d
279 ../lf_subrepo_archive/a/b/c/d/e.large.txt
289 ../lf_subrepo_archive/a/b/c/d/e.large.txt
280 ../lf_subrepo_archive/a/b/c/d/e.normal.txt
290 ../lf_subrepo_archive/a/b/c/d/e.normal.txt
281 ../lf_subrepo_archive/a/b/c/x
291 ../lf_subrepo_archive/a/b/c/x
282 ../lf_subrepo_archive/a/b/c/x/y.normal.txt
292 ../lf_subrepo_archive/a/b/c/x/y.normal.txt
283 ../lf_subrepo_archive/subrepo
293 ../lf_subrepo_archive/subrepo
284 ../lf_subrepo_archive/subrepo/large.txt
294 ../lf_subrepo_archive/subrepo/large.txt
285 ../lf_subrepo_archive/subrepo/normal.txt
295 ../lf_subrepo_archive/subrepo/normal.txt
286
296
287 Test update with subrepos.
297 Test update with subrepos.
288
298
289 $ hg update 0
299 $ hg update 0
290 getting changed largefiles
300 getting changed largefiles
291 0 largefiles updated, 1 removed
301 0 largefiles updated, 1 removed
292 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
302 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
293 $ hg status -S
303 $ hg status -S
294 $ hg update tip
304 $ hg update tip
295 getting changed largefiles
305 getting changed largefiles
296 1 largefiles updated, 0 removed
306 1 largefiles updated, 0 removed
297 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
307 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
298 $ hg status -S
308 $ hg status -S
299 # modify a large file
309 # modify a large file
300 $ echo "modified" > subrepo/large.txt
310 $ echo "modified" > subrepo/large.txt
301 $ hg st -S
311 $ hg st -S
302 M subrepo/large.txt
312 M subrepo/large.txt
303 # update -C should revert the change.
313 # update -C should revert the change.
304 $ hg update -C
314 $ hg update -C
305 getting changed largefiles
315 getting changed largefiles
306 1 largefiles updated, 0 removed
316 1 largefiles updated, 0 removed
307 getting changed largefiles
317 getting changed largefiles
308 0 largefiles updated, 0 removed
318 0 largefiles updated, 0 removed
309 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
319 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
310 $ hg status -S
320 $ hg status -S
311
321
312 Test archiving a revision that references a subrepo that is not yet
322 Test archiving a revision that references a subrepo that is not yet
313 cloned (see test-subrepo-recursion.t):
323 cloned (see test-subrepo-recursion.t):
314
324
315 $ hg clone -U . ../empty
325 $ hg clone -U . ../empty
316 $ cd ../empty
326 $ cd ../empty
317 $ hg archive --subrepos -r tip ../archive.tar.gz
327 $ hg archive --subrepos -r tip ../archive.tar.gz
318 cloning subrepo subrepo from $TESTTMP/statusmatch/subrepo
328 cloning subrepo subrepo from $TESTTMP/statusmatch/subrepo
319 $ cd ..
329 $ cd ..
320
330
321
331
322
332
323
333
324
334
325
335
326 Test addremove, forget and others
336 Test addremove, forget and others
327 ==============================================
337 ==============================================
328
338
329 Test that addremove picks up largefiles prior to the initial commit (issue3541)
339 Test that addremove picks up largefiles prior to the initial commit (issue3541)
330
340
331 $ hg init addrm2
341 $ hg init addrm2
332 $ cd addrm2
342 $ cd addrm2
333 $ touch large.dat
343 $ touch large.dat
334 $ touch large2.dat
344 $ touch large2.dat
335 $ touch normal
345 $ touch normal
336 $ hg add --large large.dat
346 $ hg add --large large.dat
337 $ hg addremove -v
347 $ hg addremove -v
338 adding large2.dat as a largefile
348 adding large2.dat as a largefile
339 adding normal
349 adding normal
340
350
341 Test that forgetting all largefiles reverts to islfilesrepo() == False
351 Test that forgetting all largefiles reverts to islfilesrepo() == False
342 (addremove will add *.dat as normal files now)
352 (addremove will add *.dat as normal files now)
343 $ hg forget large.dat
353 $ hg forget large.dat
344 $ hg forget large2.dat
354 $ hg forget large2.dat
345 $ hg addremove -v
355 $ hg addremove -v
346 adding large.dat
356 adding large.dat
347 adding large2.dat
357 adding large2.dat
348
358
349 Test commit's addremove option prior to the first commit
359 Test commit's addremove option prior to the first commit
350 $ hg forget large.dat
360 $ hg forget large.dat
351 $ hg forget large2.dat
361 $ hg forget large2.dat
352 $ hg add --large large.dat
362 $ hg add --large large.dat
353 $ hg ci -Am "commit"
363 $ hg ci -Am "commit"
354 adding large2.dat as a largefile
364 adding large2.dat as a largefile
355 Invoking status precommit hook
365 Invoking status precommit hook
356 A large.dat
366 A large.dat
357 A large2.dat
367 A large2.dat
358 A normal
368 A normal
359 $ find .hglf | sort
369 $ find .hglf | sort
360 .hglf
370 .hglf
361 .hglf/large.dat
371 .hglf/large.dat
362 .hglf/large2.dat
372 .hglf/large2.dat
363
373
364 Test actions on largefiles using relative paths from subdir
374 Test actions on largefiles using relative paths from subdir
365
375
366 $ mkdir sub
376 $ mkdir sub
367 $ cd sub
377 $ cd sub
368 $ echo anotherlarge > anotherlarge
378 $ echo anotherlarge > anotherlarge
369 $ hg add --large anotherlarge
379 $ hg add --large anotherlarge
370 $ hg st
380 $ hg st
371 A sub/anotherlarge
381 A sub/anotherlarge
372 $ hg st anotherlarge
382 $ hg st anotherlarge
373 A anotherlarge
383 A anotherlarge
374 $ hg commit -m anotherlarge anotherlarge
384 $ hg commit -m anotherlarge anotherlarge
375 Invoking status precommit hook
385 Invoking status precommit hook
376 A sub/anotherlarge
386 A sub/anotherlarge
377 $ hg log anotherlarge
387 $ hg log anotherlarge
378 changeset: 1:9627a577c5e9
388 changeset: 1:9627a577c5e9
379 tag: tip
389 tag: tip
380 user: test
390 user: test
381 date: Thu Jan 01 00:00:00 1970 +0000
391 date: Thu Jan 01 00:00:00 1970 +0000
382 summary: anotherlarge
392 summary: anotherlarge
383
393
384 $ hg log -G anotherlarge
394 $ hg log -G anotherlarge
385 @ changeset: 1:9627a577c5e9
395 @ changeset: 1:9627a577c5e9
386 | tag: tip
396 | tag: tip
387 | user: test
397 | user: test
388 | date: Thu Jan 01 00:00:00 1970 +0000
398 | date: Thu Jan 01 00:00:00 1970 +0000
389 | summary: anotherlarge
399 | summary: anotherlarge
390 |
400 |
391 $ echo more >> anotherlarge
401 $ echo more >> anotherlarge
392 $ hg st .
402 $ hg st .
393 M anotherlarge
403 M anotherlarge
394 $ hg cat anotherlarge
404 $ hg cat anotherlarge
395 anotherlarge
405 anotherlarge
396 $ hg revert anotherlarge
406 $ hg revert anotherlarge
397 $ hg st
407 $ hg st
398 ? sub/anotherlarge.orig
408 ? sub/anotherlarge.orig
399 $ cd ..
409 $ cd ..
400
410
401 $ cd ..
411 $ cd ..
402
412
403 Check error message while exchange
413 Check error message while exchange
404 =========================================================
414 =========================================================
405
415
406 issue3651: summary/outgoing with largefiles shows "no remote repo"
416 issue3651: summary/outgoing with largefiles shows "no remote repo"
407 unexpectedly
417 unexpectedly
408
418
409 $ mkdir issue3651
419 $ mkdir issue3651
410 $ cd issue3651
420 $ cd issue3651
411
421
412 $ hg init src
422 $ hg init src
413 $ echo a > src/a
423 $ echo a > src/a
414 $ hg -R src add --large src/a
424 $ hg -R src add --large src/a
415 $ hg -R src commit -m '#0'
425 $ hg -R src commit -m '#0'
416 Invoking status precommit hook
426 Invoking status precommit hook
417 A a
427 A a
418
428
419 check messages when no remote repository is specified:
429 check messages when no remote repository is specified:
420 "no remote repo" route for "hg outgoing --large" is not tested here,
430 "no remote repo" route for "hg outgoing --large" is not tested here,
421 because it can't be reproduced easily.
431 because it can't be reproduced easily.
422
432
423 $ hg init clone1
433 $ hg init clone1
424 $ hg -R clone1 -q pull src
434 $ hg -R clone1 -q pull src
425 $ hg -R clone1 -q update
435 $ hg -R clone1 -q update
426 $ hg -R clone1 paths | grep default
436 $ hg -R clone1 paths | grep default
427 [1]
437 [1]
428
438
429 $ hg -R clone1 summary --large
439 $ hg -R clone1 summary --large
430 parent: 0:fc0bd45326d3 tip
440 parent: 0:fc0bd45326d3 tip
431 #0
441 #0
432 branch: default
442 branch: default
433 commit: (clean)
443 commit: (clean)
434 update: (current)
444 update: (current)
435 largefiles: (no remote repo)
445 largefiles: (no remote repo)
436
446
437 check messages when there is no files to upload:
447 check messages when there is no files to upload:
438
448
439 $ hg -q clone src clone2
449 $ hg -q clone src clone2
440 $ hg -R clone2 paths | grep default
450 $ hg -R clone2 paths | grep default
441 default = $TESTTMP/issue3651/src (glob)
451 default = $TESTTMP/issue3651/src (glob)
442
452
443 $ hg -R clone2 summary --large
453 $ hg -R clone2 summary --large
444 parent: 0:fc0bd45326d3 tip
454 parent: 0:fc0bd45326d3 tip
445 #0
455 #0
446 branch: default
456 branch: default
447 commit: (clean)
457 commit: (clean)
448 update: (current)
458 update: (current)
449 largefiles: (no files to upload)
459 largefiles: (no files to upload)
450 $ hg -R clone2 outgoing --large
460 $ hg -R clone2 outgoing --large
451 comparing with $TESTTMP/issue3651/src (glob)
461 comparing with $TESTTMP/issue3651/src (glob)
452 searching for changes
462 searching for changes
453 no changes found
463 no changes found
454 largefiles: no files to upload
464 largefiles: no files to upload
455 [1]
465 [1]
456
466
457 $ hg -R clone2 outgoing --large --graph --template "{rev}"
467 $ hg -R clone2 outgoing --large --graph --template "{rev}"
458 comparing with $TESTTMP/issue3651/src (glob)
468 comparing with $TESTTMP/issue3651/src (glob)
459 searching for changes
469 searching for changes
460 no changes found
470 no changes found
461 largefiles: no files to upload
471 largefiles: no files to upload
462
472
463 check messages when there are files to upload:
473 check messages when there are files to upload:
464
474
465 $ echo b > clone2/b
475 $ echo b > clone2/b
466 $ hg -R clone2 add --large clone2/b
476 $ hg -R clone2 add --large clone2/b
467 $ hg -R clone2 commit -m '#1'
477 $ hg -R clone2 commit -m '#1'
468 Invoking status precommit hook
478 Invoking status precommit hook
469 A b
479 A b
470 $ hg -R clone2 summary --large
480 $ hg -R clone2 summary --large
471 parent: 1:1acbe71ce432 tip
481 parent: 1:1acbe71ce432 tip
472 #1
482 #1
473 branch: default
483 branch: default
474 commit: (clean)
484 commit: (clean)
475 update: (current)
485 update: (current)
476 largefiles: 1 entities for 1 files to upload
486 largefiles: 1 entities for 1 files to upload
477 $ hg -R clone2 outgoing --large
487 $ hg -R clone2 outgoing --large
478 comparing with $TESTTMP/issue3651/src (glob)
488 comparing with $TESTTMP/issue3651/src (glob)
479 searching for changes
489 searching for changes
480 changeset: 1:1acbe71ce432
490 changeset: 1:1acbe71ce432
481 tag: tip
491 tag: tip
482 user: test
492 user: test
483 date: Thu Jan 01 00:00:00 1970 +0000
493 date: Thu Jan 01 00:00:00 1970 +0000
484 summary: #1
494 summary: #1
485
495
486 largefiles to upload (1 entities):
496 largefiles to upload (1 entities):
487 b
497 b
488
498
489 $ hg -R clone2 outgoing --large --graph --template "{rev}"
499 $ hg -R clone2 outgoing --large --graph --template "{rev}"
490 comparing with $TESTTMP/issue3651/src (glob)
500 comparing with $TESTTMP/issue3651/src (glob)
491 searching for changes
501 searching for changes
492 @ 1
502 @ 1
493
503
494 largefiles to upload (1 entities):
504 largefiles to upload (1 entities):
495 b
505 b
496
506
497
507
498 $ cp clone2/b clone2/b1
508 $ cp clone2/b clone2/b1
499 $ cp clone2/b clone2/b2
509 $ cp clone2/b clone2/b2
500 $ hg -R clone2 add --large clone2/b1 clone2/b2
510 $ hg -R clone2 add --large clone2/b1 clone2/b2
501 $ hg -R clone2 commit -m '#2: add largefiles referring same entity'
511 $ hg -R clone2 commit -m '#2: add largefiles referring same entity'
502 Invoking status precommit hook
512 Invoking status precommit hook
503 A b1
513 A b1
504 A b2
514 A b2
505 $ hg -R clone2 summary --large
515 $ hg -R clone2 summary --large
506 parent: 2:6095d0695d70 tip
516 parent: 2:6095d0695d70 tip
507 #2: add largefiles referring same entity
517 #2: add largefiles referring same entity
508 branch: default
518 branch: default
509 commit: (clean)
519 commit: (clean)
510 update: (current)
520 update: (current)
511 largefiles: 1 entities for 3 files to upload
521 largefiles: 1 entities for 3 files to upload
512 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
522 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
513 comparing with $TESTTMP/issue3651/src (glob)
523 comparing with $TESTTMP/issue3651/src (glob)
514 searching for changes
524 searching for changes
515 1:1acbe71ce432
525 1:1acbe71ce432
516 2:6095d0695d70
526 2:6095d0695d70
517 largefiles to upload (1 entities):
527 largefiles to upload (1 entities):
518 b
528 b
519 b1
529 b1
520 b2
530 b2
521
531
522 $ hg -R clone2 cat -r 1 clone2/.hglf/b
532 $ hg -R clone2 cat -r 1 clone2/.hglf/b
523 89e6c98d92887913cadf06b2adb97f26cde4849b
533 89e6c98d92887913cadf06b2adb97f26cde4849b
524 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug
534 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug
525 comparing with $TESTTMP/issue3651/src (glob)
535 comparing with $TESTTMP/issue3651/src (glob)
526 query 1; heads
536 query 1; heads
527 searching for changes
537 searching for changes
528 all remote heads known locally
538 all remote heads known locally
529 1:1acbe71ce432
539 1:1acbe71ce432
530 2:6095d0695d70
540 2:6095d0695d70
531 largefiles to upload (1 entities):
541 largefiles to upload (1 entities):
532 b
542 b
533 89e6c98d92887913cadf06b2adb97f26cde4849b
543 89e6c98d92887913cadf06b2adb97f26cde4849b
534 b1
544 b1
535 89e6c98d92887913cadf06b2adb97f26cde4849b
545 89e6c98d92887913cadf06b2adb97f26cde4849b
536 b2
546 b2
537 89e6c98d92887913cadf06b2adb97f26cde4849b
547 89e6c98d92887913cadf06b2adb97f26cde4849b
538
548
539
549
540 $ echo bbb > clone2/b
550 $ echo bbb > clone2/b
541 $ hg -R clone2 commit -m '#3: add new largefile entity as existing file'
551 $ hg -R clone2 commit -m '#3: add new largefile entity as existing file'
542 Invoking status precommit hook
552 Invoking status precommit hook
543 M b
553 M b
544 $ echo bbbb > clone2/b
554 $ echo bbbb > clone2/b
545 $ hg -R clone2 commit -m '#4: add new largefile entity as existing file'
555 $ hg -R clone2 commit -m '#4: add new largefile entity as existing file'
546 Invoking status precommit hook
556 Invoking status precommit hook
547 M b
557 M b
548 $ cp clone2/b1 clone2/b
558 $ cp clone2/b1 clone2/b
549 $ hg -R clone2 commit -m '#5: refer existing largefile entity again'
559 $ hg -R clone2 commit -m '#5: refer existing largefile entity again'
550 Invoking status precommit hook
560 Invoking status precommit hook
551 M b
561 M b
552 $ hg -R clone2 summary --large
562 $ hg -R clone2 summary --large
553 parent: 5:036794ea641c tip
563 parent: 5:036794ea641c tip
554 #5: refer existing largefile entity again
564 #5: refer existing largefile entity again
555 branch: default
565 branch: default
556 commit: (clean)
566 commit: (clean)
557 update: (current)
567 update: (current)
558 largefiles: 3 entities for 3 files to upload
568 largefiles: 3 entities for 3 files to upload
559 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
569 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
560 comparing with $TESTTMP/issue3651/src (glob)
570 comparing with $TESTTMP/issue3651/src (glob)
561 searching for changes
571 searching for changes
562 1:1acbe71ce432
572 1:1acbe71ce432
563 2:6095d0695d70
573 2:6095d0695d70
564 3:7983dce246cc
574 3:7983dce246cc
565 4:233f12ada4ae
575 4:233f12ada4ae
566 5:036794ea641c
576 5:036794ea641c
567 largefiles to upload (3 entities):
577 largefiles to upload (3 entities):
568 b
578 b
569 b1
579 b1
570 b2
580 b2
571
581
572 $ hg -R clone2 cat -r 3 clone2/.hglf/b
582 $ hg -R clone2 cat -r 3 clone2/.hglf/b
573 c801c9cfe94400963fcb683246217d5db77f9a9a
583 c801c9cfe94400963fcb683246217d5db77f9a9a
574 $ hg -R clone2 cat -r 4 clone2/.hglf/b
584 $ hg -R clone2 cat -r 4 clone2/.hglf/b
575 13f9ed0898e315bf59dc2973fec52037b6f441a2
585 13f9ed0898e315bf59dc2973fec52037b6f441a2
576 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug
586 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug
577 comparing with $TESTTMP/issue3651/src (glob)
587 comparing with $TESTTMP/issue3651/src (glob)
578 query 1; heads
588 query 1; heads
579 searching for changes
589 searching for changes
580 all remote heads known locally
590 all remote heads known locally
581 1:1acbe71ce432
591 1:1acbe71ce432
582 2:6095d0695d70
592 2:6095d0695d70
583 3:7983dce246cc
593 3:7983dce246cc
584 4:233f12ada4ae
594 4:233f12ada4ae
585 5:036794ea641c
595 5:036794ea641c
586 largefiles to upload (3 entities):
596 largefiles to upload (3 entities):
587 b
597 b
588 13f9ed0898e315bf59dc2973fec52037b6f441a2
598 13f9ed0898e315bf59dc2973fec52037b6f441a2
589 89e6c98d92887913cadf06b2adb97f26cde4849b
599 89e6c98d92887913cadf06b2adb97f26cde4849b
590 c801c9cfe94400963fcb683246217d5db77f9a9a
600 c801c9cfe94400963fcb683246217d5db77f9a9a
591 b1
601 b1
592 89e6c98d92887913cadf06b2adb97f26cde4849b
602 89e6c98d92887913cadf06b2adb97f26cde4849b
593 b2
603 b2
594 89e6c98d92887913cadf06b2adb97f26cde4849b
604 89e6c98d92887913cadf06b2adb97f26cde4849b
595
605
596
606
597 Pushing revision #1 causes uploading entity 89e6c98d9288, which is
607 Pushing revision #1 causes uploading entity 89e6c98d9288, which is
598 shared also by largefiles b1, b2 in revision #2 and b in revision #5.
608 shared also by largefiles b1, b2 in revision #2 and b in revision #5.
599
609
600 Then, entity 89e6c98d9288 is not treated as "outgoing entity" at "hg
610 Then, entity 89e6c98d9288 is not treated as "outgoing entity" at "hg
601 summary" and "hg outgoing", even though files in outgoing revision #2
611 summary" and "hg outgoing", even though files in outgoing revision #2
602 and #5 refer it.
612 and #5 refer it.
603
613
604 $ hg -R clone2 push -r 1 -q
614 $ hg -R clone2 push -r 1 -q
605 $ hg -R clone2 summary --large
615 $ hg -R clone2 summary --large
606 parent: 5:036794ea641c tip
616 parent: 5:036794ea641c tip
607 #5: refer existing largefile entity again
617 #5: refer existing largefile entity again
608 branch: default
618 branch: default
609 commit: (clean)
619 commit: (clean)
610 update: (current)
620 update: (current)
611 largefiles: 2 entities for 1 files to upload
621 largefiles: 2 entities for 1 files to upload
612 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
622 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n"
613 comparing with $TESTTMP/issue3651/src (glob)
623 comparing with $TESTTMP/issue3651/src (glob)
614 searching for changes
624 searching for changes
615 2:6095d0695d70
625 2:6095d0695d70
616 3:7983dce246cc
626 3:7983dce246cc
617 4:233f12ada4ae
627 4:233f12ada4ae
618 5:036794ea641c
628 5:036794ea641c
619 largefiles to upload (2 entities):
629 largefiles to upload (2 entities):
620 b
630 b
621
631
622 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug
632 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug
623 comparing with $TESTTMP/issue3651/src (glob)
633 comparing with $TESTTMP/issue3651/src (glob)
624 query 1; heads
634 query 1; heads
625 searching for changes
635 searching for changes
626 all remote heads known locally
636 all remote heads known locally
627 2:6095d0695d70
637 2:6095d0695d70
628 3:7983dce246cc
638 3:7983dce246cc
629 4:233f12ada4ae
639 4:233f12ada4ae
630 5:036794ea641c
640 5:036794ea641c
631 largefiles to upload (2 entities):
641 largefiles to upload (2 entities):
632 b
642 b
633 13f9ed0898e315bf59dc2973fec52037b6f441a2
643 13f9ed0898e315bf59dc2973fec52037b6f441a2
634 c801c9cfe94400963fcb683246217d5db77f9a9a
644 c801c9cfe94400963fcb683246217d5db77f9a9a
635
645
636
646
637 $ cd ..
647 $ cd ..
638
648
639 merge action 'd' for 'local renamed directory to d2/g' which has no filename
649 merge action 'd' for 'local renamed directory to d2/g' which has no filename
640 ==================================================================================
650 ==================================================================================
641
651
642 $ hg init merge-action
652 $ hg init merge-action
643 $ cd merge-action
653 $ cd merge-action
644 $ touch l
654 $ touch l
645 $ hg add --large l
655 $ hg add --large l
646 $ mkdir d1
656 $ mkdir d1
647 $ touch d1/f
657 $ touch d1/f
648 $ hg ci -Aqm0
658 $ hg ci -Aqm0
649 Invoking status precommit hook
659 Invoking status precommit hook
650 A d1/f
660 A d1/f
651 A l
661 A l
652 $ echo > d1/f
662 $ echo > d1/f
653 $ touch d1/g
663 $ touch d1/g
654 $ hg ci -Aqm1
664 $ hg ci -Aqm1
655 Invoking status precommit hook
665 Invoking status precommit hook
656 M d1/f
666 M d1/f
657 A d1/g
667 A d1/g
658 $ hg up -qr0
668 $ hg up -qr0
659 $ hg mv d1 d2
669 $ hg mv d1 d2
660 moving d1/f to d2/f (glob)
670 moving d1/f to d2/f (glob)
661 $ hg ci -qm2
671 $ hg ci -qm2
662 Invoking status precommit hook
672 Invoking status precommit hook
663 A d2/f
673 A d2/f
664 R d1/f
674 R d1/f
665 $ hg merge
675 $ hg merge
666 merging d2/f and d1/f to d2/f
676 merging d2/f and d1/f to d2/f
667 getting changed largefiles
677 getting changed largefiles
668 0 largefiles updated, 0 removed
678 0 largefiles updated, 0 removed
669 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
679 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
670 (branch merge, don't forget to commit)
680 (branch merge, don't forget to commit)
671 $ cd ..
681 $ cd ..
672
682
673
683
674 Merge conflicts:
684 Merge conflicts:
675 =====================
685 =====================
676
686
677 $ hg init merge
687 $ hg init merge
678 $ cd merge
688 $ cd merge
679 $ echo 0 > f-different
689 $ echo 0 > f-different
680 $ echo 0 > f-same
690 $ echo 0 > f-same
681 $ echo 0 > f-unchanged-1
691 $ echo 0 > f-unchanged-1
682 $ echo 0 > f-unchanged-2
692 $ echo 0 > f-unchanged-2
683 $ hg add --large *
693 $ hg add --large *
684 $ hg ci -m0
694 $ hg ci -m0
685 Invoking status precommit hook
695 Invoking status precommit hook
686 A f-different
696 A f-different
687 A f-same
697 A f-same
688 A f-unchanged-1
698 A f-unchanged-1
689 A f-unchanged-2
699 A f-unchanged-2
690 $ echo tmp1 > f-unchanged-1
700 $ echo tmp1 > f-unchanged-1
691 $ echo tmp1 > f-unchanged-2
701 $ echo tmp1 > f-unchanged-2
692 $ echo tmp1 > f-same
702 $ echo tmp1 > f-same
693 $ hg ci -m1
703 $ hg ci -m1
694 Invoking status precommit hook
704 Invoking status precommit hook
695 M f-same
705 M f-same
696 M f-unchanged-1
706 M f-unchanged-1
697 M f-unchanged-2
707 M f-unchanged-2
698 $ echo 2 > f-different
708 $ echo 2 > f-different
699 $ echo 0 > f-unchanged-1
709 $ echo 0 > f-unchanged-1
700 $ echo 1 > f-unchanged-2
710 $ echo 1 > f-unchanged-2
701 $ echo 1 > f-same
711 $ echo 1 > f-same
702 $ hg ci -m2
712 $ hg ci -m2
703 Invoking status precommit hook
713 Invoking status precommit hook
704 M f-different
714 M f-different
705 M f-same
715 M f-same
706 M f-unchanged-1
716 M f-unchanged-1
707 M f-unchanged-2
717 M f-unchanged-2
708 $ hg up -qr0
718 $ hg up -qr0
709 $ echo tmp2 > f-unchanged-1
719 $ echo tmp2 > f-unchanged-1
710 $ echo tmp2 > f-unchanged-2
720 $ echo tmp2 > f-unchanged-2
711 $ echo tmp2 > f-same
721 $ echo tmp2 > f-same
712 $ hg ci -m3
722 $ hg ci -m3
713 Invoking status precommit hook
723 Invoking status precommit hook
714 M f-same
724 M f-same
715 M f-unchanged-1
725 M f-unchanged-1
716 M f-unchanged-2
726 M f-unchanged-2
717 created new head
727 created new head
718 $ echo 1 > f-different
728 $ echo 1 > f-different
719 $ echo 1 > f-unchanged-1
729 $ echo 1 > f-unchanged-1
720 $ echo 0 > f-unchanged-2
730 $ echo 0 > f-unchanged-2
721 $ echo 1 > f-same
731 $ echo 1 > f-same
722 $ hg ci -m4
732 $ hg ci -m4
723 Invoking status precommit hook
733 Invoking status precommit hook
724 M f-different
734 M f-different
725 M f-same
735 M f-same
726 M f-unchanged-1
736 M f-unchanged-1
727 M f-unchanged-2
737 M f-unchanged-2
728 $ hg merge
738 $ hg merge
729 largefile f-different has a merge conflict
739 largefile f-different has a merge conflict
730 ancestor was 09d2af8dd22201dd8d48e5dcfcaed281ff9422c7
740 ancestor was 09d2af8dd22201dd8d48e5dcfcaed281ff9422c7
731 keep (l)ocal e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e or
741 keep (l)ocal e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e or
732 take (o)ther 7448d8798a4380162d4b56f9b452e2f6f9e24e7a? l
742 take (o)ther 7448d8798a4380162d4b56f9b452e2f6f9e24e7a? l
733 getting changed largefiles
743 getting changed largefiles
734 1 largefiles updated, 0 removed
744 1 largefiles updated, 0 removed
735 0 files updated, 4 files merged, 0 files removed, 0 files unresolved
745 0 files updated, 4 files merged, 0 files removed, 0 files unresolved
736 (branch merge, don't forget to commit)
746 (branch merge, don't forget to commit)
737 $ cat f-different
747 $ cat f-different
738 1
748 1
739 $ cat f-same
749 $ cat f-same
740 1
750 1
741 $ cat f-unchanged-1
751 $ cat f-unchanged-1
742 1
752 1
743 $ cat f-unchanged-2
753 $ cat f-unchanged-2
744 1
754 1
745 $ cd ..
755 $ cd ..
746
756
747 Test largefile insulation (do not enabled a side effect
757 Test largefile insulation (do not enabled a side effect
748 ========================================================
758 ========================================================
749
759
750 Check whether "largefiles" feature is supported only in repositories
760 Check whether "largefiles" feature is supported only in repositories
751 enabling largefiles extension.
761 enabling largefiles extension.
752
762
753 $ mkdir individualenabling
763 $ mkdir individualenabling
754 $ cd individualenabling
764 $ cd individualenabling
755
765
756 $ hg init enabledlocally
766 $ hg init enabledlocally
757 $ echo large > enabledlocally/large
767 $ echo large > enabledlocally/large
758 $ hg -R enabledlocally add --large enabledlocally/large
768 $ hg -R enabledlocally add --large enabledlocally/large
759 $ hg -R enabledlocally commit -m '#0'
769 $ hg -R enabledlocally commit -m '#0'
760 Invoking status precommit hook
770 Invoking status precommit hook
761 A large
771 A large
762
772
763 $ hg init notenabledlocally
773 $ hg init notenabledlocally
764 $ echo large > notenabledlocally/large
774 $ echo large > notenabledlocally/large
765 $ hg -R notenabledlocally add --large notenabledlocally/large
775 $ hg -R notenabledlocally add --large notenabledlocally/large
766 $ hg -R notenabledlocally commit -m '#0'
776 $ hg -R notenabledlocally commit -m '#0'
767 Invoking status precommit hook
777 Invoking status precommit hook
768 A large
778 A large
769
779
770 $ cat >> $HGRCPATH <<EOF
780 $ cat >> $HGRCPATH <<EOF
771 > [extensions]
781 > [extensions]
772 > # disable globally
782 > # disable globally
773 > largefiles=!
783 > largefiles=!
774 > EOF
784 > EOF
775 $ cat >> enabledlocally/.hg/hgrc <<EOF
785 $ cat >> enabledlocally/.hg/hgrc <<EOF
776 > [extensions]
786 > [extensions]
777 > # enable locally
787 > # enable locally
778 > largefiles=
788 > largefiles=
779 > EOF
789 > EOF
780 $ hg -R enabledlocally root
790 $ hg -R enabledlocally root
781 $TESTTMP/individualenabling/enabledlocally (glob)
791 $TESTTMP/individualenabling/enabledlocally (glob)
782 $ hg -R notenabledlocally root
792 $ hg -R notenabledlocally root
783 abort: repository requires features unknown to this Mercurial: largefiles!
793 abort: repository requires features unknown to this Mercurial: largefiles!
784 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
794 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
785 [255]
795 [255]
786
796
787 $ hg init push-dst
797 $ hg init push-dst
788 $ hg -R enabledlocally push push-dst
798 $ hg -R enabledlocally push push-dst
789 pushing to push-dst
799 pushing to push-dst
790 abort: required features are not supported in the destination: largefiles
800 abort: required features are not supported in the destination: largefiles
791 [255]
801 [255]
792
802
793 $ hg init pull-src
803 $ hg init pull-src
794 $ hg -R pull-src pull enabledlocally
804 $ hg -R pull-src pull enabledlocally
795 pulling from enabledlocally
805 pulling from enabledlocally
796 abort: required features are not supported in the destination: largefiles
806 abort: required features are not supported in the destination: largefiles
797 [255]
807 [255]
798
808
799 $ hg clone enabledlocally clone-dst
809 $ hg clone enabledlocally clone-dst
800 abort: repository requires features unknown to this Mercurial: largefiles!
810 abort: repository requires features unknown to this Mercurial: largefiles!
801 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
811 (see http://mercurial.selenic.com/wiki/MissingRequirement for more information)
802 [255]
812 [255]
803 $ test -d clone-dst
813 $ test -d clone-dst
804 [1]
814 [1]
805 $ hg clone --pull enabledlocally clone-pull-dst
815 $ hg clone --pull enabledlocally clone-pull-dst
806 abort: required features are not supported in the destination: largefiles
816 abort: required features are not supported in the destination: largefiles
807 [255]
817 [255]
808 $ test -d clone-pull-dst
818 $ test -d clone-pull-dst
809 [1]
819 [1]
810
820
811 #if serve
821 #if serve
812
822
813 Test largefiles specific peer setup, when largefiles is enabled
823 Test largefiles specific peer setup, when largefiles is enabled
814 locally (issue4109)
824 locally (issue4109)
815
825
816 $ hg showconfig extensions | grep largefiles
826 $ hg showconfig extensions | grep largefiles
817 extensions.largefiles=!
827 extensions.largefiles=!
818 $ mkdir -p $TESTTMP/individualenabling/usercache
828 $ mkdir -p $TESTTMP/individualenabling/usercache
819
829
820 $ hg serve -R enabledlocally -d -p $HGPORT --pid-file hg.pid
830 $ hg serve -R enabledlocally -d -p $HGPORT --pid-file hg.pid
821 $ cat hg.pid >> $DAEMON_PIDS
831 $ cat hg.pid >> $DAEMON_PIDS
822
832
823 $ hg init pull-dst
833 $ hg init pull-dst
824 $ cat > pull-dst/.hg/hgrc <<EOF
834 $ cat > pull-dst/.hg/hgrc <<EOF
825 > [extensions]
835 > [extensions]
826 > # enable locally
836 > # enable locally
827 > largefiles=
837 > largefiles=
828 > [largefiles]
838 > [largefiles]
829 > # ignore system cache to force largefiles specific wire proto access
839 > # ignore system cache to force largefiles specific wire proto access
830 > usercache=$TESTTMP/individualenabling/usercache
840 > usercache=$TESTTMP/individualenabling/usercache
831 > EOF
841 > EOF
832 $ hg -R pull-dst -q pull -u http://localhost:$HGPORT
842 $ hg -R pull-dst -q pull -u http://localhost:$HGPORT
833
843
834 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
844 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
835 #endif
845 #endif
836
846
837 $ cd ..
847 $ cd ..
838
848
839
849
840 Test "pull --rebase" when rebase is enabled before largefiles (issue3861)
850 Test "pull --rebase" when rebase is enabled before largefiles (issue3861)
841 =========================================================================
851 =========================================================================
842
852
843 $ hg showconfig extensions | grep largefiles
853 $ hg showconfig extensions | grep largefiles
844 extensions.largefiles=!
854 extensions.largefiles=!
845
855
846 $ mkdir issue3861
856 $ mkdir issue3861
847 $ cd issue3861
857 $ cd issue3861
848 $ hg init src
858 $ hg init src
849 $ hg clone -q src dst
859 $ hg clone -q src dst
850 $ echo a > src/a
860 $ echo a > src/a
851 $ hg -R src commit -Aqm "#0"
861 $ hg -R src commit -Aqm "#0"
852 Invoking status precommit hook
862 Invoking status precommit hook
853 A a
863 A a
854
864
855 $ cat >> dst/.hg/hgrc <<EOF
865 $ cat >> dst/.hg/hgrc <<EOF
856 > [extensions]
866 > [extensions]
857 > largefiles=
867 > largefiles=
858 > EOF
868 > EOF
859 $ hg -R dst pull --rebase
869 $ hg -R dst pull --rebase
860 pulling from $TESTTMP/issue3861/src (glob)
870 pulling from $TESTTMP/issue3861/src (glob)
861 requesting all changes
871 requesting all changes
862 adding changesets
872 adding changesets
863 adding manifests
873 adding manifests
864 adding file changes
874 adding file changes
865 added 1 changesets with 1 changes to 1 files
875 added 1 changesets with 1 changes to 1 files
866 nothing to rebase - working directory parent is already an ancestor of destination bf5e395ced2c
876 nothing to rebase - working directory parent is already an ancestor of destination bf5e395ced2c
867 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
877 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
868
878
869 $ cd ..
879 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now