##// END OF EJS Templates
merge with stable
Augie Fackler -
r33736:02a745c2 merge default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,151 +1,152 b''
1 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A=
1 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A=
2 2be3001847cb18a23c403439d9e7d0ace30804e9 0 iD8DBQBExUbjywK+sNU5EO8RAhzxAKCtyHAQUzcTSZTqlfJ0by6vhREwWQCghaQFHfkfN0l9/40EowNhuMOKnJk=
2 2be3001847cb18a23c403439d9e7d0ace30804e9 0 iD8DBQBExUbjywK+sNU5EO8RAhzxAKCtyHAQUzcTSZTqlfJ0by6vhREwWQCghaQFHfkfN0l9/40EowNhuMOKnJk=
3 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys=
3 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys=
4 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4=
4 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4=
5 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I=
5 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I=
6 23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU=
6 23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU=
7 bae2e9c838e90a393bae3973a7850280413e091a 0 iD8DBQBH6DO5ywK+sNU5EO8RAsfrAJ0e4r9c9GF/MJsM7Xjd3NesLRC3+ACffj6+6HXdZf8cswAoFPO+DY00oD0=
7 bae2e9c838e90a393bae3973a7850280413e091a 0 iD8DBQBH6DO5ywK+sNU5EO8RAsfrAJ0e4r9c9GF/MJsM7Xjd3NesLRC3+ACffj6+6HXdZf8cswAoFPO+DY00oD0=
8 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 0 iD8DBQBINdwsywK+sNU5EO8RAjIUAKCPmlFJSpsPAAUKF+iNHAwVnwmzeQCdEXrL27CWclXuUKdbQC8De7LICtE=
8 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 0 iD8DBQBINdwsywK+sNU5EO8RAjIUAKCPmlFJSpsPAAUKF+iNHAwVnwmzeQCdEXrL27CWclXuUKdbQC8De7LICtE=
9 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 0 iD8DBQBIo1wpywK+sNU5EO8RAmRNAJ94x3OFt6blbqu/yBoypm/AJ44fuACfUaldXcV5z9tht97hSp22DVTEPGc=
9 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 0 iD8DBQBIo1wpywK+sNU5EO8RAmRNAJ94x3OFt6blbqu/yBoypm/AJ44fuACfUaldXcV5z9tht97hSp22DVTEPGc=
10 2a67430f92f15ea5159c26b09ec4839a0c549a26 0 iEYEABECAAYFAkk1hykACgkQywK+sNU5EO85QACeNJNUanjc2tl4wUoPHNuv+lSj0ZMAoIm93wSTc/feyYnO2YCaQ1iyd9Nu
10 2a67430f92f15ea5159c26b09ec4839a0c549a26 0 iEYEABECAAYFAkk1hykACgkQywK+sNU5EO85QACeNJNUanjc2tl4wUoPHNuv+lSj0ZMAoIm93wSTc/feyYnO2YCaQ1iyd9Nu
11 3773e510d433969e277b1863c317b674cbee2065 0 iEYEABECAAYFAklNbbAACgkQywK+sNU5EO8o+gCfeb2/lfIJZMvyDA1m+G1CsBAxfFsAoIa6iAMG8SBY7hW1Q85Yf/LXEvaE
11 3773e510d433969e277b1863c317b674cbee2065 0 iEYEABECAAYFAklNbbAACgkQywK+sNU5EO8o+gCfeb2/lfIJZMvyDA1m+G1CsBAxfFsAoIa6iAMG8SBY7hW1Q85Yf/LXEvaE
12 11a4eb81fb4f4742451591489e2797dc47903277 0 iEYEABECAAYFAklcAnsACgkQywK+sNU5EO+uXwCbBVHNNsLy1g7BlAyQJwadYVyHOXoAoKvtAVO71+bv7EbVoukwTzT+P4Sx
12 11a4eb81fb4f4742451591489e2797dc47903277 0 iEYEABECAAYFAklcAnsACgkQywK+sNU5EO+uXwCbBVHNNsLy1g7BlAyQJwadYVyHOXoAoKvtAVO71+bv7EbVoukwTzT+P4Sx
13 11efa41037e280d08cfb07c09ad485df30fb0ea8 0 iEYEABECAAYFAkmvJRQACgkQywK+sNU5EO9XZwCeLMgDgPSMWMm6vgjL4lDs2pEc5+0AnRxfiFbpbBfuEFTqKz9nbzeyoBlx
13 11efa41037e280d08cfb07c09ad485df30fb0ea8 0 iEYEABECAAYFAkmvJRQACgkQywK+sNU5EO9XZwCeLMgDgPSMWMm6vgjL4lDs2pEc5+0AnRxfiFbpbBfuEFTqKz9nbzeyoBlx
14 02981000012e3adf40c4849bd7b3d5618f9ce82d 0 iEYEABECAAYFAknEH3wACgkQywK+sNU5EO+uXwCeI+LbLMmhjU1lKSfU3UWJHjjUC7oAoIZLvYDGOL/tNZFUuatc3RnZ2eje
14 02981000012e3adf40c4849bd7b3d5618f9ce82d 0 iEYEABECAAYFAknEH3wACgkQywK+sNU5EO+uXwCeI+LbLMmhjU1lKSfU3UWJHjjUC7oAoIZLvYDGOL/tNZFUuatc3RnZ2eje
15 196d40e7c885fa6e95f89134809b3ec7bdbca34b 0 iEYEABECAAYFAkpL2X4ACgkQywK+sNU5EO9FOwCfXJycjyKJXsvQqKkHrglwOQhEKS4An36GfKzptfN8b1qNc3+ya/5c2WOM
15 196d40e7c885fa6e95f89134809b3ec7bdbca34b 0 iEYEABECAAYFAkpL2X4ACgkQywK+sNU5EO9FOwCfXJycjyKJXsvQqKkHrglwOQhEKS4An36GfKzptfN8b1qNc3+ya/5c2WOM
16 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 0 iEYEABECAAYFAkpopLIACgkQywK+sNU5EO8QSgCfZ0ztsd071rOa2lhmp9Fyue/WoI0AoLTei80/xrhRlB8L/rZEf2KBl8dA
16 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 0 iEYEABECAAYFAkpopLIACgkQywK+sNU5EO8QSgCfZ0ztsd071rOa2lhmp9Fyue/WoI0AoLTei80/xrhRlB8L/rZEf2KBl8dA
17 31ec469f9b556f11819937cf68ee53f2be927ebf 0 iEYEABECAAYFAksBuxAACgkQywK+sNU5EO+mBwCfagB+A0txzWZ6dRpug3LEoK7Z1QsAoKpbk8vsLjv6/oRDicSk/qBu33+m
17 31ec469f9b556f11819937cf68ee53f2be927ebf 0 iEYEABECAAYFAksBuxAACgkQywK+sNU5EO+mBwCfagB+A0txzWZ6dRpug3LEoK7Z1QsAoKpbk8vsLjv6/oRDicSk/qBu33+m
18 439d7ea6fe3aa4ab9ec274a68846779153789de9 0 iEYEABECAAYFAksVw0kACgkQywK+sNU5EO/oZwCfdfBEkgp38xq6wN2F4nj+SzofrJIAnjmxt04vaJSeOOeHylHvk6lzuQsw
18 439d7ea6fe3aa4ab9ec274a68846779153789de9 0 iEYEABECAAYFAksVw0kACgkQywK+sNU5EO/oZwCfdfBEkgp38xq6wN2F4nj+SzofrJIAnjmxt04vaJSeOOeHylHvk6lzuQsw
19 296a0b14a68621f6990c54fdba0083f6f20935bf 0 iEYEABECAAYFAks+jCoACgkQywK+sNU5EO9J8wCeMUGF9E/gS2UBsqIz56WS4HMPRPUAoI5J95mwEIK8Clrl7qFRidNI6APq
19 296a0b14a68621f6990c54fdba0083f6f20935bf 0 iEYEABECAAYFAks+jCoACgkQywK+sNU5EO9J8wCeMUGF9E/gS2UBsqIz56WS4HMPRPUAoI5J95mwEIK8Clrl7qFRidNI6APq
20 4aa619c4c2c09907034d9824ebb1dd0e878206eb 0 iEYEABECAAYFAktm9IsACgkQywK+sNU5EO9XGgCgk4HclRQhexEtooPE5GcUCdB6M8EAn2ptOhMVbIoO+JncA+tNACPFXh0O
20 4aa619c4c2c09907034d9824ebb1dd0e878206eb 0 iEYEABECAAYFAktm9IsACgkQywK+sNU5EO9XGgCgk4HclRQhexEtooPE5GcUCdB6M8EAn2ptOhMVbIoO+JncA+tNACPFXh0O
21 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 0 iEYEABECAAYFAkuRoSQACgkQywK+sNU5EO//3QCeJDc5r2uFyFCtAlpSA27DEE5rrxAAn2FSwTy9fhrB3QAdDQlwkEZcQzDh
21 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 0 iEYEABECAAYFAkuRoSQACgkQywK+sNU5EO//3QCeJDc5r2uFyFCtAlpSA27DEE5rrxAAn2FSwTy9fhrB3QAdDQlwkEZcQzDh
22 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 0 iEYEABECAAYFAku1IwIACgkQywK+sNU5EO9MjgCdHLVwkTZlNHxhcznZKBL1rjN+J7cAoLLWi9LTL6f/TgBaPSKOy1ublbaW
22 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 0 iEYEABECAAYFAku1IwIACgkQywK+sNU5EO9MjgCdHLVwkTZlNHxhcznZKBL1rjN+J7cAoLLWi9LTL6f/TgBaPSKOy1ublbaW
23 39f725929f0c48c5fb3b90c071fc3066012456ca 0 iEYEABECAAYFAkvclvsACgkQywK+sNU5EO9FSwCeL9i5x8ALW/LE5+lCX6MFEAe4MhwAn1ev5o6SX6GrNdDfKweiemfO2VBk
23 39f725929f0c48c5fb3b90c071fc3066012456ca 0 iEYEABECAAYFAkvclvsACgkQywK+sNU5EO9FSwCeL9i5x8ALW/LE5+lCX6MFEAe4MhwAn1ev5o6SX6GrNdDfKweiemfO2VBk
24 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 0 iEYEABECAAYFAkvsKTkACgkQywK+sNU5EO9qEACgiSiRGvTG2vXGJ65tUSOIYihTuFAAnRzRIqEVSw8M8/RGeUXRps0IzaCO
24 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 0 iEYEABECAAYFAkvsKTkACgkQywK+sNU5EO9qEACgiSiRGvTG2vXGJ65tUSOIYihTuFAAnRzRIqEVSw8M8/RGeUXRps0IzaCO
25 24fe2629c6fd0c74c90bd066e77387c2b02e8437 0 iEYEABECAAYFAkwFLRsACgkQywK+sNU5EO+pJACgp13tPI+pbwKZV+LeMjcQ4H6tCZYAoJebzhd6a8yYx6qiwpJxA9BXZNXy
25 24fe2629c6fd0c74c90bd066e77387c2b02e8437 0 iEYEABECAAYFAkwFLRsACgkQywK+sNU5EO+pJACgp13tPI+pbwKZV+LeMjcQ4H6tCZYAoJebzhd6a8yYx6qiwpJxA9BXZNXy
26 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 0 iEYEABECAAYFAkwsyxcACgkQywK+sNU5EO+crACfUpNAF57PmClkSri9nJcBjb2goN4AniPCNaKvnki7TnUsi1u2oxltpKKL
26 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 0 iEYEABECAAYFAkwsyxcACgkQywK+sNU5EO+crACfUpNAF57PmClkSri9nJcBjb2goN4AniPCNaKvnki7TnUsi1u2oxltpKKL
27 bf1774d95bde614af3956d92b20e2a0c68c5fec7 0 iEYEABECAAYFAkxVwccACgkQywK+sNU5EO+oFQCeJzwZ+we1fIIyBGCddHceOUAN++cAnjvT6A8ZWW0zV21NXIFF1qQmjxJd
27 bf1774d95bde614af3956d92b20e2a0c68c5fec7 0 iEYEABECAAYFAkxVwccACgkQywK+sNU5EO+oFQCeJzwZ+we1fIIyBGCddHceOUAN++cAnjvT6A8ZWW0zV21NXIFF1qQmjxJd
28 c00f03a4982e467fb6b6bd45908767db6df4771d 0 iEYEABECAAYFAkxXDqsACgkQywK+sNU5EO/GJACfT9Rz4hZOxPQEs91JwtmfjevO84gAmwSmtfo5mmWSm8gtTUebCcdTv0Kf
28 c00f03a4982e467fb6b6bd45908767db6df4771d 0 iEYEABECAAYFAkxXDqsACgkQywK+sNU5EO/GJACfT9Rz4hZOxPQEs91JwtmfjevO84gAmwSmtfo5mmWSm8gtTUebCcdTv0Kf
29 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 0 iD8DBQBMdo+qywK+sNU5EO8RAqQpAJ975BL2CCAiWMz9SXthNQ9xG181IwCgp4O+KViHPkufZVFn2aTKMNvcr1A=
29 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 0 iD8DBQBMdo+qywK+sNU5EO8RAqQpAJ975BL2CCAiWMz9SXthNQ9xG181IwCgp4O+KViHPkufZVFn2aTKMNvcr1A=
30 93d8bff78c96fe7e33237b257558ee97290048a4 0 iD8DBQBMpfvdywK+sNU5EO8RAsxVAJ0UaL1XB51C76JUBhafc9GBefuMxwCdEWkTOzwvE0SarJBe9i008jhbqW4=
30 93d8bff78c96fe7e33237b257558ee97290048a4 0 iD8DBQBMpfvdywK+sNU5EO8RAsxVAJ0UaL1XB51C76JUBhafc9GBefuMxwCdEWkTOzwvE0SarJBe9i008jhbqW4=
31 333421b9e0f96c7bc788e5667c146a58a9440a55 0 iD8DBQBMz0HOywK+sNU5EO8RAlsEAJ0USh6yOG7OrWkADGunVt9QimBQnwCbBqeMnKgSbwEw8jZwE3Iz1mdrYlo=
31 333421b9e0f96c7bc788e5667c146a58a9440a55 0 iD8DBQBMz0HOywK+sNU5EO8RAlsEAJ0USh6yOG7OrWkADGunVt9QimBQnwCbBqeMnKgSbwEw8jZwE3Iz1mdrYlo=
32 4438875ec01bd0fc32be92b0872eb6daeed4d44f 0 iD8DBQBM4WYUywK+sNU5EO8RAhCVAJ0dJswachwFAHALmk1x0RJehxzqPQCbBNskP9n/X689jB+btNTZTyKU/fw=
32 4438875ec01bd0fc32be92b0872eb6daeed4d44f 0 iD8DBQBM4WYUywK+sNU5EO8RAhCVAJ0dJswachwFAHALmk1x0RJehxzqPQCbBNskP9n/X689jB+btNTZTyKU/fw=
33 6aff4f144ad356311318b0011df0bb21f2c97429 0 iD8DBQBM9uxXywK+sNU5EO8RAv+4AKCDj4qKP16GdPaq1tP6BUwpM/M1OACfRyzLPp/qiiN8xJTWoWYSe/XjJug=
33 6aff4f144ad356311318b0011df0bb21f2c97429 0 iD8DBQBM9uxXywK+sNU5EO8RAv+4AKCDj4qKP16GdPaq1tP6BUwpM/M1OACfRyzLPp/qiiN8xJTWoWYSe/XjJug=
34 e3bf16703e2601de99e563cdb3a5d50b64e6d320 0 iD8DBQBNH8WqywK+sNU5EO8RAiQTAJ9sBO+TeiGro4si77VVaQaA6jcRUgCfSA28dBbjj0oFoQwvPoZjANiZBH8=
34 e3bf16703e2601de99e563cdb3a5d50b64e6d320 0 iD8DBQBNH8WqywK+sNU5EO8RAiQTAJ9sBO+TeiGro4si77VVaQaA6jcRUgCfSA28dBbjj0oFoQwvPoZjANiZBH8=
35 a6c855c32ea081da3c3b8ff628f1847ff271482f 0 iD8DBQBNSJJ+ywK+sNU5EO8RAoJaAKCweDEF70fu+r1Zn7pYDXdlk5RuSgCeO9gK/eit8Lin/1n3pO7aYguFLok=
35 a6c855c32ea081da3c3b8ff628f1847ff271482f 0 iD8DBQBNSJJ+ywK+sNU5EO8RAoJaAKCweDEF70fu+r1Zn7pYDXdlk5RuSgCeO9gK/eit8Lin/1n3pO7aYguFLok=
36 2b2155623ee2559caf288fd333f30475966c4525 0 iD8DBQBNSJeBywK+sNU5EO8RAm1KAJ4hW9Cm9nHaaGJguchBaPLlAr+O3wCgqgmMok8bdAS06N6PL60PSTM//Gg=
36 2b2155623ee2559caf288fd333f30475966c4525 0 iD8DBQBNSJeBywK+sNU5EO8RAm1KAJ4hW9Cm9nHaaGJguchBaPLlAr+O3wCgqgmMok8bdAS06N6PL60PSTM//Gg=
37 2616325766e3504c8ae7c84bd15ee610901fe91d 0 iD8DBQBNbWy9ywK+sNU5EO8RAlWCAJ4mW8HbzjJj9GpK98muX7k+7EvEHwCfaTLbC/DH3QEsZBhEP+M8tzL6RU4=
37 2616325766e3504c8ae7c84bd15ee610901fe91d 0 iD8DBQBNbWy9ywK+sNU5EO8RAlWCAJ4mW8HbzjJj9GpK98muX7k+7EvEHwCfaTLbC/DH3QEsZBhEP+M8tzL6RU4=
38 aa1f3be38ab127280761889d2dca906ca465b5f4 0 iD8DBQBNeQq7ywK+sNU5EO8RAlEOAJ4tlEDdetE9lKfjGgjbkcR8PrC3egCfXCfF3qNVvU/2YYjpgvRwevjvDy0=
38 aa1f3be38ab127280761889d2dca906ca465b5f4 0 iD8DBQBNeQq7ywK+sNU5EO8RAlEOAJ4tlEDdetE9lKfjGgjbkcR8PrC3egCfXCfF3qNVvU/2YYjpgvRwevjvDy0=
39 b032bec2c0a651ca0ddecb65714bfe6770f67d70 0 iD8DBQBNlg5kywK+sNU5EO8RAnGEAJ9gmEx6MfaR4XcG2m/93vwtfyzs3gCgltzx8/YdHPwqDwRX/WbpYgi33is=
39 b032bec2c0a651ca0ddecb65714bfe6770f67d70 0 iD8DBQBNlg5kywK+sNU5EO8RAnGEAJ9gmEx6MfaR4XcG2m/93vwtfyzs3gCgltzx8/YdHPwqDwRX/WbpYgi33is=
40 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 0 iD8DBQBNvTy4ywK+sNU5EO8RAmp8AJ9QnxK4jTJ7G722MyeBxf0UXEdGwACgtlM7BKtNQfbEH/fOW5y+45W88VI=
40 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 0 iD8DBQBNvTy4ywK+sNU5EO8RAmp8AJ9QnxK4jTJ7G722MyeBxf0UXEdGwACgtlM7BKtNQfbEH/fOW5y+45W88VI=
41 733af5d9f6b22387913e1d11350fb8cb7c1487dd 0 iD8DBQBN5q/8ywK+sNU5EO8RArRGAKCNGT94GKIYtSuwZ57z1sQbcw6uLACfffpbMV4NAPMl8womAwg+7ZPKnIU=
41 733af5d9f6b22387913e1d11350fb8cb7c1487dd 0 iD8DBQBN5q/8ywK+sNU5EO8RArRGAKCNGT94GKIYtSuwZ57z1sQbcw6uLACfffpbMV4NAPMl8womAwg+7ZPKnIU=
42 de9eb6b1da4fc522b1cab16d86ca166204c24f25 0 iD8DBQBODhfhywK+sNU5EO8RAr2+AJ4ugbAj8ae8/K0bYZzx3sascIAg1QCeK3b+zbbVVqd3b7CDpwFnaX8kTd4=
42 de9eb6b1da4fc522b1cab16d86ca166204c24f25 0 iD8DBQBODhfhywK+sNU5EO8RAr2+AJ4ugbAj8ae8/K0bYZzx3sascIAg1QCeK3b+zbbVVqd3b7CDpwFnaX8kTd4=
43 4a43e23b8c55b4566b8200bf69fe2158485a2634 0 iD8DBQBONzIMywK+sNU5EO8RAj5SAJ0aPS3+JHnyI6bHB2Fl0LImbDmagwCdGbDLp1S7TFobxXudOH49bX45Iik=
43 4a43e23b8c55b4566b8200bf69fe2158485a2634 0 iD8DBQBONzIMywK+sNU5EO8RAj5SAJ0aPS3+JHnyI6bHB2Fl0LImbDmagwCdGbDLp1S7TFobxXudOH49bX45Iik=
44 d629f1e89021103f1753addcef6b310e4435b184 0 iD8DBQBOWAsBywK+sNU5EO8RAht4AJwJl9oNFopuGkj5m8aKuf7bqPkoAQCeNrEm7UhFsZKYT5iUOjnMV7s2LaM=
44 d629f1e89021103f1753addcef6b310e4435b184 0 iD8DBQBOWAsBywK+sNU5EO8RAht4AJwJl9oNFopuGkj5m8aKuf7bqPkoAQCeNrEm7UhFsZKYT5iUOjnMV7s2LaM=
45 351a9292e430e35766c552066ed3e87c557b803b 0 iD8DBQBOh3zUywK+sNU5EO8RApFMAKCD3Y/u3avDFndznwqfG5UeTHMlvACfUivPIVQZyDZnhZMq0UhC6zhCEQg=
45 351a9292e430e35766c552066ed3e87c557b803b 0 iD8DBQBOh3zUywK+sNU5EO8RApFMAKCD3Y/u3avDFndznwqfG5UeTHMlvACfUivPIVQZyDZnhZMq0UhC6zhCEQg=
46 384082750f2c51dc917d85a7145748330fa6ef4d 0 iD8DBQBOmd+OywK+sNU5EO8RAgDgAJ9V/X+G7VLwhTpHrZNiOHabzSyzYQCdE2kKfIevJUYB9QLAWCWP6DPwrwI=
46 384082750f2c51dc917d85a7145748330fa6ef4d 0 iD8DBQBOmd+OywK+sNU5EO8RAgDgAJ9V/X+G7VLwhTpHrZNiOHabzSyzYQCdE2kKfIevJUYB9QLAWCWP6DPwrwI=
47 41453d55b481ddfcc1dacb445179649e24ca861d 0 iD8DBQBOsFhpywK+sNU5EO8RAqM6AKCyfxUae3/zLuiLdQz+JR78690eMACfQ6JTBQib4AbE+rUDdkeFYg9K/+4=
47 41453d55b481ddfcc1dacb445179649e24ca861d 0 iD8DBQBOsFhpywK+sNU5EO8RAqM6AKCyfxUae3/zLuiLdQz+JR78690eMACfQ6JTBQib4AbE+rUDdkeFYg9K/+4=
48 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 0 iD8DBQBO1/fWywK+sNU5EO8RAmoPAKCR5lpv1D6JLURHD8KVLSV4GRVEBgCgnd0Sy78ligNfqAMafmACRDvj7vo=
48 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 0 iD8DBQBO1/fWywK+sNU5EO8RAmoPAKCR5lpv1D6JLURHD8KVLSV4GRVEBgCgnd0Sy78ligNfqAMafmACRDvj7vo=
49 6344043924497cd06d781d9014c66802285072e4 0 iD8DBQBPALgmywK+sNU5EO8RAlfhAJ9nYOdWnhfVDHYtDTJAyJtXBAQS9wCgnefoSQt7QABkbGxM+Q85UYEBuD0=
49 6344043924497cd06d781d9014c66802285072e4 0 iD8DBQBPALgmywK+sNU5EO8RAlfhAJ9nYOdWnhfVDHYtDTJAyJtXBAQS9wCgnefoSQt7QABkbGxM+Q85UYEBuD0=
50 db33555eafeaf9df1e18950e29439eaa706d399b 0 iD8DBQBPGdzxywK+sNU5EO8RAppkAJ9jOXhUVE/97CPgiMA0pMGiIYnesQCfengAszcBiSiKGugiI8Okc9ghU+Y=
50 db33555eafeaf9df1e18950e29439eaa706d399b 0 iD8DBQBPGdzxywK+sNU5EO8RAppkAJ9jOXhUVE/97CPgiMA0pMGiIYnesQCfengAszcBiSiKGugiI8Okc9ghU+Y=
51 2aa5b51f310fb3befd26bed99c02267f5c12c734 0 iD8DBQBPKZ9bywK+sNU5EO8RAt1TAJ45r1eJ0YqSkInzrrayg4TVCh0SnQCgm0GA/Ua74jnnDwVQ60lAwROuz1Q=
51 2aa5b51f310fb3befd26bed99c02267f5c12c734 0 iD8DBQBPKZ9bywK+sNU5EO8RAt1TAJ45r1eJ0YqSkInzrrayg4TVCh0SnQCgm0GA/Ua74jnnDwVQ60lAwROuz1Q=
52 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 0 iD8DBQBPT/fvywK+sNU5EO8RAnfYAKCn7d0vwqIb100YfWm1F7nFD5B+FACeM02YHpQLSNsztrBCObtqcnfod7Q=
52 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 0 iD8DBQBPT/fvywK+sNU5EO8RAnfYAKCn7d0vwqIb100YfWm1F7nFD5B+FACeM02YHpQLSNsztrBCObtqcnfod7Q=
53 b9bd95e61b49c221c4cca24e6da7c946fc02f992 0 iD8DBQBPeLsIywK+sNU5EO8RAvpNAKCtKe2gitz8dYn52IRF0hFOPCR7AQCfRJL/RWCFweu2T1vH/mUOCf8SXXc=
53 b9bd95e61b49c221c4cca24e6da7c946fc02f992 0 iD8DBQBPeLsIywK+sNU5EO8RAvpNAKCtKe2gitz8dYn52IRF0hFOPCR7AQCfRJL/RWCFweu2T1vH/mUOCf8SXXc=
54 d9e2f09d5488c395ae9ddbb320ceacd24757e055 0 iD8DBQBPju/dywK+sNU5EO8RArBYAJ9xtifdbk+hCOJO8OZa4JfHX8OYZQCeKPMBaBWiT8N/WHoOm1XU0q+iono=
54 d9e2f09d5488c395ae9ddbb320ceacd24757e055 0 iD8DBQBPju/dywK+sNU5EO8RArBYAJ9xtifdbk+hCOJO8OZa4JfHX8OYZQCeKPMBaBWiT8N/WHoOm1XU0q+iono=
55 00182b3d087909e3c3ae44761efecdde8f319ef3 0 iD8DBQBPoFhIywK+sNU5EO8RAhzhAKCBj1n2jxPTkZNJJ5pSp3soa+XHIgCgsZZpAQxOpXwCp0eCdNGe0+pmxmg=
55 00182b3d087909e3c3ae44761efecdde8f319ef3 0 iD8DBQBPoFhIywK+sNU5EO8RAhzhAKCBj1n2jxPTkZNJJ5pSp3soa+XHIgCgsZZpAQxOpXwCp0eCdNGe0+pmxmg=
56 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 0 iD8DBQBPovNWywK+sNU5EO8RAhgiAJ980T91FdPTRMmVONDhpkMsZwVIMACgg3bKvoWSeuCW28llUhAJtUjrMv0=
56 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 0 iD8DBQBPovNWywK+sNU5EO8RAhgiAJ980T91FdPTRMmVONDhpkMsZwVIMACgg3bKvoWSeuCW28llUhAJtUjrMv0=
57 85a358df5bbbe404ca25730c9c459b34263441dc 0 iD8DBQBPyZsWywK+sNU5EO8RAnpLAJ48qrGDJRT+pteS0mSQ11haqHstPwCdG4ccGbk+0JHb7aNy8/NRGAOqn9w=
57 85a358df5bbbe404ca25730c9c459b34263441dc 0 iD8DBQBPyZsWywK+sNU5EO8RAnpLAJ48qrGDJRT+pteS0mSQ11haqHstPwCdG4ccGbk+0JHb7aNy8/NRGAOqn9w=
58 b013baa3898e117959984fc64c29d8c784d2f28b 0 iD8DBQBP8QOPywK+sNU5EO8RAqimAKCFRSx0lvG6y8vne2IhNG062Hn0dACeMLI5/zhpWpHBIVeAAquYfx2XFeA=
58 b013baa3898e117959984fc64c29d8c784d2f28b 0 iD8DBQBP8QOPywK+sNU5EO8RAqimAKCFRSx0lvG6y8vne2IhNG062Hn0dACeMLI5/zhpWpHBIVeAAquYfx2XFeA=
59 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 0 iD8DBQBQGiL8ywK+sNU5EO8RAq5oAJ4rMMCPx6O+OuzNXVOexogedWz/QgCeIiIxLd76I4pXO48tdXhr0hQcBuM=
59 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 0 iD8DBQBQGiL8ywK+sNU5EO8RAq5oAJ4rMMCPx6O+OuzNXVOexogedWz/QgCeIiIxLd76I4pXO48tdXhr0hQcBuM=
60 072209ae4ddb654eb2d5fd35bff358c738414432 0 iD8DBQBQQkq0ywK+sNU5EO8RArDTAJ9nk5CySnNAjAXYvqvx4uWCw9ThZwCgqmFRehH/l+oTwj3f8nw8u8qTCdc=
60 072209ae4ddb654eb2d5fd35bff358c738414432 0 iD8DBQBQQkq0ywK+sNU5EO8RArDTAJ9nk5CySnNAjAXYvqvx4uWCw9ThZwCgqmFRehH/l+oTwj3f8nw8u8qTCdc=
61 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 0 iD8DBQBQamltywK+sNU5EO8RAlsqAJ4qF/m6aFu4mJCOKTiAP5RvZFK02ACfawYShUZO6OXEFfveU0aAxDR0M1k=
61 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 0 iD8DBQBQamltywK+sNU5EO8RAlsqAJ4qF/m6aFu4mJCOKTiAP5RvZFK02ACfawYShUZO6OXEFfveU0aAxDR0M1k=
62 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 0 iD8DBQBQgPV5ywK+sNU5EO8RArylAJ0abcx5NlDjyv3ZDWpAfRIHyRsJtQCgn4TMuEayqgxzrvadQZHdTEU2g38=
62 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 0 iD8DBQBQgPV5ywK+sNU5EO8RArylAJ0abcx5NlDjyv3ZDWpAfRIHyRsJtQCgn4TMuEayqgxzrvadQZHdTEU2g38=
63 195ad823b5d58c68903a6153a25e3fb4ed25239d 0 iD8DBQBQkuT9ywK+sNU5EO8RAhB4AKCeerItoK2Jipm2cVf4euGofAa/WACeJj3TVd4pFILpb+ogj7ebweFLJi0=
63 195ad823b5d58c68903a6153a25e3fb4ed25239d 0 iD8DBQBQkuT9ywK+sNU5EO8RAhB4AKCeerItoK2Jipm2cVf4euGofAa/WACeJj3TVd4pFILpb+ogj7ebweFLJi0=
64 0c10cf8191469e7c3c8844922e17e71a176cb7cb 0 iD8DBQBQvQWoywK+sNU5EO8RAnq3AJoCn98u4geFx5YaQaeh99gFhCd7bQCgjoBwBSUyOvGd0yBy60E3Vv3VZhM=
64 0c10cf8191469e7c3c8844922e17e71a176cb7cb 0 iD8DBQBQvQWoywK+sNU5EO8RAnq3AJoCn98u4geFx5YaQaeh99gFhCd7bQCgjoBwBSUyOvGd0yBy60E3Vv3VZhM=
65 a4765077b65e6ae29ba42bab7834717b5072d5ba 0 iD8DBQBQ486sywK+sNU5EO8RAhmJAJ90aLfLKZhmcZN7kqphigQJxiFOQACeJ5IUZxjGKH4xzi3MrgIcx9n+dB0=
65 a4765077b65e6ae29ba42bab7834717b5072d5ba 0 iD8DBQBQ486sywK+sNU5EO8RAhmJAJ90aLfLKZhmcZN7kqphigQJxiFOQACeJ5IUZxjGKH4xzi3MrgIcx9n+dB0=
66 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 0 iD8DBQBQ+yuYywK+sNU5EO8RAm9JAJoD/UciWvpGeKBcpGtZJBFJVcL/HACghDXSgQ+xQDjB+6uGrdgAQsRR1Lg=
66 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 0 iD8DBQBQ+yuYywK+sNU5EO8RAm9JAJoD/UciWvpGeKBcpGtZJBFJVcL/HACghDXSgQ+xQDjB+6uGrdgAQsRR1Lg=
67 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 0 iD8DBQBRDDROywK+sNU5EO8RAh75AJ9uJCGoCWnP0Lv/+XuYs4hvUl+sAgCcD36QgAnuw8IQXrvv684BAXAnHcA=
67 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 0 iD8DBQBRDDROywK+sNU5EO8RAh75AJ9uJCGoCWnP0Lv/+XuYs4hvUl+sAgCcD36QgAnuw8IQXrvv684BAXAnHcA=
68 7511d4df752e61fe7ae4f3682e0a0008573b0402 0 iD8DBQBRFYaoywK+sNU5EO8RAuErAJoDyhXn+lptU3+AevVdwAIeNFyR2gCdHzPHyWd+JDeWCUR+pSOBi8O2ppM=
68 7511d4df752e61fe7ae4f3682e0a0008573b0402 0 iD8DBQBRFYaoywK+sNU5EO8RAuErAJoDyhXn+lptU3+AevVdwAIeNFyR2gCdHzPHyWd+JDeWCUR+pSOBi8O2ppM=
69 5b7175377babacce80a6c1e12366d8032a6d4340 0 iD8DBQBRMCYgywK+sNU5EO8RAq1/AKCWKlt9ysibyQgYwoxxIOZv5J8rpwCcDSHQaaf1fFZUTnQsOePwcM2Y/Sg=
69 5b7175377babacce80a6c1e12366d8032a6d4340 0 iD8DBQBRMCYgywK+sNU5EO8RAq1/AKCWKlt9ysibyQgYwoxxIOZv5J8rpwCcDSHQaaf1fFZUTnQsOePwcM2Y/Sg=
70 50c922c1b5145dab8baefefb0437d363b6a6c21c 0 iD8DBQBRWnUnywK+sNU5EO8RAuQRAJwM42cJqJPeqJ0jVNdMqKMDqr4dSACeP0cRVGz1gitMuV0x8f3mrZrqc7I=
70 50c922c1b5145dab8baefefb0437d363b6a6c21c 0 iD8DBQBRWnUnywK+sNU5EO8RAuQRAJwM42cJqJPeqJ0jVNdMqKMDqr4dSACeP0cRVGz1gitMuV0x8f3mrZrqc7I=
71 8a7bd2dccd44ed571afe7424cd7f95594f27c092 0 iD8DBQBRXfBvywK+sNU5EO8RAn+LAKCsMmflbuXjYRxlzFwId5ptm8TZcwCdGkyLbZcASBOkzQUm/WW1qfknJHU=
71 8a7bd2dccd44ed571afe7424cd7f95594f27c092 0 iD8DBQBRXfBvywK+sNU5EO8RAn+LAKCsMmflbuXjYRxlzFwId5ptm8TZcwCdGkyLbZcASBOkzQUm/WW1qfknJHU=
72 292cd385856d98bacb2c3086f8897bc660c2beea 0 iD8DBQBRcM0BywK+sNU5EO8RAjp4AKCJBykQbvXhKuvLSMxKx3a2TBiXcACfbr/kLg5GlZTF/XDPmY+PyHgI/GM=
72 292cd385856d98bacb2c3086f8897bc660c2beea 0 iD8DBQBRcM0BywK+sNU5EO8RAjp4AKCJBykQbvXhKuvLSMxKx3a2TBiXcACfbr/kLg5GlZTF/XDPmY+PyHgI/GM=
73 23f785b38af38d2fca6b8f3db56b8007a84cd73a 0 iD8DBQBRgZwNywK+sNU5EO8RAmO4AJ4u2ILGuimRP6MJgE2t65LZ5dAdkACgiENEstIdrlFC80p+sWKD81kKIYI=
73 23f785b38af38d2fca6b8f3db56b8007a84cd73a 0 iD8DBQBRgZwNywK+sNU5EO8RAmO4AJ4u2ILGuimRP6MJgE2t65LZ5dAdkACgiENEstIdrlFC80p+sWKD81kKIYI=
74 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 0 iD8DBQBRkswvywK+sNU5EO8RAiYYAJsHTHyHbJeAgmGvBTmDrfcKu4doUgCeLm7eGBjx7yAPUvEtxef8rAkQmXI=
74 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 0 iD8DBQBRkswvywK+sNU5EO8RAiYYAJsHTHyHbJeAgmGvBTmDrfcKu4doUgCeLm7eGBjx7yAPUvEtxef8rAkQmXI=
75 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 0 iD8DBQBRqnFLywK+sNU5EO8RAsWNAJ9RR6t+y1DLFc2HeH0eN9VfZAKF9gCeJ8ezvhtKq/LMs0/nvcgKQc/d5jk=
75 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 0 iD8DBQBRqnFLywK+sNU5EO8RAsWNAJ9RR6t+y1DLFc2HeH0eN9VfZAKF9gCeJ8ezvhtKq/LMs0/nvcgKQc/d5jk=
76 009794acc6e37a650f0fae37872e733382ac1c0c 0 iD8DBQBR0guxywK+sNU5EO8RArNkAKCq9pMihVzP8Os5kCmgbWpe5C37wgCgqzuPZTHvAsXF5wTyaSTMVa9Ccq4=
76 009794acc6e37a650f0fae37872e733382ac1c0c 0 iD8DBQBR0guxywK+sNU5EO8RArNkAKCq9pMihVzP8Os5kCmgbWpe5C37wgCgqzuPZTHvAsXF5wTyaSTMVa9Ccq4=
77 f0d7721d7322dcfb5af33599c2543f27335334bb 0 iD8DBQBR8taaywK+sNU5EO8RAqeEAJ4idDhhDuEsgsUjeQgWNj498matHACfT67gSF5w0ylsrBx1Hb52HkGXDm0=
77 f0d7721d7322dcfb5af33599c2543f27335334bb 0 iD8DBQBR8taaywK+sNU5EO8RAqeEAJ4idDhhDuEsgsUjeQgWNj498matHACfT67gSF5w0ylsrBx1Hb52HkGXDm0=
78 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 0 iD8DBQBR+ymFywK+sNU5EO8RAuSdAJkBMcd9DAZ3rWE9WGKPm2YZ8LBoXACfXn/wbEsVy7ZgJoUwiWmHSnQaWCI=
78 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 0 iD8DBQBR+ymFywK+sNU5EO8RAuSdAJkBMcd9DAZ3rWE9WGKPm2YZ8LBoXACfXn/wbEsVy7ZgJoUwiWmHSnQaWCI=
79 335a558f81dc73afeab4d7be63617392b130117f 0 iQIVAwUAUiZrIyBXgaxoKi1yAQK2iw//cquNqqSkc8Re5/TZT9I6NH+lh6DbOKjJP0Xl1Wqq0K+KSIUgZG4G32ovaEb2l5X0uY+3unRPiZ0ebl0YSw4Fb2ZiPIADXLBTOYRrY2Wwd3tpJeGI6wEgZt3SfcITV/g7NJrCjT3FlYoSOIayrExM80InSdcEM0Q3Rx6HKzY2acyxzgZeAtAW5ohFvHilSvY6p5Gcm4+QptMxvw45GPdreUmjeXZxNXNXZ8P+MjMz/QJbai/N7PjmK8lqnhkBsT48Ng/KhhmOkGntNJ2/ImBWLFGcWngSvJ7sfWwnyhndvGhe0Hq1NcCf7I8TjNDxU5TR+m+uW7xjXdLoDbUjBdX4sKXnh8ZjbYiODKBOrrDq25cf8nA/tnpKyE/qsVy60kOk6loY4XKiYmn1V49Ta0emmDx0hqo3HgxHHsHX0NDnGdWGol7cPRET0RzVobKq1A0jnrhPooWidvLh9bPzLonrWDo+ib+DuySoRkuYUK4pgZJ2mbg6daFOBEZygkSyRB8bo1UQUP7EgQDrWe4khb/5GHEfDkrQz3qu/sXvc0Ir1mOUWBFPHC2DjjCn/oMJuUkG1SwM8l2Bfv7h67ssES6YQ2+RjOix4yid7EXS/Ogl45PzCIPSI5+BbNs10JhE0w5uErBHlF53EDTe/TSLc+GU6DB6PP6dH912Njdr3jpNSUQ=
79 335a558f81dc73afeab4d7be63617392b130117f 0 iQIVAwUAUiZrIyBXgaxoKi1yAQK2iw//cquNqqSkc8Re5/TZT9I6NH+lh6DbOKjJP0Xl1Wqq0K+KSIUgZG4G32ovaEb2l5X0uY+3unRPiZ0ebl0YSw4Fb2ZiPIADXLBTOYRrY2Wwd3tpJeGI6wEgZt3SfcITV/g7NJrCjT3FlYoSOIayrExM80InSdcEM0Q3Rx6HKzY2acyxzgZeAtAW5ohFvHilSvY6p5Gcm4+QptMxvw45GPdreUmjeXZxNXNXZ8P+MjMz/QJbai/N7PjmK8lqnhkBsT48Ng/KhhmOkGntNJ2/ImBWLFGcWngSvJ7sfWwnyhndvGhe0Hq1NcCf7I8TjNDxU5TR+m+uW7xjXdLoDbUjBdX4sKXnh8ZjbYiODKBOrrDq25cf8nA/tnpKyE/qsVy60kOk6loY4XKiYmn1V49Ta0emmDx0hqo3HgxHHsHX0NDnGdWGol7cPRET0RzVobKq1A0jnrhPooWidvLh9bPzLonrWDo+ib+DuySoRkuYUK4pgZJ2mbg6daFOBEZygkSyRB8bo1UQUP7EgQDrWe4khb/5GHEfDkrQz3qu/sXvc0Ir1mOUWBFPHC2DjjCn/oMJuUkG1SwM8l2Bfv7h67ssES6YQ2+RjOix4yid7EXS/Ogl45PzCIPSI5+BbNs10JhE0w5uErBHlF53EDTe/TSLc+GU6DB6PP6dH912Njdr3jpNSUQ=
80 e7fa36d2ad3a7944a52dca126458d6f482db3524 0 iQIVAwUAUktg4yBXgaxoKi1yAQLO0g//du/2ypYYUfmM/yZ4zztNKIvgMSGTDVbCCGB2y2/wk2EcolpjpGTkcgnJT413ksYtw78ZU+mvv0RjgrFCm8DQ8kroJaQZ2qHmtSUb42hPBPvtg6kL9YaA4yvp87uUBpFRavGS5uX4hhEIyvZKzhXUBvqtL3TfwR7ld21bj8j00wudqELyyU9IrojIY9jkJ3XL/4shBGgP7u6OK5g8yJ6zTnWgysUetxHBPrYjG25lziiiZQFvZqK1B3PUqAOaFPltQs0PB8ipOCAHQgJsjaREj8VmC3+rskmSSy66NHm6gAB9+E8oAgOcU7FzWbdYgnz4kR3M7TQvHX9U61NinPXC6Q9d1VPhO3E6sIGvqJ4YeQOn65V9ezYuIpFSlgQzCHMmLVnOV96Uv1R/Z39I4w7D3S5qoZcQT/siQwGbsZoPMGFYmqOK1da5TZWrrJWkYzc9xvzT9m3q3Wds5pmCmo4b/dIqDifWwYEcNAZ0/YLHwCN5SEZWuunkEwtU5o7TZAv3bvDDA6WxUrrHI/y9/qvvhXxsJnY8IueNhshdmWZfXKz+lJi2Dvk7DUlEQ1zZWSsozi1E+3biMPJO47jsxjoT/jmE5+GHLCgcnXXDVBeaVal99IOaTRFukiz2EMsry1s8fnwEE5XKDKRlU/dOPfsje0gc7bgE0QD/u3E4NJ99g9A=
80 e7fa36d2ad3a7944a52dca126458d6f482db3524 0 iQIVAwUAUktg4yBXgaxoKi1yAQLO0g//du/2ypYYUfmM/yZ4zztNKIvgMSGTDVbCCGB2y2/wk2EcolpjpGTkcgnJT413ksYtw78ZU+mvv0RjgrFCm8DQ8kroJaQZ2qHmtSUb42hPBPvtg6kL9YaA4yvp87uUBpFRavGS5uX4hhEIyvZKzhXUBvqtL3TfwR7ld21bj8j00wudqELyyU9IrojIY9jkJ3XL/4shBGgP7u6OK5g8yJ6zTnWgysUetxHBPrYjG25lziiiZQFvZqK1B3PUqAOaFPltQs0PB8ipOCAHQgJsjaREj8VmC3+rskmSSy66NHm6gAB9+E8oAgOcU7FzWbdYgnz4kR3M7TQvHX9U61NinPXC6Q9d1VPhO3E6sIGvqJ4YeQOn65V9ezYuIpFSlgQzCHMmLVnOV96Uv1R/Z39I4w7D3S5qoZcQT/siQwGbsZoPMGFYmqOK1da5TZWrrJWkYzc9xvzT9m3q3Wds5pmCmo4b/dIqDifWwYEcNAZ0/YLHwCN5SEZWuunkEwtU5o7TZAv3bvDDA6WxUrrHI/y9/qvvhXxsJnY8IueNhshdmWZfXKz+lJi2Dvk7DUlEQ1zZWSsozi1E+3biMPJO47jsxjoT/jmE5+GHLCgcnXXDVBeaVal99IOaTRFukiz2EMsry1s8fnwEE5XKDKRlU/dOPfsje0gc7bgE0QD/u3E4NJ99g9A=
81 1596f2d8f2421314b1ddead8f7d0c91009358994 0 iQIVAwUAUmRq+yBXgaxoKi1yAQLolhAAi+l4ZFdQTu9yJDv22YmkmHH4fI3d5VBYgvfJPufpyaj7pX626QNW18UNcGSw2BBpYHIJzWPkk/4XznLVKr4Ciw2N3/yqloEFV0V2SSrTbMWiR9qXI4KJH+Df3KZnKs3FgiYpXkErL4GWkc1jLVR50xQ5RnkMljjtCd0NTeV2PHZ6gP2qbu6CS+5sm3AFhTDGnx8GicbMw76ZNw5M2G+T48yH9jn5KQi2SBThfi4H9Bpr8FDuR7PzQLgw9SbtYxtdQxNkK55k0nG4oLDxduNakU6SH9t8n8tdCfMt58kTzlQVrPFiTFjKu2n2JioDTz2HEivbZ5H757cu7SvpX8gW3paeBc57e+GOLMisMZABXLICq59c3QnrMwFY4FG+5cpiHVXoaZz/0bYCJx+IhU4QLWqZuzb18KSyHUCqQRzXlzS6QV5O7dY5YNQXFC44j/dS5zdgWMYo2mc6mVP2OaPUn7F6aQh5MCDYorPIOkcNjOg7ytajo7DXbzWt5Al8qt6386BJksyR3GAonc09+l8IFeNxk8HZNP4ETQ8aWj0dC9jgBDPK43T2Bju/i84s+U/bRe4tGSQalZUEv06mkIH/VRJp5w2izYTsdIjA4FT9d36OhaxlfoO1X6tHR9AyA3bF/g/ozvBwuo3kTRUUqo+Ggvx/DmcPQdDiZZQIqDBXch0=
81 1596f2d8f2421314b1ddead8f7d0c91009358994 0 iQIVAwUAUmRq+yBXgaxoKi1yAQLolhAAi+l4ZFdQTu9yJDv22YmkmHH4fI3d5VBYgvfJPufpyaj7pX626QNW18UNcGSw2BBpYHIJzWPkk/4XznLVKr4Ciw2N3/yqloEFV0V2SSrTbMWiR9qXI4KJH+Df3KZnKs3FgiYpXkErL4GWkc1jLVR50xQ5RnkMljjtCd0NTeV2PHZ6gP2qbu6CS+5sm3AFhTDGnx8GicbMw76ZNw5M2G+T48yH9jn5KQi2SBThfi4H9Bpr8FDuR7PzQLgw9SbtYxtdQxNkK55k0nG4oLDxduNakU6SH9t8n8tdCfMt58kTzlQVrPFiTFjKu2n2JioDTz2HEivbZ5H757cu7SvpX8gW3paeBc57e+GOLMisMZABXLICq59c3QnrMwFY4FG+5cpiHVXoaZz/0bYCJx+IhU4QLWqZuzb18KSyHUCqQRzXlzS6QV5O7dY5YNQXFC44j/dS5zdgWMYo2mc6mVP2OaPUn7F6aQh5MCDYorPIOkcNjOg7ytajo7DXbzWt5Al8qt6386BJksyR3GAonc09+l8IFeNxk8HZNP4ETQ8aWj0dC9jgBDPK43T2Bju/i84s+U/bRe4tGSQalZUEv06mkIH/VRJp5w2izYTsdIjA4FT9d36OhaxlfoO1X6tHR9AyA3bF/g/ozvBwuo3kTRUUqo+Ggvx/DmcPQdDiZZQIqDBXch0=
82 d825e4025e39d1c39db943cdc89818abd0a87c27 0 iQIVAwUAUnQlXiBXgaxoKi1yAQJd3BAAi7LjMSpXmdR7B8K98C3/By4YHsCOAocMl3JXiLd7SXwKmlta1zxtkgWwWJnNYE3lVJvGCl+l4YsGKmFu755MGXlyORh1x4ohckoC1a8cqnbNAgD6CSvjSaZfnINLGZQP1wIP4yWj0FftKVANQBjj/xkkxO530mjBYnUvyA4PeDd5A1AOUUu6qHzX6S5LcprEt7iktLI+Ae1dYTkiCpckDtyYUKIk3RK/4AGWwGCPddVWeV5bDxLs8GHyMbqdBwx+2EAMtyZfXT+z6MDRsL/gEBVOXHb/UR0qpYED+qFnbtTlxqQkRE/wBhwDoRzUgcSuukQ9iPn79WNDSdT5b6Jd393uEO5BNF/DB6rrOiWmlpoooWgTY9kcwGB02v0hhLrH5r1wkv8baaPl+qjCjBxf4CNKm/83KN5/umGbZlORqPSN5JVxK6vDNwFFmHLaZbMT1g27GsGOWm84VH+dgolgk4nmRNSO37eTNM5Y1C3Zf2amiqDSRcAxCgseg0Jh10G7i52SSTcZPI2MqrwT9eIyg8PTIxT1D5bPcCzkg5nTTL6S7bet7OSwynRnHslhvVUBly8aIj4eY/5cQqAucUUa5sq6xLD8N27Tl+sQi+kE6KtWu2c0ZhpouflYp55XNMHgU4KeFcVcDtHfJRF6THT6tFcHFNauCHbhfN2F33ANMP4=
82 d825e4025e39d1c39db943cdc89818abd0a87c27 0 iQIVAwUAUnQlXiBXgaxoKi1yAQJd3BAAi7LjMSpXmdR7B8K98C3/By4YHsCOAocMl3JXiLd7SXwKmlta1zxtkgWwWJnNYE3lVJvGCl+l4YsGKmFu755MGXlyORh1x4ohckoC1a8cqnbNAgD6CSvjSaZfnINLGZQP1wIP4yWj0FftKVANQBjj/xkkxO530mjBYnUvyA4PeDd5A1AOUUu6qHzX6S5LcprEt7iktLI+Ae1dYTkiCpckDtyYUKIk3RK/4AGWwGCPddVWeV5bDxLs8GHyMbqdBwx+2EAMtyZfXT+z6MDRsL/gEBVOXHb/UR0qpYED+qFnbtTlxqQkRE/wBhwDoRzUgcSuukQ9iPn79WNDSdT5b6Jd393uEO5BNF/DB6rrOiWmlpoooWgTY9kcwGB02v0hhLrH5r1wkv8baaPl+qjCjBxf4CNKm/83KN5/umGbZlORqPSN5JVxK6vDNwFFmHLaZbMT1g27GsGOWm84VH+dgolgk4nmRNSO37eTNM5Y1C3Zf2amiqDSRcAxCgseg0Jh10G7i52SSTcZPI2MqrwT9eIyg8PTIxT1D5bPcCzkg5nTTL6S7bet7OSwynRnHslhvVUBly8aIj4eY/5cQqAucUUa5sq6xLD8N27Tl+sQi+kE6KtWu2c0ZhpouflYp55XNMHgU4KeFcVcDtHfJRF6THT6tFcHFNauCHbhfN2F33ANMP4=
83 209e04a06467e2969c0cc6501335be0406d46ef0 0 iQIVAwUAUpv1oCBXgaxoKi1yAQKOFBAAma2wlsr3w/5NvDwq2rmOrgtNDq1DnNqcXloaOdwegX1z3/N++5uVjLjI0VyguexnwK+7E8rypMZ+4glaiZvIiGPnGMYbG9iOoz5XBhtUHzI5ECYfm5QU81by9VmCIvArDFe5Hlnz4XaXpEGnAwPywD+yzV3/+tyoV7MgsVinCMtbX9OF84/ubWKNzq2810FpQRfYoCOrF8sUed/1TcQrSm1eMB/PnuxjFCFySiR6J7Urd9bJoJIDtdZOQeeHaL5Z8Pcsyzjoe/9oTwJ3L3tl/NMZtRxiQUWtfRA0zvEnQ4QEkZSDMd/JnGiWHPVeP4P92+YN15za9yhneEAtustrTNAmVF2Uh92RIlmkG475HFhvwPJ4DfCx0vU1OOKX/U4c1rifW7H7HaipoaMlsDU2VFsAHcc3YF8ulVt27bH2yUaLGJz7eqpt+3DzZTKp4d/brZA2EkbVgsoYP+XYLbzxfwWlaMwiN3iCnlTFbNogH8MxhfHFWBj6ouikqOz8HlNl6BmSQiUCBnz5fquVpXmW2Md+TDekk+uOW9mvk1QMU62br+Z6PEZupkdTrqKaz+8ZMWvTRct8SiOcu7R11LpfERyrwYGGPei0P2YrEGIWGgXvEobXoPTSl7J+mpOA/rp2Q1zA3ihjgzwtGZZF+ThQXZGIMGaA2YPgzuYRqY8l5oc=
83 209e04a06467e2969c0cc6501335be0406d46ef0 0 iQIVAwUAUpv1oCBXgaxoKi1yAQKOFBAAma2wlsr3w/5NvDwq2rmOrgtNDq1DnNqcXloaOdwegX1z3/N++5uVjLjI0VyguexnwK+7E8rypMZ+4glaiZvIiGPnGMYbG9iOoz5XBhtUHzI5ECYfm5QU81by9VmCIvArDFe5Hlnz4XaXpEGnAwPywD+yzV3/+tyoV7MgsVinCMtbX9OF84/ubWKNzq2810FpQRfYoCOrF8sUed/1TcQrSm1eMB/PnuxjFCFySiR6J7Urd9bJoJIDtdZOQeeHaL5Z8Pcsyzjoe/9oTwJ3L3tl/NMZtRxiQUWtfRA0zvEnQ4QEkZSDMd/JnGiWHPVeP4P92+YN15za9yhneEAtustrTNAmVF2Uh92RIlmkG475HFhvwPJ4DfCx0vU1OOKX/U4c1rifW7H7HaipoaMlsDU2VFsAHcc3YF8ulVt27bH2yUaLGJz7eqpt+3DzZTKp4d/brZA2EkbVgsoYP+XYLbzxfwWlaMwiN3iCnlTFbNogH8MxhfHFWBj6ouikqOz8HlNl6BmSQiUCBnz5fquVpXmW2Md+TDekk+uOW9mvk1QMU62br+Z6PEZupkdTrqKaz+8ZMWvTRct8SiOcu7R11LpfERyrwYGGPei0P2YrEGIWGgXvEobXoPTSl7J+mpOA/rp2Q1zA3ihjgzwtGZZF+ThQXZGIMGaA2YPgzuYRqY8l5oc=
84 ca387377df7a3a67dbb90b6336b781cdadc3ef41 0 iQIVAwUAUsThISBXgaxoKi1yAQJpvRAAkRkCWLjHBZnWxX9Oe6t2HQgkSsmn9wMHvXXGFkcAmrqJ86yfyrxLq2Ns0X7Qwky37kOwKsywM53FQlsx9j//Y+ncnGZoObFTz9YTuSbOHGVsTbAruXWxBrGOf1nFTlg8afcbH0jPfQXwxf3ptfBhgsFCzORcqc8HNopAW+2sgXGhHnbVtq6LF90PWkbKjCCQLiX3da1uETGAElrl4jA5Y2i64S1Q/2X+UFrNslkIIRCGmAJ6BnE6KLJaUftpfbN7Br7a3z9xxWqxRYDOinxDgfAPAucOJPLgMVQ0bJIallaRu7KTmIWKIuSBgg1/hgfoX8I1w49WrTGp0gGY140kl8RWwczAz/SB03Xtbl2+h6PV7rUV2K/5g61DkwdVbWqXM9wmJZmvjEKK0qQbBT0By4QSEDNcKKqtaFFwhFzx4dkXph0igHOtXhSNzMd8PsFx/NRn9NLFIpirxfqVDwakpDNBZw4Q9hUAlTPxSFL3vD9/Zs7lV4/dAvvl+tixJEi2k/iv248b/AI1PrPIQEqDvjrozzzYvrS4HtbkUn+IiHiepQaYnpqKoXvBu6btK/nv0GTxB5OwVJzMA1RPDcxIFfZA2AazHjrXiPAl5uWYEddEvRjaCiF8xkQkfiXzLOoqhKQHdwPGcfMFEs9lNR8BrB2ZOajBJc8RPsFDswhT5h4=
84 ca387377df7a3a67dbb90b6336b781cdadc3ef41 0 iQIVAwUAUsThISBXgaxoKi1yAQJpvRAAkRkCWLjHBZnWxX9Oe6t2HQgkSsmn9wMHvXXGFkcAmrqJ86yfyrxLq2Ns0X7Qwky37kOwKsywM53FQlsx9j//Y+ncnGZoObFTz9YTuSbOHGVsTbAruXWxBrGOf1nFTlg8afcbH0jPfQXwxf3ptfBhgsFCzORcqc8HNopAW+2sgXGhHnbVtq6LF90PWkbKjCCQLiX3da1uETGAElrl4jA5Y2i64S1Q/2X+UFrNslkIIRCGmAJ6BnE6KLJaUftpfbN7Br7a3z9xxWqxRYDOinxDgfAPAucOJPLgMVQ0bJIallaRu7KTmIWKIuSBgg1/hgfoX8I1w49WrTGp0gGY140kl8RWwczAz/SB03Xtbl2+h6PV7rUV2K/5g61DkwdVbWqXM9wmJZmvjEKK0qQbBT0By4QSEDNcKKqtaFFwhFzx4dkXph0igHOtXhSNzMd8PsFx/NRn9NLFIpirxfqVDwakpDNBZw4Q9hUAlTPxSFL3vD9/Zs7lV4/dAvvl+tixJEi2k/iv248b/AI1PrPIQEqDvjrozzzYvrS4HtbkUn+IiHiepQaYnpqKoXvBu6btK/nv0GTxB5OwVJzMA1RPDcxIFfZA2AazHjrXiPAl5uWYEddEvRjaCiF8xkQkfiXzLOoqhKQHdwPGcfMFEs9lNR8BrB2ZOajBJc8RPsFDswhT5h4=
85 8862469e16f9236208581b20de5f96bd13cc039d 0 iQIVAwUAUt7cLSBXgaxoKi1yAQLOkRAAidp501zafqe+JnDwlf7ORcJc+FgCE6mK1gxDfReCbkMsY7AzspogU7orqfSmr6XXdrDwmk3Y5x3mf44OGzNQjvuNWhqnTgJ7sOcU/lICGQUc8WiGNzHEMFGX9S+K4dpUaBf8Tcl8pU3iArhlthDghW6SZeDFB/FDBaUx9dkdFp6eXrmu4OuGRZEvwUvPtCGxIL7nKNnufI1du/MsWQxvC2ORHbMNtRq6tjA0fLZi4SvbySuYifQRS32BfHkFS5Qu4/40+1k7kd0YFyyQUvIsVa17lrix3zDqMavG8x7oOlqM/axDMBT6DhpdBMAdc5qqf8myz8lwjlFjyDUL6u3Z4/yE0nUrmEudXiXwG0xbVoEN8SCNrDmmvFMt6qdCpdDMkHr2TuSh0Hh4FT5CDkzPI8ZRssv/01j/QvIO3c/xlbpGRPWpsPXEVOz3pmjYN4qyQesnBKWCENsQLy/8s2rey8iQgx2GtsrNw8+wGX6XE4v3QtwUrRe12hWoNrEHWl0xnLv2mvAFqdMAMpFY6EpOKLlE4hoCs2CmTJ2dv6e2tiGTXGU6/frI5iuNRK61OXnH5OjEc8DCGH/GC7NXyDOXOB+7BdBvvf50l2C/vxR2TKgTncLtHeLCrR0GHNHsxqRo1UDwOWur0r7fdfCRvb2tIr5LORCqKYVKd60/BAXjHWc=
85 8862469e16f9236208581b20de5f96bd13cc039d 0 iQIVAwUAUt7cLSBXgaxoKi1yAQLOkRAAidp501zafqe+JnDwlf7ORcJc+FgCE6mK1gxDfReCbkMsY7AzspogU7orqfSmr6XXdrDwmk3Y5x3mf44OGzNQjvuNWhqnTgJ7sOcU/lICGQUc8WiGNzHEMFGX9S+K4dpUaBf8Tcl8pU3iArhlthDghW6SZeDFB/FDBaUx9dkdFp6eXrmu4OuGRZEvwUvPtCGxIL7nKNnufI1du/MsWQxvC2ORHbMNtRq6tjA0fLZi4SvbySuYifQRS32BfHkFS5Qu4/40+1k7kd0YFyyQUvIsVa17lrix3zDqMavG8x7oOlqM/axDMBT6DhpdBMAdc5qqf8myz8lwjlFjyDUL6u3Z4/yE0nUrmEudXiXwG0xbVoEN8SCNrDmmvFMt6qdCpdDMkHr2TuSh0Hh4FT5CDkzPI8ZRssv/01j/QvIO3c/xlbpGRPWpsPXEVOz3pmjYN4qyQesnBKWCENsQLy/8s2rey8iQgx2GtsrNw8+wGX6XE4v3QtwUrRe12hWoNrEHWl0xnLv2mvAFqdMAMpFY6EpOKLlE4hoCs2CmTJ2dv6e2tiGTXGU6/frI5iuNRK61OXnH5OjEc8DCGH/GC7NXyDOXOB+7BdBvvf50l2C/vxR2TKgTncLtHeLCrR0GHNHsxqRo1UDwOWur0r7fdfCRvb2tIr5LORCqKYVKd60/BAXjHWc=
86 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 0 iQIVAwUAUu1lIyBXgaxoKi1yAQIzCBAAizSWvTkWt8+tReM9jUetoSToF+XahLhn381AYdErFCBErX4bNL+vyEj+Jt2DHsAfabkvNBe3k7rtFlXHwpq6POa/ciFGPDhFlplNv6yN1jOKBlMsgdjpn7plZKcLHODOigU7IMlgg70Um8qVrRgQ8FhvbVgR2I5+CD6bucFzqo78wNl9mCIHIQCpGKIUoz56GbwT+rUpEB182Z3u6rf4NWj35RZLGAicVV2A2eAAFh4ZvuC+Z0tXMkp6Gq9cINawZgqfLbzVYJeXBtJC39lHPyp5P3LaEVRhntc9YTwbfkVGjyJZR60iYrieeKpOYRnzgHauPVdgVhkTkBxshmEPY7svKYSQqlj8hLuFa+a3ajbIPrpQAAi1MgtamA991atNqGiSTjdZa9kLQvfdn0k80+gkCxpuO56PhvtdjKsYVRgQMTYmQVQdh3x4WbQOSqTADXXIZUaWxx4RmNSlxY7KD+3lPP09teOD+A3B2cP60bC5NsCfULtQFXQzdC7NvfIyYfYBTZa+Pv6HFkVe10cbnqTt83hBy0D77vdaegPRe56qDNU+GrIG2/rosnlKGFjFoK/pTYkR9uzfkrhEjLwyfkoXlBqY+376W0PC5fP10pJeQBS9DuXpCPlgtyW0Jy1ayCT1YR4QJC4n75vZwTFBFRBhSi0HqFquOgy83+O0Q/k=
86 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 0 iQIVAwUAUu1lIyBXgaxoKi1yAQIzCBAAizSWvTkWt8+tReM9jUetoSToF+XahLhn381AYdErFCBErX4bNL+vyEj+Jt2DHsAfabkvNBe3k7rtFlXHwpq6POa/ciFGPDhFlplNv6yN1jOKBlMsgdjpn7plZKcLHODOigU7IMlgg70Um8qVrRgQ8FhvbVgR2I5+CD6bucFzqo78wNl9mCIHIQCpGKIUoz56GbwT+rUpEB182Z3u6rf4NWj35RZLGAicVV2A2eAAFh4ZvuC+Z0tXMkp6Gq9cINawZgqfLbzVYJeXBtJC39lHPyp5P3LaEVRhntc9YTwbfkVGjyJZR60iYrieeKpOYRnzgHauPVdgVhkTkBxshmEPY7svKYSQqlj8hLuFa+a3ajbIPrpQAAi1MgtamA991atNqGiSTjdZa9kLQvfdn0k80+gkCxpuO56PhvtdjKsYVRgQMTYmQVQdh3x4WbQOSqTADXXIZUaWxx4RmNSlxY7KD+3lPP09teOD+A3B2cP60bC5NsCfULtQFXQzdC7NvfIyYfYBTZa+Pv6HFkVe10cbnqTt83hBy0D77vdaegPRe56qDNU+GrIG2/rosnlKGFjFoK/pTYkR9uzfkrhEjLwyfkoXlBqY+376W0PC5fP10pJeQBS9DuXpCPlgtyW0Jy1ayCT1YR4QJC4n75vZwTFBFRBhSi0HqFquOgy83+O0Q/k=
87 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 0 iQIVAwUAUxJPlyBXgaxoKi1yAQLIRA//Qh9qzoYthPAWAUNbzybWXC/oMBI2X89NQC7l1ivKhv7cn9L79D8SWXM18q7LTwLdlwOkV/a0NTE3tkQTLvxJpfnRLCBbMOcGiIn/PxsAae8IhMAUbR7qz+XOynHOs60ZhK9X8seQHJRf1YtOI9gYTL/WYk8Cnpmc6xZQ90TNhoPPkpdfe8Y236V11SbYtN14fmrPaWQ3GXwyrvQaqM1F7BxSnC/sbm9+/wprsTa8gRQo7YQL/T5jJQgFiatG3yayrDdJtoRq3TZKtsxw8gtQdfVCrrBibbysjM8++dnwA92apHNUY8LzyptPy7rSDXRrIpPUWGGTQTD+6HQwkcLFtIuUpw4I75SV3z2r6LyOLKzDJUIunKOOYFS/rEIQGxZHxZOBAvbI+73mHAn3pJqm+UAA7R1n7tk3JyQncg50qJlm9zIUPGpNFcdEqak5iXzGYx292VlcE+fbJYeIPWggpilaVUgdmXtMCG0O0uX6C8MDmzVDCjd6FzDJ4GTZwgmWJaamvls85CkZgyN/UqlisfFXub0A1h7qAzBSVpP1+Ti+UbBjlrGX8BMRYHRGYIeIq16elcWwSpLgshjDwNn2r2EdwX8xKU5mucgTzSLprbOYGdQaqnvf6e8IX5WMBgwVW9YdY9yJKSLF7kE1AlM9nfVcXwOK4mHoMvnNgiX3zsw=
87 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 0 iQIVAwUAUxJPlyBXgaxoKi1yAQLIRA//Qh9qzoYthPAWAUNbzybWXC/oMBI2X89NQC7l1ivKhv7cn9L79D8SWXM18q7LTwLdlwOkV/a0NTE3tkQTLvxJpfnRLCBbMOcGiIn/PxsAae8IhMAUbR7qz+XOynHOs60ZhK9X8seQHJRf1YtOI9gYTL/WYk8Cnpmc6xZQ90TNhoPPkpdfe8Y236V11SbYtN14fmrPaWQ3GXwyrvQaqM1F7BxSnC/sbm9+/wprsTa8gRQo7YQL/T5jJQgFiatG3yayrDdJtoRq3TZKtsxw8gtQdfVCrrBibbysjM8++dnwA92apHNUY8LzyptPy7rSDXRrIpPUWGGTQTD+6HQwkcLFtIuUpw4I75SV3z2r6LyOLKzDJUIunKOOYFS/rEIQGxZHxZOBAvbI+73mHAn3pJqm+UAA7R1n7tk3JyQncg50qJlm9zIUPGpNFcdEqak5iXzGYx292VlcE+fbJYeIPWggpilaVUgdmXtMCG0O0uX6C8MDmzVDCjd6FzDJ4GTZwgmWJaamvls85CkZgyN/UqlisfFXub0A1h7qAzBSVpP1+Ti+UbBjlrGX8BMRYHRGYIeIq16elcWwSpLgshjDwNn2r2EdwX8xKU5mucgTzSLprbOYGdQaqnvf6e8IX5WMBgwVW9YdY9yJKSLF7kE1AlM9nfVcXwOK4mHoMvnNgiX3zsw=
88 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 0 iQIVAwUAUztENyBXgaxoKi1yAQIpkhAAmJj5JRTSn0Dn/OTAHggalw8KYFbAck1X35Wg9O7ku7sd+cOnNnkYfqAdz2m5ikqWHP7aWMiNkNy7Ree2110NqkQVYG/2AJStXBdIOmewqnjDlNt+rbJQN/JsjeKSCy+ToNvhqX5cTM9DF2pwRjMsTXVff307S6/3pga244i+RFAeG3WCUrzfDu641MGFLjG4atCj8ZFLg9DcW5bsRiOs5ZK5Il+UAb2yyoS2KNQ70VLhYULhGtqq9tuO4nLRGN3DX/eDcYfncPCav1GckW4OZKakcbLtAdW0goSgGWloxcM+j2E6Z1JZ9tOTTkFN77EvX0ZWZLmYM7sUN1meFnKbVxrtGKlMelwKwlT252c65PAKa9zsTaRUKvN7XclyxZAYVCsiCQ/V08NXhNgXJXcoKUAeGNf6wruOyvRU9teia8fAiuHJoY58WC8jC4nYG3iZTnl+zNj2A5xuEUpYHhjUfe3rNJeK7CwUpJKlbxopu5mnW9AE9ITfI490eaapRLTojOBDJNqCORAtbggMD46fLeCOzzB8Gl70U2p5P34F92Sn6mgERFKh/10XwJcj4ZIeexbQK8lqQ2cIanDN9dAmbvavPTY8grbANuq+vXDGxjIjfxapqzsSPqUJ5KnfTQyLq5NWwquR9t38XvHZfktkd140BFKwIUAIlKKaFfYXXtM=
88 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 0 iQIVAwUAUztENyBXgaxoKi1yAQIpkhAAmJj5JRTSn0Dn/OTAHggalw8KYFbAck1X35Wg9O7ku7sd+cOnNnkYfqAdz2m5ikqWHP7aWMiNkNy7Ree2110NqkQVYG/2AJStXBdIOmewqnjDlNt+rbJQN/JsjeKSCy+ToNvhqX5cTM9DF2pwRjMsTXVff307S6/3pga244i+RFAeG3WCUrzfDu641MGFLjG4atCj8ZFLg9DcW5bsRiOs5ZK5Il+UAb2yyoS2KNQ70VLhYULhGtqq9tuO4nLRGN3DX/eDcYfncPCav1GckW4OZKakcbLtAdW0goSgGWloxcM+j2E6Z1JZ9tOTTkFN77EvX0ZWZLmYM7sUN1meFnKbVxrtGKlMelwKwlT252c65PAKa9zsTaRUKvN7XclyxZAYVCsiCQ/V08NXhNgXJXcoKUAeGNf6wruOyvRU9teia8fAiuHJoY58WC8jC4nYG3iZTnl+zNj2A5xuEUpYHhjUfe3rNJeK7CwUpJKlbxopu5mnW9AE9ITfI490eaapRLTojOBDJNqCORAtbggMD46fLeCOzzB8Gl70U2p5P34F92Sn6mgERFKh/10XwJcj4ZIeexbQK8lqQ2cIanDN9dAmbvavPTY8grbANuq+vXDGxjIjfxapqzsSPqUJ5KnfTQyLq5NWwquR9t38XvHZfktkd140BFKwIUAIlKKaFfYXXtM=
89 564f55b251224f16508dd1311452db7780dafe2b 0 iQIVAwUAU1BmFSBXgaxoKi1yAQJ2Aw//bjK++xJuZCIdktg/i5FxBwoxdbipfTkKsN/YjUwrEmroYM8IkqIsO+U54OGCYWr3NPJ3VS8wUQeJ+NF3ffcjmjC297R9J+X0c5G90DdQUYX44jG/tP8Tqpev4Q7DLCXT26aRwEMdJQpq0eGaqv55E5Cxnyt3RrLCqe7RjPresZFg7iYrro5nq8TGYwBhessHXnCix9QI0HtXiLpms+0UGz8Sbi9nEYW+M0OZCyO1TvykCpFzEsLNwqqtFvhOMD/AMiWcTKNUpjmOn3V83xjWl+jnDUt7BxJ7n1efUnlwl4IeWlSUb73q/durtaymb97cSdKFmXHv4pdAShQEuEpVVGO1WELsKoXmbj30ItTW2V3KvNbjFsvIdDo7zLCpXyTq1HC56W7QCIMINX2qT+hrAMWC12tPQ05f89Cv1+jpk6eOPFqIHFdi663AjyrnGll8nwN7HJWwtA5wTXisu3bec51FAq4yJTzPMtOE9spz36E+Go2hZ1cAv9oCSceZcM0wB8KiMfaZJKNZNZk1jvsdiio4CcdASOFQPOspz07GqQxVP7W+F1Oz32LgwcNAEAS/f3juwDj45GYfAWJrTh3dnJy5DTD2LVC7KtkxxUVkWkqxivnDB9anj++FN9eyekxzut5eFED+WrCfZMcSPW0ai7wbslhKUhCwSf/v3DgGwsM=
89 564f55b251224f16508dd1311452db7780dafe2b 0 iQIVAwUAU1BmFSBXgaxoKi1yAQJ2Aw//bjK++xJuZCIdktg/i5FxBwoxdbipfTkKsN/YjUwrEmroYM8IkqIsO+U54OGCYWr3NPJ3VS8wUQeJ+NF3ffcjmjC297R9J+X0c5G90DdQUYX44jG/tP8Tqpev4Q7DLCXT26aRwEMdJQpq0eGaqv55E5Cxnyt3RrLCqe7RjPresZFg7iYrro5nq8TGYwBhessHXnCix9QI0HtXiLpms+0UGz8Sbi9nEYW+M0OZCyO1TvykCpFzEsLNwqqtFvhOMD/AMiWcTKNUpjmOn3V83xjWl+jnDUt7BxJ7n1efUnlwl4IeWlSUb73q/durtaymb97cSdKFmXHv4pdAShQEuEpVVGO1WELsKoXmbj30ItTW2V3KvNbjFsvIdDo7zLCpXyTq1HC56W7QCIMINX2qT+hrAMWC12tPQ05f89Cv1+jpk6eOPFqIHFdi663AjyrnGll8nwN7HJWwtA5wTXisu3bec51FAq4yJTzPMtOE9spz36E+Go2hZ1cAv9oCSceZcM0wB8KiMfaZJKNZNZk1jvsdiio4CcdASOFQPOspz07GqQxVP7W+F1Oz32LgwcNAEAS/f3juwDj45GYfAWJrTh3dnJy5DTD2LVC7KtkxxUVkWkqxivnDB9anj++FN9eyekxzut5eFED+WrCfZMcSPW0ai7wbslhKUhCwSf/v3DgGwsM=
90 2195ac506c6ababe86985b932f4948837c0891b5 0 iQIVAwUAU2LO/CBXgaxoKi1yAQI/3w/7BT/VRPyxey6tYp7i5cONIlEB3gznebGYwm0SGYNE6lsvS2VLh6ztb+j4eqOadr8Ssna6bslBx+dVsm+VuJ+vrNLMucD5Uc+fhn6dAfVqg+YBzUEaedI5yNsJizcJUDI7hUVsxiPiiYd9hchCWJ+z2tVt2jCyG2lMV2rbW36AM89sgz/wn5/AaAFsgoS6up/uzA3Tmw+qZSO6dZChb4Q8midIUWEbNzVhokgYcw7/HmjmvkvV9RJYiG8aBnMdQmxTE69q2dTjnnDL6wu61WU2FpTN09HRFbemUqzAfoJp8MmXq6jWgfLcm0cI3kRo7ZNpnEkmVKsfKQCXXiaR4alt9IQpQ6Jl7LSYsYI+D4ejpYysIsZyAE8qzltYhBKJWqO27A5V4WdJsoTgA/RwKfPRlci4PY8I4N466S7PBXVz/Cc5EpFkecvrgceTmBafb8JEi+gPiD2Po4vtW3bCeV4xldiEXHeJ77byUz7fZU7jL78SjJVOCCQTJfKZVr36kTz3KlaOz3E700RxzEFDYbK7I41mdANeQBmNNbcvRTy5ma6W6I3McEcAH4wqM5fFQ8YS+QWJxk85Si8KtaDPqoEdC/0dQPavuU/jAVjhV8IbmmkOtO7WvOHQDBtrR15yMxGMnUwMrPHaRNKdHNYRG0LL7lpCtdMi1mzLQgHYY9SRYvI=
90 2195ac506c6ababe86985b932f4948837c0891b5 0 iQIVAwUAU2LO/CBXgaxoKi1yAQI/3w/7BT/VRPyxey6tYp7i5cONIlEB3gznebGYwm0SGYNE6lsvS2VLh6ztb+j4eqOadr8Ssna6bslBx+dVsm+VuJ+vrNLMucD5Uc+fhn6dAfVqg+YBzUEaedI5yNsJizcJUDI7hUVsxiPiiYd9hchCWJ+z2tVt2jCyG2lMV2rbW36AM89sgz/wn5/AaAFsgoS6up/uzA3Tmw+qZSO6dZChb4Q8midIUWEbNzVhokgYcw7/HmjmvkvV9RJYiG8aBnMdQmxTE69q2dTjnnDL6wu61WU2FpTN09HRFbemUqzAfoJp8MmXq6jWgfLcm0cI3kRo7ZNpnEkmVKsfKQCXXiaR4alt9IQpQ6Jl7LSYsYI+D4ejpYysIsZyAE8qzltYhBKJWqO27A5V4WdJsoTgA/RwKfPRlci4PY8I4N466S7PBXVz/Cc5EpFkecvrgceTmBafb8JEi+gPiD2Po4vtW3bCeV4xldiEXHeJ77byUz7fZU7jL78SjJVOCCQTJfKZVr36kTz3KlaOz3E700RxzEFDYbK7I41mdANeQBmNNbcvRTy5ma6W6I3McEcAH4wqM5fFQ8YS+QWJxk85Si8KtaDPqoEdC/0dQPavuU/jAVjhV8IbmmkOtO7WvOHQDBtrR15yMxGMnUwMrPHaRNKdHNYRG0LL7lpCtdMi1mzLQgHYY9SRYvI=
91 269c80ee5b3cb3684fa8edc61501b3506d02eb10 0 iQIVAwUAU4uX5CBXgaxoKi1yAQLpdg/+OxulOKwZN+Nr7xsRhUijYjyAElRf2mGDvMrbAOA2xNf85DOXjOrX5TKETumf1qANA5cHa1twA8wYgxUzhx30H+w5EsLjyeSsOncRnD5WZNqSoIq2XevT0T4c8xdyNftyBqK4h/SC/t2h3vEiSCUaGcfNK8yk4XO45MIk4kk9nlA9jNWdA5ZMLgEFBye2ggz0JjEAPUkVDqlr9sNORDEbnwZxGPV8CK9HaL/I8VWClaFgjKQmjqV3SQsNFe2XPffzXmIipFJ+ODuXVxYpAsvLiGmcfuUfSDHQ4L9QvjBsWe1PgYMr/6CY/lPYmR+xW5mJUE9eIdN4MYcXgicLrmMpdF5pToNccNCMtfa6CDvEasPRqe2bDzL/Q9dQbdOVE/boaYBlgmYLL+/u+dpqip9KkyGgbSo9uJzst1mLTCzJmr5bw+surul28i9HM+4+Lewg4UUdHLz46no1lfTlB5o5EAhiOZBTEVdoBaKfewVpDa/aBRvtWX7UMVRG5qrtA0sXwydN00Jaqkr9m20W0jWjtc1ZC72QCrynVHOyfIb2rN98rnuy2QN4bTvjNpNjHOhhhPTOoVo0YYPdiUupm46vymUTQCmWsglU4Rlaa3vXneP7JenL5TV8WLPs9J28lF0IkOnyBXY7OFcpvYO1euu7iR1VdjfrQukMyaX18usymiA=
91 269c80ee5b3cb3684fa8edc61501b3506d02eb10 0 iQIVAwUAU4uX5CBXgaxoKi1yAQLpdg/+OxulOKwZN+Nr7xsRhUijYjyAElRf2mGDvMrbAOA2xNf85DOXjOrX5TKETumf1qANA5cHa1twA8wYgxUzhx30H+w5EsLjyeSsOncRnD5WZNqSoIq2XevT0T4c8xdyNftyBqK4h/SC/t2h3vEiSCUaGcfNK8yk4XO45MIk4kk9nlA9jNWdA5ZMLgEFBye2ggz0JjEAPUkVDqlr9sNORDEbnwZxGPV8CK9HaL/I8VWClaFgjKQmjqV3SQsNFe2XPffzXmIipFJ+ODuXVxYpAsvLiGmcfuUfSDHQ4L9QvjBsWe1PgYMr/6CY/lPYmR+xW5mJUE9eIdN4MYcXgicLrmMpdF5pToNccNCMtfa6CDvEasPRqe2bDzL/Q9dQbdOVE/boaYBlgmYLL+/u+dpqip9KkyGgbSo9uJzst1mLTCzJmr5bw+surul28i9HM+4+Lewg4UUdHLz46no1lfTlB5o5EAhiOZBTEVdoBaKfewVpDa/aBRvtWX7UMVRG5qrtA0sXwydN00Jaqkr9m20W0jWjtc1ZC72QCrynVHOyfIb2rN98rnuy2QN4bTvjNpNjHOhhhPTOoVo0YYPdiUupm46vymUTQCmWsglU4Rlaa3vXneP7JenL5TV8WLPs9J28lF0IkOnyBXY7OFcpvYO1euu7iR1VdjfrQukMyaX18usymiA=
92 2d8cd3d0e83c7336c0cb45a9f88638363f993848 0 iQIVAwUAU7OLTCBXgaxoKi1yAQJ+pw/+M3yOesgf55eo3PUTZw02QZxDyEg9ElrRc6664/QFXaJuYdz8H3LGG/NYs8uEdYihiGpS1Qc70jwd1IoUlrCELsaSSZpzWQ+VpQFX29aooBoetfL+8WgqV8zJHCtY0E1EBg/Z3ZL3n2OS++fVeWlKtp5mwEq8uLTUmhIS7GseP3bIG/CwF2Zz4bzhmPGK8V2s74aUvELZLCfkBE1ULNs7Nou1iPDGnhYOD53eq1KGIPlIg1rnLbyYw5bhS20wy5IxkWf2eCaXfmQBTG61kO5m3nkzfVgtxmZHLqYggISTJXUovfGsWZcp5a71clCSMVal+Mfviw8L/UPHG0Ie1c36djJiFLxM0f2HlwVMjegQOZSAeMGg1YL1xnIys2zMMsKgEeR+JISTal1pJyLcT9x5mr1HCnUczSGXE5zsixN+PORRnZOqcEZTa2mHJ1h5jJeEm36B/eR57BMJG+i0QgZqTpLzYTFrp2eWokGMjFB1MvgAkL2YoRsw9h6TeIwqzK8mFwLi28bf1c90gX9uMbwY/NOqGzfQKBR9bvCjs2k/gmJ+qd5AbC3DvOxHnN6hRZUqNq76Bo4F+CUVcjQ/NXnfnOIVNbILpl5Un5kl+8wLFM+mNxDxduajaUwLhSHZofKmmCSLbuuaGmQTC7a/4wzhQM9e5dX0X/8sOo8CptW7uw4=
92 2d8cd3d0e83c7336c0cb45a9f88638363f993848 0 iQIVAwUAU7OLTCBXgaxoKi1yAQJ+pw/+M3yOesgf55eo3PUTZw02QZxDyEg9ElrRc6664/QFXaJuYdz8H3LGG/NYs8uEdYihiGpS1Qc70jwd1IoUlrCELsaSSZpzWQ+VpQFX29aooBoetfL+8WgqV8zJHCtY0E1EBg/Z3ZL3n2OS++fVeWlKtp5mwEq8uLTUmhIS7GseP3bIG/CwF2Zz4bzhmPGK8V2s74aUvELZLCfkBE1ULNs7Nou1iPDGnhYOD53eq1KGIPlIg1rnLbyYw5bhS20wy5IxkWf2eCaXfmQBTG61kO5m3nkzfVgtxmZHLqYggISTJXUovfGsWZcp5a71clCSMVal+Mfviw8L/UPHG0Ie1c36djJiFLxM0f2HlwVMjegQOZSAeMGg1YL1xnIys2zMMsKgEeR+JISTal1pJyLcT9x5mr1HCnUczSGXE5zsixN+PORRnZOqcEZTa2mHJ1h5jJeEm36B/eR57BMJG+i0QgZqTpLzYTFrp2eWokGMjFB1MvgAkL2YoRsw9h6TeIwqzK8mFwLi28bf1c90gX9uMbwY/NOqGzfQKBR9bvCjs2k/gmJ+qd5AbC3DvOxHnN6hRZUqNq76Bo4F+CUVcjQ/NXnfnOIVNbILpl5Un5kl+8wLFM+mNxDxduajaUwLhSHZofKmmCSLbuuaGmQTC7a/4wzhQM9e5dX0X/8sOo8CptW7uw4=
93 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 0 iQIVAwUAU8n97yBXgaxoKi1yAQKqcA/+MT0VFoP6N8fHnlxj85maoM2HfZbAzX7oEW1B8F1WH6rHESHDexDWIYWJ2XnEeTD4GCXN0/1p+O/I0IMPNzqoSz8BU0SR4+ejhRkGrKG7mcFiF5G8enxaiISn9nmax6DyRfqtOQBzuXYGObXg9PGvMS6zbR0SorJK61xX7fSsUNN6BAvHJfpwcVkOrrFAIpEhs/Gh9wg0oUKCffO/Abs6oS+P6nGLylpIyXqC7rKZ4uPVc6Ljh9DOcpV4NCU6kQbNE7Ty79E0/JWWLsHOEY4F4WBzI7rVh7dOkRMmfNGaqvKkuNkJOEqTR1o1o73Hhbxn4NU7IPbVP/zFKC+/4QVtcPk2IPlpK1MqA1H2hBNYZhJlNhvAa7LwkIxM0916/zQ8dbFAzp6Ay/t/L0tSEcIrudTz2KTrY0WKw+pkzB/nTwaS3XZre6H2B+gszskmf1Y41clkIy/nH9K7zBuzANWyK3+bm40vmMoBbbnsweUAKkyCwqm4KTyQoYQWzu/ZiZcI+Uuk/ajJ9s7EhJbIlSnYG9ttWL/IZ1h+qPU9mqVO9fcaqkeL/NIRh+IsnzaWo0zmHU1bK+/E29PPGGf3v6+IEJmXg7lvNl5pHiMd2tb7RNO/UaNSv1Y2E9naD4FQwSWo38GRBcnRGuKCLdZNHGUR+6dYo6BJCGG8wtZvNXb3TOo=
93 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 0 iQIVAwUAU8n97yBXgaxoKi1yAQKqcA/+MT0VFoP6N8fHnlxj85maoM2HfZbAzX7oEW1B8F1WH6rHESHDexDWIYWJ2XnEeTD4GCXN0/1p+O/I0IMPNzqoSz8BU0SR4+ejhRkGrKG7mcFiF5G8enxaiISn9nmax6DyRfqtOQBzuXYGObXg9PGvMS6zbR0SorJK61xX7fSsUNN6BAvHJfpwcVkOrrFAIpEhs/Gh9wg0oUKCffO/Abs6oS+P6nGLylpIyXqC7rKZ4uPVc6Ljh9DOcpV4NCU6kQbNE7Ty79E0/JWWLsHOEY4F4WBzI7rVh7dOkRMmfNGaqvKkuNkJOEqTR1o1o73Hhbxn4NU7IPbVP/zFKC+/4QVtcPk2IPlpK1MqA1H2hBNYZhJlNhvAa7LwkIxM0916/zQ8dbFAzp6Ay/t/L0tSEcIrudTz2KTrY0WKw+pkzB/nTwaS3XZre6H2B+gszskmf1Y41clkIy/nH9K7zBuzANWyK3+bm40vmMoBbbnsweUAKkyCwqm4KTyQoYQWzu/ZiZcI+Uuk/ajJ9s7EhJbIlSnYG9ttWL/IZ1h+qPU9mqVO9fcaqkeL/NIRh+IsnzaWo0zmHU1bK+/E29PPGGf3v6+IEJmXg7lvNl5pHiMd2tb7RNO/UaNSv1Y2E9naD4FQwSWo38GRBcnRGuKCLdZNHGUR+6dYo6BJCGG8wtZvNXb3TOo=
94 3178e49892020336491cdc6945885c4de26ffa8b 0 iQIVAwUAU9whUCBXgaxoKi1yAQJDKxAAoGzdHXV/BvZ598VExEQ8IqkmBVIP1QZDVBr/orMc1eFM4tbGKxumMGbqgJsg+NetI0irkh/YWeJQ13lT4Og72iJ+4UC9eF9pcpUKr/0eBYdU2N/p2MIbVNWh3aF5QkbuQpSri0VbHOWkxqwoqrrwXEjgHaKYP4PKh+Dzukax4yzBUIyzAG38pt4a8hbjnozCl2uAikxk4Ojg+ZufhPoZWgFEuYzSfK5SrwVKOwuxKYFGbbVGTQMIXLvBhOipAmHp4JMEYHfG85kwuyx/DCDbGmXKPQYQfClwjJ4ob/IwG8asyMsPWs+09vrvpVO08HBuph3GjuiWJ1fhEef/ImWmZdQySI9Y4SjwP4dMVfzLCnY+PYPDM9Sq/5Iee13gI2lVM2NtAfQZPXh9l8u6SbCir1UhMNMx0qVMkqMAATmiZ+ETHCO75q4Wdcmnv5fk2PbvaGBVtrHGeiyuz5mK/j4cMbd0R9R0hR1PyC4dOhNqOnbqELNIe0rKNByG1RkpiQYsqZTU6insmnZrv4fVsxfA4JOObPfKNT4oa24MHS73ldLFCfQAuIxVE7RDJJ3bHeh/yO6Smo28FuVRldBl5e+wj2MykS8iVcuSa1smw6gJ14iLBH369nlR3fAAQxI0omVYPDHLr7SsH3vJasTaCD7V3SL4lW6vo/yaAh4ImlTAE+Y=
94 3178e49892020336491cdc6945885c4de26ffa8b 0 iQIVAwUAU9whUCBXgaxoKi1yAQJDKxAAoGzdHXV/BvZ598VExEQ8IqkmBVIP1QZDVBr/orMc1eFM4tbGKxumMGbqgJsg+NetI0irkh/YWeJQ13lT4Og72iJ+4UC9eF9pcpUKr/0eBYdU2N/p2MIbVNWh3aF5QkbuQpSri0VbHOWkxqwoqrrwXEjgHaKYP4PKh+Dzukax4yzBUIyzAG38pt4a8hbjnozCl2uAikxk4Ojg+ZufhPoZWgFEuYzSfK5SrwVKOwuxKYFGbbVGTQMIXLvBhOipAmHp4JMEYHfG85kwuyx/DCDbGmXKPQYQfClwjJ4ob/IwG8asyMsPWs+09vrvpVO08HBuph3GjuiWJ1fhEef/ImWmZdQySI9Y4SjwP4dMVfzLCnY+PYPDM9Sq/5Iee13gI2lVM2NtAfQZPXh9l8u6SbCir1UhMNMx0qVMkqMAATmiZ+ETHCO75q4Wdcmnv5fk2PbvaGBVtrHGeiyuz5mK/j4cMbd0R9R0hR1PyC4dOhNqOnbqELNIe0rKNByG1RkpiQYsqZTU6insmnZrv4fVsxfA4JOObPfKNT4oa24MHS73ldLFCfQAuIxVE7RDJJ3bHeh/yO6Smo28FuVRldBl5e+wj2MykS8iVcuSa1smw6gJ14iLBH369nlR3fAAQxI0omVYPDHLr7SsH3vJasTaCD7V3SL4lW6vo/yaAh4ImlTAE+Y=
95 5dc91146f35369949ea56b40172308158b59063a 0 iQIVAwUAVAUgJyBXgaxoKi1yAQJkEg/9EXFZvPpuvU7AjII1dlIT8F534AXrO30+H6hweg+h2mUCSb/mZnbo3Jr1tATgBWbIKkYmmsiIKNlJMFNPZTWhImGcVA93t6v85tSFiNJRI2QP9ypl5wTt2KhiS/s7GbUYCtPDm6xyNYoSvDo6vXJ5mfGlgFZY5gYLwEHq/lIRWLWD4EWYWbk5yN+B7rHu6A1n3yro73UR8DudEhYYqC23KbWEqFOiNd1IGj3UJlxIHUE4AcDukxbfiMWrKvv1kuT/vXak3X7cLXlO56aUbMopvaUflA3PSr3XAqynDd69cxACo/T36fuwzCQN4ICpdzGTos0rQALSr7CKF5YP9LMhVhCsOn0pCsAkSiw4HxxbcHQLl+t+0rchNysc4dWGwDt6GAfYcdm3fPtGFtA3qsN8lOpCquFH3TAZ3TrIjLFoTOk6s1xX1x5rjP/DAHc/y3KZU0Ffx3TwdQEEEIFaAXaxQG848rdfzV42+dnFnXh1G/MIrKAmv3ZSUkQ3XJfGc7iu82FsYE1NLHriUQDmMRBzCoQ1Rn1Kji119Cxf5rsMcQ6ZISR1f0jDCUS/qxlHvSqETLp8H63NSUfvuKSC7uC6pGvq9XQm1JRNO5UuJfK6tHzy0jv9bt2IRo2xbmvpDu9L5oHHd3JePsAmFmbrFf/7Qem3JyzEvRcpdcdHtefxcxc=
95 5dc91146f35369949ea56b40172308158b59063a 0 iQIVAwUAVAUgJyBXgaxoKi1yAQJkEg/9EXFZvPpuvU7AjII1dlIT8F534AXrO30+H6hweg+h2mUCSb/mZnbo3Jr1tATgBWbIKkYmmsiIKNlJMFNPZTWhImGcVA93t6v85tSFiNJRI2QP9ypl5wTt2KhiS/s7GbUYCtPDm6xyNYoSvDo6vXJ5mfGlgFZY5gYLwEHq/lIRWLWD4EWYWbk5yN+B7rHu6A1n3yro73UR8DudEhYYqC23KbWEqFOiNd1IGj3UJlxIHUE4AcDukxbfiMWrKvv1kuT/vXak3X7cLXlO56aUbMopvaUflA3PSr3XAqynDd69cxACo/T36fuwzCQN4ICpdzGTos0rQALSr7CKF5YP9LMhVhCsOn0pCsAkSiw4HxxbcHQLl+t+0rchNysc4dWGwDt6GAfYcdm3fPtGFtA3qsN8lOpCquFH3TAZ3TrIjLFoTOk6s1xX1x5rjP/DAHc/y3KZU0Ffx3TwdQEEEIFaAXaxQG848rdfzV42+dnFnXh1G/MIrKAmv3ZSUkQ3XJfGc7iu82FsYE1NLHriUQDmMRBzCoQ1Rn1Kji119Cxf5rsMcQ6ZISR1f0jDCUS/qxlHvSqETLp8H63NSUfvuKSC7uC6pGvq9XQm1JRNO5UuJfK6tHzy0jv9bt2IRo2xbmvpDu9L5oHHd3JePsAmFmbrFf/7Qem3JyzEvRcpdcdHtefxcxc=
96 f768c888aaa68d12dd7f509dcc7f01c9584357d0 0 iQIVAwUAVCxczSBXgaxoKi1yAQJYiA/9HnqKuU7IsGACgsUGt+YaqZQumg077Anj158kihSytmSts6xDxqVY1UQB38dqAKLJrQc7RbN0YK0NVCKZZrx/4OqgWvjiL5qWUJKqQzsDx4LGTUlbPlZNZawW2urmmYW6c9ZZDs1EVnVeZMDrOdntddtnBgtILDwrZ8o3U7FwSlfnm03vTkqUMj9okA3AsI8+lQIlo4qbqjQJYwvUC1ZezRdQwaT1LyoWUgjmhoZ1XWcWKOs9baikaJr6fMv8vZpwmaOY1+pztxYlROeSPVWt9P6yOf0Hi/2eg8AwSZLaX96xfk9IvXUSItg/wjTWP9BhnNs/ulwTnN8QOgSXpYxH4RXwsYOyU7BvwAekA9xi17wuzPrGEliScplxICIZ7jiiwv/VngMvM9AYw2mNBvZt2ZIGrrLaK6pq/zBm5tbviwqt5/8U5aqO8k1O0e4XYm5WmQ1c2AkXRO+xwvFpondlSF2y0flzf2FRXP82QMfsy7vxIP0KmaQ4ex+J8krZgMjNTwXh2M4tdYNtu5AehJQEP3l6giy2srkMDuFLqoe1yECjVlGdgA86ve3J/84I8KGgsufYMhfQnwHHGXCbONcNsDvO0QOee6CIQVcdKCG7dac3M89SC6Ns2CjuC8BIYDRnxbGQb7Fvn4ZcadyJKKbXQJzMgRV25K6BAwTIdvYAtgU=
96 f768c888aaa68d12dd7f509dcc7f01c9584357d0 0 iQIVAwUAVCxczSBXgaxoKi1yAQJYiA/9HnqKuU7IsGACgsUGt+YaqZQumg077Anj158kihSytmSts6xDxqVY1UQB38dqAKLJrQc7RbN0YK0NVCKZZrx/4OqgWvjiL5qWUJKqQzsDx4LGTUlbPlZNZawW2urmmYW6c9ZZDs1EVnVeZMDrOdntddtnBgtILDwrZ8o3U7FwSlfnm03vTkqUMj9okA3AsI8+lQIlo4qbqjQJYwvUC1ZezRdQwaT1LyoWUgjmhoZ1XWcWKOs9baikaJr6fMv8vZpwmaOY1+pztxYlROeSPVWt9P6yOf0Hi/2eg8AwSZLaX96xfk9IvXUSItg/wjTWP9BhnNs/ulwTnN8QOgSXpYxH4RXwsYOyU7BvwAekA9xi17wuzPrGEliScplxICIZ7jiiwv/VngMvM9AYw2mNBvZt2ZIGrrLaK6pq/zBm5tbviwqt5/8U5aqO8k1O0e4XYm5WmQ1c2AkXRO+xwvFpondlSF2y0flzf2FRXP82QMfsy7vxIP0KmaQ4ex+J8krZgMjNTwXh2M4tdYNtu5AehJQEP3l6giy2srkMDuFLqoe1yECjVlGdgA86ve3J/84I8KGgsufYMhfQnwHHGXCbONcNsDvO0QOee6CIQVcdKCG7dac3M89SC6Ns2CjuC8BIYDRnxbGQb7Fvn4ZcadyJKKbXQJzMgRV25K6BAwTIdvYAtgU=
97 7f8d16af8cae246fa5a48e723d48d58b015aed94 0 iQIVAwUAVEL0XyBXgaxoKi1yAQJLkRAAjZhpUju5nnSYtN9S0/vXS/tjuAtBTUdGwc0mz97VrM6Yhc6BjSCZL59tjeqQaoH7Lqf94pRAtZyIB2Vj/VVMDbM+/eaoSr1JixxppU+a4eqScaj82944u4C5YMSMC22PMvEwqKmy87RinZKJlFwSQ699zZ5g6mnNq8xeAiDlYhoF2QKzUXwnKxzpvjGsYhYGDMmVS1QPmky4WGvuTl6KeGkv8LidKf7r6/2RZeMcq+yjJ7R0RTtyjo1cM5dMcn/jRdwZxuV4cmFweCAeoy5guV+X6du022TpVndjOSDoKiRgdk7pTuaToXIy+9bleHpEo9bwKx58wvOMg7sirAYjrA4Xcx762RHiUuidTTPktm8sNsBQmgwJZ8Pzm+8TyHjFGLnBfeiDbQQEdLCXloz0jVOVRflDfMays1WpAYUV8XNOsgxnD2jDU8L0NLkJiX5Y0OerGq9AZ+XbgJFVBFhaOfsm2PEc3jq00GOLzrGzA+4b3CGpFzM3EyK9OnnwbP7SqCGb7PJgjmQ7IO8IWEmVYGaKtWONSm8zRLcKdH8xuk8iN1qCkBXMty/wfTEVTkIlMVEDbslYkVfj0rAPJ8B37bfe0Yz4CEMkCmARIB1rIOpMhnavXGuD50OP2PBBY/8DyC5aY97z9f04na/ffk+l7rWaHihjHufKIApt5OnfJ1w=
97 7f8d16af8cae246fa5a48e723d48d58b015aed94 0 iQIVAwUAVEL0XyBXgaxoKi1yAQJLkRAAjZhpUju5nnSYtN9S0/vXS/tjuAtBTUdGwc0mz97VrM6Yhc6BjSCZL59tjeqQaoH7Lqf94pRAtZyIB2Vj/VVMDbM+/eaoSr1JixxppU+a4eqScaj82944u4C5YMSMC22PMvEwqKmy87RinZKJlFwSQ699zZ5g6mnNq8xeAiDlYhoF2QKzUXwnKxzpvjGsYhYGDMmVS1QPmky4WGvuTl6KeGkv8LidKf7r6/2RZeMcq+yjJ7R0RTtyjo1cM5dMcn/jRdwZxuV4cmFweCAeoy5guV+X6du022TpVndjOSDoKiRgdk7pTuaToXIy+9bleHpEo9bwKx58wvOMg7sirAYjrA4Xcx762RHiUuidTTPktm8sNsBQmgwJZ8Pzm+8TyHjFGLnBfeiDbQQEdLCXloz0jVOVRflDfMays1WpAYUV8XNOsgxnD2jDU8L0NLkJiX5Y0OerGq9AZ+XbgJFVBFhaOfsm2PEc3jq00GOLzrGzA+4b3CGpFzM3EyK9OnnwbP7SqCGb7PJgjmQ7IO8IWEmVYGaKtWONSm8zRLcKdH8xuk8iN1qCkBXMty/wfTEVTkIlMVEDbslYkVfj0rAPJ8B37bfe0Yz4CEMkCmARIB1rIOpMhnavXGuD50OP2PBBY/8DyC5aY97z9f04na/ffk+l7rWaHihjHufKIApt5OnfJ1w=
98 ced632394371a36953ce4d394f86278ae51a2aae 0 iQIVAwUAVFWpfSBXgaxoKi1yAQLCQw//cvCi/Di3z/2ZEDQt4Ayyxv18gzewqrYyoElgnEzr5uTynD9Mf25hprstKla/Y5C6q+y0K6qCHPimGOkz3H+wZ2GVUgLKAwMABkfSb5IZiLTGaB2DjAJKZRwB6h43wG/DSFggE3dYszWuyHW88c72ZzVF5CSNc4J1ARLjDSgnNYJQ6XdPw3C9KgiLFDXzynPpZbPg0AK5bdPUKJruMeIKPn36Hx/Tv5GXUrbc2/lcnyRDFWisaDl0X/5eLdA+r3ID0cSmyPLYOeCgszRiW++KGw+PPDsWVeM3ZaZ9SgaBWU7MIn9A7yQMnnSzgDbN+9v/VMT3zbk1WJXlQQK8oA+CCdHH9EY33RfZ6ST/lr3pSQbUG1hdK6Sw+H6WMkOnnEk6HtLwa4xZ3HjDpoPkhVV+S0C7D5WWOovbubxuBiW5v8tK4sIOS6bAaKevTBKRbo4Rs6qmS/Ish5Q+z5bKst80cyEdi4QSoPZ/W+6kh1KfOprMxynwPQhtEcDYW2gfLpgPIM7RdXPKukLlkV2qX3eF/tqApGU4KNdP4I3N80Ri0h+6tVU/K4TMYzlRV3ziLBumJ4TnBrTHU3X6AfZUfTgslQzokX8/7a3tbctX6kZuJPggLGisdFSdirHbrUc+y5VKuJtPr+LxxgZKRFbs2VpJRem6FvwGNyndWLv32v0GMtQ=
98 ced632394371a36953ce4d394f86278ae51a2aae 0 iQIVAwUAVFWpfSBXgaxoKi1yAQLCQw//cvCi/Di3z/2ZEDQt4Ayyxv18gzewqrYyoElgnEzr5uTynD9Mf25hprstKla/Y5C6q+y0K6qCHPimGOkz3H+wZ2GVUgLKAwMABkfSb5IZiLTGaB2DjAJKZRwB6h43wG/DSFggE3dYszWuyHW88c72ZzVF5CSNc4J1ARLjDSgnNYJQ6XdPw3C9KgiLFDXzynPpZbPg0AK5bdPUKJruMeIKPn36Hx/Tv5GXUrbc2/lcnyRDFWisaDl0X/5eLdA+r3ID0cSmyPLYOeCgszRiW++KGw+PPDsWVeM3ZaZ9SgaBWU7MIn9A7yQMnnSzgDbN+9v/VMT3zbk1WJXlQQK8oA+CCdHH9EY33RfZ6ST/lr3pSQbUG1hdK6Sw+H6WMkOnnEk6HtLwa4xZ3HjDpoPkhVV+S0C7D5WWOovbubxuBiW5v8tK4sIOS6bAaKevTBKRbo4Rs6qmS/Ish5Q+z5bKst80cyEdi4QSoPZ/W+6kh1KfOprMxynwPQhtEcDYW2gfLpgPIM7RdXPKukLlkV2qX3eF/tqApGU4KNdP4I3N80Ri0h+6tVU/K4TMYzlRV3ziLBumJ4TnBrTHU3X6AfZUfTgslQzokX8/7a3tbctX6kZuJPggLGisdFSdirHbrUc+y5VKuJtPr+LxxgZKRFbs2VpJRem6FvwGNyndWLv32v0GMtQ=
99 643c58303fb0ec020907af28b9e486be299ba043 0 iQIVAwUAVGKawCBXgaxoKi1yAQL7zxAAjpXKNvzm/PKVlTfDjuVOYZ9H8w9QKUZ0vfrNJrN6Eo6hULIostbdRc25FcMWocegTqvKbz3IG+L2TKOIdZJS9M9QS4URybUd37URq4Jai8kMiJY31KixNNnjO2G1B39aIXUhY+EPx12aY31/OVy4laXIVtN6qpSncjo9baXSOMZmx6RyA1dbyfwXRjT/aODCGHZXgLJHS/kHlkCsThVlqYQ4rUCDkXIeMqIGF1CR0KjfmKpp1fS14OMgpLgdnt9+pnBZ+qcf1YdpOeQob1zwunjMYOyYC74FyOTdwaynU2iDsuBrmkE8kgEedIn7+WWe9fp/6TQJMVOeTQPZBNSRRSUYCw5Tg/0L/+jLtzjc2mY4444sDPbR7scrtU+/GtvlR5z0Y5pofwEdFME7PZNOp9a4kMiSa7ZERyGdN7U1pDu9JU6BZRz+nPzW217PVnTF7YFV/GGUzMTk9i7EZb5M4T9r9gfxFSMPeT5ct712CdBfyRlsSbSWk8XclTXwW385kLVYNDtOukWrvEiwxpA14Xb/ZUXbIDZVf5rP2HrZHMkghzeUYPjRn/IlgYUt7sDNmqFZNIc9mRFrZC9uFQ/Nul5InZodNODQDM+nHpxaztt4xl4qKep8SDEPAQjNr8biC6T9MtLKbWbSKDlqYYNv0pb2PuGub3y9rvkF1Y05mgM=
99 643c58303fb0ec020907af28b9e486be299ba043 0 iQIVAwUAVGKawCBXgaxoKi1yAQL7zxAAjpXKNvzm/PKVlTfDjuVOYZ9H8w9QKUZ0vfrNJrN6Eo6hULIostbdRc25FcMWocegTqvKbz3IG+L2TKOIdZJS9M9QS4URybUd37URq4Jai8kMiJY31KixNNnjO2G1B39aIXUhY+EPx12aY31/OVy4laXIVtN6qpSncjo9baXSOMZmx6RyA1dbyfwXRjT/aODCGHZXgLJHS/kHlkCsThVlqYQ4rUCDkXIeMqIGF1CR0KjfmKpp1fS14OMgpLgdnt9+pnBZ+qcf1YdpOeQob1zwunjMYOyYC74FyOTdwaynU2iDsuBrmkE8kgEedIn7+WWe9fp/6TQJMVOeTQPZBNSRRSUYCw5Tg/0L/+jLtzjc2mY4444sDPbR7scrtU+/GtvlR5z0Y5pofwEdFME7PZNOp9a4kMiSa7ZERyGdN7U1pDu9JU6BZRz+nPzW217PVnTF7YFV/GGUzMTk9i7EZb5M4T9r9gfxFSMPeT5ct712CdBfyRlsSbSWk8XclTXwW385kLVYNDtOukWrvEiwxpA14Xb/ZUXbIDZVf5rP2HrZHMkghzeUYPjRn/IlgYUt7sDNmqFZNIc9mRFrZC9uFQ/Nul5InZodNODQDM+nHpxaztt4xl4qKep8SDEPAQjNr8biC6T9MtLKbWbSKDlqYYNv0pb2PuGub3y9rvkF1Y05mgM=
100 902554884335e5ca3661d63be9978eb4aec3f68a 0 iQIVAwUAVH0KMyBXgaxoKi1yAQLUKxAAjgyYpmqD0Ji5OQ3995yX0dmwHOaaSuYpq71VUsOMYBskjH4xE2UgcTrX8RWUf0E+Ya91Nw3veTf+IZlYLaWuOYuJPRzw+zD1sVY8xprwqBOXNaA7n8SsTqZPSh6qgw4S0pUm0xJUOZzUP1l9S7BtIdJP7KwZ7hs9YZev4r9M3G15xOIPn5qJqBAtIeE6f5+ezoyOpSPZFtLFc4qKQ/YWzOT5uuSaYogXgVByXRFaO84+1TD93LR0PyVWxhwU9JrDU5d7P/bUTW1BXdjsxTbBnigWswKHC71EHpgz/HCYxivVL30qNdOm4Fow1Ec2GdUzGunSqTPrq18ScZDYW1x87f3JuqPM+ce/lxRWBBqP1yE30/8l/Us67m6enWXdGER8aL1lYTGOIWAhvJpfzv9KebaUq1gMFLo6j+OfwR3rYPiCHgi20nTNBa+LOceWFjCGzFa3T9UQWHW/MBElfAxK65uecbGRRYY9V1/+wxtTUiS6ixpmzL8S7uUd5n6oMaeeMiD82NLgPIbMyUHQv6eFEcCj0U9NT2uKbFRmclMs5V+8D+RTCsLJ55R9PD5OoRw/6K/coqqPShYmJvgYsFQPzXVpQdCRae31xdfGFmd5KUetqyrT+4GUdJWzSm0giSgovpEJNxXglrvNdvSO7fX3R1oahhwOwtGqMwNilcK+iDw=
100 902554884335e5ca3661d63be9978eb4aec3f68a 0 iQIVAwUAVH0KMyBXgaxoKi1yAQLUKxAAjgyYpmqD0Ji5OQ3995yX0dmwHOaaSuYpq71VUsOMYBskjH4xE2UgcTrX8RWUf0E+Ya91Nw3veTf+IZlYLaWuOYuJPRzw+zD1sVY8xprwqBOXNaA7n8SsTqZPSh6qgw4S0pUm0xJUOZzUP1l9S7BtIdJP7KwZ7hs9YZev4r9M3G15xOIPn5qJqBAtIeE6f5+ezoyOpSPZFtLFc4qKQ/YWzOT5uuSaYogXgVByXRFaO84+1TD93LR0PyVWxhwU9JrDU5d7P/bUTW1BXdjsxTbBnigWswKHC71EHpgz/HCYxivVL30qNdOm4Fow1Ec2GdUzGunSqTPrq18ScZDYW1x87f3JuqPM+ce/lxRWBBqP1yE30/8l/Us67m6enWXdGER8aL1lYTGOIWAhvJpfzv9KebaUq1gMFLo6j+OfwR3rYPiCHgi20nTNBa+LOceWFjCGzFa3T9UQWHW/MBElfAxK65uecbGRRYY9V1/+wxtTUiS6ixpmzL8S7uUd5n6oMaeeMiD82NLgPIbMyUHQv6eFEcCj0U9NT2uKbFRmclMs5V+8D+RTCsLJ55R9PD5OoRw/6K/coqqPShYmJvgYsFQPzXVpQdCRae31xdfGFmd5KUetqyrT+4GUdJWzSm0giSgovpEJNxXglrvNdvSO7fX3R1oahhwOwtGqMwNilcK+iDw=
101 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 0 iQIVAwUAVJNALCBXgaxoKi1yAQKgmw/+OFbHHOMmN2zs2lI2Y0SoMALPNQBInMBq2E6RMCMbfcS9Cn75iD29DnvBwAYNWaWsYEGyheJ7JjGBiuNKPOrLaHkdjG+5ypbhAfNDyHDiteMsXfH7D1L+cTOAB8yvhimZHOTTVF0zb/uRyVIPNowAyervUVRjDptzdfcvjUS+X+/Ufgwms6Y4CcuzFLFCxpmryJhLtOpwUPLlzIqeNkFOYWkHanCgtZX03PNIWhorH3AWOc9yztwWPQ+kcKl3FMlyuNMPhS/ElxSF6GHGtreRbtP+ZLoSIOMb2QBKpGDpZLgJ3JQEHDcZ0h5CLZWL9dDUJR3M8pg1qglqMFSWMgRPTzxPS4QntPgT/Ewd3+U5oCZUh052fG41OeCZ0CnVCpqi5PjUIDhzQkONxRCN2zbjQ2GZY7glbXoqytissihEIVP9m7RmBVq1rbjOKr+yUetJ9gOZcsMtZiCEq4Uj2cbA1x32MQv7rxwAgQP1kgQ62b0sN08HTjQpI7/IkNALLIDHoQWWr45H97i34qK1dd5uCOnYk7juvhGNX5XispxNnC01/CUVNnqChfDHpgnDjgT+1H618LiTgUAD3zo4IVAhCqF5XWsS4pQEENOB3Msffi62fYowvJx7f/htWeRLZ2OA+B85hhDiD4QBdHCRoz3spVp0asNqDxX4f4ndj8RlzfM=
101 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 0 iQIVAwUAVJNALCBXgaxoKi1yAQKgmw/+OFbHHOMmN2zs2lI2Y0SoMALPNQBInMBq2E6RMCMbfcS9Cn75iD29DnvBwAYNWaWsYEGyheJ7JjGBiuNKPOrLaHkdjG+5ypbhAfNDyHDiteMsXfH7D1L+cTOAB8yvhimZHOTTVF0zb/uRyVIPNowAyervUVRjDptzdfcvjUS+X+/Ufgwms6Y4CcuzFLFCxpmryJhLtOpwUPLlzIqeNkFOYWkHanCgtZX03PNIWhorH3AWOc9yztwWPQ+kcKl3FMlyuNMPhS/ElxSF6GHGtreRbtP+ZLoSIOMb2QBKpGDpZLgJ3JQEHDcZ0h5CLZWL9dDUJR3M8pg1qglqMFSWMgRPTzxPS4QntPgT/Ewd3+U5oCZUh052fG41OeCZ0CnVCpqi5PjUIDhzQkONxRCN2zbjQ2GZY7glbXoqytissihEIVP9m7RmBVq1rbjOKr+yUetJ9gOZcsMtZiCEq4Uj2cbA1x32MQv7rxwAgQP1kgQ62b0sN08HTjQpI7/IkNALLIDHoQWWr45H97i34qK1dd5uCOnYk7juvhGNX5XispxNnC01/CUVNnqChfDHpgnDjgT+1H618LiTgUAD3zo4IVAhCqF5XWsS4pQEENOB3Msffi62fYowvJx7f/htWeRLZ2OA+B85hhDiD4QBdHCRoz3spVp0asNqDxX4f4ndj8RlzfM=
102 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 0 iQIVAwUAVKXKYCBXgaxoKi1yAQIfsA/+PFfaWuZ6Jna12Y3MpKMnBCXYLWEJgMNlWHWzwU8lD26SKSlvMyHQsVZlkld2JmFugUCn1OV3OA4YWT6BA7VALq6Zsdcu5Dc8LRbyajBUkzGRpOUyWuFzjkCpGVbrQzbCR/bel/BBXzSqL4ipdtWgJ4y+WpZIhWkNXclBkR52b5hUTjN9vzhyhVVI7eURGwIEf7vVs1fDOcEGtaGY/ynzMTzyxIDsEEygCZau86wpKlYlqhCgxKDyzyGfpH3B1UlNGFt1afW8AWe1eHjdqC7TJZpMqmQ/Ju8vco8Xht6OXw4ZLHj7y39lpccfKTBLiK/cAKSg+xgyaH/BLhzoEkNAwYSFAB4i4IoV0KUC8nFxHfsoswBxJnMqU751ziMrpZ/XHZ1xQoEOdXgz2I04vlRn8xtynOVhcgjoAXwtbia7oNh/qCH/hl5/CdAtaawuCxJBf237F+cwur4PMAAvsGefRfZco/DInpr3qegr8rwInTxlO48ZG+o5xA4TPwT0QQTUjMdNfC146ZSbp65wG7VxJDocMZ8KJN/lqPaOvX+FVYWq4YnJhlldiV9DGgmym1AAaP0D3te2GcfHXpt/f6NYUPpgiBHy0GnOlNcQyGnnONg1A6oKVWB3k7WP28+PQbQEiCIFk2nkf5VZmye7OdHRGKOFfuprYFP1WwTWnVoNX9c=
102 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 0 iQIVAwUAVKXKYCBXgaxoKi1yAQIfsA/+PFfaWuZ6Jna12Y3MpKMnBCXYLWEJgMNlWHWzwU8lD26SKSlvMyHQsVZlkld2JmFugUCn1OV3OA4YWT6BA7VALq6Zsdcu5Dc8LRbyajBUkzGRpOUyWuFzjkCpGVbrQzbCR/bel/BBXzSqL4ipdtWgJ4y+WpZIhWkNXclBkR52b5hUTjN9vzhyhVVI7eURGwIEf7vVs1fDOcEGtaGY/ynzMTzyxIDsEEygCZau86wpKlYlqhCgxKDyzyGfpH3B1UlNGFt1afW8AWe1eHjdqC7TJZpMqmQ/Ju8vco8Xht6OXw4ZLHj7y39lpccfKTBLiK/cAKSg+xgyaH/BLhzoEkNAwYSFAB4i4IoV0KUC8nFxHfsoswBxJnMqU751ziMrpZ/XHZ1xQoEOdXgz2I04vlRn8xtynOVhcgjoAXwtbia7oNh/qCH/hl5/CdAtaawuCxJBf237F+cwur4PMAAvsGefRfZco/DInpr3qegr8rwInTxlO48ZG+o5xA4TPwT0QQTUjMdNfC146ZSbp65wG7VxJDocMZ8KJN/lqPaOvX+FVYWq4YnJhlldiV9DGgmym1AAaP0D3te2GcfHXpt/f6NYUPpgiBHy0GnOlNcQyGnnONg1A6oKVWB3k7WP28+PQbQEiCIFk2nkf5VZmye7OdHRGKOFfuprYFP1WwTWnVoNX9c=
103 db8e3f7948b1fdeb9ad12d448fc3525759908b9f 0 iQIVAwUAVLsaciBXgaxoKi1yAQKMIA//a90/GvySL9UID+iYvzV2oDaAPDD0T+4Xs43I7DT5NIoDz+3yq2VV54XevQe5lYiURmsb/Q9nX2VR/Qq1J9c/R6Gy+CIfmJ3HzMZ0aAX8ZlZgQPYZKh/2kY5Ojl++k6MTqbqcrICNs4+UE/4IAxPyOfu5gy7TpdJmRZo2J3lWVC2Jbhd02Mzb+tjtfbOM+QcQxPwt9PpqmQszJceyVYOSm3jvD1uJdSOC04tBQrQwrxktQ09Om0LUMMaB5zFXpJtqUzfw7l4U4AaddEmkd3vUfLtHxc21RB01c3cpe2dJnjifDfwseLsI8rS4jmi/91c74TeBatSOhvbqzEkm/p8xZFXE4Uh+EpWjTsVqmfQaRq6NfNCR7I/kvGv8Ps6w8mg8uX8fd8lx+GJbodj+Uy0X3oqHyqPMky/df5i79zADBDuz+yuxFfDD9i22DJPIYcilfGgwpIUuO2lER5nSMVmReuWTVBnT6SEN66Q4KR8zLtIRr+t1qUUCy6wYbgwrdHVCbgMF8RPOVZPjbs17RIqcHjch0Xc7bShKGhQg4WHDjXHK61w4tOa1Yp7jT6COkl01XC9BLcGxJYKFvNCbeDZQGvVgJNoEvHxBxD9rGMVRjfuxeJawc2fGzZJn0ySyLDW0pfd4EJNgTh9bLdPjWz2VlXqn4A6bgaLgTPqjmN0VBXw=
103 db8e3f7948b1fdeb9ad12d448fc3525759908b9f 0 iQIVAwUAVLsaciBXgaxoKi1yAQKMIA//a90/GvySL9UID+iYvzV2oDaAPDD0T+4Xs43I7DT5NIoDz+3yq2VV54XevQe5lYiURmsb/Q9nX2VR/Qq1J9c/R6Gy+CIfmJ3HzMZ0aAX8ZlZgQPYZKh/2kY5Ojl++k6MTqbqcrICNs4+UE/4IAxPyOfu5gy7TpdJmRZo2J3lWVC2Jbhd02Mzb+tjtfbOM+QcQxPwt9PpqmQszJceyVYOSm3jvD1uJdSOC04tBQrQwrxktQ09Om0LUMMaB5zFXpJtqUzfw7l4U4AaddEmkd3vUfLtHxc21RB01c3cpe2dJnjifDfwseLsI8rS4jmi/91c74TeBatSOhvbqzEkm/p8xZFXE4Uh+EpWjTsVqmfQaRq6NfNCR7I/kvGv8Ps6w8mg8uX8fd8lx+GJbodj+Uy0X3oqHyqPMky/df5i79zADBDuz+yuxFfDD9i22DJPIYcilfGgwpIUuO2lER5nSMVmReuWTVBnT6SEN66Q4KR8zLtIRr+t1qUUCy6wYbgwrdHVCbgMF8RPOVZPjbs17RIqcHjch0Xc7bShKGhQg4WHDjXHK61w4tOa1Yp7jT6COkl01XC9BLcGxJYKFvNCbeDZQGvVgJNoEvHxBxD9rGMVRjfuxeJawc2fGzZJn0ySyLDW0pfd4EJNgTh9bLdPjWz2VlXqn4A6bgaLgTPqjmN0VBXw=
104 fbdd5195528fae4f41feebc1838215c110b25d6a 0 iQIVAwUAVM7fBCBXgaxoKi1yAQKoYw/+LeIGcjQmHIVFQULsiBtPDf+eGAADQoP3mKBy+eX/3Fa0qqUNfES2Q3Y6RRApyZ1maPRMt8BvvhZMgQsu9QIrmf3zsFxZGFwoyrIj4hM3xvAbEZXqmWiR85/Ywd4ImeLaZ0c7mkO1/HGF1n2Mv47bfM4hhNe7VGJSSrTY4srFHDfk4IG9f18DukJVzRD9/dZeBw6eUN1ukuLEgQAD5Sl47bUdKSetglOSR1PjXfZ1hjtz5ywUyBc5P9p3LC4wSvlcJKl22zEvB3L0hkoDcPsdIPEnJAeXxKlR1rQpoA3fEgrstGiSNUW/9Tj0VekAHLO95SExmQyoG/AhbjRRzIj4uQ0aevCJyiAhkv+ffOSf99PMW9L1k3tVjLhpMWEz9BOAWyX7cDFWj5t/iktI046O9HGN9SGVx18e9xM6pEgRcLA2TyjEmtkA4jX0JeN7WeCweMLiSxyGP7pSPSJdpJeXaFtRpSF62p/G0Z5wN9s05LHqDyqNVtCvg4WjkuV5LZSdLbMcYBWGBxQzCG6qowXFXIawmbaFiBZwTfOgNls9ndz5RGupAaxY317prxPFv/pXoesc1P8bdK09ZvjhbmmD66Q/BmS2dOMQ8rXRjuVdlR8j2QBtFZxekMcRD02nBAVnwHg1VWQMIRaGjdgmW4wOkirWVn7me177FnBxrxW1tG4=
104 fbdd5195528fae4f41feebc1838215c110b25d6a 0 iQIVAwUAVM7fBCBXgaxoKi1yAQKoYw/+LeIGcjQmHIVFQULsiBtPDf+eGAADQoP3mKBy+eX/3Fa0qqUNfES2Q3Y6RRApyZ1maPRMt8BvvhZMgQsu9QIrmf3zsFxZGFwoyrIj4hM3xvAbEZXqmWiR85/Ywd4ImeLaZ0c7mkO1/HGF1n2Mv47bfM4hhNe7VGJSSrTY4srFHDfk4IG9f18DukJVzRD9/dZeBw6eUN1ukuLEgQAD5Sl47bUdKSetglOSR1PjXfZ1hjtz5ywUyBc5P9p3LC4wSvlcJKl22zEvB3L0hkoDcPsdIPEnJAeXxKlR1rQpoA3fEgrstGiSNUW/9Tj0VekAHLO95SExmQyoG/AhbjRRzIj4uQ0aevCJyiAhkv+ffOSf99PMW9L1k3tVjLhpMWEz9BOAWyX7cDFWj5t/iktI046O9HGN9SGVx18e9xM6pEgRcLA2TyjEmtkA4jX0JeN7WeCweMLiSxyGP7pSPSJdpJeXaFtRpSF62p/G0Z5wN9s05LHqDyqNVtCvg4WjkuV5LZSdLbMcYBWGBxQzCG6qowXFXIawmbaFiBZwTfOgNls9ndz5RGupAaxY317prxPFv/pXoesc1P8bdK09ZvjhbmmD66Q/BmS2dOMQ8rXRjuVdlR8j2QBtFZxekMcRD02nBAVnwHg1VWQMIRaGjdgmW4wOkirWVn7me177FnBxrxW1tG4=
105 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 0 iQIVAwUAVPQL9CBXgaxoKi1yAQJIXxAAtD2hWhaKa+lABmCOYG92FE/WdqY/91Xv5atTL8Xeko/MkirIKZiOuxNWX+J34TVevINZSWmMfDSc5TkGxktL9jW/pDB/CXn+CVZpxRabPYFH9HM2K3g8VaTV1MFtV2+feOMDIPCmq5ogMF9/kXjmifiEBrJcFsE82fdexJ3OHoOY4iHFxEhh3GzvNqEQygk4VeU6VYziNvSQj9G//PsK3Bmk7zm5ScsZcMVML3SIYFuej1b1PI1v0N8mmCRooVNBGhD/eA0iLtdh/hSb9s/8UgJ4f9HOcx9zqs8V4i14lpd/fo0+yvFuVrVbWGzrDrk5EKLENhVPwvc1KA32PTQ4Z9u7VQIBIxq3K5lL2VlCMIYc1BSaSQBjuiLm8VdN6iDuf5poNZhk1rvtpQgpxJzh362dlGtR/iTJuLCeW7gCqWUAorLTeHy0bLQ/jSOeTAGys8bUHtlRL4QbnhLbUmJmRYVvCJ+Yt1aTgTSNcoFjoLJarR1169BXgdCA38BgReUL6kB224UJSTzB1hJUyB2LvCWrXZMipZmR99Iwdq7MePD3+AoSIXQNUMY9blxuuF5x7W2ikNXmVWuab4Z8rQRtmGqEuIMBSunxAnZSn+i8057dFKlq+/yGy+WW3RQg+RnLnwZs1zCDTfu98/GT5k5hFpjXZeUWWiOVwQJ5HrqncCw=
105 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 0 iQIVAwUAVPQL9CBXgaxoKi1yAQJIXxAAtD2hWhaKa+lABmCOYG92FE/WdqY/91Xv5atTL8Xeko/MkirIKZiOuxNWX+J34TVevINZSWmMfDSc5TkGxktL9jW/pDB/CXn+CVZpxRabPYFH9HM2K3g8VaTV1MFtV2+feOMDIPCmq5ogMF9/kXjmifiEBrJcFsE82fdexJ3OHoOY4iHFxEhh3GzvNqEQygk4VeU6VYziNvSQj9G//PsK3Bmk7zm5ScsZcMVML3SIYFuej1b1PI1v0N8mmCRooVNBGhD/eA0iLtdh/hSb9s/8UgJ4f9HOcx9zqs8V4i14lpd/fo0+yvFuVrVbWGzrDrk5EKLENhVPwvc1KA32PTQ4Z9u7VQIBIxq3K5lL2VlCMIYc1BSaSQBjuiLm8VdN6iDuf5poNZhk1rvtpQgpxJzh362dlGtR/iTJuLCeW7gCqWUAorLTeHy0bLQ/jSOeTAGys8bUHtlRL4QbnhLbUmJmRYVvCJ+Yt1aTgTSNcoFjoLJarR1169BXgdCA38BgReUL6kB224UJSTzB1hJUyB2LvCWrXZMipZmR99Iwdq7MePD3+AoSIXQNUMY9blxuuF5x7W2ikNXmVWuab4Z8rQRtmGqEuIMBSunxAnZSn+i8057dFKlq+/yGy+WW3RQg+RnLnwZs1zCDTfu98/GT5k5hFpjXZeUWWiOVwQJ5HrqncCw=
106 07a92bbd02e5e3a625e0820389b47786b02b2cea 0 iQIVAwUAVPSP9SBXgaxoKi1yAQLkBQ//dRQExJHFepJfZ0gvGnUoYI4APsLmne5XtfeXJ8OtUyC4a6RylxA5BavDWgXwUh9BGhOX2cBSz1fyvzohrPrvNnlBrYKAvOIJGEAiBTXHYTxHINEKPtDF92Uz23T0Rn/wnSvvlbWF7Pvd+0DMJpFDEyr9n6jvVLR7mgxMaCqZbVaB1W/wTwDjni780WgVx8OPUXkLx3/DyarMcIiPeI5UN+FeHDovTsBWFC95msFLm80PMRPuHOejWp65yyEemGujZEPO2D5VVah7fshM2HTz63+bkEBYoqrftuv3vXKBRG78MIrUrKpqxmnCKNKDUUWJ4yk3+NwuOiHlKdly5kZ7MNFaL73XKo8HH287lDWz0lIazs91dQA9a9JOyTsp8YqGtIJGGCbhrUDtiQJ199oBU84mw3VH/EEzm4mPv4sW5fm7BnnoH/a+9vXySc+498rkdLlzFwxrQkWyJ/pFOx4UA3mCtGQK+OSwLPc+X4SRqA4fiyqKxVAL1kpLTSDL3QA82I7GzBaXsxUXzS4nmteMhUyzTdwAhKVydL0gC3d7NmkAFSyRjdGzutUUXshYxg0ywRgYebe8uzJcTj4nNRgaalYLdg3guuDulD+dJmILsrcLmA6KD/pvfDn8PYt+4ZjNIvN2E9GF6uXDu4Ux+AlOTLk9BChxUF8uBX9ev5cvWtQ=
106 07a92bbd02e5e3a625e0820389b47786b02b2cea 0 iQIVAwUAVPSP9SBXgaxoKi1yAQLkBQ//dRQExJHFepJfZ0gvGnUoYI4APsLmne5XtfeXJ8OtUyC4a6RylxA5BavDWgXwUh9BGhOX2cBSz1fyvzohrPrvNnlBrYKAvOIJGEAiBTXHYTxHINEKPtDF92Uz23T0Rn/wnSvvlbWF7Pvd+0DMJpFDEyr9n6jvVLR7mgxMaCqZbVaB1W/wTwDjni780WgVx8OPUXkLx3/DyarMcIiPeI5UN+FeHDovTsBWFC95msFLm80PMRPuHOejWp65yyEemGujZEPO2D5VVah7fshM2HTz63+bkEBYoqrftuv3vXKBRG78MIrUrKpqxmnCKNKDUUWJ4yk3+NwuOiHlKdly5kZ7MNFaL73XKo8HH287lDWz0lIazs91dQA9a9JOyTsp8YqGtIJGGCbhrUDtiQJ199oBU84mw3VH/EEzm4mPv4sW5fm7BnnoH/a+9vXySc+498rkdLlzFwxrQkWyJ/pFOx4UA3mCtGQK+OSwLPc+X4SRqA4fiyqKxVAL1kpLTSDL3QA82I7GzBaXsxUXzS4nmteMhUyzTdwAhKVydL0gC3d7NmkAFSyRjdGzutUUXshYxg0ywRgYebe8uzJcTj4nNRgaalYLdg3guuDulD+dJmILsrcLmA6KD/pvfDn8PYt+4ZjNIvN2E9GF6uXDu4Ux+AlOTLk9BChxUF8uBX9ev5cvWtQ=
107 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 0 iQIVAwUAVRw4nyBXgaxoKi1yAQIFExAAkbCPtLjQlJvPaYCL1KhNR+ZVAmn7JrFH3XhvR26RayYbs4NxR3W1BhwhDy9+W+28szEx1kQvmr6t1bXAFywY0tNJOeuLU7uFfmbgAfYgkQ9kpsQNqFYkjbCyftw0S9vX9VOJ9DqUoDWuKfX7VzjkwE9dCfKI5F+dvzxnd6ZFjB85nyHBQuTZlzXl0+csY212RJ2G2j/mzEBVyeZj9l7Rm+1X8AC1xQMWRJGiyd0b7nhYqoOcceeJFAV1t9QO4+gjmkM5kL0orjxTnuVsxPTxcC5ca1BfidPWrZEto3duHWNiATGnCDylxxr52BxCAS+BWePW9J0PROtw1pYaZ9pF4N5X5LSXJzqX7ZiNGckxqIjry09+Tbsa8FS0VkkYBEiGotpuo4Jd05V6qpXfW2JqAfEVo6X6aGvPM2B7ZUtKi30I4J+WprrOP3WgZ/ZWHe1ERYKgjDqisn3t/D40q30WQUeQGltGsOX0Udqma2RjBugO5BHGzJ2yer4GdJXg7q1OMzrjAEuz1IoKvIB/o1pg86quVA4H2gQnL1B8t1M38/DIafyw7mrEY4Z3GL44Reev63XVvDE099Vbhqp7ufwq81Fpq7Xxa5vsr9SJ+8IqqQr8AcYSuK3G3L6BmIuSUAYMRqgl35FWoWkGyZIG5c6K6zI8w5Pb0aGi6Lb2Wfb9zbc=
107 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 0 iQIVAwUAVRw4nyBXgaxoKi1yAQIFExAAkbCPtLjQlJvPaYCL1KhNR+ZVAmn7JrFH3XhvR26RayYbs4NxR3W1BhwhDy9+W+28szEx1kQvmr6t1bXAFywY0tNJOeuLU7uFfmbgAfYgkQ9kpsQNqFYkjbCyftw0S9vX9VOJ9DqUoDWuKfX7VzjkwE9dCfKI5F+dvzxnd6ZFjB85nyHBQuTZlzXl0+csY212RJ2G2j/mzEBVyeZj9l7Rm+1X8AC1xQMWRJGiyd0b7nhYqoOcceeJFAV1t9QO4+gjmkM5kL0orjxTnuVsxPTxcC5ca1BfidPWrZEto3duHWNiATGnCDylxxr52BxCAS+BWePW9J0PROtw1pYaZ9pF4N5X5LSXJzqX7ZiNGckxqIjry09+Tbsa8FS0VkkYBEiGotpuo4Jd05V6qpXfW2JqAfEVo6X6aGvPM2B7ZUtKi30I4J+WprrOP3WgZ/ZWHe1ERYKgjDqisn3t/D40q30WQUeQGltGsOX0Udqma2RjBugO5BHGzJ2yer4GdJXg7q1OMzrjAEuz1IoKvIB/o1pg86quVA4H2gQnL1B8t1M38/DIafyw7mrEY4Z3GL44Reev63XVvDE099Vbhqp7ufwq81Fpq7Xxa5vsr9SJ+8IqqQr8AcYSuK3G3L6BmIuSUAYMRqgl35FWoWkGyZIG5c6K6zI8w5Pb0aGi6Lb2Wfb9zbc=
108 e89f909edffad558b56f4affa8239e4832f88de0 0 iQIVAwUAVTBozCBXgaxoKi1yAQLHeg/+IvfpPmG7OSqCoHvMVETYdrqT7lKCwfCQWMFOC/2faWs1n4R/qQNm6ckE5OY888RK8tVQ7ue03Pg/iyWgQlYfS7Njd3WPjS4JsnEBxIvuGkIu6TPIXAUAH0PFTBh0cZEICDpPEVT2X3bPRwDHA+hUE9RrxM5zJ39Fpk/pTYCjQ9UKfEhXlEfka75YB39g2Y/ssaSbn5w/tAAx8sL72Y4G96D4IV2seLHZhB3VQ7UZKThEWn6UdVOoKj+urIwGaBYMeekGVtHSh6fnHOw3EtDO9mQ5HtAz2Bl4CwRYN8eSN+Dwgr+mdk8MWpQQJ+i1A8jUhUp8gn1Pe5GkIH4CWZ9+AvLLnshe2MkVaTT1g7EQk37tFkkdZDRBsOHIvpF71B9pEA1gMUlX4gKgh5YwukgpQlDmFCfY7XmX6eXw9Ub+EckEwYuGMz7Fbwe9J/Ce4DxvgJgq3/cu/jb3bmbewH6tZmcrlqziqqA8GySIwcURnF1c37e7+e7x1jhFJfCWpHzvCusjKhUp9tZsl9Rt1Bo/y41QY+avY7//ymhbwTMKgqjzCYoA+ipF4JfZlFiZF+JhvOSIFb0ltkfdqKD+qOjlkFaglvQU1bpGKLJ6cz4Xk2Jqt5zhcrpyDMGVv9aiWywCK2ZP34RNaJ6ZFwzwdpXihqgkm5dBGoZ4ztFUfmjXzIg=
108 e89f909edffad558b56f4affa8239e4832f88de0 0 iQIVAwUAVTBozCBXgaxoKi1yAQLHeg/+IvfpPmG7OSqCoHvMVETYdrqT7lKCwfCQWMFOC/2faWs1n4R/qQNm6ckE5OY888RK8tVQ7ue03Pg/iyWgQlYfS7Njd3WPjS4JsnEBxIvuGkIu6TPIXAUAH0PFTBh0cZEICDpPEVT2X3bPRwDHA+hUE9RrxM5zJ39Fpk/pTYCjQ9UKfEhXlEfka75YB39g2Y/ssaSbn5w/tAAx8sL72Y4G96D4IV2seLHZhB3VQ7UZKThEWn6UdVOoKj+urIwGaBYMeekGVtHSh6fnHOw3EtDO9mQ5HtAz2Bl4CwRYN8eSN+Dwgr+mdk8MWpQQJ+i1A8jUhUp8gn1Pe5GkIH4CWZ9+AvLLnshe2MkVaTT1g7EQk37tFkkdZDRBsOHIvpF71B9pEA1gMUlX4gKgh5YwukgpQlDmFCfY7XmX6eXw9Ub+EckEwYuGMz7Fbwe9J/Ce4DxvgJgq3/cu/jb3bmbewH6tZmcrlqziqqA8GySIwcURnF1c37e7+e7x1jhFJfCWpHzvCusjKhUp9tZsl9Rt1Bo/y41QY+avY7//ymhbwTMKgqjzCYoA+ipF4JfZlFiZF+JhvOSIFb0ltkfdqKD+qOjlkFaglvQU1bpGKLJ6cz4Xk2Jqt5zhcrpyDMGVv9aiWywCK2ZP34RNaJ6ZFwzwdpXihqgkm5dBGoZ4ztFUfmjXzIg=
109 8cc6036bca532e06681c5a8fa37efaa812de67b5 0 iQIVAwUAVUP0xCBXgaxoKi1yAQLIChAAme3kg1Z0V8t5PnWKDoIvscIeAsD2s6EhMy1SofmdZ4wvYD1VmGC6TgXMCY7ssvRBhxqwG3GxwYpwELASuw2GYfVot2scN7+b8Hs5jHtkQevKbxarYni+ZI9mw/KldnJixD1yW3j+LoJFh/Fu6GD2yrfGIhimFLozcwUu3EbLk7JzyHSn7/8NFjLJz0foAYfcbowU9/BFwNVLrQPnsUbWcEifsq5bYso9MBO9k+25yLgqHoqMbGpJcgjubNy1cWoKnlKS+lOJl0/waAk+aIjHXMzFpRRuJDjxEZn7V4VdV5d23nrBTcit1BfMzga5df7VrLPVRbom1Bi0kQ0BDeDex3hHNqHS5X+HSrd/njzP1xp8twG8hTE+njv85PWoGBTo1eUGW/esChIJKA5f3/F4B9ErgBNNOKnYmRgxixd562OWAwAQZK0r0roe2H/Mfg2VvgxT0kHd22NQLoAv0YI4jcXcCFrnV/80vHUQ8AsAYAbkLcz1jkfk3YwYDP8jbJCqcwJRt9ialYKJwvXlEe0TMeGdq7EjCO0z/pIpu82k2R/C0FtCFih3bUvJEmWoVVx8UGkDDQEORLbzxQCt0IOiQGFcoCCxgQmL0x9ZoljCWg5vZuuhU4uSOuRTuM+aa4xoLkeOcvgGRSOXrqfkV8JpWKoJB4dmY2qSuxw8LsAAzK0=
109 8cc6036bca532e06681c5a8fa37efaa812de67b5 0 iQIVAwUAVUP0xCBXgaxoKi1yAQLIChAAme3kg1Z0V8t5PnWKDoIvscIeAsD2s6EhMy1SofmdZ4wvYD1VmGC6TgXMCY7ssvRBhxqwG3GxwYpwELASuw2GYfVot2scN7+b8Hs5jHtkQevKbxarYni+ZI9mw/KldnJixD1yW3j+LoJFh/Fu6GD2yrfGIhimFLozcwUu3EbLk7JzyHSn7/8NFjLJz0foAYfcbowU9/BFwNVLrQPnsUbWcEifsq5bYso9MBO9k+25yLgqHoqMbGpJcgjubNy1cWoKnlKS+lOJl0/waAk+aIjHXMzFpRRuJDjxEZn7V4VdV5d23nrBTcit1BfMzga5df7VrLPVRbom1Bi0kQ0BDeDex3hHNqHS5X+HSrd/njzP1xp8twG8hTE+njv85PWoGBTo1eUGW/esChIJKA5f3/F4B9ErgBNNOKnYmRgxixd562OWAwAQZK0r0roe2H/Mfg2VvgxT0kHd22NQLoAv0YI4jcXcCFrnV/80vHUQ8AsAYAbkLcz1jkfk3YwYDP8jbJCqcwJRt9ialYKJwvXlEe0TMeGdq7EjCO0z/pIpu82k2R/C0FtCFih3bUvJEmWoVVx8UGkDDQEORLbzxQCt0IOiQGFcoCCxgQmL0x9ZoljCWg5vZuuhU4uSOuRTuM+aa4xoLkeOcvgGRSOXrqfkV8JpWKoJB4dmY2qSuxw8LsAAzK0=
110 ed18f4acf435a2824c6f49fba40f42b9df5da7ad 0 iQIVAwUAVWy9mCBXgaxoKi1yAQIm+Q/+I/tV8DC51d4f/6T5OR+motlIx9U5za5p9XUUzfp3tzSY2PutVko/FclajVdFekZsK5pUzlh/GZhfe1jjyEEIr3UC3yWk8hMcvvS+2UDmfy81QxN7Uf0kz4mZOlME6d/fYDzf4cDKkkCXoec3kyZBw7L84mteUcrJoyb5K3fkQBrK5CG/CV7+uZN6b9+quKjtDhDEkAyc6phNanzWNgiHGucEbNgXsKM01HmV1TnN4GXTKx8y2UDalIJOPyes2OWHggibMHbaNnGnwSBAK+k29yaQ5FD0rsA+q0j3TijA1NfqvtluNEPbFOx/wJV4CxonYad93gWyEdgU34LRqqw1bx7PFUvew2/T3TJsxQLoCt67OElE7ScG8evuNEe8/4r3LDnzYFx7QMP5r5+B7PxVpj/DT+buS16BhYS8pXMMqLynFOQkX5uhEM7mNC0JTXQsBMHSDAcizVDrdFCF2OSfQjLpUfFP1VEWX7EInqj7hZrd+GE7TfBD8/rwSBSkkCX2aa9uKyt6Ius1GgQUuEETskAUvvpsNBzZxtvGpMMhqQLGlJYnBbhOmsbOyTSnXU66KJ5e/H3O0KRrF09i74v30DaY4uIH8xG6KpSkfw5s/oiLCtagfc0goUvvojk9pACDR3CKM/jVC63EVp2oUcjT72jUgSLxBgi7siLD8IW86wc=
110 ed18f4acf435a2824c6f49fba40f42b9df5da7ad 0 iQIVAwUAVWy9mCBXgaxoKi1yAQIm+Q/+I/tV8DC51d4f/6T5OR+motlIx9U5za5p9XUUzfp3tzSY2PutVko/FclajVdFekZsK5pUzlh/GZhfe1jjyEEIr3UC3yWk8hMcvvS+2UDmfy81QxN7Uf0kz4mZOlME6d/fYDzf4cDKkkCXoec3kyZBw7L84mteUcrJoyb5K3fkQBrK5CG/CV7+uZN6b9+quKjtDhDEkAyc6phNanzWNgiHGucEbNgXsKM01HmV1TnN4GXTKx8y2UDalIJOPyes2OWHggibMHbaNnGnwSBAK+k29yaQ5FD0rsA+q0j3TijA1NfqvtluNEPbFOx/wJV4CxonYad93gWyEdgU34LRqqw1bx7PFUvew2/T3TJsxQLoCt67OElE7ScG8evuNEe8/4r3LDnzYFx7QMP5r5+B7PxVpj/DT+buS16BhYS8pXMMqLynFOQkX5uhEM7mNC0JTXQsBMHSDAcizVDrdFCF2OSfQjLpUfFP1VEWX7EInqj7hZrd+GE7TfBD8/rwSBSkkCX2aa9uKyt6Ius1GgQUuEETskAUvvpsNBzZxtvGpMMhqQLGlJYnBbhOmsbOyTSnXU66KJ5e/H3O0KRrF09i74v30DaY4uIH8xG6KpSkfw5s/oiLCtagfc0goUvvojk9pACDR3CKM/jVC63EVp2oUcjT72jUgSLxBgi7siLD8IW86wc=
111 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 0 iQIVAwUAVZRtzSBXgaxoKi1yAQJVLhAAtfn+8OzHIp6wRC4NUbkImAJRLsNTRPKeRSWPCF5O5XXQ84hp+86qjhndIE6mcJSAt4cVP8uky6sEa8ULd6b3ACRBvtgZtsecA9S/KtRjyE9CKr8nP+ogBNqJPaYlTz9RuwGedOd+8I9lYgsnRjfaHSByNMX08WEHtWqAWhSkAz/HO32ardS38cN97fckCgQtA8v7c77nBT7vcw4epgxyUQvMUxUhqmCVVhVfz8JXa5hyJxFrOtqgaVuQ1B5Y/EKxcyZT+JNHPtu3V1uc1awS/w16CEPstNBSFHax5MuT9UbY0mV2ZITP99EkM+vdomh82VHdnMo0i7Pz7XF45ychD4cteroO9gGqDDt9j7hd1rubBX1bfkPsd/APJlyeshusyTj+FqsUD/HDlvM9LRjY1HpU7i7yAlLQQ3851XKMLUPNFYu2r3bo8Wt/CCHtJvB4wYuH+7Wo3muudpU01ziJBxQrUWwPbUrG+7LvO1iEEVxB8l+8Vq0mU3Te7lJi1kGetm6xHNbtvQip5P2YUqvv+lLo/K8KoJDxsh63Y01JGwdmUDb8mnFlRx4J7hQJaoNEvz3cgnc4X8gDJD8sUOjGOPnbtz2QwTY+zj/5+FdLxWDCxNrHX5vvkVdJHcCqEfVvQTKfDMOUeKuhjI7GD7t3xRPfUxq19jjoLPe7aqn1Z1s=
111 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 0 iQIVAwUAVZRtzSBXgaxoKi1yAQJVLhAAtfn+8OzHIp6wRC4NUbkImAJRLsNTRPKeRSWPCF5O5XXQ84hp+86qjhndIE6mcJSAt4cVP8uky6sEa8ULd6b3ACRBvtgZtsecA9S/KtRjyE9CKr8nP+ogBNqJPaYlTz9RuwGedOd+8I9lYgsnRjfaHSByNMX08WEHtWqAWhSkAz/HO32ardS38cN97fckCgQtA8v7c77nBT7vcw4epgxyUQvMUxUhqmCVVhVfz8JXa5hyJxFrOtqgaVuQ1B5Y/EKxcyZT+JNHPtu3V1uc1awS/w16CEPstNBSFHax5MuT9UbY0mV2ZITP99EkM+vdomh82VHdnMo0i7Pz7XF45ychD4cteroO9gGqDDt9j7hd1rubBX1bfkPsd/APJlyeshusyTj+FqsUD/HDlvM9LRjY1HpU7i7yAlLQQ3851XKMLUPNFYu2r3bo8Wt/CCHtJvB4wYuH+7Wo3muudpU01ziJBxQrUWwPbUrG+7LvO1iEEVxB8l+8Vq0mU3Te7lJi1kGetm6xHNbtvQip5P2YUqvv+lLo/K8KoJDxsh63Y01JGwdmUDb8mnFlRx4J7hQJaoNEvz3cgnc4X8gDJD8sUOjGOPnbtz2QwTY+zj/5+FdLxWDCxNrHX5vvkVdJHcCqEfVvQTKfDMOUeKuhjI7GD7t3xRPfUxq19jjoLPe7aqn1Z1s=
112 96a38d44ba093bd1d1ecfd34119e94056030278b 0 iQIVAwUAVarUUyBXgaxoKi1yAQIfJw/+MG/0736F/9IvzgCTF6omIC+9kS8JH0n/JBGPhpbPAHK4xxjhOOz6m3Ia3c3HNoy+I6calwU6YV7k5dUzlyLhM0Z5oYpdrH+OBNxDEsD5SfhclfR63MK1kmgtD33izijsZ++6a+ZaVfyxpMTksKOktWSIDD63a5b/avb6nKY64KwJcbbeXPdelxvXV7TXYm0GvWc46BgvrHOJpYHCDaXorAn6BMq7EQF8sxdNK4GVMNMVk1njve0HOg3Kz8llPB/7QmddZXYLFGmWqICyUn1IsJDfePxzh8sOYVCbxAgitTJHJJmmH5gzVzw7t7ljtmxSJpcUGQJB2MphejmNFGfgvJPB9c6xOCfUqDjxN5m24V+UYesZntpfgs3lpfvE7785IpVnf6WfKG4PKty01ome/joHlDlrRTekKMlpiBapGMfv8EHvPBrOA+5yAHNfKsmcyCcjD1nvXYZ2/X9qY35AhdcBuNkyp55oPDOdtYIHfnOIxlYMKG1dusDx3Z4eveF0lQTzfRVoE5w+k9A2Ov3Zx0aiSkFFevJjrq5QBfs9dAiT8JYgBmWhaJzCtJm12lQirRMKR/br88Vwt/ry/UVY9cereMNvRYUGOGfC8CGGDCw4WDD+qWvyB3mmrXVuMlXxQRIZRJy5KazaQXsBWuIsx4kgGqC5Uo+yzpiQ1VMuCyI=
112 96a38d44ba093bd1d1ecfd34119e94056030278b 0 iQIVAwUAVarUUyBXgaxoKi1yAQIfJw/+MG/0736F/9IvzgCTF6omIC+9kS8JH0n/JBGPhpbPAHK4xxjhOOz6m3Ia3c3HNoy+I6calwU6YV7k5dUzlyLhM0Z5oYpdrH+OBNxDEsD5SfhclfR63MK1kmgtD33izijsZ++6a+ZaVfyxpMTksKOktWSIDD63a5b/avb6nKY64KwJcbbeXPdelxvXV7TXYm0GvWc46BgvrHOJpYHCDaXorAn6BMq7EQF8sxdNK4GVMNMVk1njve0HOg3Kz8llPB/7QmddZXYLFGmWqICyUn1IsJDfePxzh8sOYVCbxAgitTJHJJmmH5gzVzw7t7ljtmxSJpcUGQJB2MphejmNFGfgvJPB9c6xOCfUqDjxN5m24V+UYesZntpfgs3lpfvE7785IpVnf6WfKG4PKty01ome/joHlDlrRTekKMlpiBapGMfv8EHvPBrOA+5yAHNfKsmcyCcjD1nvXYZ2/X9qY35AhdcBuNkyp55oPDOdtYIHfnOIxlYMKG1dusDx3Z4eveF0lQTzfRVoE5w+k9A2Ov3Zx0aiSkFFevJjrq5QBfs9dAiT8JYgBmWhaJzCtJm12lQirRMKR/br88Vwt/ry/UVY9cereMNvRYUGOGfC8CGGDCw4WDD+qWvyB3mmrXVuMlXxQRIZRJy5KazaQXsBWuIsx4kgGqC5Uo+yzpiQ1VMuCyI=
113 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 0 iQIVAwUAVbuouCBXgaxoKi1yAQL2ng//eI1w51F4YkDiUAhrZuc8RE/chEd2o4F6Jyu9laA03vbim598ntqGjX3+UkOyTQ/zGVeZfW2cNG8zkJjSLk138DHCYl2YPPD/yxqMOJp/a7U34+HrA0aE5Y2pcfx+FofZHRvRtt40UCngicjKivko8au7Ezayidpa/vQbc6dNvGrwwk4KMgOP2HYIfHgCirR5UmaWtNpzlLhf9E7JSNL5ZXij3nt6AgEPyn0OvmmOLyUARO/JTJ6vVyLEtwiXg7B3sF5RpmyFDhrkZ+MbFHgL4k/3y9Lb97WaZl8nXJIaNPOTPJqkApFY/56S12PKYK4js2OgU+QsX1XWvouAhEx6CC6Jk9EHhr6+9qxYFhBJw7RjbswUG6LvJy/kBe+Ei5UbYg9dATf3VxQ6Gqs19lebtzltERH2yNwaHyVeqqakPSonOaUyxGMRRosvNHyrTTor38j8d27KksgpocXzBPZcc1MlS3vJg2nIwZlc9EKM9z5R0J1KAi1Z/+xzBjiGRYg5EZY6ElAw30eCjGta7tXlBssJiKeHut7QTLxCZHQuX1tKxDDs1qlXlGCMbrFqo0EiF9hTssptRG3ZyLwMdzEjnh4ki6gzONZKDI8uayAS3N+CEtWcGUtiA9OwuiFXTwodmles/Mh14LEhiVZoDK3L9TPcY22o2qRuku/6wq6QKsg=
113 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 0 iQIVAwUAVbuouCBXgaxoKi1yAQL2ng//eI1w51F4YkDiUAhrZuc8RE/chEd2o4F6Jyu9laA03vbim598ntqGjX3+UkOyTQ/zGVeZfW2cNG8zkJjSLk138DHCYl2YPPD/yxqMOJp/a7U34+HrA0aE5Y2pcfx+FofZHRvRtt40UCngicjKivko8au7Ezayidpa/vQbc6dNvGrwwk4KMgOP2HYIfHgCirR5UmaWtNpzlLhf9E7JSNL5ZXij3nt6AgEPyn0OvmmOLyUARO/JTJ6vVyLEtwiXg7B3sF5RpmyFDhrkZ+MbFHgL4k/3y9Lb97WaZl8nXJIaNPOTPJqkApFY/56S12PKYK4js2OgU+QsX1XWvouAhEx6CC6Jk9EHhr6+9qxYFhBJw7RjbswUG6LvJy/kBe+Ei5UbYg9dATf3VxQ6Gqs19lebtzltERH2yNwaHyVeqqakPSonOaUyxGMRRosvNHyrTTor38j8d27KksgpocXzBPZcc1MlS3vJg2nIwZlc9EKM9z5R0J1KAi1Z/+xzBjiGRYg5EZY6ElAw30eCjGta7tXlBssJiKeHut7QTLxCZHQuX1tKxDDs1qlXlGCMbrFqo0EiF9hTssptRG3ZyLwMdzEjnh4ki6gzONZKDI8uayAS3N+CEtWcGUtiA9OwuiFXTwodmles/Mh14LEhiVZoDK3L9TPcY22o2qRuku/6wq6QKsg=
114 1a45e49a6bed023deb229102a8903234d18054d3 0 iQIVAwUAVeYa2SBXgaxoKi1yAQLWVA//Q7vU0YzngbxIbrTPvfFiNTJcT4bx9u1xMHRZf6QBIE3KtRHKTooJwH9lGR0HHM+8DWWZup3Vzo6JuWHMGoW0v5fzDyk2czwM9BgQQPfEmoJ/ZuBMevTkTZngjgHVwhP3tHFym8Rk9vVxyiZd35EcxP+4F817GCzD+K7XliIBqVggmv9YeQDXfEtvo7UZrMPPec79t8tzt2UadI3KC1jWUriTS1Fg1KxgXW6srD80D10bYyCkkdo/KfF6BGZ9SkF+U3b95cuqSmOfoyyQwUA3JbMXXOnIefnC7lqRC2QTC6mYDx5hIkBiwymXJBe8rpq/S94VVvPGfW6A5upyeCZISLEEnAz0GlykdpIy/NogzhmWpbAMOus05Xnen6xPdNig6c/M5ZleRxVobNrZSd7c5qI3aUUyfMKXlY1j9oiUTjSKH1IizwaI3aL/MM70eErBxXiLs2tpQvZeaVLn3kwCB5YhywO3LK0x+FNx4Gl90deAXMYibGNiLTq9grpB8fuLg9M90JBjFkeYkrSJ2yGYumYyP/WBA3mYEYGDLNstOby4riTU3WCqVl+eah6ss3l+gNDjLxiMtJZ/g0gQACaAvxQ9tYp5eeRMuLRTp79QQPxv97s8IyVwE/TlPlcSFlEXAzsBvqvsolQXRVi9AxA6M2davYabBYAgRf6rRfgujoU=
114 1a45e49a6bed023deb229102a8903234d18054d3 0 iQIVAwUAVeYa2SBXgaxoKi1yAQLWVA//Q7vU0YzngbxIbrTPvfFiNTJcT4bx9u1xMHRZf6QBIE3KtRHKTooJwH9lGR0HHM+8DWWZup3Vzo6JuWHMGoW0v5fzDyk2czwM9BgQQPfEmoJ/ZuBMevTkTZngjgHVwhP3tHFym8Rk9vVxyiZd35EcxP+4F817GCzD+K7XliIBqVggmv9YeQDXfEtvo7UZrMPPec79t8tzt2UadI3KC1jWUriTS1Fg1KxgXW6srD80D10bYyCkkdo/KfF6BGZ9SkF+U3b95cuqSmOfoyyQwUA3JbMXXOnIefnC7lqRC2QTC6mYDx5hIkBiwymXJBe8rpq/S94VVvPGfW6A5upyeCZISLEEnAz0GlykdpIy/NogzhmWpbAMOus05Xnen6xPdNig6c/M5ZleRxVobNrZSd7c5qI3aUUyfMKXlY1j9oiUTjSKH1IizwaI3aL/MM70eErBxXiLs2tpQvZeaVLn3kwCB5YhywO3LK0x+FNx4Gl90deAXMYibGNiLTq9grpB8fuLg9M90JBjFkeYkrSJ2yGYumYyP/WBA3mYEYGDLNstOby4riTU3WCqVl+eah6ss3l+gNDjLxiMtJZ/g0gQACaAvxQ9tYp5eeRMuLRTp79QQPxv97s8IyVwE/TlPlcSFlEXAzsBvqvsolQXRVi9AxA6M2davYabBYAgRf6rRfgujoU=
115 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 0 iQIVAwUAVg1oMSBXgaxoKi1yAQLPag/+Pv0+pR9b9Y5RflEcERUzVu92q+l/JEiP7PHP9pAZuXoQ0ikYBFo1Ygw8tkIG00dgEaLk/2b7E3OxaU9pjU3thoX//XpTcbkJtVhe7Bkjh9/S3dRpm2FWNL9n0qnywebziB45Xs8XzUwBZTYOkVRInYr/NzSo8KNbQH1B4u2g56veb8u/7GtEvBSGnMGVYKhVUZ3jxyDf371QkdafMOJPpogkZcVhXusvMZPDBYtTIzswyxBJ2jxHzjt8+EKs+FI3FxzvQ9Ze3M5Daa7xfiHI3sOgECO8GMVaJi0F49lttKx08KONw8xLlEof+cJ+qxLxQ42X5XOQglJ2/bv5ES5JiZYAti2XSXbZK96p4wexqL4hnaLVU/2iEUfqB9Sj6itEuhGOknPD9fQo1rZXYIS8CT5nGTNG4rEpLFN6VwWn1btIMNkEHw998zU7N3HAOk6adD6zGcntUfMBvQC3V4VK3o7hp8PGeySrWrOLcC/xLKM+XRonz46woJK5D8w8lCVYAxBWEGKAFtj9hv9R8Ye9gCW0Q8BvJ7MwGpn+7fLQ1BVZdV1LZQTSBUr5u8mNeDsRo4H2hITQRhUeElIwlMsUbbN078a4JPOUgPz1+Fi8oHRccBchN6I40QohL934zhcKXQ+NXYN8BgpCicPztSg8O8Y/qvhFP12Zu4tOH8P/dFY=
115 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 0 iQIVAwUAVg1oMSBXgaxoKi1yAQLPag/+Pv0+pR9b9Y5RflEcERUzVu92q+l/JEiP7PHP9pAZuXoQ0ikYBFo1Ygw8tkIG00dgEaLk/2b7E3OxaU9pjU3thoX//XpTcbkJtVhe7Bkjh9/S3dRpm2FWNL9n0qnywebziB45Xs8XzUwBZTYOkVRInYr/NzSo8KNbQH1B4u2g56veb8u/7GtEvBSGnMGVYKhVUZ3jxyDf371QkdafMOJPpogkZcVhXusvMZPDBYtTIzswyxBJ2jxHzjt8+EKs+FI3FxzvQ9Ze3M5Daa7xfiHI3sOgECO8GMVaJi0F49lttKx08KONw8xLlEof+cJ+qxLxQ42X5XOQglJ2/bv5ES5JiZYAti2XSXbZK96p4wexqL4hnaLVU/2iEUfqB9Sj6itEuhGOknPD9fQo1rZXYIS8CT5nGTNG4rEpLFN6VwWn1btIMNkEHw998zU7N3HAOk6adD6zGcntUfMBvQC3V4VK3o7hp8PGeySrWrOLcC/xLKM+XRonz46woJK5D8w8lCVYAxBWEGKAFtj9hv9R8Ye9gCW0Q8BvJ7MwGpn+7fLQ1BVZdV1LZQTSBUr5u8mNeDsRo4H2hITQRhUeElIwlMsUbbN078a4JPOUgPz1+Fi8oHRccBchN6I40QohL934zhcKXQ+NXYN8BgpCicPztSg8O8Y/qvhFP12Zu4tOH8P/dFY=
116 b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 0 iQIVAwUAViarTyBXgaxoKi1yAQLZgRAAh7c7ebn7kUWI5M/b/T6qHGjFrU5azkjamzy9IG+KIa2hZgSMxyEM7JJUFqKP4TiWa3sW03bjKGSM/SjjDSSyheX+JIVSPNyKrBwneYhPq45Ius8eiHziClkt0CSsl2d9xDRpI0JmHbN0Pf8nh7rnbL+231GDAOT6dP+2S8K1HGa/0BgEcL9gpYs4/2GyjL+hBSUjyrabzvwe48DCN5W0tEJbGFw5YEADxdfbVbNEuXL81tR4PFGiJxPW0QKRLDB74MWmiWC0gi2ZC/IhbNBZ2sLb6694d4Bx4PVwtiARh63HNXVMEaBrFu1S9NcMQyHvAOc6Zw4izF/PCeTcdEnPk8J1t5PTz09Lp0EAKxe7CWIViy350ke5eiaxO3ySrNMX6d83BOHLDqEFMSWm+ad+KEMT4CJrK4X/n/XMgEFAaU5nWlIRqrLRIeU2Ifc625T0Xh4BgTqXPpytQxhgV5b+Fi6duNk4cy+QnHT4ymxI6BPD9HvSQwc+O7h37qjvJVZmpQX6AP8O75Yza8ZbcYKRIIxZzOkwNpzE5A/vpvP5bCRn7AGcT3ORWmAYr/etr3vxUvt2fQz6U/R4S915V+AeWBdcp+uExu6VZ42M0vhhh0lyzx1VRJGVdV+LoxFKkaC42d0yT+O1QEhSB7WL1D3/a/iWubv6ieB/cvNMhFaK9DA=
116 b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 0 iQIVAwUAViarTyBXgaxoKi1yAQLZgRAAh7c7ebn7kUWI5M/b/T6qHGjFrU5azkjamzy9IG+KIa2hZgSMxyEM7JJUFqKP4TiWa3sW03bjKGSM/SjjDSSyheX+JIVSPNyKrBwneYhPq45Ius8eiHziClkt0CSsl2d9xDRpI0JmHbN0Pf8nh7rnbL+231GDAOT6dP+2S8K1HGa/0BgEcL9gpYs4/2GyjL+hBSUjyrabzvwe48DCN5W0tEJbGFw5YEADxdfbVbNEuXL81tR4PFGiJxPW0QKRLDB74MWmiWC0gi2ZC/IhbNBZ2sLb6694d4Bx4PVwtiARh63HNXVMEaBrFu1S9NcMQyHvAOc6Zw4izF/PCeTcdEnPk8J1t5PTz09Lp0EAKxe7CWIViy350ke5eiaxO3ySrNMX6d83BOHLDqEFMSWm+ad+KEMT4CJrK4X/n/XMgEFAaU5nWlIRqrLRIeU2Ifc625T0Xh4BgTqXPpytQxhgV5b+Fi6duNk4cy+QnHT4ymxI6BPD9HvSQwc+O7h37qjvJVZmpQX6AP8O75Yza8ZbcYKRIIxZzOkwNpzE5A/vpvP5bCRn7AGcT3ORWmAYr/etr3vxUvt2fQz6U/R4S915V+AeWBdcp+uExu6VZ42M0vhhh0lyzx1VRJGVdV+LoxFKkaC42d0yT+O1QEhSB7WL1D3/a/iWubv6ieB/cvNMhFaK9DA=
117 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 0 iQIVAwUAVjZiKiBXgaxoKi1yAQKBWQ/+JcE37vprSOA5e0ezs/avC7leR6hTlXy9O5bpFnvMpbVMTUp+KfBE4HxTT0KKXKh9lGtNaQ+lAmHuy1OQE1hBKPIaCUd8/1gunGsXgRM3TJ9LwjFd4qFpOMxvOouc6kW5kmea7V9W2fg6aFNjjc/4/0J3HMOIjmf2fFz87xqR1xX8iezJ57A4pUPNViJlOWXRzfa56cI6VUe5qOMD0NRXcY+JyI5qW25Y/aL5D9loeKflpzd53Ue+Pu3qlhddJd3PVkaAiVDH+DYyRb8sKgwuiEsyaBO18IBgC8eDmTohEJt6707A+WNhwBJwp9aOUhHC7caaKRYhEKuDRQ3op++VqwuxbFRXx22XYR9bEzQIlpsv9GY2k8SShU5MZqUKIhk8vppFI6RaID5bmALnLLmjmXfSPYSJDzDuCP5UTQgI3PKPOATorVrqMdKzfb7FiwtcTvtHAXpOgLaY9P9XIePbnei6Rx9TfoHYDvzFWRqzSjl21xR+ZUrJtG2fx7XLbMjEAZJcnjP++GRvNbHBOi57aX0l2LO1peQqZVMULoIivaoLFP3i16RuXXQ/bvKyHmKjJzGrLc0QCa0yfrvV2m30RRMaYlOv7ToJfdfZLXvSAP0zbAuDaXdjGnq7gpfIlNE3xM+kQ75Akcf4V4fK1p061EGBQvQz6Ov3PkPiWL/bxrQ=
117 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 0 iQIVAwUAVjZiKiBXgaxoKi1yAQKBWQ/+JcE37vprSOA5e0ezs/avC7leR6hTlXy9O5bpFnvMpbVMTUp+KfBE4HxTT0KKXKh9lGtNaQ+lAmHuy1OQE1hBKPIaCUd8/1gunGsXgRM3TJ9LwjFd4qFpOMxvOouc6kW5kmea7V9W2fg6aFNjjc/4/0J3HMOIjmf2fFz87xqR1xX8iezJ57A4pUPNViJlOWXRzfa56cI6VUe5qOMD0NRXcY+JyI5qW25Y/aL5D9loeKflpzd53Ue+Pu3qlhddJd3PVkaAiVDH+DYyRb8sKgwuiEsyaBO18IBgC8eDmTohEJt6707A+WNhwBJwp9aOUhHC7caaKRYhEKuDRQ3op++VqwuxbFRXx22XYR9bEzQIlpsv9GY2k8SShU5MZqUKIhk8vppFI6RaID5bmALnLLmjmXfSPYSJDzDuCP5UTQgI3PKPOATorVrqMdKzfb7FiwtcTvtHAXpOgLaY9P9XIePbnei6Rx9TfoHYDvzFWRqzSjl21xR+ZUrJtG2fx7XLbMjEAZJcnjP++GRvNbHBOi57aX0l2LO1peQqZVMULoIivaoLFP3i16RuXXQ/bvKyHmKjJzGrLc0QCa0yfrvV2m30RRMaYlOv7ToJfdfZLXvSAP0zbAuDaXdjGnq7gpfIlNE3xM+kQ75Akcf4V4fK1p061EGBQvQz6Ov3PkPiWL/bxrQ=
118 1aa5083cbebbe7575c88f3402ab377539b484897 0 iQIVAwUAVkEdCCBXgaxoKi1yAQKdWg//crTr5gsnHQppuD1p+PPn3/7SMsWJ7bgbuaXgERDLC0zWMfhM2oMmu/4jqXnpangdBVvb0SojejgzxoBo9FfRQiIoKt0vxmmn+S8CrEwb99rpP4M7lgyMAInKPMXQdYxkoDNwL70Afmog6eBtlxjYnu8nmUE/swu6JoVns+tF8UOvIKFYbuCcGujo2pUOQC0xBGiHeHSGRDJOlWmY2d7D/PkQtQE/u/d4QZt7enTHMiV44XVJ8+0U0f1ZQE7V+hNWf+IjwcZtL95dnQzUKs6tXMIln/OwO+eJ3d61BfLvmABvCwUC9IepPssNSFBUfGqBAP5wXOzFIPSYn00IWpmZtCnpUNL99X1IV3RP+p99gnEDTScQFPYt5B0q5I1nFdRh1p48BSF/kjPA7V++UfBwMXrrYLKhUR9BjmrRzYnyXJKwbH6iCNj5hsXUkVrBdBi/FnMczgsVILfFcIXUfnJD3E/dG+1lmuObg6dEynxiGChTuaR4KkLa5ZRkUcUl6fWlSRsqSNbGEEbdwcI+nTCZqJUlLSghumhs0Z89Hs1nltBd1ALX2VLJEHrKMrFQ8NfEBeCB6ENqMJi5qPlq354MCdGOZ9RvisX/HlxE4Q61BW0+EwnyXSch6LFSOS3axOocUazMoK1XiOTJSv/5bAsnwb0ztDWeUj9fZEJL+SWtgB8=
118 1aa5083cbebbe7575c88f3402ab377539b484897 0 iQIVAwUAVkEdCCBXgaxoKi1yAQKdWg//crTr5gsnHQppuD1p+PPn3/7SMsWJ7bgbuaXgERDLC0zWMfhM2oMmu/4jqXnpangdBVvb0SojejgzxoBo9FfRQiIoKt0vxmmn+S8CrEwb99rpP4M7lgyMAInKPMXQdYxkoDNwL70Afmog6eBtlxjYnu8nmUE/swu6JoVns+tF8UOvIKFYbuCcGujo2pUOQC0xBGiHeHSGRDJOlWmY2d7D/PkQtQE/u/d4QZt7enTHMiV44XVJ8+0U0f1ZQE7V+hNWf+IjwcZtL95dnQzUKs6tXMIln/OwO+eJ3d61BfLvmABvCwUC9IepPssNSFBUfGqBAP5wXOzFIPSYn00IWpmZtCnpUNL99X1IV3RP+p99gnEDTScQFPYt5B0q5I1nFdRh1p48BSF/kjPA7V++UfBwMXrrYLKhUR9BjmrRzYnyXJKwbH6iCNj5hsXUkVrBdBi/FnMczgsVILfFcIXUfnJD3E/dG+1lmuObg6dEynxiGChTuaR4KkLa5ZRkUcUl6fWlSRsqSNbGEEbdwcI+nTCZqJUlLSghumhs0Z89Hs1nltBd1ALX2VLJEHrKMrFQ8NfEBeCB6ENqMJi5qPlq354MCdGOZ9RvisX/HlxE4Q61BW0+EwnyXSch6LFSOS3axOocUazMoK1XiOTJSv/5bAsnwb0ztDWeUj9fZEJL+SWtgB8=
119 2d437a0f3355834a9485bbbeb30a52a052c98f19 0 iQIVAwUAVl5U9CBXgaxoKi1yAQLocg//a4YFz9UVSIEzVEJMUPJnN2dBvEXRpwpb5CdKPd428+18K6VWZd5Mc6xNNRV5AV/hCYylgqDplIvyOvwCj7uN8nEOrLUQQ0Pp37M5ZIX8ZVCK/wgchJ2ltabUG1NrZ7/JA84U79VGLAECMnD0Z9WvZDESpVXmdXfxrk1eCc3omRB0ofNghEx+xpYworfZsu8aap1GHQuBsjPv4VyUWGpMq/KA01PdxRTELmrJnfSyr0nPKwxlI5KsbA1GOe+Mk3tp5HJ42DZqLtKSGPirf6E+6lRJeB0H7EpotN4wD3yZDsw6AgRb2C/ay/3T3Oz7CN+45mwuujV9Cxx5zs1EeOgZcqgA/hXMcwlQyvQDMrWpO8ytSBm6MhOuFOTB3HnUxfsnfSocLJsbNwGWKceAzACcXSqapveVAz/7h+InFgl/8Qce28UJdnX5wro5gP6UWt+xrvc7vfmVGgI3oxbiOUrfglhkjmrxBjEiDQy4BWH7HWMZUVxnqPQRcxIE10+dv0KtM/PBkbUtnbGJ88opFBGkFweje5vQcZy/duuPEIufRkPr8EV47QjOxlvldEjlLq3+QUdJZEgCIFw1X0y7Pix4dsPFjwOmAyo4El1ePrdFzG3dXSVA3eHvMDRnYnNlue9wHvKhYbBle5xTOZBgGuMzhDVe+54JLql5JYr4WrI1pvA=
119 2d437a0f3355834a9485bbbeb30a52a052c98f19 0 iQIVAwUAVl5U9CBXgaxoKi1yAQLocg//a4YFz9UVSIEzVEJMUPJnN2dBvEXRpwpb5CdKPd428+18K6VWZd5Mc6xNNRV5AV/hCYylgqDplIvyOvwCj7uN8nEOrLUQQ0Pp37M5ZIX8ZVCK/wgchJ2ltabUG1NrZ7/JA84U79VGLAECMnD0Z9WvZDESpVXmdXfxrk1eCc3omRB0ofNghEx+xpYworfZsu8aap1GHQuBsjPv4VyUWGpMq/KA01PdxRTELmrJnfSyr0nPKwxlI5KsbA1GOe+Mk3tp5HJ42DZqLtKSGPirf6E+6lRJeB0H7EpotN4wD3yZDsw6AgRb2C/ay/3T3Oz7CN+45mwuujV9Cxx5zs1EeOgZcqgA/hXMcwlQyvQDMrWpO8ytSBm6MhOuFOTB3HnUxfsnfSocLJsbNwGWKceAzACcXSqapveVAz/7h+InFgl/8Qce28UJdnX5wro5gP6UWt+xrvc7vfmVGgI3oxbiOUrfglhkjmrxBjEiDQy4BWH7HWMZUVxnqPQRcxIE10+dv0KtM/PBkbUtnbGJ88opFBGkFweje5vQcZy/duuPEIufRkPr8EV47QjOxlvldEjlLq3+QUdJZEgCIFw1X0y7Pix4dsPFjwOmAyo4El1ePrdFzG3dXSVA3eHvMDRnYnNlue9wHvKhYbBle5xTOZBgGuMzhDVe+54JLql5JYr4WrI1pvA=
120 ea389970c08449440587712117f178d33bab3f1e 0 iQIVAwUAVociGyBXgaxoKi1yAQJx9Q//TzMypcls5CQW3DM9xY1Q+RFeIw1LcDIev6NDBjUYxULb2WIK2qPw4Th5czF622SMd+XO/kiQeWYp9IW90MZOUVT1YGgUPKlKWMjkf0lZEPzprHjHq0+z/no1kBCBQg2uUOLsb6Y7zom4hFCyPsxXOk5nnxcFEK0VDbODa9zoKb/flyQ7rtzs+Z6BljIQ0TJAJsXs+6XgrW1XJ/f6nbeqsQyPklIBJuGKiaU1Pg8wQe6QqFaO1NYgM3hBETku6r3OTpUhu/2FTUZ7yDWGGzBqmifxzdHoj7/B+2qzRpII77PlZqoe6XF+UOObSFnhKvXKLjlGY5cy3SXBMbHkPcYtHua8wYR8LqO2bYYnsDd9qD0DJ+LlqH0ZMUkB2Cdk9q/cp1PGJWGlYYecHP87DLuWKwS+a6LhVI9TGkIUosVtLaIMsUUEz83RJFb4sSGOXtjk5DDznn9QW8ltXXMTdGQwFq1vmuiXATYenhszbvagrnbAnDyNFths4IhS1jG8237SB36nGmO3zQm5V7AMHfSrISB/8VPyY4Si7uvAV2kMWxuMhYuQbBwVx/KxbKrYjowuvJvCKaV101rWxvSeU2wDih20v+dnQKPveRNnO8AAK/ICflVVsISkd7hXcfk+SnhfxcPQTr+HQIJEW9wt5Q8WbgHk9wuR8kgXQEX6tCGpT/w=
120 ea389970c08449440587712117f178d33bab3f1e 0 iQIVAwUAVociGyBXgaxoKi1yAQJx9Q//TzMypcls5CQW3DM9xY1Q+RFeIw1LcDIev6NDBjUYxULb2WIK2qPw4Th5czF622SMd+XO/kiQeWYp9IW90MZOUVT1YGgUPKlKWMjkf0lZEPzprHjHq0+z/no1kBCBQg2uUOLsb6Y7zom4hFCyPsxXOk5nnxcFEK0VDbODa9zoKb/flyQ7rtzs+Z6BljIQ0TJAJsXs+6XgrW1XJ/f6nbeqsQyPklIBJuGKiaU1Pg8wQe6QqFaO1NYgM3hBETku6r3OTpUhu/2FTUZ7yDWGGzBqmifxzdHoj7/B+2qzRpII77PlZqoe6XF+UOObSFnhKvXKLjlGY5cy3SXBMbHkPcYtHua8wYR8LqO2bYYnsDd9qD0DJ+LlqH0ZMUkB2Cdk9q/cp1PGJWGlYYecHP87DLuWKwS+a6LhVI9TGkIUosVtLaIMsUUEz83RJFb4sSGOXtjk5DDznn9QW8ltXXMTdGQwFq1vmuiXATYenhszbvagrnbAnDyNFths4IhS1jG8237SB36nGmO3zQm5V7AMHfSrISB/8VPyY4Si7uvAV2kMWxuMhYuQbBwVx/KxbKrYjowuvJvCKaV101rWxvSeU2wDih20v+dnQKPveRNnO8AAK/ICflVVsISkd7hXcfk+SnhfxcPQTr+HQIJEW9wt5Q8WbgHk9wuR8kgXQEX6tCGpT/w=
121 158bdc8965720ca4061f8f8d806563cfc7cdb62e 0 iQIVAwUAVqBhFyBXgaxoKi1yAQLJpQ//S8kdgmVlS+CI0d2hQVGYWB/eK+tcntG+bZKLto4bvVy5d0ymlDL0x7VrJMOkwzkU1u/GaYo3L6CVEiM/JGCgB32bllrpx+KwQ0AyHswMZruo/6xrjDIYymLMEJ9yonXBZsG7pf2saYTHm3C5/ZIPkrDZSlssJHJDdeWqd75hUnx3nX8dZ4jIIxYDhtdB5/EmuEGOVlbeBHVpwfDXidSJUHJRwJvDqezUlN003sQdUvOHHtRqBrhsYEhHqPMOxDidAgCvjSfWZQKOTKaPE/gQo/BP3GU++Fg55jBz+SBXpdfQJI2Gd8FZfjLkhFa9vTTTcd10YCd4CZbYLpj/4R2xWj1U4oTVEFa6d+AA5Yyu8xG53XSCCPyzfagyuyfLqsaq5r1qDZO/Mh5KZCTvc9xSF5KXj57mKvzMDpiNeQcamGmsV4yXxymKJKGMQvbnzqp+ItIdbnfk38Nuac8rqNnGmFYwMIPa50680vSZT/NhrlPJ8FVTJlfHtSUZbdjPpsqw7BgjFWaVUdwgCKIGERiK7zfR0innj9rF5oVwT8EbKiaR1uVxOKnTwZzPCbdO1euNg/HutZLVQmugiLAv5Z38L3YZf5bH7zJdUydhiTI4mGn/mgncsKXoSarnnduhoYu9OsQZc9pndhxjAEuAslEIyBsLy81fR2HOhUzw5FGNgdY=
121 158bdc8965720ca4061f8f8d806563cfc7cdb62e 0 iQIVAwUAVqBhFyBXgaxoKi1yAQLJpQ//S8kdgmVlS+CI0d2hQVGYWB/eK+tcntG+bZKLto4bvVy5d0ymlDL0x7VrJMOkwzkU1u/GaYo3L6CVEiM/JGCgB32bllrpx+KwQ0AyHswMZruo/6xrjDIYymLMEJ9yonXBZsG7pf2saYTHm3C5/ZIPkrDZSlssJHJDdeWqd75hUnx3nX8dZ4jIIxYDhtdB5/EmuEGOVlbeBHVpwfDXidSJUHJRwJvDqezUlN003sQdUvOHHtRqBrhsYEhHqPMOxDidAgCvjSfWZQKOTKaPE/gQo/BP3GU++Fg55jBz+SBXpdfQJI2Gd8FZfjLkhFa9vTTTcd10YCd4CZbYLpj/4R2xWj1U4oTVEFa6d+AA5Yyu8xG53XSCCPyzfagyuyfLqsaq5r1qDZO/Mh5KZCTvc9xSF5KXj57mKvzMDpiNeQcamGmsV4yXxymKJKGMQvbnzqp+ItIdbnfk38Nuac8rqNnGmFYwMIPa50680vSZT/NhrlPJ8FVTJlfHtSUZbdjPpsqw7BgjFWaVUdwgCKIGERiK7zfR0innj9rF5oVwT8EbKiaR1uVxOKnTwZzPCbdO1euNg/HutZLVQmugiLAv5Z38L3YZf5bH7zJdUydhiTI4mGn/mgncsKXoSarnnduhoYu9OsQZc9pndhxjAEuAslEIyBsLy81fR2HOhUzw5FGNgdY=
122 2408645de650d8a29a6ce9e7dce601d8dd0d1474 0 iQIVAwUAVq/xFSBXgaxoKi1yAQLsxhAAg+E6uJCtZZOugrrFi9S6C20SRPBwHwmw22PC5z3Ufp9Vf3vqSL/+zmWI9d/yezIVcTXgM9rKCvq58sZvo4FuO2ngPx7bL9LMJ3qx0IyHUKjwa3AwrzjSzvVhNIrRoimD+lVBI/GLmoszpMICM+Nyg3D41fNJKs6YpnwwsHNJkjMwz0n2SHAShWAgIilyANNVnwnzHE68AIkB/gBkUGtrjf6xB9mXQxAv4GPco/234FAkX9xSWsM0Rx+JLLrSBXoHmIlmu9LPjC0AKn8/DDke+fj7bFaF7hdJBUYOtlYH6f7NIvyZSpw0FHl7jPxoRCtXzIV+1dZEbbIMIXzNtzPFVDYDfMhLqpTgthkZ9x0UaMaHecCUWYYBp8G/IyVS40GJodl8xnRiXUkFejbK/NDdR1f9iZS0dtiFu66cATMdb6d+MG+zW0nDKiQmBt6bwynysqn4g3SIGQFEPyEoRy0bXiefHrlkeHbdfc4zgoejx3ywcRDMGvUbpWs5C43EPu44irKXcqC695vAny3A7nZpt/XP5meDdOF67DNQPvhFdjPPbJBpSsUi2hUlZ+599wUfr3lNVzeEzHT7XApTOf6ysuGtHH3qcVHpFqQSRL1MI0f2xL13UadgTVWYrnHEis7f+ncwlWiR0ucpJB3+dQQh3NVGVo89MfbIZPkA8iil03U=
122 2408645de650d8a29a6ce9e7dce601d8dd0d1474 0 iQIVAwUAVq/xFSBXgaxoKi1yAQLsxhAAg+E6uJCtZZOugrrFi9S6C20SRPBwHwmw22PC5z3Ufp9Vf3vqSL/+zmWI9d/yezIVcTXgM9rKCvq58sZvo4FuO2ngPx7bL9LMJ3qx0IyHUKjwa3AwrzjSzvVhNIrRoimD+lVBI/GLmoszpMICM+Nyg3D41fNJKs6YpnwwsHNJkjMwz0n2SHAShWAgIilyANNVnwnzHE68AIkB/gBkUGtrjf6xB9mXQxAv4GPco/234FAkX9xSWsM0Rx+JLLrSBXoHmIlmu9LPjC0AKn8/DDke+fj7bFaF7hdJBUYOtlYH6f7NIvyZSpw0FHl7jPxoRCtXzIV+1dZEbbIMIXzNtzPFVDYDfMhLqpTgthkZ9x0UaMaHecCUWYYBp8G/IyVS40GJodl8xnRiXUkFejbK/NDdR1f9iZS0dtiFu66cATMdb6d+MG+zW0nDKiQmBt6bwynysqn4g3SIGQFEPyEoRy0bXiefHrlkeHbdfc4zgoejx3ywcRDMGvUbpWs5C43EPu44irKXcqC695vAny3A7nZpt/XP5meDdOF67DNQPvhFdjPPbJBpSsUi2hUlZ+599wUfr3lNVzeEzHT7XApTOf6ysuGtHH3qcVHpFqQSRL1MI0f2xL13UadgTVWYrnHEis7f+ncwlWiR0ucpJB3+dQQh3NVGVo89MfbIZPkA8iil03U=
123 b698abf971e7377d9b7ec7fc8c52df45255b0329 0 iQIVAwUAVrJ4YCBXgaxoKi1yAQJsKw/+JHSR0bIyarO4/VilFwsYxCprOnPxmUdS4qc4yjvpbf7Dqqr/OnOHJA29LrMoqWqsHgREepemjqiNindwNtlZec+KgmbF08ihSBBpls96UTTYTcytKRkkbrB+FhwB0iDl/o8RgGPniyG6M7gOp6p8pXQVRCOToIY1B/G0rtpkcU1N3GbiZntO5Fm/LPAVIE74VaDsamMopQ/wEB8qiERngX/M8SjO1ZSaVNW6KjRUsarLXQB9ziVJBolK/WnQsDwEeuWU2udpjBiOHnFC6h84uBpc8rLGhr419bKMJcjgl+0sl2zHGPY2edQYuJqVjVENzf4zzZA+xPgKw3GrSTpd37PEnGU/fufdJ0X+pp3kvmO1cV3TsvVMTCn7NvS6+w8SGdHdwKQQwelYI6vmJnjuOCATbafJiHMaOQ0GVYYk6PPoGrYcQ081x6dStCMaHIPOV1Wirwd2wq+SN9Ql8H6njftBf5Sa5tVWdW/zrhsltMsdZYZagZ/oFT3t83exL0rgZ96bZFs0j3HO3APELygIVuQ6ybPsFyToMDbURNDvr7ZqPKhQkkdHIUMqEez5ReuVgpbO9CWV/yWpB1/ZCpjNBZyDvw05kG2mOoC7AbHc8aLUS/8DetAmhwyb48LW4qjfUkO7RyxVSxqdnaBOMlsg1wsP2S+SlkZKsDHjcquZJ5U=
123 b698abf971e7377d9b7ec7fc8c52df45255b0329 0 iQIVAwUAVrJ4YCBXgaxoKi1yAQJsKw/+JHSR0bIyarO4/VilFwsYxCprOnPxmUdS4qc4yjvpbf7Dqqr/OnOHJA29LrMoqWqsHgREepemjqiNindwNtlZec+KgmbF08ihSBBpls96UTTYTcytKRkkbrB+FhwB0iDl/o8RgGPniyG6M7gOp6p8pXQVRCOToIY1B/G0rtpkcU1N3GbiZntO5Fm/LPAVIE74VaDsamMopQ/wEB8qiERngX/M8SjO1ZSaVNW6KjRUsarLXQB9ziVJBolK/WnQsDwEeuWU2udpjBiOHnFC6h84uBpc8rLGhr419bKMJcjgl+0sl2zHGPY2edQYuJqVjVENzf4zzZA+xPgKw3GrSTpd37PEnGU/fufdJ0X+pp3kvmO1cV3TsvVMTCn7NvS6+w8SGdHdwKQQwelYI6vmJnjuOCATbafJiHMaOQ0GVYYk6PPoGrYcQ081x6dStCMaHIPOV1Wirwd2wq+SN9Ql8H6njftBf5Sa5tVWdW/zrhsltMsdZYZagZ/oFT3t83exL0rgZ96bZFs0j3HO3APELygIVuQ6ybPsFyToMDbURNDvr7ZqPKhQkkdHIUMqEez5ReuVgpbO9CWV/yWpB1/ZCpjNBZyDvw05kG2mOoC7AbHc8aLUS/8DetAmhwyb48LW4qjfUkO7RyxVSxqdnaBOMlsg1wsP2S+SlkZKsDHjcquZJ5U=
124 d493d64757eb45ada99fcb3693e479a51b7782da 0 iQIVAwUAVtYt4SBXgaxoKi1yAQL6TQ/9FzYE/xOSC2LYqPdPjCXNjGuZdN1WMf/8fUMYT83NNOoLEBGx37C0bAxgD4/P03FwYMuP37IjIcX8vN6fWvtG9Oo0o2n/oR3SKjpsheh2zxhAFX3vXhFD4U18wCz/DnM0O1qGJwJ49kk/99WNgDWeW4n9dMzTFpcaeZBCu1REbZQS40Z+ArXTDCr60g5TLN1XR1WKEzQJvF71rvaE6P8d3GLoGobTIJMLi5UnMwGsnsv2/EIPrWHQiAY9ZEnYq6deU/4RMh9c7afZie9I+ycIA/qVH6vXNt3/a2BP3Frmv8IvKPzqwnoWmIUamew9lLf1joD5joBy8Yu+qMW0/s6DYUGQ4Slk9qIfn6wh4ySgT/7FJUMcayx9ONDq7920RjRc+XFpD8B3Zhj2mM+0g9At1FgX2w2Gkf957oz2nlgTVh9sdPvP6UvWzhqszPMpdG5Vt0oc5vuyobW333qSkufCxi5gmH7do1DIzErMcy8b6IpZUDeQ/dakKwLQpZVVPF15IrNa/zsOW55SrGrL8/ErM/mXNQBBAqvRsOLq2njFqK2JaoG6biH21DMjHVZFw2wBRoLQxbOppfz2/e3mNkNy9HjgJTW3+0iHWvRzMSjwRbk9BlbkmH6kG5163ElHq3Ft3uuQyZBL9I5SQxlHi9s/CV0YSTYthpWR3ChKIMoqBQ0=
124 d493d64757eb45ada99fcb3693e479a51b7782da 0 iQIVAwUAVtYt4SBXgaxoKi1yAQL6TQ/9FzYE/xOSC2LYqPdPjCXNjGuZdN1WMf/8fUMYT83NNOoLEBGx37C0bAxgD4/P03FwYMuP37IjIcX8vN6fWvtG9Oo0o2n/oR3SKjpsheh2zxhAFX3vXhFD4U18wCz/DnM0O1qGJwJ49kk/99WNgDWeW4n9dMzTFpcaeZBCu1REbZQS40Z+ArXTDCr60g5TLN1XR1WKEzQJvF71rvaE6P8d3GLoGobTIJMLi5UnMwGsnsv2/EIPrWHQiAY9ZEnYq6deU/4RMh9c7afZie9I+ycIA/qVH6vXNt3/a2BP3Frmv8IvKPzqwnoWmIUamew9lLf1joD5joBy8Yu+qMW0/s6DYUGQ4Slk9qIfn6wh4ySgT/7FJUMcayx9ONDq7920RjRc+XFpD8B3Zhj2mM+0g9At1FgX2w2Gkf957oz2nlgTVh9sdPvP6UvWzhqszPMpdG5Vt0oc5vuyobW333qSkufCxi5gmH7do1DIzErMcy8b6IpZUDeQ/dakKwLQpZVVPF15IrNa/zsOW55SrGrL8/ErM/mXNQBBAqvRsOLq2njFqK2JaoG6biH21DMjHVZFw2wBRoLQxbOppfz2/e3mNkNy9HjgJTW3+0iHWvRzMSjwRbk9BlbkmH6kG5163ElHq3Ft3uuQyZBL9I5SQxlHi9s/CV0YSTYthpWR3ChKIMoqBQ0=
125 ae279d4a19e9683214cbd1fe8298cf0b50571432 0 iQIVAwUAVvqzViBXgaxoKi1yAQKUCxAAtctMD3ydbe+li3iYjhY5qT0wyHwPr9fcLqsQUJ4ZtD4sK3oxCRZFWFxNBk5bIIyiwusSEJPiPddoQ7NljSZlYDI0HR3R4vns55fmDwPG07Ykf7aSyqr+c2ppCGzn2/2ID476FNtzKqjF+LkVyadgI9vgZk5S4BgdSlfSRBL+1KtB1BlF5etIZnc5U9qs1uqzZJc06xyyF8HlrmMZkAvRUbsx/JzA5LgzZ2WzueaxZgYzYjDk0nPLgyPPBj0DVyWXnW/kdRNmKHNbaZ9aZlWmdPCEoq5iBm71d7Xoa61shmeuVZWvxHNqXdjVMHVeT61cRxjdfxTIkJwvlRGwpy7V17vTgzWFxw6QJpmr7kupRo3idsDydLDPHGUsxP3uMZFsp6+4rEe6qbafjNajkRyiw7kVGCxboOFN0rLVJPZwZGksEIkw58IHcPhZNT1bHHocWOA/uHJTAynfKsAdv/LDdGKcZWUCFOzlokw54xbPvdrBtEOnYNp15OY01IAJd2FCUki5WHvhELUggTjfank1Tc3/Rt1KrGOFhg80CWq6eMiuiWkHGvYq3fjNLbgjl3JJatUFoB+cX1ulDOGsLJEXQ4v5DNHgel0o2H395owNlStksSeW1UBVk0hUK/ADtVUYKAPEIFiboh1iDpEOl40JVnYdsGz3w5FLj2w+16/1vWs=
125 ae279d4a19e9683214cbd1fe8298cf0b50571432 0 iQIVAwUAVvqzViBXgaxoKi1yAQKUCxAAtctMD3ydbe+li3iYjhY5qT0wyHwPr9fcLqsQUJ4ZtD4sK3oxCRZFWFxNBk5bIIyiwusSEJPiPddoQ7NljSZlYDI0HR3R4vns55fmDwPG07Ykf7aSyqr+c2ppCGzn2/2ID476FNtzKqjF+LkVyadgI9vgZk5S4BgdSlfSRBL+1KtB1BlF5etIZnc5U9qs1uqzZJc06xyyF8HlrmMZkAvRUbsx/JzA5LgzZ2WzueaxZgYzYjDk0nPLgyPPBj0DVyWXnW/kdRNmKHNbaZ9aZlWmdPCEoq5iBm71d7Xoa61shmeuVZWvxHNqXdjVMHVeT61cRxjdfxTIkJwvlRGwpy7V17vTgzWFxw6QJpmr7kupRo3idsDydLDPHGUsxP3uMZFsp6+4rEe6qbafjNajkRyiw7kVGCxboOFN0rLVJPZwZGksEIkw58IHcPhZNT1bHHocWOA/uHJTAynfKsAdv/LDdGKcZWUCFOzlokw54xbPvdrBtEOnYNp15OY01IAJd2FCUki5WHvhELUggTjfank1Tc3/Rt1KrGOFhg80CWq6eMiuiWkHGvYq3fjNLbgjl3JJatUFoB+cX1ulDOGsLJEXQ4v5DNHgel0o2H395owNlStksSeW1UBVk0hUK/ADtVUYKAPEIFiboh1iDpEOl40JVnYdsGz3w5FLj2w+16/1vWs=
126 740156eedf2c450aee58b1a90b0e826f47c5da64 0 iQIVAwUAVxLGMCBXgaxoKi1yAQLhIg/8DDX+sCz7LmqO47/FfTo+OqGR+bTTqpfK3WebitL0Z6hbXPj7s45jijqIFGqKgMPqS5oom1xeuGTPHdYA0NNoc/mxSCuNLfuXYolpNWPN71HeSDRV9SnhMThG5HSxI+P0Ye4rbsCHrVV+ib1rV81QE2kZ9aZsJd0HnGd512xJ+2ML7AXweM/4lcLmMthN+oi/dv1OGLzfckrcr/fEATCLZt55eO7idx11J1Fk4ptQ6dQ/bKznlD4hneyy1HMPsGxw+bCXrMF2C/nUiRLHdKgGqZ+cDq6loQRfFlQoIhfoEnWC424qbjH4rvHgkZHqC59Oi/ti9Hi75oq9Tb79yzlCY/fGsdrlJpEzrTQdHFMHUoO9CC+JYObXHRo3ALnC5350ZBKxlkdpmucrHTgcDabfhRlx9vDxP4RDopm2hAjk2LJH7bdxnGEyZYkTOZ3hXKnVpt2hUQb4jyzzC9Kl47TFpPKNVKI+NLqRRZAIdXXiy24KD7WzzE6L0NNK0/IeqKBENLL8I1PmDQ6XmYTQVhTuad1jjm2PZDyGiXmJFZO1O/NGecVTvVynKsDT6XhEvzyEtjXqD98rrhbeMHTcmNSwwJMDvm9ws0075sLQyq2EYFG6ECWFypdA/jfumTmxOTkMtuy/V1Gyq7YJ8YaksZ7fXNY9VuJFP72grmlXc6Dvpr4=
126 740156eedf2c450aee58b1a90b0e826f47c5da64 0 iQIVAwUAVxLGMCBXgaxoKi1yAQLhIg/8DDX+sCz7LmqO47/FfTo+OqGR+bTTqpfK3WebitL0Z6hbXPj7s45jijqIFGqKgMPqS5oom1xeuGTPHdYA0NNoc/mxSCuNLfuXYolpNWPN71HeSDRV9SnhMThG5HSxI+P0Ye4rbsCHrVV+ib1rV81QE2kZ9aZsJd0HnGd512xJ+2ML7AXweM/4lcLmMthN+oi/dv1OGLzfckrcr/fEATCLZt55eO7idx11J1Fk4ptQ6dQ/bKznlD4hneyy1HMPsGxw+bCXrMF2C/nUiRLHdKgGqZ+cDq6loQRfFlQoIhfoEnWC424qbjH4rvHgkZHqC59Oi/ti9Hi75oq9Tb79yzlCY/fGsdrlJpEzrTQdHFMHUoO9CC+JYObXHRo3ALnC5350ZBKxlkdpmucrHTgcDabfhRlx9vDxP4RDopm2hAjk2LJH7bdxnGEyZYkTOZ3hXKnVpt2hUQb4jyzzC9Kl47TFpPKNVKI+NLqRRZAIdXXiy24KD7WzzE6L0NNK0/IeqKBENLL8I1PmDQ6XmYTQVhTuad1jjm2PZDyGiXmJFZO1O/NGecVTvVynKsDT6XhEvzyEtjXqD98rrhbeMHTcmNSwwJMDvm9ws0075sLQyq2EYFG6ECWFypdA/jfumTmxOTkMtuy/V1Gyq7YJ8YaksZ7fXNY9VuJFP72grmlXc6Dvpr4=
127 f85de28eae32e7d3064b1a1321309071bbaaa069 0 iQIVAwUAVyZQaiBXgaxoKi1yAQJhCQ//WrRZ55k3VI/OgY+I/HvgFHOC0sbhe207Kedxvy00a3AtXM6wa5E95GNX04QxUfTWUf5ZHDfEgj0/mQywNrH1oJG47iPZSs+qXNLqtgAaXtrih6r4/ruUwFCRFxqK9mkhjG61SKicw3Q7uGva950g6ZUE5BsZ7XJWgoDcJzWKR+AH992G6H//Fhi4zFQAmB34++sm80wV6wMxVKA/qhQzetooTR2x9qrHpvCKMzKllleJe48yzPLJjQoaaVgXCDav0eIePFNw0WvVSldOEp/ADDdTGa65qsC1rO2BB1Cu5+frJ/vUoo0PwIgqgD6p2i41hfIKvkp6130TxmRVxUx+ma8gBYEpPIabV0flLU72gq8lMlGBBSnQ+fcZsfs/Ug0xRN0tzkEScmZFiDxRGk0y7IalXzv6irwOyC2fZCajXGJDzkROQXWMgy9eKkwuFhZBmPVYtrATSq3jHLVmJg5vfdeiVzA6NKxAgGm2z8AsRrijKK8WRqFYiH6xcWKG5u+FroPQdKa0nGCkPSTH3tvC6fAHTVm7JeXch5QE/LiS9Y575pM2PeIP+k+Fr1ugK0AEvYJAXa5UIIcdszPyI+TwPTtWaQ83X99qGAdmRWLvSYjqevOVr7F/fhO3XKFXRCcHA3EzVYnG7nWiVACYF3H2UgN4PWjStbx/Qhhdi9xAuks=
127 f85de28eae32e7d3064b1a1321309071bbaaa069 0 iQIVAwUAVyZQaiBXgaxoKi1yAQJhCQ//WrRZ55k3VI/OgY+I/HvgFHOC0sbhe207Kedxvy00a3AtXM6wa5E95GNX04QxUfTWUf5ZHDfEgj0/mQywNrH1oJG47iPZSs+qXNLqtgAaXtrih6r4/ruUwFCRFxqK9mkhjG61SKicw3Q7uGva950g6ZUE5BsZ7XJWgoDcJzWKR+AH992G6H//Fhi4zFQAmB34++sm80wV6wMxVKA/qhQzetooTR2x9qrHpvCKMzKllleJe48yzPLJjQoaaVgXCDav0eIePFNw0WvVSldOEp/ADDdTGa65qsC1rO2BB1Cu5+frJ/vUoo0PwIgqgD6p2i41hfIKvkp6130TxmRVxUx+ma8gBYEpPIabV0flLU72gq8lMlGBBSnQ+fcZsfs/Ug0xRN0tzkEScmZFiDxRGk0y7IalXzv6irwOyC2fZCajXGJDzkROQXWMgy9eKkwuFhZBmPVYtrATSq3jHLVmJg5vfdeiVzA6NKxAgGm2z8AsRrijKK8WRqFYiH6xcWKG5u+FroPQdKa0nGCkPSTH3tvC6fAHTVm7JeXch5QE/LiS9Y575pM2PeIP+k+Fr1ugK0AEvYJAXa5UIIcdszPyI+TwPTtWaQ83X99qGAdmRWLvSYjqevOVr7F/fhO3XKFXRCcHA3EzVYnG7nWiVACYF3H2UgN4PWjStbx/Qhhdi9xAuks=
128 a56296f55a5e1038ea5016dace2076b693c28a56 0 iQIVAwUAVyZarCBXgaxoKi1yAQL87g/8D7whM3e08HVGDHHEkVUgqLIfueVy1mx0AkRvelmZmwaocFNGpZTd3AjSwy6qXbRNZFXrWU85JJvQCi3PSo/8bK43kwqLJ4lv+Hv2zVTvz30vbLWTSndH3oVRu38lIA7b5K9J4y50pMCwjKLG9iyp+aQG4RBz76fJMlhXy0gu38A8JZVKEeAnQCbtzxKXBzsC8k0/ku/bEQEoo9D4AAGlVTbl5AsHMp3Z6NWu7kEHAX/52/VKU2I0LxYqRxoL1tjTVGkAQfkOHz1gOhLXUgGSYmA9Fb265AYj9cnGWCfyNonlE0Rrk2kAsrjBTGiLyb8WvK/TZmRo4ZpNukzenS9UuAOKxA22Kf9+oN9kKBu1HnwqusYDH9pto1WInCZKV1al7DMBXbGFcnyTXk2xuiTGhVRG5LzCO2QMByBLXiYl77WqqJnzxK3v5lAc/immJl5qa3ATUlTnVBjAs+6cbsbCoY6sjXCT0ClndA9+iZZ1TjPnmLrSeFh5AoE8WHmnFV6oqGN4caX6wiIW5vO+x5Q2ruSsDrwXosXIYzm+0KYKRq9O+MaTwR44Dvq3/RyeIu/cif/Nc7B8bR5Kf7OiRf2T5u97MYAomwGcQfXqgUfm6y7D3Yg+IdAdAJKitxhRPsqqdxIuteXMvOvwukXNDiWP1zsKoYLI37EcwzvbGLUlZvg=
128 a56296f55a5e1038ea5016dace2076b693c28a56 0 iQIVAwUAVyZarCBXgaxoKi1yAQL87g/8D7whM3e08HVGDHHEkVUgqLIfueVy1mx0AkRvelmZmwaocFNGpZTd3AjSwy6qXbRNZFXrWU85JJvQCi3PSo/8bK43kwqLJ4lv+Hv2zVTvz30vbLWTSndH3oVRu38lIA7b5K9J4y50pMCwjKLG9iyp+aQG4RBz76fJMlhXy0gu38A8JZVKEeAnQCbtzxKXBzsC8k0/ku/bEQEoo9D4AAGlVTbl5AsHMp3Z6NWu7kEHAX/52/VKU2I0LxYqRxoL1tjTVGkAQfkOHz1gOhLXUgGSYmA9Fb265AYj9cnGWCfyNonlE0Rrk2kAsrjBTGiLyb8WvK/TZmRo4ZpNukzenS9UuAOKxA22Kf9+oN9kKBu1HnwqusYDH9pto1WInCZKV1al7DMBXbGFcnyTXk2xuiTGhVRG5LzCO2QMByBLXiYl77WqqJnzxK3v5lAc/immJl5qa3ATUlTnVBjAs+6cbsbCoY6sjXCT0ClndA9+iZZ1TjPnmLrSeFh5AoE8WHmnFV6oqGN4caX6wiIW5vO+x5Q2ruSsDrwXosXIYzm+0KYKRq9O+MaTwR44Dvq3/RyeIu/cif/Nc7B8bR5Kf7OiRf2T5u97MYAomwGcQfXqgUfm6y7D3Yg+IdAdAJKitxhRPsqqdxIuteXMvOvwukXNDiWP1zsKoYLI37EcwzvbGLUlZvg=
129 aaabed77791a75968a12b8c43ad263631a23ee81 0 iQIVAwUAVzpH4CBXgaxoKi1yAQLm5A/9GUYv9CeIepjcdWSBAtNhCBJcqgk2cBcV0XaeQomfxqYWfbW2fze6eE+TrXPKTX1ajycgqquMyo3asQolhHXwasv8+5CQxowjGfyVg7N/kyyjgmJljI+rCi74VfnsEhvG/J4GNr8JLVQmSICfALqQjw7XN8doKthYhwOfIY2vY419613v4oeBQXSsItKC/tfKw9lYvlk4qJKDffJQFyAekgv43ovWqHNkl4LaR6ubtjOsxCnxHfr7OtpX3muM9MLT/obBax5I3EsmiDTQBOjbvI6TcLczs5tVCnTa1opQsPUcEmdA4WpUEiTnLl9lk9le/BIImfYfEP33oVYmubRlKhJYnUiu89ao9L+48FBoqCY88HqbjQI1GO6icfRJN/+NLVeE9wubltbWFETH6e2Q+Ex4+lkul1tQMLPcPt10suMHnEo3/FcOTPt6/DKeMpsYgckHSJq5KzTg632xifyySmb9qkpdGGpY9lRal6FHw3rAhRBqucMgxso4BwC51h04RImtCUQPoA3wpb4BvCHba/thpsUFnHefOvsu3ei4JyHXZK84LPwOj31PcucNFdGDTW6jvKrF1vVUIVS9uMJkJXPu0V4i/oEQSUKifJZivROlpvj1eHy3KeMtjq2kjGyXY2KdzxpT8wX/oYJhCtm1XWMui5f24XBjE6xOcjjm8k4=
129 aaabed77791a75968a12b8c43ad263631a23ee81 0 iQIVAwUAVzpH4CBXgaxoKi1yAQLm5A/9GUYv9CeIepjcdWSBAtNhCBJcqgk2cBcV0XaeQomfxqYWfbW2fze6eE+TrXPKTX1ajycgqquMyo3asQolhHXwasv8+5CQxowjGfyVg7N/kyyjgmJljI+rCi74VfnsEhvG/J4GNr8JLVQmSICfALqQjw7XN8doKthYhwOfIY2vY419613v4oeBQXSsItKC/tfKw9lYvlk4qJKDffJQFyAekgv43ovWqHNkl4LaR6ubtjOsxCnxHfr7OtpX3muM9MLT/obBax5I3EsmiDTQBOjbvI6TcLczs5tVCnTa1opQsPUcEmdA4WpUEiTnLl9lk9le/BIImfYfEP33oVYmubRlKhJYnUiu89ao9L+48FBoqCY88HqbjQI1GO6icfRJN/+NLVeE9wubltbWFETH6e2Q+Ex4+lkul1tQMLPcPt10suMHnEo3/FcOTPt6/DKeMpsYgckHSJq5KzTg632xifyySmb9qkpdGGpY9lRal6FHw3rAhRBqucMgxso4BwC51h04RImtCUQPoA3wpb4BvCHba/thpsUFnHefOvsu3ei4JyHXZK84LPwOj31PcucNFdGDTW6jvKrF1vVUIVS9uMJkJXPu0V4i/oEQSUKifJZivROlpvj1eHy3KeMtjq2kjGyXY2KdzxpT8wX/oYJhCtm1XWMui5f24XBjE6xOcjjm8k4=
130 a9764ab80e11bcf6a37255db7dd079011f767c6c 0 iQIVAwUAV09KHyBXgaxoKi1yAQJBWg/+OywRrqU+zvnL1tHJ95PgatsF7S4ZAHZFR098+oCjUDtKpvnm71o2TKiY4D5cckyD2KNwLWg/qW6V+5+2EYU0Y/ViwPVcngib/ZeJP+Nr44TK3YZMRmfFuUEEzA7sZ2r2Gm8eswv//W79I0hXJeFd/o6FgLnn7AbOjcOn3IhWdGAP6jUHv9zyJigQv6K9wgyvAnK1RQE+2CgMcoyeqao/zs23IPXI6XUHOwfrQ7XrQ83+ciMqN7XNRx+TKsUQoYeUew4AanoDSMPAQ4kIudsP5tOgKeLRPmHX9zg6Y5S1nTpLRNdyAxuNuyZtkQxDYcG5Hft/SIx27tZUo3gywHL2U+9RYD2nvXqaWzT3sYB2sPBOiq7kjHRgvothkXemAFsbq2nKFrN0PRua9WG4l3ny0xYmDFPlJ/s0E9XhmQaqy+uXtVbA2XdLEvE6pQ0YWbHEKMniW26w6LJkx4IV6RX/7Kpq7byw/bW65tu/BzgISKau5FYLY4CqZJH7f8QBg3XWpzB91AR494tdsD+ugM45wrY/6awGQx9CY5SAzGqTyFuSFQxgB2rBurb01seZPf8nqG8V13UYXfX/O3/WMOBMr7U/RVqmAA0ZMYOyEwfVUmHqrFjkxpXX+JdNKRiA1GJp5sdRpCxSeXdQ/Ni6AAGZV2IyRb4G4Y++1vP4yPBalas=
130 a9764ab80e11bcf6a37255db7dd079011f767c6c 0 iQIVAwUAV09KHyBXgaxoKi1yAQJBWg/+OywRrqU+zvnL1tHJ95PgatsF7S4ZAHZFR098+oCjUDtKpvnm71o2TKiY4D5cckyD2KNwLWg/qW6V+5+2EYU0Y/ViwPVcngib/ZeJP+Nr44TK3YZMRmfFuUEEzA7sZ2r2Gm8eswv//W79I0hXJeFd/o6FgLnn7AbOjcOn3IhWdGAP6jUHv9zyJigQv6K9wgyvAnK1RQE+2CgMcoyeqao/zs23IPXI6XUHOwfrQ7XrQ83+ciMqN7XNRx+TKsUQoYeUew4AanoDSMPAQ4kIudsP5tOgKeLRPmHX9zg6Y5S1nTpLRNdyAxuNuyZtkQxDYcG5Hft/SIx27tZUo3gywHL2U+9RYD2nvXqaWzT3sYB2sPBOiq7kjHRgvothkXemAFsbq2nKFrN0PRua9WG4l3ny0xYmDFPlJ/s0E9XhmQaqy+uXtVbA2XdLEvE6pQ0YWbHEKMniW26w6LJkx4IV6RX/7Kpq7byw/bW65tu/BzgISKau5FYLY4CqZJH7f8QBg3XWpzB91AR494tdsD+ugM45wrY/6awGQx9CY5SAzGqTyFuSFQxgB2rBurb01seZPf8nqG8V13UYXfX/O3/WMOBMr7U/RVqmAA0ZMYOyEwfVUmHqrFjkxpXX+JdNKRiA1GJp5sdRpCxSeXdQ/Ni6AAGZV2IyRb4G4Y++1vP4yPBalas=
131 26a5d605b8683a292bb89aea11f37a81b06ac016 0 iQIVAwUAV3bOsSBXgaxoKi1yAQLiDg//fxmcNpTUedsXqEwNdGFJsJ2E25OANgyv1saZHNfbYFWXIR8g4nyjNaj2SjtXF0wzOq5aHlMWXjMZPOT6pQBdTnOYDdgv+O8DGpgHs5x/f+uuxtpVkdxR6uRP0/ImlTEtDix8VQiN3nTu5A0N3C7E2y+D1JIIyTp6vyjzxvGQTY0MD/qgB55Dn6khx8c3phDtMkzmVEwL4ItJxVRVNw1m+2FOXHu++hJEruJdeMV0CKOV6LVbXHho+yt3jQDKhlIgJ65EPLKrf+yRalQtSWpu7y/vUMcEUde9XeQ5x05ebCiI4MkJ0ULQro/Bdx9vBHkAstUC7D+L5y45ZnhHjOwxz9c3GQMZQt1HuyORqbBhf9hvOkUQ2GhlDHc5U04nBe0VhEoCw9ra54n+AgUyqWr4CWimSW6pMTdquCzAAbcJWgdNMwDHrMalCYHhJksKFARKq3uSTR1Noz7sOCSIEQvOozawKSQfOwGxn/5bNepKh4uIRelC1uEDoqculqCLgAruzcMNIMndNVYaJ09IohJzA9jVApa+SZVPAeREg71lnS3d8jaWh1Lu5JFlAAKQeKGVJmNm40Y3HBjtHQDrI67TT59oDAhjo420Wf9VFCaj2k0weYBLWSeJhfUZ5x3PVpAHUvP/rnHPwNYyY0wVoQEvM/bnQdcpICmKhqcK+vKjDrM=
131 26a5d605b8683a292bb89aea11f37a81b06ac016 0 iQIVAwUAV3bOsSBXgaxoKi1yAQLiDg//fxmcNpTUedsXqEwNdGFJsJ2E25OANgyv1saZHNfbYFWXIR8g4nyjNaj2SjtXF0wzOq5aHlMWXjMZPOT6pQBdTnOYDdgv+O8DGpgHs5x/f+uuxtpVkdxR6uRP0/ImlTEtDix8VQiN3nTu5A0N3C7E2y+D1JIIyTp6vyjzxvGQTY0MD/qgB55Dn6khx8c3phDtMkzmVEwL4ItJxVRVNw1m+2FOXHu++hJEruJdeMV0CKOV6LVbXHho+yt3jQDKhlIgJ65EPLKrf+yRalQtSWpu7y/vUMcEUde9XeQ5x05ebCiI4MkJ0ULQro/Bdx9vBHkAstUC7D+L5y45ZnhHjOwxz9c3GQMZQt1HuyORqbBhf9hvOkUQ2GhlDHc5U04nBe0VhEoCw9ra54n+AgUyqWr4CWimSW6pMTdquCzAAbcJWgdNMwDHrMalCYHhJksKFARKq3uSTR1Noz7sOCSIEQvOozawKSQfOwGxn/5bNepKh4uIRelC1uEDoqculqCLgAruzcMNIMndNVYaJ09IohJzA9jVApa+SZVPAeREg71lnS3d8jaWh1Lu5JFlAAKQeKGVJmNm40Y3HBjtHQDrI67TT59oDAhjo420Wf9VFCaj2k0weYBLWSeJhfUZ5x3PVpAHUvP/rnHPwNYyY0wVoQEvM/bnQdcpICmKhqcK+vKjDrM=
132 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 0 iQIVAwUAV42tNyBXgaxoKi1yAQI/Iw//V0NtxpVD4sClotAwffBVW42Uv+SG+07CJoOuFYnmHZv/plOzXuuJlmm95L00/qyRCCTUyAGxK/eP5cAKP2V99ln6rNhh8gpgvmZlnYjU3gqFv8tCQ+fkwgRiWmgKjRL6/bK9FY5cO7ATLVu3kCkFd8CEgzlAaUqBfkNFxZxLDLvKqRlhXxVXhKjvkKg5DZ6eJqRQY7w3UqqR+sF1rMLtVyt490Wqv7YQKwcvY7MEKTyH4twGLx/RhBpBi+GccVKvWC011ffjSjxqAfQqrrSVt0Ld1Khj2/p1bDDYpTgtdDgCzclSXWEQpmSdFRBF5wYs/pDMUreI/E6mlWkB4hfZZk1NBRPRWYikXwnhU3ziubCGesZDyBYLrK1vT+tf6giseo22YQmDnOftbS999Pcn04cyCafeFuOjkubYaINB25T20GS5Wb4a0nHPRAOOVxzk/m/arwYgF0ZZZDDvJ48TRMDf3XOc1jc5qZ7AN/OQKbvh2B08vObnnPm3lmBY1qOnhwzJxpNiq+Z/ypokGXQkGBfKUo7rWHJy5iXLb3Biv9AhxY9d5pSTjBmTAYJEic3q03ztzlnfMyi+C13+YxFAbSSNGBP8Hejkkz0NvmB1TBuCKpnZA8spxY5rhZ/zMx+cCw8hQvWHHDUURps7SQvZEfrJSCGJFPDHL3vbfK+LNwI=
132 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 0 iQIVAwUAV42tNyBXgaxoKi1yAQI/Iw//V0NtxpVD4sClotAwffBVW42Uv+SG+07CJoOuFYnmHZv/plOzXuuJlmm95L00/qyRCCTUyAGxK/eP5cAKP2V99ln6rNhh8gpgvmZlnYjU3gqFv8tCQ+fkwgRiWmgKjRL6/bK9FY5cO7ATLVu3kCkFd8CEgzlAaUqBfkNFxZxLDLvKqRlhXxVXhKjvkKg5DZ6eJqRQY7w3UqqR+sF1rMLtVyt490Wqv7YQKwcvY7MEKTyH4twGLx/RhBpBi+GccVKvWC011ffjSjxqAfQqrrSVt0Ld1Khj2/p1bDDYpTgtdDgCzclSXWEQpmSdFRBF5wYs/pDMUreI/E6mlWkB4hfZZk1NBRPRWYikXwnhU3ziubCGesZDyBYLrK1vT+tf6giseo22YQmDnOftbS999Pcn04cyCafeFuOjkubYaINB25T20GS5Wb4a0nHPRAOOVxzk/m/arwYgF0ZZZDDvJ48TRMDf3XOc1jc5qZ7AN/OQKbvh2B08vObnnPm3lmBY1qOnhwzJxpNiq+Z/ypokGXQkGBfKUo7rWHJy5iXLb3Biv9AhxY9d5pSTjBmTAYJEic3q03ztzlnfMyi+C13+YxFAbSSNGBP8Hejkkz0NvmB1TBuCKpnZA8spxY5rhZ/zMx+cCw8hQvWHHDUURps7SQvZEfrJSCGJFPDHL3vbfK+LNwI=
133 299546f84e68dbb9bd026f0f3a974ce4bdb93686 0 iQIcBAABCAAGBQJXn3rFAAoJELnJ3IJKpb3VmZoQAK0cdOfi/OURglnN0vYYGwdvSXTPpZauPEYEpwML3dW1j6HRnl5L+H8D8vlYzahK95X4+NNBhqtyyB6wmIVI0NkYfXfd6ACntJE/EnTdLIHIP2NAAoVsggIjiNr26ubRegaD5ya63Ofxz+Yq5iRsUUfHet7o+CyFhExyzdu+Vcz1/E9GztxNfTDVpC/mf+RMLwQTfHOhoTVbaamLCmGAIjw39w72X+vRMJoYNF44te6PvsfI67+6uuC0+9DjMnp5eL/hquSQ1qfks71rnWwxuiPcUDZloIueowVmt0z0sO4loSP1nZ5IP/6ZOoAzSjspqsxeay9sKP0kzSYLGsmCi29otyVSnXiKtyMCW5z5iM6k8XQcMi5mWy9RcpqlNYD7RUTn3g0+a8u7F6UEtske3/qoweJLPhtTmBNOfDNw4JXwOBSZea0QnIIjCeCc4ZGqfojPpbvcA4rkRpxI23YoMrT2v/kp4wgwrqK9fi8ctt8WbXpmGoAQDXWj2bWcuzj94HsAhLduFKv6sxoDz871hqjmjjnjQSU7TSNNnVzdzwqYkMB+BvhcNYxk6lcx3Aif3AayGdrWDubtU/ZRNoLzBwe6gm0udRMXBj4D/60GD6TIkYeL7HjJwfBb6Bf7qvQ6y7g0zbYG9uwBmMeduU7XchErGqQGSEyyJH3DG9OLaFOj
133 299546f84e68dbb9bd026f0f3a974ce4bdb93686 0 iQIcBAABCAAGBQJXn3rFAAoJELnJ3IJKpb3VmZoQAK0cdOfi/OURglnN0vYYGwdvSXTPpZauPEYEpwML3dW1j6HRnl5L+H8D8vlYzahK95X4+NNBhqtyyB6wmIVI0NkYfXfd6ACntJE/EnTdLIHIP2NAAoVsggIjiNr26ubRegaD5ya63Ofxz+Yq5iRsUUfHet7o+CyFhExyzdu+Vcz1/E9GztxNfTDVpC/mf+RMLwQTfHOhoTVbaamLCmGAIjw39w72X+vRMJoYNF44te6PvsfI67+6uuC0+9DjMnp5eL/hquSQ1qfks71rnWwxuiPcUDZloIueowVmt0z0sO4loSP1nZ5IP/6ZOoAzSjspqsxeay9sKP0kzSYLGsmCi29otyVSnXiKtyMCW5z5iM6k8XQcMi5mWy9RcpqlNYD7RUTn3g0+a8u7F6UEtske3/qoweJLPhtTmBNOfDNw4JXwOBSZea0QnIIjCeCc4ZGqfojPpbvcA4rkRpxI23YoMrT2v/kp4wgwrqK9fi8ctt8WbXpmGoAQDXWj2bWcuzj94HsAhLduFKv6sxoDz871hqjmjjnjQSU7TSNNnVzdzwqYkMB+BvhcNYxk6lcx3Aif3AayGdrWDubtU/ZRNoLzBwe6gm0udRMXBj4D/60GD6TIkYeL7HjJwfBb6Bf7qvQ6y7g0zbYG9uwBmMeduU7XchErGqQGSEyyJH3DG9OLaFOj
134 ccd436f7db6d5d7b9af89715179b911d031d44f1 0 iQIVAwUAV8h7F0emf/qjRqrOAQjmdhAAgYhom8fzL/YHeVLddm71ZB+pKDviKASKGSrBHY4D5Szrh/pYTedmG9IptYue5vzXpspHAaGvZN5xkwrz1/5nmnCsLA8DFaYT9qCkize6EYzxSBtA/W1S9Mv5tObinr1EX9rCSyI4HEJYE8i1IQM5h07SqUsMKDoasd4e29t6gRWg5pfOYq1kc2MTck35W9ff1Fii8S28dqbO3cLU6g5K0pT0JLCZIq7hyTNQdxHAYfebxkVl7PZrZR383IrnyotXVKFFc44qinv94T50uR4yUNYPQ8Gu0TgoGQQjBjk1Lrxot2xpgPQAy8vx+EOJgpg/yNZnYkmJZMxjDkTGVrwvXtOXZzmy2jti7PniET9hUBCU7aNHnoJJLzIf+Vb1CIRP0ypJl8GYCZx6HIYwOQH6EtcaeUqq3r+WXWv74ijIE7OApotmutM9buTvdOLdZddBzFPIjykc6cXO+W4E0kl6u9/OHtaZ3Nynh0ejBRafRWAVw2yU3T9SgQyICsmYWJCThkj14WqCJr2b7jfGlg9MkQOUG6/3f4xz2R3SgyUD8KiGsq/vdBE53zh0YA9gppLoum6AY+z61G1NhVGlrtps90txZBehuARUUz2dJC0pBMRy8XFwXMewDSIe6ATg25pHZsxHfhcalBpJncBl8pORs7oQl+GKBVxlnV4jm1pCzLU=
134 ccd436f7db6d5d7b9af89715179b911d031d44f1 0 iQIVAwUAV8h7F0emf/qjRqrOAQjmdhAAgYhom8fzL/YHeVLddm71ZB+pKDviKASKGSrBHY4D5Szrh/pYTedmG9IptYue5vzXpspHAaGvZN5xkwrz1/5nmnCsLA8DFaYT9qCkize6EYzxSBtA/W1S9Mv5tObinr1EX9rCSyI4HEJYE8i1IQM5h07SqUsMKDoasd4e29t6gRWg5pfOYq1kc2MTck35W9ff1Fii8S28dqbO3cLU6g5K0pT0JLCZIq7hyTNQdxHAYfebxkVl7PZrZR383IrnyotXVKFFc44qinv94T50uR4yUNYPQ8Gu0TgoGQQjBjk1Lrxot2xpgPQAy8vx+EOJgpg/yNZnYkmJZMxjDkTGVrwvXtOXZzmy2jti7PniET9hUBCU7aNHnoJJLzIf+Vb1CIRP0ypJl8GYCZx6HIYwOQH6EtcaeUqq3r+WXWv74ijIE7OApotmutM9buTvdOLdZddBzFPIjykc6cXO+W4E0kl6u9/OHtaZ3Nynh0ejBRafRWAVw2yU3T9SgQyICsmYWJCThkj14WqCJr2b7jfGlg9MkQOUG6/3f4xz2R3SgyUD8KiGsq/vdBE53zh0YA9gppLoum6AY+z61G1NhVGlrtps90txZBehuARUUz2dJC0pBMRy8XFwXMewDSIe6ATg25pHZsxHfhcalBpJncBl8pORs7oQl+GKBVxlnV4jm1pCzLU=
135 149433e68974eb5c63ccb03f794d8b57339a80c4 0 iQIcBAABAgAGBQJX8AfCAAoJELnJ3IJKpb3VnNAP/3umS8tohcZTr4m6DJm9u4XGr2m3FWQmjTEfimGpsOuBC8oCgsq0eAlORYcV68zDax+vQHQu3pqfPXaX+y4ZFDuz0ForNRiPJn+Q+tj1+NrOT1e8h4gH0nSK4rDxEGaa6x01fyC/xQMqN6iNfzbLLB7+WadZlyBRbHaUeZFDlPxPDf1rjDpu1vqwtOrVzSxMasRGEceiUegwsFdFMAefCq0ya/pKe9oV+GgGfR4qNrP7BfpOBcN/Po/ctkFCbLOhHbu6M7HpBSiD57BUy5lfhQQtSjzCKEVTyrWEH0ApjjXKuJzLSyq7xsHKQSOPMgGQprGehyzdCETlZOdauGrC0t9vBCr7kXEhXtycqxBC03vknA2eNeV610VX+HgO9VpCVZWHtENiArhALCcpoEsJvT29xCBYpSii/wnTpYJFT9yW8tjQCxH0zrmEZJvO1/nMINEBQFScB/nzUELn9asnghNf6vMpSGy0fSM27j87VAXCzJ5lqa6WCL/RrKgvYflow/m5AzUfMQhpqpH1vmh4ba1zZ4123lgnW4pNZDV9kmwXrEagGbWe1rnmsMzHugsECiYQyIngjWzHfpHgyEr49Uc5bMM1MlTypeHYYL4kV1jJ8Ou0SC4aV+49p8Onmb2NlVY7JKV7hqDCuZPI164YXMxhPNst4XK0/ENhoOE+8iB6
135 149433e68974eb5c63ccb03f794d8b57339a80c4 0 iQIcBAABAgAGBQJX8AfCAAoJELnJ3IJKpb3VnNAP/3umS8tohcZTr4m6DJm9u4XGr2m3FWQmjTEfimGpsOuBC8oCgsq0eAlORYcV68zDax+vQHQu3pqfPXaX+y4ZFDuz0ForNRiPJn+Q+tj1+NrOT1e8h4gH0nSK4rDxEGaa6x01fyC/xQMqN6iNfzbLLB7+WadZlyBRbHaUeZFDlPxPDf1rjDpu1vqwtOrVzSxMasRGEceiUegwsFdFMAefCq0ya/pKe9oV+GgGfR4qNrP7BfpOBcN/Po/ctkFCbLOhHbu6M7HpBSiD57BUy5lfhQQtSjzCKEVTyrWEH0ApjjXKuJzLSyq7xsHKQSOPMgGQprGehyzdCETlZOdauGrC0t9vBCr7kXEhXtycqxBC03vknA2eNeV610VX+HgO9VpCVZWHtENiArhALCcpoEsJvT29xCBYpSii/wnTpYJFT9yW8tjQCxH0zrmEZJvO1/nMINEBQFScB/nzUELn9asnghNf6vMpSGy0fSM27j87VAXCzJ5lqa6WCL/RrKgvYflow/m5AzUfMQhpqpH1vmh4ba1zZ4123lgnW4pNZDV9kmwXrEagGbWe1rnmsMzHugsECiYQyIngjWzHfpHgyEr49Uc5bMM1MlTypeHYYL4kV1jJ8Ou0SC4aV+49p8Onmb2NlVY7JKV7hqDCuZPI164YXMxhPNst4XK0/ENhoOE+8iB6
136 438173c415874f6ac653efc1099dec9c9150e90f 0 iQIVAwUAWAZ3okemf/qjRqrOAQj89xAAw/6QZ07yqvH+aZHeGQfgJ/X1Nze/hSMzkqbwGkuUOWD5ztN8+c39EXCn8JlqyLUPD7uGzhTV0299k5fGRihLIseXr0hy/cvVW16uqfeKJ/4/qL9zLS3rwSAgWbaHd1s6UQZVfGCb8V6oC1dkJxfrE9h6kugBqV97wStIRxmCpMDjsFv/zdNwsv6eEdxbiMilLn2/IbWXFOVKJzzv9iEY5Pu5McFR+nnrMyUZQhyGtVPLSkoEPsOysorfCZaVLJ6MnVaJunp9XEv94Pqx9+k+shsQvJHWkc0Nnb6uDHZYkLR5v2AbFsbJ9jDHsdr9A7qeQTiZay7PGI0uPoIrkmLya3cYbU1ADhwloAeQ/3gZLaJaKEjrXcFSsz7AZ9yq74rTwiPulF8uqZxJUodk2m/zy83HBrxxp/vgxWJ5JP2WXPtB8qKY+05umAt4rQS+fd2H/xOu2V2d5Mq1WmgknLBLC0ItaNaf91sSHtgEy22GtcvWQE7S6VWU1PoSYmOLITdJKAsmb7Eq+yKDW9nt0lOpUu2wUhBGctlgXgcWOmJP6gL6edIg66czAkVBp/fpKNl8Z/A0hhpuH7nW7GW/mzLVQnc+JW4wqUVkwlur3NRfvSt5ZyTY/SaR++nRf62h7PHIjU+f0kWQRdCcEQ0X38b8iAjeXcsOW8NCOPpm0zcz3i8=
136 438173c415874f6ac653efc1099dec9c9150e90f 0 iQIVAwUAWAZ3okemf/qjRqrOAQj89xAAw/6QZ07yqvH+aZHeGQfgJ/X1Nze/hSMzkqbwGkuUOWD5ztN8+c39EXCn8JlqyLUPD7uGzhTV0299k5fGRihLIseXr0hy/cvVW16uqfeKJ/4/qL9zLS3rwSAgWbaHd1s6UQZVfGCb8V6oC1dkJxfrE9h6kugBqV97wStIRxmCpMDjsFv/zdNwsv6eEdxbiMilLn2/IbWXFOVKJzzv9iEY5Pu5McFR+nnrMyUZQhyGtVPLSkoEPsOysorfCZaVLJ6MnVaJunp9XEv94Pqx9+k+shsQvJHWkc0Nnb6uDHZYkLR5v2AbFsbJ9jDHsdr9A7qeQTiZay7PGI0uPoIrkmLya3cYbU1ADhwloAeQ/3gZLaJaKEjrXcFSsz7AZ9yq74rTwiPulF8uqZxJUodk2m/zy83HBrxxp/vgxWJ5JP2WXPtB8qKY+05umAt4rQS+fd2H/xOu2V2d5Mq1WmgknLBLC0ItaNaf91sSHtgEy22GtcvWQE7S6VWU1PoSYmOLITdJKAsmb7Eq+yKDW9nt0lOpUu2wUhBGctlgXgcWOmJP6gL6edIg66czAkVBp/fpKNl8Z/A0hhpuH7nW7GW/mzLVQnc+JW4wqUVkwlur3NRfvSt5ZyTY/SaR++nRf62h7PHIjU+f0kWQRdCcEQ0X38b8iAjeXcsOW8NCOPpm0zcz3i8=
137 eab27446995210c334c3d06f1a659e3b9b5da769 0 iQIcBAABCAAGBQJYGNsXAAoJELnJ3IJKpb3Vf30QAK/dq5vEHEkufLGiYxxkvIyiRaswS+8jamXeHMQrdK8CuokcQYhEv9xiUI6FMIoX4Zc0xfoFCBc+X4qE+Ed9SFYWgQkDs/roJq1C1mTYA+KANMqJkDt00QZq536snFQvjCXAA5fwR/DpgGOOuGMRfvbjh7x8mPyVoPr4HDQCGFXnTYdn193HpTOqUsipzIV5OJqQ9p0sfJjwKP4ZfD0tqqdjTkNwMyJuwuRaReXFvGGCjH2PqkZE/FwQG0NJJjt0xaMUmv5U5tXHC9tEVobVV/qEslqfbH2v1YPF5d8Jmdn7F76FU5J0nTd+3rIVjYGYSt01cR6wtGnzvr/7kw9kbChw4wYhXxnmIALSd48FpA1qWjlPcAdHfUUwObxOxfqmlnBGtAQFK+p5VXCsxDZEIT9MSxscfCjyDQZpkY5S5B3PFIRg6V9bdl5a4rEt27aucuKTHj1Ok2vip4WfaIKk28YMjjzuOQRbr6Pp7mJcCC1/ERHUJdLsaQP+dy18z6XbDjX3O2JDRNYbCBexQyV/Kfrt5EOS5fXiByQUHv+PyR+9Ju6QWkkcFBfgsxq25kFl+eos4V9lxPOY5jDpw2BWu9TyHtTWkjL/YxDUGwUO9WA/WzrcT4skr9FYrFV/oEgi8MkwydC0cFICDfd6tr9upqkkr1W025Im1UBXXJ89bTVj
137 eab27446995210c334c3d06f1a659e3b9b5da769 0 iQIcBAABCAAGBQJYGNsXAAoJELnJ3IJKpb3Vf30QAK/dq5vEHEkufLGiYxxkvIyiRaswS+8jamXeHMQrdK8CuokcQYhEv9xiUI6FMIoX4Zc0xfoFCBc+X4qE+Ed9SFYWgQkDs/roJq1C1mTYA+KANMqJkDt00QZq536snFQvjCXAA5fwR/DpgGOOuGMRfvbjh7x8mPyVoPr4HDQCGFXnTYdn193HpTOqUsipzIV5OJqQ9p0sfJjwKP4ZfD0tqqdjTkNwMyJuwuRaReXFvGGCjH2PqkZE/FwQG0NJJjt0xaMUmv5U5tXHC9tEVobVV/qEslqfbH2v1YPF5d8Jmdn7F76FU5J0nTd+3rIVjYGYSt01cR6wtGnzvr/7kw9kbChw4wYhXxnmIALSd48FpA1qWjlPcAdHfUUwObxOxfqmlnBGtAQFK+p5VXCsxDZEIT9MSxscfCjyDQZpkY5S5B3PFIRg6V9bdl5a4rEt27aucuKTHj1Ok2vip4WfaIKk28YMjjzuOQRbr6Pp7mJcCC1/ERHUJdLsaQP+dy18z6XbDjX3O2JDRNYbCBexQyV/Kfrt5EOS5fXiByQUHv+PyR+9Ju6QWkkcFBfgsxq25kFl+eos4V9lxPOY5jDpw2BWu9TyHtTWkjL/YxDUGwUO9WA/WzrcT4skr9FYrFV/oEgi8MkwydC0cFICDfd6tr9upqkkr1W025Im1UBXXJ89bTVj
138 b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 0 iQIVAwUAWECEaEemf/qjRqrOAQjuZw/+IWJKnKOsaUMcB9ly3Fo/eskqDL6A0j69IXTJDeBDGMoyGbQU/gZyX2yc6Sw3EhwTSCXu5vKpzg3a6e8MNrC1iHqli4wJ/jPY7XtmiqTYDixdsBLNk46VfOi73ooFe08wVDSNB65xpZsrtPDSioNmQ2kSJwSHb71UlauS4xGkM74vuDpWvX5OZRSfBqMh6NjG5RwBBnS8mzA0SW2dCI2jSc5SCGIzIZpzM0xUN21xzq0YQbrk9qEsmi7ks0eowdhUjeET2wSWwhOK4jS4IfMyRO7KueUB05yHs4mChj9kNFNWtSzXKwKBQbZzwO/1Y7IJjU+AsbWkiUu+6ipqBPQWzS28gCwGOrv5BcIJS+tzsvLUKWgcixyfy5UAqJ32gCdzKC54FUpT2zL6Ad0vXGM6WkpZA7yworN4RCFPexXbi0x2GSTLG8PyIoZ4Iwgtj5NtsEDHrz0380FxgnKUIC3ny2SVuPlyD+9wepD3QYcxdRk1BIzcFT9ZxNlgil3IXRVPwVejvQ/zr6/ILdhBnZ8ojjvVCy3b86B1OhZj/ZByYo5QaykVqWl0V9vJOZlZfvOpm2HiDhm/2uNrVWxG4O6EwhnekAdaJYmeLq1YbhIfGA6KVOaB9Yi5A5BxK9QGXBZ6sLj+dIUD3QR47r9yAqVQE8Gr/Oh6oQXBQqOQv7WzBBs=
138 b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 0 iQIVAwUAWECEaEemf/qjRqrOAQjuZw/+IWJKnKOsaUMcB9ly3Fo/eskqDL6A0j69IXTJDeBDGMoyGbQU/gZyX2yc6Sw3EhwTSCXu5vKpzg3a6e8MNrC1iHqli4wJ/jPY7XtmiqTYDixdsBLNk46VfOi73ooFe08wVDSNB65xpZsrtPDSioNmQ2kSJwSHb71UlauS4xGkM74vuDpWvX5OZRSfBqMh6NjG5RwBBnS8mzA0SW2dCI2jSc5SCGIzIZpzM0xUN21xzq0YQbrk9qEsmi7ks0eowdhUjeET2wSWwhOK4jS4IfMyRO7KueUB05yHs4mChj9kNFNWtSzXKwKBQbZzwO/1Y7IJjU+AsbWkiUu+6ipqBPQWzS28gCwGOrv5BcIJS+tzsvLUKWgcixyfy5UAqJ32gCdzKC54FUpT2zL6Ad0vXGM6WkpZA7yworN4RCFPexXbi0x2GSTLG8PyIoZ4Iwgtj5NtsEDHrz0380FxgnKUIC3ny2SVuPlyD+9wepD3QYcxdRk1BIzcFT9ZxNlgil3IXRVPwVejvQ/zr6/ILdhBnZ8ojjvVCy3b86B1OhZj/ZByYo5QaykVqWl0V9vJOZlZfvOpm2HiDhm/2uNrVWxG4O6EwhnekAdaJYmeLq1YbhIfGA6KVOaB9Yi5A5BxK9QGXBZ6sLj+dIUD3QR47r9yAqVQE8Gr/Oh6oQXBQqOQv7WzBBs=
139 e69874dc1f4e142746ff3df91e678a09c6fc208c 0 iQIVAwUAWG0oGUemf/qjRqrOAQh3uhAAu4TN7jkkgH7Hxn8S1cB6Ru0x8MQutzzzpjShhsE/G7nzCxsZ5eWdJ5ItwXmKhunb7T0og54CGcTxfmdPtCI7AhhHh9/TM2Hv1EBcsXCiwjG8E+P6X1UJkijgTGjNWuCvEDOsQAvgywslECBNnXp2QA5I5UdCMeqDdTAb8ujvbD8I4pxUx1xXKY18DgQGJh13mRlfkEVnPxUi2n8emnwPLjbVVkVISkMFUkaOl8a4fOeZC1xzDpoQocoH2Q8DYa9RCPPSHHSYPNMWGCdNGN2CoAurcHWWvc7jNU28/tBhTazfFv8LYh63lLQ8SIIPZHJAOxo45ufMspzUfNgoD6y3vlF5aW7DpdxwYHnueh7S1Fxgtd9cOnxmxQsgiF4LK0a+VXOi/Tli/fivZHDRCGHJvJgsMQm7pzkay9sGohes6jAnsOv2E8DwFC71FO/btrAp07IRFxH9WhUeMsXLMS9oBlubMxMM58M+xzSKApK6bz2MkLsx9cewmfmfbJnRIK1xDv+J+77pWWNGlxCCjl1WU+aA3M7G8HzwAqjL75ASOWtBrJlFXvlLgzobwwetg6cm44Rv1P39i3rDySZvi4BDlOQHWFupgMKiXnZ1PeL7eBDs/aawrE0V2ysNkf9An+XJZkos2JSLPWcoNigfXNUu5c1AqsERvHA246XJzqvCEK8=
139 e69874dc1f4e142746ff3df91e678a09c6fc208c 0 iQIVAwUAWG0oGUemf/qjRqrOAQh3uhAAu4TN7jkkgH7Hxn8S1cB6Ru0x8MQutzzzpjShhsE/G7nzCxsZ5eWdJ5ItwXmKhunb7T0og54CGcTxfmdPtCI7AhhHh9/TM2Hv1EBcsXCiwjG8E+P6X1UJkijgTGjNWuCvEDOsQAvgywslECBNnXp2QA5I5UdCMeqDdTAb8ujvbD8I4pxUx1xXKY18DgQGJh13mRlfkEVnPxUi2n8emnwPLjbVVkVISkMFUkaOl8a4fOeZC1xzDpoQocoH2Q8DYa9RCPPSHHSYPNMWGCdNGN2CoAurcHWWvc7jNU28/tBhTazfFv8LYh63lLQ8SIIPZHJAOxo45ufMspzUfNgoD6y3vlF5aW7DpdxwYHnueh7S1Fxgtd9cOnxmxQsgiF4LK0a+VXOi/Tli/fivZHDRCGHJvJgsMQm7pzkay9sGohes6jAnsOv2E8DwFC71FO/btrAp07IRFxH9WhUeMsXLMS9oBlubMxMM58M+xzSKApK6bz2MkLsx9cewmfmfbJnRIK1xDv+J+77pWWNGlxCCjl1WU+aA3M7G8HzwAqjL75ASOWtBrJlFXvlLgzobwwetg6cm44Rv1P39i3rDySZvi4BDlOQHWFupgMKiXnZ1PeL7eBDs/aawrE0V2ysNkf9An+XJZkos2JSLPWcoNigfXNUu5c1AqsERvHA246XJzqvCEK8=
140 a1dd2c0c479e0550040542e392e87bc91262517e 0 iQIcBAABCAAGBQJYgBBEAAoJELnJ3IJKpb3VJosP/10rr3onsVbL8E+ri1Q0TJc8uhqIsBVyD/vS1MJtbxRaAdIV92o13YOent0o5ASFF/0yzVKlOWPQRjsYYbYY967k1TruDaWxJAnpeFgMni2Afl/qyWrW4AY2xegZNZCfMmwJA+uSJDdAn+jPV40XbuCZ+OgyZo5S05dfclHFxdc8rPKeUsJtvs5PMmCL3iQl1sulp1ASjuhRtFWZgSFsC6rb2Y7evD66ikL93+0/BPEB4SVX17vB/XEzdmh4ntyt4+d1XAznLHS33IU8UHbTkUmLy+82WnNH7HBB2V7gO47m/HhvaYjEfeW0bqMzN3aOUf30Vy/wB4HHsvkBGDgL5PYVHRRovGcAuCmnYbOkawqbRewW5oDs7UT3HbShNpxCxfsYpo7deHr11zWA3ooWCSlIRRREU4BfwVmn+Ds1hT5HM28Q6zr6GQZegDUbiT9i1zU0EpyfTpH7gc6NTVQrO1z1p70NBnQMqXcHjWJwjSwLER2Qify9MjrGXTL6ofD5zVZKobeRmq94mf3lDq26H7coraM9X5h9xa49VgAcRHzn/WQ6wcFCKDQr6FT67hTUOlF7Jriv8/5h/ziSZr10fCObKeKWN8Skur29VIAHHY4NuUqbM55WohD+jZ2O3d4tze1eWm5MDgWD8RlrfYhQ+cLOwH65AOtts0LNZwlvJuC7
140 a1dd2c0c479e0550040542e392e87bc91262517e 0 iQIcBAABCAAGBQJYgBBEAAoJELnJ3IJKpb3VJosP/10rr3onsVbL8E+ri1Q0TJc8uhqIsBVyD/vS1MJtbxRaAdIV92o13YOent0o5ASFF/0yzVKlOWPQRjsYYbYY967k1TruDaWxJAnpeFgMni2Afl/qyWrW4AY2xegZNZCfMmwJA+uSJDdAn+jPV40XbuCZ+OgyZo5S05dfclHFxdc8rPKeUsJtvs5PMmCL3iQl1sulp1ASjuhRtFWZgSFsC6rb2Y7evD66ikL93+0/BPEB4SVX17vB/XEzdmh4ntyt4+d1XAznLHS33IU8UHbTkUmLy+82WnNH7HBB2V7gO47m/HhvaYjEfeW0bqMzN3aOUf30Vy/wB4HHsvkBGDgL5PYVHRRovGcAuCmnYbOkawqbRewW5oDs7UT3HbShNpxCxfsYpo7deHr11zWA3ooWCSlIRRREU4BfwVmn+Ds1hT5HM28Q6zr6GQZegDUbiT9i1zU0EpyfTpH7gc6NTVQrO1z1p70NBnQMqXcHjWJwjSwLER2Qify9MjrGXTL6ofD5zVZKobeRmq94mf3lDq26H7coraM9X5h9xa49VgAcRHzn/WQ6wcFCKDQr6FT67hTUOlF7Jriv8/5h/ziSZr10fCObKeKWN8Skur29VIAHHY4NuUqbM55WohD+jZ2O3d4tze1eWm5MDgWD8RlrfYhQ+cLOwH65AOtts0LNZwlvJuC7
141 e1526da1e6d84e03146151c9b6e6950fe9a83d7d 0 iQIVAwUAWJIKpUemf/qjRqrOAQjjThAAvl1K/GZBrkanwEPXomewHkWKTEy1s5d5oWmPPGrSb9G4LM/3/abSbQ7fnzkS6IWi4Ao0za68w/MohaVGKoMAslRbelaTqlus0wE3zxb2yQ/j2NeZzFnFEuR/vbUug7uzH+onko2jXrt7VcPNXLOa1/g5CWwaf/YPfJO4zv+atlzBHvuFcQCkdbcOJkccCnBUoR7y0PJoBJX6K7wJQ+hWLdcY4nVaxkGPRmsZJo9qogXZMw1CwJVjofxRI0S/5vMtEqh8srYsg7qlTNv8eYnwdpfuunn2mI7Khx10Tz85PZDnr3SGRiFvdfmT30pI7jL3bhOHALkaoy2VevteJjIyMxANTvjIUBNQUi+7Kj3VIKmkL9NAMAQBbshiQL1wTrXdqOeC8Nm1BfCQEox2yiC6pDFbXVbguwJZ5VKFizTTK6f6BdNYKTVx8lNEdjAsWH8ojgGWwGXBbTkClULHezJ/sODaZzK/+M/IzbGmlF27jJYpdJX8fUoybZNw9lXwIfQQWHmQHEOJYCljD9G1tvYY70+xAFexgBX5Ib48UK4DRITVNecyQZL7bLTzGcM0TAE0EtD4M42wawsYP3Cva9UxShFLICQdPoa4Wmfs6uLbXG1DDLol/j7b6bL+6W8E3AlW+aAPc8GZm51/w3VlYqqciWTc12OJpu8FiD0pZ/iBw+E=
141 e1526da1e6d84e03146151c9b6e6950fe9a83d7d 0 iQIVAwUAWJIKpUemf/qjRqrOAQjjThAAvl1K/GZBrkanwEPXomewHkWKTEy1s5d5oWmPPGrSb9G4LM/3/abSbQ7fnzkS6IWi4Ao0za68w/MohaVGKoMAslRbelaTqlus0wE3zxb2yQ/j2NeZzFnFEuR/vbUug7uzH+onko2jXrt7VcPNXLOa1/g5CWwaf/YPfJO4zv+atlzBHvuFcQCkdbcOJkccCnBUoR7y0PJoBJX6K7wJQ+hWLdcY4nVaxkGPRmsZJo9qogXZMw1CwJVjofxRI0S/5vMtEqh8srYsg7qlTNv8eYnwdpfuunn2mI7Khx10Tz85PZDnr3SGRiFvdfmT30pI7jL3bhOHALkaoy2VevteJjIyMxANTvjIUBNQUi+7Kj3VIKmkL9NAMAQBbshiQL1wTrXdqOeC8Nm1BfCQEox2yiC6pDFbXVbguwJZ5VKFizTTK6f6BdNYKTVx8lNEdjAsWH8ojgGWwGXBbTkClULHezJ/sODaZzK/+M/IzbGmlF27jJYpdJX8fUoybZNw9lXwIfQQWHmQHEOJYCljD9G1tvYY70+xAFexgBX5Ib48UK4DRITVNecyQZL7bLTzGcM0TAE0EtD4M42wawsYP3Cva9UxShFLICQdPoa4Wmfs6uLbXG1DDLol/j7b6bL+6W8E3AlW+aAPc8GZm51/w3VlYqqciWTc12OJpu8FiD0pZ/iBw+E=
142 25703b624d27e3917d978af56d6ad59331e0464a 0 iQIcBAABCAAGBQJYuMSwAAoJELnJ3IJKpb3VL3YP/iKWY3+K3cLUBD3Ne5MhfS7N3t6rlk9YD4kmU8JnVeV1oAfg36VCylpbJLBnmQdvC8AfBJOkXi6DHp9RKXXmlsOeoppdWYGX5RMOzuwuGPBii6cA6KFd+WBpBJlRtklz61qGCAtv4q8V1mga0yucihghzt4lD/PPz7mk6yUBL8s3rK+bIHGdEhnK2dfnn/U2G0K/vGgsYZESORISuBclCrrc7M3/v1D+FBMCEYX9FXYU4PhYkKXK1mSqzCB7oENu/WP4ijl1nRnEIyzBV9pKO4ylnXTpbZAr/e4PofzjzPXb0zume1191C3wvgJ4eDautGide/Pxls5s6fJRaIowf5XVYQ5srX/NC9N3K77Hy01t5u8nwcyAhjmajZYuB9j37nmiwFawqS/y2eHovrUjkGdelV8OM7/iAexPRC8i2NcGk0m6XuzWy1Dxr8453VD8Hh3tTeafd6v5uHXSLjwogpu/th5rk/i9/5GBzc1MyJgRTwBhVHi/yFxfyakrSU7HT2cwX/Lb5KgWccogqfvrFYQABIBanxLIeZxTv8OIjC75EYknbxYtvvgb35ZdJytwrTHSZN0S7Ua2dHx2KUnHB6thbLu/v9fYrCgFF76DK4Ogd22Cbvv6NqRoglG26d0bqdwz/l1n3o416YjupteW8LMxHzuwiJy69WP1yi10eNDq
142 25703b624d27e3917d978af56d6ad59331e0464a 0 iQIcBAABCAAGBQJYuMSwAAoJELnJ3IJKpb3VL3YP/iKWY3+K3cLUBD3Ne5MhfS7N3t6rlk9YD4kmU8JnVeV1oAfg36VCylpbJLBnmQdvC8AfBJOkXi6DHp9RKXXmlsOeoppdWYGX5RMOzuwuGPBii6cA6KFd+WBpBJlRtklz61qGCAtv4q8V1mga0yucihghzt4lD/PPz7mk6yUBL8s3rK+bIHGdEhnK2dfnn/U2G0K/vGgsYZESORISuBclCrrc7M3/v1D+FBMCEYX9FXYU4PhYkKXK1mSqzCB7oENu/WP4ijl1nRnEIyzBV9pKO4ylnXTpbZAr/e4PofzjzPXb0zume1191C3wvgJ4eDautGide/Pxls5s6fJRaIowf5XVYQ5srX/NC9N3K77Hy01t5u8nwcyAhjmajZYuB9j37nmiwFawqS/y2eHovrUjkGdelV8OM7/iAexPRC8i2NcGk0m6XuzWy1Dxr8453VD8Hh3tTeafd6v5uHXSLjwogpu/th5rk/i9/5GBzc1MyJgRTwBhVHi/yFxfyakrSU7HT2cwX/Lb5KgWccogqfvrFYQABIBanxLIeZxTv8OIjC75EYknbxYtvvgb35ZdJytwrTHSZN0S7Ua2dHx2KUnHB6thbLu/v9fYrCgFF76DK4Ogd22Cbvv6NqRoglG26d0bqdwz/l1n3o416YjupteW8LMxHzuwiJy69WP1yi10eNDq
143 ed5b25874d998ababb181a939dd37a16ea644435 0 iQIcBAABCAAGBQJY4r/gAAoJELnJ3IJKpb3VtwYP/RuTmo252ExXQk/n5zGJZvZQnI86vO1+yGuyOlGFFBwf1v3sOLW1HD7fxF6/GdT8CSQrRqtC17Ya3qtayfY/0AEiSuH2bklBXSB1H5wPyguS5iLqyilCJY0SkHYBIDhJ0xftuIjsa805wdMm3OdclnTOkYT+K1WL8Ylbx/Ni2Lsx1rPpYdcQ/HlTkr5ca1ZbNOOSxSNI4+ilGlKbdSYeEsmqB2sDEiSaDEoxGGoSgzAE9+5Q2FfCGXV0bq4vfmEPoT9lhB4kANE+gcFUvsJTu8Z7EdF8y3CJLiy8+KHO/VLKTGJ1pMperbig9nAXl1AOt+izBFGJGTolbR/ShkkDWB/QVcqIF5CysAWMgnHAx7HjnMDBOANcKzhMMfOi3GUvOCNNIqIIoJHKRHaRk0YbMdt7z2mKpTrRQ9Zadz764jXOqqrPgQFM3jkBHzAvZz9yShrHGh42Y+iReAF9pAN0xPjyZ5Y2qp+DSl0bIQqrAet6Zd3QuoJtXczAeRrAvgn7O9MyLnMyE5s7xxI7o8M7zfWtChLF8ytJUzmRo3iVJNOJH+Zls9N30PGw6vubQAnB5ieaVTv8lnNpcAnEQD/i0tmRSxzyyqoOQbnItIPKFOsaYW+eX9sgJmObU3yDc5k3cs+yAFD2CM/uiUsLcTKyxPNcP1JHBYpwhOjIGczSHVS1
143 ed5b25874d998ababb181a939dd37a16ea644435 0 iQIcBAABCAAGBQJY4r/gAAoJELnJ3IJKpb3VtwYP/RuTmo252ExXQk/n5zGJZvZQnI86vO1+yGuyOlGFFBwf1v3sOLW1HD7fxF6/GdT8CSQrRqtC17Ya3qtayfY/0AEiSuH2bklBXSB1H5wPyguS5iLqyilCJY0SkHYBIDhJ0xftuIjsa805wdMm3OdclnTOkYT+K1WL8Ylbx/Ni2Lsx1rPpYdcQ/HlTkr5ca1ZbNOOSxSNI4+ilGlKbdSYeEsmqB2sDEiSaDEoxGGoSgzAE9+5Q2FfCGXV0bq4vfmEPoT9lhB4kANE+gcFUvsJTu8Z7EdF8y3CJLiy8+KHO/VLKTGJ1pMperbig9nAXl1AOt+izBFGJGTolbR/ShkkDWB/QVcqIF5CysAWMgnHAx7HjnMDBOANcKzhMMfOi3GUvOCNNIqIIoJHKRHaRk0YbMdt7z2mKpTrRQ9Zadz764jXOqqrPgQFM3jkBHzAvZz9yShrHGh42Y+iReAF9pAN0xPjyZ5Y2qp+DSl0bIQqrAet6Zd3QuoJtXczAeRrAvgn7O9MyLnMyE5s7xxI7o8M7zfWtChLF8ytJUzmRo3iVJNOJH+Zls9N30PGw6vubQAnB5ieaVTv8lnNpcAnEQD/i0tmRSxzyyqoOQbnItIPKFOsaYW+eX9sgJmObU3yDc5k3cs+yAFD2CM/uiUsLcTKyxPNcP1JHBYpwhOjIGczSHVS1
144 77eaf9539499a1b8be259ffe7ada787d07857f80 0 iQIcBAABCAAGBQJY9iz9AAoJELnJ3IJKpb3VYqEQAJNkB09sXgYRLA4kGQv3p4v02q9WZ1lHkAhOlNwIh7Zp+pGvT33nHZffByA0v+xtJNV9TNMIFFjkCg3jl5Z42CCe33ZlezGBAzXU+70QPvOR0ojlYk+FdMfeSyCBzWYokIpImwNmwNGKVrUAfywdikCsUC2aRjKg4Mn7GnqWl9WrBG6JEOOUamdx8qV2f6g/utRiqj4YQ86P0y4K3yakwc1LMM+vRfrwvsf1+DZ9t7QRENNKQ6gRnUdfryqSFIWn1VkBVMwIN5W3yIrTMfgH1wAZxbnYHrN5qDK7mcbP7bOA3XWJuEC+3QRnheRFd/21O1dMFuYjaKApXPHRlTGRMOaz2eydbfBopUS1BtfYEh4/B/1yJb9/HDw6LiAjea7ACHiaNec83z643005AvtUuWhjX3QTPkYlQzWaosanGy1IOGtXCPp1L0A+9gUpqyqycfPjQCbST5KRzYSZn3Ngmed5Bb6jsgvg5e5y0En/SQgK/pTKnxemAmFFVvIIrrWGRKj0AD0IFEHEepmwprPRs97EZPoBPFAGmVRuASBeIhFQxSDIXV0ebHJoUmz5w1rTy7U3Eq0ff6nW14kjWOUplatXz5LpWJ3VkZKrI+4gelto5xpTI6gJl2nmezhXQIlInk17cPuxmiHjeMdlOHZRh/zICLhQNL5fGne0ZL+qlrXY
144 77eaf9539499a1b8be259ffe7ada787d07857f80 0 iQIcBAABCAAGBQJY9iz9AAoJELnJ3IJKpb3VYqEQAJNkB09sXgYRLA4kGQv3p4v02q9WZ1lHkAhOlNwIh7Zp+pGvT33nHZffByA0v+xtJNV9TNMIFFjkCg3jl5Z42CCe33ZlezGBAzXU+70QPvOR0ojlYk+FdMfeSyCBzWYokIpImwNmwNGKVrUAfywdikCsUC2aRjKg4Mn7GnqWl9WrBG6JEOOUamdx8qV2f6g/utRiqj4YQ86P0y4K3yakwc1LMM+vRfrwvsf1+DZ9t7QRENNKQ6gRnUdfryqSFIWn1VkBVMwIN5W3yIrTMfgH1wAZxbnYHrN5qDK7mcbP7bOA3XWJuEC+3QRnheRFd/21O1dMFuYjaKApXPHRlTGRMOaz2eydbfBopUS1BtfYEh4/B/1yJb9/HDw6LiAjea7ACHiaNec83z643005AvtUuWhjX3QTPkYlQzWaosanGy1IOGtXCPp1L0A+9gUpqyqycfPjQCbST5KRzYSZn3Ngmed5Bb6jsgvg5e5y0En/SQgK/pTKnxemAmFFVvIIrrWGRKj0AD0IFEHEepmwprPRs97EZPoBPFAGmVRuASBeIhFQxSDIXV0ebHJoUmz5w1rTy7U3Eq0ff6nW14kjWOUplatXz5LpWJ3VkZKrI+4gelto5xpTI6gJl2nmezhXQIlInk17cPuxmiHjeMdlOHZRh/zICLhQNL5fGne0ZL+qlrXY
145 616e788321cc4ae9975b7f0c54c849f36d82182b 0 iQIVAwUAWPZuQkemf/qjRqrOAQjFlg/9HXEegJMv8FP+uILPoaiA2UCiqWUL2MVJ0K1cvafkwUq+Iwir8sTe4VJ1v6V+ZRiOuzs4HMnoGJrIks4vHRbAxJ3J6xCfvrsbHdl59grv54vuoL5FlZvkdIe8L7/ovKrUmNwPWZX2v+ffFPrsEBeVlVrXpp4wOPhDxCKTmjYVOp87YqXfJsud7EQFPqpV4jX8DEDtJWT95OE9x0srBg0HpSE95d/BM4TuXTVNI8fV41YEqearKeFIhLxu37HxUmGmkAALCi8RJmm4hVpUHgk3tAVzImI8DglUqnC6VEfaYb+PKzIqHelhb66JO/48qN2S/JXihpNHAVUBysBT0b1xEnc6eNsF2fQEB+bEcf8IGj7/ILee1cmwPtoK2OXR2+xWWWjlu2keVcKeI0yAajJw/dP21yvVzVq0ypst7iD+EGHLJWJSmZscbyH5ICr+TJ5yQvIGZJtfsAdAUUTM2xpqSDW4mT5kYyg75URbQ3AKI7lOhJBmkkGQErE4zIQMkaAqcWziVF20xiRWfJoFxT2fK5weaRGIjELH49NLlyvZxYc4LlRo9lIdC7l/6lYDdTx15VuEj1zx/91y/d7OtPm+KCA2Bbdqth8m/fMD8trfQ6jSG/wgsvjZ+S0eoXa92qIR/igsCI+6EwP7duuzL2iyKOPXupQVNN10PKI7EuKv4Lk=
145 616e788321cc4ae9975b7f0c54c849f36d82182b 0 iQIVAwUAWPZuQkemf/qjRqrOAQjFlg/9HXEegJMv8FP+uILPoaiA2UCiqWUL2MVJ0K1cvafkwUq+Iwir8sTe4VJ1v6V+ZRiOuzs4HMnoGJrIks4vHRbAxJ3J6xCfvrsbHdl59grv54vuoL5FlZvkdIe8L7/ovKrUmNwPWZX2v+ffFPrsEBeVlVrXpp4wOPhDxCKTmjYVOp87YqXfJsud7EQFPqpV4jX8DEDtJWT95OE9x0srBg0HpSE95d/BM4TuXTVNI8fV41YEqearKeFIhLxu37HxUmGmkAALCi8RJmm4hVpUHgk3tAVzImI8DglUqnC6VEfaYb+PKzIqHelhb66JO/48qN2S/JXihpNHAVUBysBT0b1xEnc6eNsF2fQEB+bEcf8IGj7/ILee1cmwPtoK2OXR2+xWWWjlu2keVcKeI0yAajJw/dP21yvVzVq0ypst7iD+EGHLJWJSmZscbyH5ICr+TJ5yQvIGZJtfsAdAUUTM2xpqSDW4mT5kYyg75URbQ3AKI7lOhJBmkkGQErE4zIQMkaAqcWziVF20xiRWfJoFxT2fK5weaRGIjELH49NLlyvZxYc4LlRo9lIdC7l/6lYDdTx15VuEj1zx/91y/d7OtPm+KCA2Bbdqth8m/fMD8trfQ6jSG/wgsvjZ+S0eoXa92qIR/igsCI+6EwP7duuzL2iyKOPXupQVNN10PKI7EuKv4Lk=
146 bb96d4a497432722623ae60d9bc734a1e360179e 0 iQIVAwUAWQkDfEemf/qjRqrOAQierQ/7BuQ0IW0T0cglgqIgkLuYLx2VXJCTEtRNCWmrH2UMK7fAdpAhN0xf+xedv56zYHrlyHpbskDbWvsKIHJdw/4bQitXaIFTyuMMtSR5vXy4Nly34O/Xs2uGb3Y5qwdubeK2nZr4lSPgiRHb/zI/B1Oy8GX830ljmIOY7B0nUWy4DrXcy/M41SnAMLFyD1K6T/8tkv7M4Fai7dQoF9EmIIkShVPktI3lqp3m7infZ4XnJqcqUB0NSfQZwZaUaoalOdCvEIe3ab5ewgl/CuvlDI4oqMQGjXCtNLbtiZSwo6hvudO6ewT+Zn/VdabkZyRtXUxu56ajjd6h22nU1+vknqDzo5tzw6oh1Ubzf8tzyv3Gmmr+tlOjzfK7tXXnT3vR9aEGli0qri0DzOpsDSY0pDC7EsS4LINPoNdsGQrGQdoX++AISROlNjvyuo4Vrp26tPHCSupkKOXuZaiozycAa2Q+aI1EvkPZSXe8SAXKDVtFn05ZB58YVkFzZKAYAxkE/ven59zb4aIbOgR12tZbJoZZsVHrlf/TcDtiXVfIMEMsCtJ1tPgD1rAsEURWRxK3mJ0Ev6KTHgNz4PeBhq1gIP/Y665aX2+cCjc4+vApPUienh5aOr1bQFpIDyYZsafHGMUFNCwRh8bX98oTGa0hjqz4ypwXE4Wztjdc+48UiHARp/Y=
146 bb96d4a497432722623ae60d9bc734a1e360179e 0 iQIVAwUAWQkDfEemf/qjRqrOAQierQ/7BuQ0IW0T0cglgqIgkLuYLx2VXJCTEtRNCWmrH2UMK7fAdpAhN0xf+xedv56zYHrlyHpbskDbWvsKIHJdw/4bQitXaIFTyuMMtSR5vXy4Nly34O/Xs2uGb3Y5qwdubeK2nZr4lSPgiRHb/zI/B1Oy8GX830ljmIOY7B0nUWy4DrXcy/M41SnAMLFyD1K6T/8tkv7M4Fai7dQoF9EmIIkShVPktI3lqp3m7infZ4XnJqcqUB0NSfQZwZaUaoalOdCvEIe3ab5ewgl/CuvlDI4oqMQGjXCtNLbtiZSwo6hvudO6ewT+Zn/VdabkZyRtXUxu56ajjd6h22nU1+vknqDzo5tzw6oh1Ubzf8tzyv3Gmmr+tlOjzfK7tXXnT3vR9aEGli0qri0DzOpsDSY0pDC7EsS4LINPoNdsGQrGQdoX++AISROlNjvyuo4Vrp26tPHCSupkKOXuZaiozycAa2Q+aI1EvkPZSXe8SAXKDVtFn05ZB58YVkFzZKAYAxkE/ven59zb4aIbOgR12tZbJoZZsVHrlf/TcDtiXVfIMEMsCtJ1tPgD1rAsEURWRxK3mJ0Ev6KTHgNz4PeBhq1gIP/Y665aX2+cCjc4+vApPUienh5aOr1bQFpIDyYZsafHGMUFNCwRh8bX98oTGa0hjqz4ypwXE4Wztjdc+48UiHARp/Y=
147 c850f0ed54c1d42f9aa079ad528f8127e5775217 0 iQIVAwUAWTQINUemf/qjRqrOAQjZDw//b4pEgHYfWRVDEmLZtevysfhlJzbSyLAnWgNnRUVdSwl4WRF1r6ds/q7N4Ege5wQHjOpRtx4jC3y/riMbrLUlaeUXzCdqKgm4JcINS1nXy3IfkeDdUKyOR9upjaVhIEzCMRpyzabdYuflh5CoxayO7GFk2iZ8c1oAl4QzuLSspn9w+znqDg0HrMDbRNijStSulNjkqutih9UqT/PYizhE1UjL0NSnpYyD1vDljsHModJc2dhSzuZ1c4VFZHkienk+CNyeLtVKg8aC+Ej/Ppwq6FlE461T/RxOEzf+WFAc9F4iJibSN2kAFB4ySJ43y+OKkvzAwc5XbUx0y6OlWn2Ph+5T54sIwqasG3DjXyVrwVtAvCrcWUmOyS0RfkKoDVepMPIhFXyrhGqUYSq25Gt6tHVtIrlcWARIGGWlsE+PSHi87qcnSjs4xUzZwVvJWz4fuM1AUG/GTpyt4w3kB85XQikIINkmSTmsM/2/ar75T6jBL3kqOCGOL3n7bVZsGXllhkkQ7e/jqPPWnNXm8scDYdT3WENNu34zZp5ZmqdTXPAIIaqGswnU04KfUSEoYtOMri3E2VvrgMkiINm9BOKpgeTsMb3dkYRw2ZY3UAH9QfdX9BZywk6v3kkE5ghLWMUoQ4sqRlTo7mJKA8+EodjmIGRV/kAv1f7pigg6pIWWEyo=
147 c850f0ed54c1d42f9aa079ad528f8127e5775217 0 iQIVAwUAWTQINUemf/qjRqrOAQjZDw//b4pEgHYfWRVDEmLZtevysfhlJzbSyLAnWgNnRUVdSwl4WRF1r6ds/q7N4Ege5wQHjOpRtx4jC3y/riMbrLUlaeUXzCdqKgm4JcINS1nXy3IfkeDdUKyOR9upjaVhIEzCMRpyzabdYuflh5CoxayO7GFk2iZ8c1oAl4QzuLSspn9w+znqDg0HrMDbRNijStSulNjkqutih9UqT/PYizhE1UjL0NSnpYyD1vDljsHModJc2dhSzuZ1c4VFZHkienk+CNyeLtVKg8aC+Ej/Ppwq6FlE461T/RxOEzf+WFAc9F4iJibSN2kAFB4ySJ43y+OKkvzAwc5XbUx0y6OlWn2Ph+5T54sIwqasG3DjXyVrwVtAvCrcWUmOyS0RfkKoDVepMPIhFXyrhGqUYSq25Gt6tHVtIrlcWARIGGWlsE+PSHi87qcnSjs4xUzZwVvJWz4fuM1AUG/GTpyt4w3kB85XQikIINkmSTmsM/2/ar75T6jBL3kqOCGOL3n7bVZsGXllhkkQ7e/jqPPWnNXm8scDYdT3WENNu34zZp5ZmqdTXPAIIaqGswnU04KfUSEoYtOMri3E2VvrgMkiINm9BOKpgeTsMb3dkYRw2ZY3UAH9QfdX9BZywk6v3kkE5ghLWMUoQ4sqRlTo7mJKA8+EodjmIGRV/kAv1f7pigg6pIWWEyo=
148 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 0 iQIcBAABCAAGBQJZXQSmAAoJELnJ3IJKpb3VmTwP/jsxFTlKzWU8EnEhEViiP2YREOD3AXU7685DIMnoyVAsZgxrt0CG6Y92b5sINCeh5B0ORPQ7+xi2Xmz6tX8EeAR+/Dpdx6K623yExf8kq91zgfMvYkatNMu6ZVfywibYZAASq02oKoX7WqSPcQG/OwgtdFiGacCrG5iMH7wRv0N9hPc6D5vAV8/H/Inq8twpSG5SGDpCdKj7KPZiY8DFu/3OXatJtl+byg8zWT4FCYKkBPvmZp8/sRhDKBgwr3RvF1p84uuw/QxXjt+DmGxgtjvObjHr+shCMcKBAuZ4RtZmyEo/0L81uaTElHu1ejsEzsEKxs+8YifnH070PTFoV4VXQyXfTc8AyaqHE6rzX96a/HjQiJnL4dFeTZIrUhGK3AkObFLWJxVTo4J8+oliBQQldIh1H2yb1ZMfwapLnUGIqSieHDGZ6K2ccNJK8Q7IRhTCvYc0cjsnbwTpV4cebGqf3WXZhX0cZN+TNfhh/HGRzR1EeAAavjJqpDam1OBA5TmtJd/lHLIRVR5jyG+r4SK0XDlJ8uSfah7MpVH6aQ6UrycPyFusGXQlIqJ1DYQaBrI/SRJfIvRUmvVz9WgKLe83oC3Ui3aWR9rNjMb2InuQuXjeZaeaYfBAUYACcGfCZpZZvoEkMHCqtTng1rbbFnKMFk5kVy9YWuVgK9Iuh0O5
148 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 0 iQIcBAABCAAGBQJZXQSmAAoJELnJ3IJKpb3VmTwP/jsxFTlKzWU8EnEhEViiP2YREOD3AXU7685DIMnoyVAsZgxrt0CG6Y92b5sINCeh5B0ORPQ7+xi2Xmz6tX8EeAR+/Dpdx6K623yExf8kq91zgfMvYkatNMu6ZVfywibYZAASq02oKoX7WqSPcQG/OwgtdFiGacCrG5iMH7wRv0N9hPc6D5vAV8/H/Inq8twpSG5SGDpCdKj7KPZiY8DFu/3OXatJtl+byg8zWT4FCYKkBPvmZp8/sRhDKBgwr3RvF1p84uuw/QxXjt+DmGxgtjvObjHr+shCMcKBAuZ4RtZmyEo/0L81uaTElHu1ejsEzsEKxs+8YifnH070PTFoV4VXQyXfTc8AyaqHE6rzX96a/HjQiJnL4dFeTZIrUhGK3AkObFLWJxVTo4J8+oliBQQldIh1H2yb1ZMfwapLnUGIqSieHDGZ6K2ccNJK8Q7IRhTCvYc0cjsnbwTpV4cebGqf3WXZhX0cZN+TNfhh/HGRzR1EeAAavjJqpDam1OBA5TmtJd/lHLIRVR5jyG+r4SK0XDlJ8uSfah7MpVH6aQ6UrycPyFusGXQlIqJ1DYQaBrI/SRJfIvRUmvVz9WgKLe83oC3Ui3aWR9rNjMb2InuQuXjeZaeaYfBAUYACcGfCZpZZvoEkMHCqtTng1rbbFnKMFk5kVy9YWuVgK9Iuh0O5
149 857876ebaed4e315f63157bd157d6ce553c7ab73 0 iQIVAwUAWW9XW0emf/qjRqrOAQhI7A//cKXIM4l8vrWWsc1Os4knXm/2UaexmAwV70TpviKL9RxCy5zBP/EapCaGRCH8uNPOQTkWGR9Aucm3CtxhggCMzULQxxeH86mEpWf1xILWLySPXW/t2f+2zxrwLSAxxqFJtuYv83Pe8CnS3y4BlgHnBKYXH8XXuW8uvfc0lHKblhrspGBIAinx7vPLoGQcpYrn9USWUKq5d9FaCLQCDT9501FHKf5dlYQajevCUDnewtn5ohelOXjTJQClW3aygv/z+98Kq7ZhayeIiZu+SeP+Ay7lZPklXcy6eyRiQtGCa1yesb9v53jKtgxWewV4o6zyuUesdknZ/IBeNUgw8LepqTIJo6/ckyvBOsSQcda81DuYNUChZLYTSXYPHEUmYiz6CvNoLEgHF/oO5p6CZXOPWbmLWrAFd+0+1Tuq8BSh+PSdEREM3ZLOikkXoVzTKBgu4zpMvmBnjliBg7WhixkcG0v5WunlV9/oHAIpsKdL7AatU+oCPulp+xDpTKzRazEemYiWG9zYKzwSMk9Nc17e2tk+EtFSPsPo4iVCXMgdIZSTNBvynKEFXZQVPWVa+bYRdAmbSY8awiX7exxYL10UcpnN2q/AH/F7rQzAmo8eZ3OtD0+3Nk3JRx0/CMyzKLPYDpdUgwmaPb+s2Bsy7f7TfmA7jTa69YqB1/zVwlWULr0=
149 857876ebaed4e315f63157bd157d6ce553c7ab73 0 iQIVAwUAWW9XW0emf/qjRqrOAQhI7A//cKXIM4l8vrWWsc1Os4knXm/2UaexmAwV70TpviKL9RxCy5zBP/EapCaGRCH8uNPOQTkWGR9Aucm3CtxhggCMzULQxxeH86mEpWf1xILWLySPXW/t2f+2zxrwLSAxxqFJtuYv83Pe8CnS3y4BlgHnBKYXH8XXuW8uvfc0lHKblhrspGBIAinx7vPLoGQcpYrn9USWUKq5d9FaCLQCDT9501FHKf5dlYQajevCUDnewtn5ohelOXjTJQClW3aygv/z+98Kq7ZhayeIiZu+SeP+Ay7lZPklXcy6eyRiQtGCa1yesb9v53jKtgxWewV4o6zyuUesdknZ/IBeNUgw8LepqTIJo6/ckyvBOsSQcda81DuYNUChZLYTSXYPHEUmYiz6CvNoLEgHF/oO5p6CZXOPWbmLWrAFd+0+1Tuq8BSh+PSdEREM3ZLOikkXoVzTKBgu4zpMvmBnjliBg7WhixkcG0v5WunlV9/oHAIpsKdL7AatU+oCPulp+xDpTKzRazEemYiWG9zYKzwSMk9Nc17e2tk+EtFSPsPo4iVCXMgdIZSTNBvynKEFXZQVPWVa+bYRdAmbSY8awiX7exxYL10UcpnN2q/AH/F7rQzAmo8eZ3OtD0+3Nk3JRx0/CMyzKLPYDpdUgwmaPb+s2Bsy7f7TfmA7jTa69YqB1/zVwlWULr0=
150 5544af8622863796a0027566f6b646e10d522c4c 0 iQIcBAABCAAGBQJZjJflAAoJELnJ3IJKpb3V19kQALCvTdPrpce5+rBNbFtLGNFxTMDol1dUy87EUAWiArnfOzW3rKBdYxvxDL23BpgUfjRm1fAXdayVvlj6VC6Dyb195OLmc/I9z7SjFxsfmxWilF6U0GIa3W0x37i05EjfcccrBIuSLrvR6AWyJhjLOBCcyAqD/HcEom00/L+o2ry9CDQNLEeVuNewJiupcUqsTIG2yS26lWbtLZuoqS2T4Nlg8wjJhiSXlsZSuAF55iUJKlTQP6KyWReiaYuEVfm/Bybp0A2bFcZCYpWPwnwKBdSCHhIalH8PO57gh9J7xJVnyyBg5PU6n4l6PrGOmKhNiU/xyNe36tEAdMW6svcVvt8hiY0dnwWqR6wgnFFDu0lnTMUcjsy5M5FBY6wSw9Fph8zcNRzYyaeUbasNonPvrIrk21nT3ET3RzVR3ri2nJDVF+0GlpogGfk9k7wY3808091BMsyV3448ZPKQeWiK4Yy4UOUwbKV7YAsS5MdDnC1uKjl4GwLn9UCY/+Q2/2R0CBZ13Tox+Nbo6hBRuRGtFIbLK9j7IIUhhZrIZFSh8cDNkC+UMaS52L5z7ECvoYIUpw+MJ7NkMLHIVGZ2Nxn0C7IbGO6uHyR7D6bdNpxilU+WZStHk0ppZItRTm/htar4jifnaCI8F8OQNYmZ3cQhxx6qV2Tyow8arvWb1NYXrocG
150 5544af8622863796a0027566f6b646e10d522c4c 0 iQIcBAABCAAGBQJZjJflAAoJELnJ3IJKpb3V19kQALCvTdPrpce5+rBNbFtLGNFxTMDol1dUy87EUAWiArnfOzW3rKBdYxvxDL23BpgUfjRm1fAXdayVvlj6VC6Dyb195OLmc/I9z7SjFxsfmxWilF6U0GIa3W0x37i05EjfcccrBIuSLrvR6AWyJhjLOBCcyAqD/HcEom00/L+o2ry9CDQNLEeVuNewJiupcUqsTIG2yS26lWbtLZuoqS2T4Nlg8wjJhiSXlsZSuAF55iUJKlTQP6KyWReiaYuEVfm/Bybp0A2bFcZCYpWPwnwKBdSCHhIalH8PO57gh9J7xJVnyyBg5PU6n4l6PrGOmKhNiU/xyNe36tEAdMW6svcVvt8hiY0dnwWqR6wgnFFDu0lnTMUcjsy5M5FBY6wSw9Fph8zcNRzYyaeUbasNonPvrIrk21nT3ET3RzVR3ri2nJDVF+0GlpogGfk9k7wY3808091BMsyV3448ZPKQeWiK4Yy4UOUwbKV7YAsS5MdDnC1uKjl4GwLn9UCY/+Q2/2R0CBZ13Tox+Nbo6hBRuRGtFIbLK9j7IIUhhZrIZFSh8cDNkC+UMaS52L5z7ECvoYIUpw+MJ7NkMLHIVGZ2Nxn0C7IbGO6uHyR7D6bdNpxilU+WZStHk0ppZItRTm/htar4jifnaCI8F8OQNYmZ3cQhxx6qV2Tyow8arvWb1NYXrocG
151 943c91326b23954e6e1c6960d0239511f9530258 0 iQIcBAABCAAGBQJZjKKZAAoJELnJ3IJKpb3VGQkP/0iF6Khef0lBaRhbSAPwa7RUBb3iaBeuwmeic/hUjMoU1E5NR36bDDaF3u2di5mIYPBONFIeCPf9/DKyFkidueX1UnlAQa3mjh/QfKTb4/yO2Nrk7eH+QtrYxVUUYYjwgp4rS0Nd/++I1IUOor54vqJzJ7ZnM5O1RsE7VI1esAC/BTlUuO354bbm08B0owsZBwVvcVvpV4zeTvq5qyPxBJ3M0kw83Pgwh3JZB9IYhOabhSUBcA2fIPHgYGYnJVC+bLOeMWI1HJkJeoYfClNUiQUjAmi0cdTC733eQnHkDw7xyyFi+zkKu6JmU1opxkHSuj4Hrjul7Gtw3vVWWUPufz3AK7oymNp2Xr5y1HQLDtNJP3jicTTG1ae2TdX5Az3ze0I8VGbpR81/6ShAvY2cSKttV3I+2k4epxTTTf0xaZS1eUdnFOox6acElG2reNzx7EYYxpHj17K8N2qNzyY78iPgbJ+L39PBFoiGXMZJqWCxxIHoK1MxlXa8WwSnsXAU768dJvEn2N1x3fl+aeaWzeM4/5Qd83YjFuCeycuRnIo3rejSX3rWFAwZE0qQHKI5YWdKDLxIfdHTjdfMP7np+zLcHt0DV/dHmj2hKQgU0OK04fx7BrmdS1tw67Y9bL3H3TDohn7khU1FrqrKVuqSLbLsxnNyWRbZQF+DCoYrHlIW
151 943c91326b23954e6e1c6960d0239511f9530258 0 iQIcBAABCAAGBQJZjKKZAAoJELnJ3IJKpb3VGQkP/0iF6Khef0lBaRhbSAPwa7RUBb3iaBeuwmeic/hUjMoU1E5NR36bDDaF3u2di5mIYPBONFIeCPf9/DKyFkidueX1UnlAQa3mjh/QfKTb4/yO2Nrk7eH+QtrYxVUUYYjwgp4rS0Nd/++I1IUOor54vqJzJ7ZnM5O1RsE7VI1esAC/BTlUuO354bbm08B0owsZBwVvcVvpV4zeTvq5qyPxBJ3M0kw83Pgwh3JZB9IYhOabhSUBcA2fIPHgYGYnJVC+bLOeMWI1HJkJeoYfClNUiQUjAmi0cdTC733eQnHkDw7xyyFi+zkKu6JmU1opxkHSuj4Hrjul7Gtw3vVWWUPufz3AK7oymNp2Xr5y1HQLDtNJP3jicTTG1ae2TdX5Az3ze0I8VGbpR81/6ShAvY2cSKttV3I+2k4epxTTTf0xaZS1eUdnFOox6acElG2reNzx7EYYxpHj17K8N2qNzyY78iPgbJ+L39PBFoiGXMZJqWCxxIHoK1MxlXa8WwSnsXAU768dJvEn2N1x3fl+aeaWzeM4/5Qd83YjFuCeycuRnIo3rejSX3rWFAwZE0qQHKI5YWdKDLxIfdHTjdfMP7np+zLcHt0DV/dHmj2hKQgU0OK04fx7BrmdS1tw67Y9bL3H3TDohn7khU1FrqrKVuqSLbLsxnNyWRbZQF+DCoYrHlIW
152 3fee7f7d2da04226914c2258cc2884dc27384fd7 0 iQIcBAABCAAGBQJZjOJfAAoJELnJ3IJKpb3VvikP/iGjfahwkl2BDZYGq6Ia64a0bhEh0iltoWTCCDKMbHuuO+7h07fHpBl/XX5XPnS7imBUVWLOARhVL7aDPb0tu5NZzMKN57XUC/0FWFyf7lXXAVaOapR4kP8RtQvnoxfNSLRgiZQL88KIRBgFc8pbl8hLA6UbcHPsOk4dXKvmfPfHBHnzdUEDcSXDdyOBhuyOSzRs8egXVi3WeX6OaXG3twkw/uCF3pgOMOSyWVDwD+KvK+IBmSxCTKXzsb+pqpc7pPOFWhSXjpbuYUcI5Qy7mpd0bFL3qNqgvUNq2gX5mT6zH/TsVD10oSUjYYqKMO+gi34OgTVWRRoQfWBwrQwxsC/MxH6ZeOetl2YkS13OxdmYpNAFNQ8ye0vZigJRA+wHoC9dn0h8c5X4VJt/dufHeXc887EGJpLg6GDXi5Emr2ydAUhBJKlpi2yss22AmiQ4G9NE1hAjxqhPvkgBK/hpbr3FurV4hjTG6XKsF8I0WdbYz2CW/FEbp1+4T49ChhrwW0orZdEQX7IEjXr45Hs5sTInT90Hy2XG3Kovi0uVMt15cKsSEYDoFHkR4NgCZX2Y+qS5ryH8yqor3xtel3KsBIy6Ywn8pAo2f8flW3nro/O6x+0NKGV+ZZ0uo/FctuQLBrQVs025T1ai/6MbscQXvFVZVPKrUzlQaNPf/IwNOaRa
@@ -1,164 +1,165 b''
1 d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f
1 d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f
2 1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e
2 1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e
3 7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d
3 7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d
4 b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c
4 b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c
5 f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5
5 f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5
6 0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b
6 0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b
7 12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6
7 12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6
8 4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b
8 4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b
9 eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c
9 eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c
10 979c049974485125e1f9357f6bbe9c1b548a64c3 0.7
10 979c049974485125e1f9357f6bbe9c1b548a64c3 0.7
11 3a56574f329a368d645853e0f9e09472aee62349 0.8
11 3a56574f329a368d645853e0f9e09472aee62349 0.8
12 6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1
12 6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1
13 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9
13 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9
14 2be3001847cb18a23c403439d9e7d0ace30804e9 0.9.1
14 2be3001847cb18a23c403439d9e7d0ace30804e9 0.9.1
15 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2
15 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2
16 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3
16 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3
17 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4
17 fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4
18 23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5
18 23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5
19 bae2e9c838e90a393bae3973a7850280413e091a 1.0
19 bae2e9c838e90a393bae3973a7850280413e091a 1.0
20 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 1.0.1
20 d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 1.0.1
21 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 1.0.2
21 d2375bbee6d47e62ba8e415c86e83a465dc4dce9 1.0.2
22 2a67430f92f15ea5159c26b09ec4839a0c549a26 1.1
22 2a67430f92f15ea5159c26b09ec4839a0c549a26 1.1
23 3773e510d433969e277b1863c317b674cbee2065 1.1.1
23 3773e510d433969e277b1863c317b674cbee2065 1.1.1
24 11a4eb81fb4f4742451591489e2797dc47903277 1.1.2
24 11a4eb81fb4f4742451591489e2797dc47903277 1.1.2
25 11efa41037e280d08cfb07c09ad485df30fb0ea8 1.2
25 11efa41037e280d08cfb07c09ad485df30fb0ea8 1.2
26 02981000012e3adf40c4849bd7b3d5618f9ce82d 1.2.1
26 02981000012e3adf40c4849bd7b3d5618f9ce82d 1.2.1
27 196d40e7c885fa6e95f89134809b3ec7bdbca34b 1.3
27 196d40e7c885fa6e95f89134809b3ec7bdbca34b 1.3
28 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 1.3.1
28 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 1.3.1
29 31ec469f9b556f11819937cf68ee53f2be927ebf 1.4
29 31ec469f9b556f11819937cf68ee53f2be927ebf 1.4
30 439d7ea6fe3aa4ab9ec274a68846779153789de9 1.4.1
30 439d7ea6fe3aa4ab9ec274a68846779153789de9 1.4.1
31 296a0b14a68621f6990c54fdba0083f6f20935bf 1.4.2
31 296a0b14a68621f6990c54fdba0083f6f20935bf 1.4.2
32 4aa619c4c2c09907034d9824ebb1dd0e878206eb 1.4.3
32 4aa619c4c2c09907034d9824ebb1dd0e878206eb 1.4.3
33 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 1.5
33 ff2704a8ded37fbebd8b6eb5ec733731d725da8a 1.5
34 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 1.5.1
34 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 1.5.1
35 39f725929f0c48c5fb3b90c071fc3066012456ca 1.5.2
35 39f725929f0c48c5fb3b90c071fc3066012456ca 1.5.2
36 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 1.5.3
36 fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 1.5.3
37 24fe2629c6fd0c74c90bd066e77387c2b02e8437 1.5.4
37 24fe2629c6fd0c74c90bd066e77387c2b02e8437 1.5.4
38 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 1.6
38 f786fc4b8764cd2a5526d259cf2f94d8a66924d9 1.6
39 bf1774d95bde614af3956d92b20e2a0c68c5fec7 1.6.1
39 bf1774d95bde614af3956d92b20e2a0c68c5fec7 1.6.1
40 c00f03a4982e467fb6b6bd45908767db6df4771d 1.6.2
40 c00f03a4982e467fb6b6bd45908767db6df4771d 1.6.2
41 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 1.6.3
41 ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 1.6.3
42 93d8bff78c96fe7e33237b257558ee97290048a4 1.6.4
42 93d8bff78c96fe7e33237b257558ee97290048a4 1.6.4
43 333421b9e0f96c7bc788e5667c146a58a9440a55 1.7
43 333421b9e0f96c7bc788e5667c146a58a9440a55 1.7
44 4438875ec01bd0fc32be92b0872eb6daeed4d44f 1.7.1
44 4438875ec01bd0fc32be92b0872eb6daeed4d44f 1.7.1
45 6aff4f144ad356311318b0011df0bb21f2c97429 1.7.2
45 6aff4f144ad356311318b0011df0bb21f2c97429 1.7.2
46 e3bf16703e2601de99e563cdb3a5d50b64e6d320 1.7.3
46 e3bf16703e2601de99e563cdb3a5d50b64e6d320 1.7.3
47 a6c855c32ea081da3c3b8ff628f1847ff271482f 1.7.4
47 a6c855c32ea081da3c3b8ff628f1847ff271482f 1.7.4
48 2b2155623ee2559caf288fd333f30475966c4525 1.7.5
48 2b2155623ee2559caf288fd333f30475966c4525 1.7.5
49 2616325766e3504c8ae7c84bd15ee610901fe91d 1.8
49 2616325766e3504c8ae7c84bd15ee610901fe91d 1.8
50 aa1f3be38ab127280761889d2dca906ca465b5f4 1.8.1
50 aa1f3be38ab127280761889d2dca906ca465b5f4 1.8.1
51 b032bec2c0a651ca0ddecb65714bfe6770f67d70 1.8.2
51 b032bec2c0a651ca0ddecb65714bfe6770f67d70 1.8.2
52 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 1.8.3
52 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 1.8.3
53 733af5d9f6b22387913e1d11350fb8cb7c1487dd 1.8.4
53 733af5d9f6b22387913e1d11350fb8cb7c1487dd 1.8.4
54 de9eb6b1da4fc522b1cab16d86ca166204c24f25 1.9
54 de9eb6b1da4fc522b1cab16d86ca166204c24f25 1.9
55 4a43e23b8c55b4566b8200bf69fe2158485a2634 1.9.1
55 4a43e23b8c55b4566b8200bf69fe2158485a2634 1.9.1
56 d629f1e89021103f1753addcef6b310e4435b184 1.9.2
56 d629f1e89021103f1753addcef6b310e4435b184 1.9.2
57 351a9292e430e35766c552066ed3e87c557b803b 1.9.3
57 351a9292e430e35766c552066ed3e87c557b803b 1.9.3
58 384082750f2c51dc917d85a7145748330fa6ef4d 2.0-rc
58 384082750f2c51dc917d85a7145748330fa6ef4d 2.0-rc
59 41453d55b481ddfcc1dacb445179649e24ca861d 2.0
59 41453d55b481ddfcc1dacb445179649e24ca861d 2.0
60 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 2.0.1
60 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 2.0.1
61 6344043924497cd06d781d9014c66802285072e4 2.0.2
61 6344043924497cd06d781d9014c66802285072e4 2.0.2
62 db33555eafeaf9df1e18950e29439eaa706d399b 2.1-rc
62 db33555eafeaf9df1e18950e29439eaa706d399b 2.1-rc
63 2aa5b51f310fb3befd26bed99c02267f5c12c734 2.1
63 2aa5b51f310fb3befd26bed99c02267f5c12c734 2.1
64 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 2.1.1
64 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 2.1.1
65 b9bd95e61b49c221c4cca24e6da7c946fc02f992 2.1.2
65 b9bd95e61b49c221c4cca24e6da7c946fc02f992 2.1.2
66 d9e2f09d5488c395ae9ddbb320ceacd24757e055 2.2-rc
66 d9e2f09d5488c395ae9ddbb320ceacd24757e055 2.2-rc
67 00182b3d087909e3c3ae44761efecdde8f319ef3 2.2
67 00182b3d087909e3c3ae44761efecdde8f319ef3 2.2
68 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 2.2.1
68 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 2.2.1
69 85a358df5bbbe404ca25730c9c459b34263441dc 2.2.2
69 85a358df5bbbe404ca25730c9c459b34263441dc 2.2.2
70 b013baa3898e117959984fc64c29d8c784d2f28b 2.2.3
70 b013baa3898e117959984fc64c29d8c784d2f28b 2.2.3
71 a06e2681dd1786e2354d84a5fa9c1c88dd4fa3e0 2.3-rc
71 a06e2681dd1786e2354d84a5fa9c1c88dd4fa3e0 2.3-rc
72 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 2.3
72 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 2.3
73 072209ae4ddb654eb2d5fd35bff358c738414432 2.3.1
73 072209ae4ddb654eb2d5fd35bff358c738414432 2.3.1
74 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 2.3.2
74 b3f0f9a39c4e1d0250048cd803ab03542d6f140a 2.3.2
75 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 2.4-rc
75 d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 2.4-rc
76 195ad823b5d58c68903a6153a25e3fb4ed25239d 2.4
76 195ad823b5d58c68903a6153a25e3fb4ed25239d 2.4
77 0c10cf8191469e7c3c8844922e17e71a176cb7cb 2.4.1
77 0c10cf8191469e7c3c8844922e17e71a176cb7cb 2.4.1
78 a4765077b65e6ae29ba42bab7834717b5072d5ba 2.4.2
78 a4765077b65e6ae29ba42bab7834717b5072d5ba 2.4.2
79 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 2.5-rc
79 f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 2.5-rc
80 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 2.5
80 a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 2.5
81 7511d4df752e61fe7ae4f3682e0a0008573b0402 2.5.1
81 7511d4df752e61fe7ae4f3682e0a0008573b0402 2.5.1
82 5b7175377babacce80a6c1e12366d8032a6d4340 2.5.2
82 5b7175377babacce80a6c1e12366d8032a6d4340 2.5.2
83 50c922c1b5145dab8baefefb0437d363b6a6c21c 2.5.3
83 50c922c1b5145dab8baefefb0437d363b6a6c21c 2.5.3
84 8a7bd2dccd44ed571afe7424cd7f95594f27c092 2.5.4
84 8a7bd2dccd44ed571afe7424cd7f95594f27c092 2.5.4
85 292cd385856d98bacb2c3086f8897bc660c2beea 2.6-rc
85 292cd385856d98bacb2c3086f8897bc660c2beea 2.6-rc
86 23f785b38af38d2fca6b8f3db56b8007a84cd73a 2.6
86 23f785b38af38d2fca6b8f3db56b8007a84cd73a 2.6
87 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 2.6.1
87 ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 2.6.1
88 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 2.6.2
88 cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 2.6.2
89 009794acc6e37a650f0fae37872e733382ac1c0c 2.6.3
89 009794acc6e37a650f0fae37872e733382ac1c0c 2.6.3
90 f0d7721d7322dcfb5af33599c2543f27335334bb 2.7-rc
90 f0d7721d7322dcfb5af33599c2543f27335334bb 2.7-rc
91 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 2.7
91 f37b5a17e6a0ee17afde2cdde5393dd74715fb58 2.7
92 335a558f81dc73afeab4d7be63617392b130117f 2.7.1
92 335a558f81dc73afeab4d7be63617392b130117f 2.7.1
93 e7fa36d2ad3a7944a52dca126458d6f482db3524 2.7.2
93 e7fa36d2ad3a7944a52dca126458d6f482db3524 2.7.2
94 1596f2d8f2421314b1ddead8f7d0c91009358994 2.8-rc
94 1596f2d8f2421314b1ddead8f7d0c91009358994 2.8-rc
95 d825e4025e39d1c39db943cdc89818abd0a87c27 2.8
95 d825e4025e39d1c39db943cdc89818abd0a87c27 2.8
96 209e04a06467e2969c0cc6501335be0406d46ef0 2.8.1
96 209e04a06467e2969c0cc6501335be0406d46ef0 2.8.1
97 ca387377df7a3a67dbb90b6336b781cdadc3ef41 2.8.2
97 ca387377df7a3a67dbb90b6336b781cdadc3ef41 2.8.2
98 8862469e16f9236208581b20de5f96bd13cc039d 2.9-rc
98 8862469e16f9236208581b20de5f96bd13cc039d 2.9-rc
99 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 2.9
99 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 2.9
100 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 2.9.1
100 b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 2.9.1
101 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 2.9.2
101 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 2.9.2
102 564f55b251224f16508dd1311452db7780dafe2b 3.0-rc
102 564f55b251224f16508dd1311452db7780dafe2b 3.0-rc
103 2195ac506c6ababe86985b932f4948837c0891b5 3.0
103 2195ac506c6ababe86985b932f4948837c0891b5 3.0
104 269c80ee5b3cb3684fa8edc61501b3506d02eb10 3.0.1
104 269c80ee5b3cb3684fa8edc61501b3506d02eb10 3.0.1
105 2d8cd3d0e83c7336c0cb45a9f88638363f993848 3.0.2
105 2d8cd3d0e83c7336c0cb45a9f88638363f993848 3.0.2
106 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 3.1-rc
106 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 3.1-rc
107 3178e49892020336491cdc6945885c4de26ffa8b 3.1
107 3178e49892020336491cdc6945885c4de26ffa8b 3.1
108 5dc91146f35369949ea56b40172308158b59063a 3.1.1
108 5dc91146f35369949ea56b40172308158b59063a 3.1.1
109 f768c888aaa68d12dd7f509dcc7f01c9584357d0 3.1.2
109 f768c888aaa68d12dd7f509dcc7f01c9584357d0 3.1.2
110 7f8d16af8cae246fa5a48e723d48d58b015aed94 3.2-rc
110 7f8d16af8cae246fa5a48e723d48d58b015aed94 3.2-rc
111 ced632394371a36953ce4d394f86278ae51a2aae 3.2
111 ced632394371a36953ce4d394f86278ae51a2aae 3.2
112 643c58303fb0ec020907af28b9e486be299ba043 3.2.1
112 643c58303fb0ec020907af28b9e486be299ba043 3.2.1
113 902554884335e5ca3661d63be9978eb4aec3f68a 3.2.2
113 902554884335e5ca3661d63be9978eb4aec3f68a 3.2.2
114 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 3.2.3
114 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 3.2.3
115 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 3.2.4
115 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 3.2.4
116 db8e3f7948b1fdeb9ad12d448fc3525759908b9f 3.3-rc
116 db8e3f7948b1fdeb9ad12d448fc3525759908b9f 3.3-rc
117 fbdd5195528fae4f41feebc1838215c110b25d6a 3.3
117 fbdd5195528fae4f41feebc1838215c110b25d6a 3.3
118 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 3.3.1
118 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 3.3.1
119 07a92bbd02e5e3a625e0820389b47786b02b2cea 3.3.2
119 07a92bbd02e5e3a625e0820389b47786b02b2cea 3.3.2
120 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 3.3.3
120 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 3.3.3
121 e89f909edffad558b56f4affa8239e4832f88de0 3.4-rc
121 e89f909edffad558b56f4affa8239e4832f88de0 3.4-rc
122 8cc6036bca532e06681c5a8fa37efaa812de67b5 3.4
122 8cc6036bca532e06681c5a8fa37efaa812de67b5 3.4
123 ed18f4acf435a2824c6f49fba40f42b9df5da7ad 3.4.1
123 ed18f4acf435a2824c6f49fba40f42b9df5da7ad 3.4.1
124 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 3.4.2
124 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 3.4.2
125 96a38d44ba093bd1d1ecfd34119e94056030278b 3.5-rc
125 96a38d44ba093bd1d1ecfd34119e94056030278b 3.5-rc
126 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 3.5
126 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 3.5
127 1a45e49a6bed023deb229102a8903234d18054d3 3.5.1
127 1a45e49a6bed023deb229102a8903234d18054d3 3.5.1
128 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 3.5.2
128 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 3.5.2
129 b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 3.6-rc
129 b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 3.6-rc
130 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 3.6
130 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 3.6
131 1aa5083cbebbe7575c88f3402ab377539b484897 3.6.1
131 1aa5083cbebbe7575c88f3402ab377539b484897 3.6.1
132 2d437a0f3355834a9485bbbeb30a52a052c98f19 3.6.2
132 2d437a0f3355834a9485bbbeb30a52a052c98f19 3.6.2
133 ea389970c08449440587712117f178d33bab3f1e 3.6.3
133 ea389970c08449440587712117f178d33bab3f1e 3.6.3
134 158bdc8965720ca4061f8f8d806563cfc7cdb62e 3.7-rc
134 158bdc8965720ca4061f8f8d806563cfc7cdb62e 3.7-rc
135 2408645de650d8a29a6ce9e7dce601d8dd0d1474 3.7
135 2408645de650d8a29a6ce9e7dce601d8dd0d1474 3.7
136 b698abf971e7377d9b7ec7fc8c52df45255b0329 3.7.1
136 b698abf971e7377d9b7ec7fc8c52df45255b0329 3.7.1
137 d493d64757eb45ada99fcb3693e479a51b7782da 3.7.2
137 d493d64757eb45ada99fcb3693e479a51b7782da 3.7.2
138 ae279d4a19e9683214cbd1fe8298cf0b50571432 3.7.3
138 ae279d4a19e9683214cbd1fe8298cf0b50571432 3.7.3
139 740156eedf2c450aee58b1a90b0e826f47c5da64 3.8-rc
139 740156eedf2c450aee58b1a90b0e826f47c5da64 3.8-rc
140 f85de28eae32e7d3064b1a1321309071bbaaa069 3.8
140 f85de28eae32e7d3064b1a1321309071bbaaa069 3.8
141 a56296f55a5e1038ea5016dace2076b693c28a56 3.8.1
141 a56296f55a5e1038ea5016dace2076b693c28a56 3.8.1
142 aaabed77791a75968a12b8c43ad263631a23ee81 3.8.2
142 aaabed77791a75968a12b8c43ad263631a23ee81 3.8.2
143 a9764ab80e11bcf6a37255db7dd079011f767c6c 3.8.3
143 a9764ab80e11bcf6a37255db7dd079011f767c6c 3.8.3
144 26a5d605b8683a292bb89aea11f37a81b06ac016 3.8.4
144 26a5d605b8683a292bb89aea11f37a81b06ac016 3.8.4
145 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 3.9-rc
145 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 3.9-rc
146 299546f84e68dbb9bd026f0f3a974ce4bdb93686 3.9
146 299546f84e68dbb9bd026f0f3a974ce4bdb93686 3.9
147 ccd436f7db6d5d7b9af89715179b911d031d44f1 3.9.1
147 ccd436f7db6d5d7b9af89715179b911d031d44f1 3.9.1
148 149433e68974eb5c63ccb03f794d8b57339a80c4 3.9.2
148 149433e68974eb5c63ccb03f794d8b57339a80c4 3.9.2
149 438173c415874f6ac653efc1099dec9c9150e90f 4.0-rc
149 438173c415874f6ac653efc1099dec9c9150e90f 4.0-rc
150 eab27446995210c334c3d06f1a659e3b9b5da769 4.0
150 eab27446995210c334c3d06f1a659e3b9b5da769 4.0
151 b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 4.0.1
151 b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 4.0.1
152 e69874dc1f4e142746ff3df91e678a09c6fc208c 4.0.2
152 e69874dc1f4e142746ff3df91e678a09c6fc208c 4.0.2
153 a1dd2c0c479e0550040542e392e87bc91262517e 4.1-rc
153 a1dd2c0c479e0550040542e392e87bc91262517e 4.1-rc
154 e1526da1e6d84e03146151c9b6e6950fe9a83d7d 4.1
154 e1526da1e6d84e03146151c9b6e6950fe9a83d7d 4.1
155 25703b624d27e3917d978af56d6ad59331e0464a 4.1.1
155 25703b624d27e3917d978af56d6ad59331e0464a 4.1.1
156 ed5b25874d998ababb181a939dd37a16ea644435 4.1.2
156 ed5b25874d998ababb181a939dd37a16ea644435 4.1.2
157 77eaf9539499a1b8be259ffe7ada787d07857f80 4.1.3
157 77eaf9539499a1b8be259ffe7ada787d07857f80 4.1.3
158 616e788321cc4ae9975b7f0c54c849f36d82182b 4.2-rc
158 616e788321cc4ae9975b7f0c54c849f36d82182b 4.2-rc
159 bb96d4a497432722623ae60d9bc734a1e360179e 4.2
159 bb96d4a497432722623ae60d9bc734a1e360179e 4.2
160 c850f0ed54c1d42f9aa079ad528f8127e5775217 4.2.1
160 c850f0ed54c1d42f9aa079ad528f8127e5775217 4.2.1
161 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 4.2.2
161 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 4.2.2
162 857876ebaed4e315f63157bd157d6ce553c7ab73 4.3-rc
162 857876ebaed4e315f63157bd157d6ce553c7ab73 4.3-rc
163 5544af8622863796a0027566f6b646e10d522c4c 4.3
163 5544af8622863796a0027566f6b646e10d522c4c 4.3
164 943c91326b23954e6e1c6960d0239511f9530258 4.2.3
164 943c91326b23954e6e1c6960d0239511f9530258 4.2.3
165 3fee7f7d2da04226914c2258cc2884dc27384fd7 4.3.1
@@ -1,712 +1,712 b''
1 # __init__.py - fsmonitor initialization and overrides
1 # __init__.py - fsmonitor initialization and overrides
2 #
2 #
3 # Copyright 2013-2016 Facebook, Inc.
3 # Copyright 2013-2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''Faster status operations with the Watchman file monitor (EXPERIMENTAL)
8 '''Faster status operations with the Watchman file monitor (EXPERIMENTAL)
9
9
10 Integrates the file-watching program Watchman with Mercurial to produce faster
10 Integrates the file-watching program Watchman with Mercurial to produce faster
11 status results.
11 status results.
12
12
13 On a particular Linux system, for a real-world repository with over 400,000
13 On a particular Linux system, for a real-world repository with over 400,000
14 files hosted on ext4, vanilla `hg status` takes 1.3 seconds. On the same
14 files hosted on ext4, vanilla `hg status` takes 1.3 seconds. On the same
15 system, with fsmonitor it takes about 0.3 seconds.
15 system, with fsmonitor it takes about 0.3 seconds.
16
16
17 fsmonitor requires no configuration -- it will tell Watchman about your
17 fsmonitor requires no configuration -- it will tell Watchman about your
18 repository as necessary. You'll need to install Watchman from
18 repository as necessary. You'll need to install Watchman from
19 https://facebook.github.io/watchman/ and make sure it is in your PATH.
19 https://facebook.github.io/watchman/ and make sure it is in your PATH.
20
20
21 The following configuration options exist:
21 The following configuration options exist:
22
22
23 ::
23 ::
24
24
25 [fsmonitor]
25 [fsmonitor]
26 mode = {off, on, paranoid}
26 mode = {off, on, paranoid}
27
27
28 When `mode = off`, fsmonitor will disable itself (similar to not loading the
28 When `mode = off`, fsmonitor will disable itself (similar to not loading the
29 extension at all). When `mode = on`, fsmonitor will be enabled (the default).
29 extension at all). When `mode = on`, fsmonitor will be enabled (the default).
30 When `mode = paranoid`, fsmonitor will query both Watchman and the filesystem,
30 When `mode = paranoid`, fsmonitor will query both Watchman and the filesystem,
31 and ensure that the results are consistent.
31 and ensure that the results are consistent.
32
32
33 ::
33 ::
34
34
35 [fsmonitor]
35 [fsmonitor]
36 timeout = (float)
36 timeout = (float)
37
37
38 A value, in seconds, that determines how long fsmonitor will wait for Watchman
38 A value, in seconds, that determines how long fsmonitor will wait for Watchman
39 to return results. Defaults to `2.0`.
39 to return results. Defaults to `2.0`.
40
40
41 ::
41 ::
42
42
43 [fsmonitor]
43 [fsmonitor]
44 blacklistusers = (list of userids)
44 blacklistusers = (list of userids)
45
45
46 A list of usernames for which fsmonitor will disable itself altogether.
46 A list of usernames for which fsmonitor will disable itself altogether.
47
47
48 ::
48 ::
49
49
50 [fsmonitor]
50 [fsmonitor]
51 walk_on_invalidate = (boolean)
51 walk_on_invalidate = (boolean)
52
52
53 Whether or not to walk the whole repo ourselves when our cached state has been
53 Whether or not to walk the whole repo ourselves when our cached state has been
54 invalidated, for example when Watchman has been restarted or .hgignore rules
54 invalidated, for example when Watchman has been restarted or .hgignore rules
55 have been changed. Walking the repo in that case can result in competing for
55 have been changed. Walking the repo in that case can result in competing for
56 I/O with Watchman. For large repos it is recommended to set this value to
56 I/O with Watchman. For large repos it is recommended to set this value to
57 false. You may wish to set this to true if you have a very fast filesystem
57 false. You may wish to set this to true if you have a very fast filesystem
58 that can outpace the IPC overhead of getting the result data for the full repo
58 that can outpace the IPC overhead of getting the result data for the full repo
59 from Watchman. Defaults to false.
59 from Watchman. Defaults to false.
60
60
61 fsmonitor is incompatible with the largefiles and eol extensions, and
61 fsmonitor is incompatible with the largefiles and eol extensions, and
62 will disable itself if any of those are active.
62 will disable itself if any of those are active.
63
63
64 '''
64 '''
65
65
66 # Platforms Supported
66 # Platforms Supported
67 # ===================
67 # ===================
68 #
68 #
69 # **Linux:** *Stable*. Watchman and fsmonitor are both known to work reliably,
69 # **Linux:** *Stable*. Watchman and fsmonitor are both known to work reliably,
70 # even under severe loads.
70 # even under severe loads.
71 #
71 #
72 # **Mac OS X:** *Stable*. The Mercurial test suite passes with fsmonitor
72 # **Mac OS X:** *Stable*. The Mercurial test suite passes with fsmonitor
73 # turned on, on case-insensitive HFS+. There has been a reasonable amount of
73 # turned on, on case-insensitive HFS+. There has been a reasonable amount of
74 # user testing under normal loads.
74 # user testing under normal loads.
75 #
75 #
76 # **Solaris, BSD:** *Alpha*. watchman and fsmonitor are believed to work, but
76 # **Solaris, BSD:** *Alpha*. watchman and fsmonitor are believed to work, but
77 # very little testing has been done.
77 # very little testing has been done.
78 #
78 #
79 # **Windows:** *Alpha*. Not in a release version of watchman or fsmonitor yet.
79 # **Windows:** *Alpha*. Not in a release version of watchman or fsmonitor yet.
80 #
80 #
81 # Known Issues
81 # Known Issues
82 # ============
82 # ============
83 #
83 #
84 # * fsmonitor will disable itself if any of the following extensions are
84 # * fsmonitor will disable itself if any of the following extensions are
85 # enabled: largefiles, inotify, eol; or if the repository has subrepos.
85 # enabled: largefiles, inotify, eol; or if the repository has subrepos.
86 # * fsmonitor will produce incorrect results if nested repos that are not
86 # * fsmonitor will produce incorrect results if nested repos that are not
87 # subrepos exist. *Workaround*: add nested repo paths to your `.hgignore`.
87 # subrepos exist. *Workaround*: add nested repo paths to your `.hgignore`.
88 #
88 #
89 # The issues related to nested repos and subrepos are probably not fundamental
89 # The issues related to nested repos and subrepos are probably not fundamental
90 # ones. Patches to fix them are welcome.
90 # ones. Patches to fix them are welcome.
91
91
92 from __future__ import absolute_import
92 from __future__ import absolute_import
93
93
94 import codecs
94 import codecs
95 import hashlib
95 import hashlib
96 import os
96 import os
97 import stat
97 import stat
98 import sys
98 import sys
99
99
100 from mercurial.i18n import _
100 from mercurial.i18n import _
101 from mercurial import (
101 from mercurial import (
102 context,
102 context,
103 encoding,
103 encoding,
104 error,
104 error,
105 extensions,
105 extensions,
106 localrepo,
106 localrepo,
107 merge,
107 merge,
108 pathutil,
108 pathutil,
109 pycompat,
109 pycompat,
110 scmutil,
110 scmutil,
111 util,
111 util,
112 )
112 )
113 from mercurial import match as matchmod
113 from mercurial import match as matchmod
114
114
115 from . import (
115 from . import (
116 pywatchman,
116 pywatchman,
117 state,
117 state,
118 watchmanclient,
118 watchmanclient,
119 )
119 )
120
120
121 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
121 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
122 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
122 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
123 # be specifying the version(s) of Mercurial they are tested with, or
123 # be specifying the version(s) of Mercurial they are tested with, or
124 # leave the attribute unspecified.
124 # leave the attribute unspecified.
125 testedwith = 'ships-with-hg-core'
125 testedwith = 'ships-with-hg-core'
126
126
127 # This extension is incompatible with the following blacklisted extensions
127 # This extension is incompatible with the following blacklisted extensions
128 # and will disable itself when encountering one of these:
128 # and will disable itself when encountering one of these:
129 _blacklist = ['largefiles', 'eol']
129 _blacklist = ['largefiles', 'eol']
130
130
131 def _handleunavailable(ui, state, ex):
131 def _handleunavailable(ui, state, ex):
132 """Exception handler for Watchman interaction exceptions"""
132 """Exception handler for Watchman interaction exceptions"""
133 if isinstance(ex, watchmanclient.Unavailable):
133 if isinstance(ex, watchmanclient.Unavailable):
134 if ex.warn:
134 if ex.warn:
135 ui.warn(str(ex) + '\n')
135 ui.warn(str(ex) + '\n')
136 if ex.invalidate:
136 if ex.invalidate:
137 state.invalidate()
137 state.invalidate()
138 ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg)
138 ui.log('fsmonitor', 'Watchman unavailable: %s\n', ex.msg)
139 else:
139 else:
140 ui.log('fsmonitor', 'Watchman exception: %s\n', ex)
140 ui.log('fsmonitor', 'Watchman exception: %s\n', ex)
141
141
142 def _hashignore(ignore):
142 def _hashignore(ignore):
143 """Calculate hash for ignore patterns and filenames
143 """Calculate hash for ignore patterns and filenames
144
144
145 If this information changes between Mercurial invocations, we can't
145 If this information changes between Mercurial invocations, we can't
146 rely on Watchman information anymore and have to re-scan the working
146 rely on Watchman information anymore and have to re-scan the working
147 copy.
147 copy.
148
148
149 """
149 """
150 sha1 = hashlib.sha1()
150 sha1 = hashlib.sha1()
151 sha1.update(repr(ignore))
151 sha1.update(repr(ignore))
152 return sha1.hexdigest()
152 return sha1.hexdigest()
153
153
154 _watchmanencoding = pywatchman.encoding.get_local_encoding()
154 _watchmanencoding = pywatchman.encoding.get_local_encoding()
155 _fsencoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
155 _fsencoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
156 _fixencoding = codecs.lookup(_watchmanencoding) != codecs.lookup(_fsencoding)
156 _fixencoding = codecs.lookup(_watchmanencoding) != codecs.lookup(_fsencoding)
157
157
158 def _watchmantofsencoding(path):
158 def _watchmantofsencoding(path):
159 """Fix path to match watchman and local filesystem encoding
159 """Fix path to match watchman and local filesystem encoding
160
160
161 watchman's paths encoding can differ from filesystem encoding. For example,
161 watchman's paths encoding can differ from filesystem encoding. For example,
162 on Windows, it's always utf-8.
162 on Windows, it's always utf-8.
163 """
163 """
164 try:
164 try:
165 decoded = path.decode(_watchmanencoding)
165 decoded = path.decode(_watchmanencoding)
166 except UnicodeDecodeError as e:
166 except UnicodeDecodeError as e:
167 raise error.Abort(str(e), hint='watchman encoding error')
167 raise error.Abort(str(e), hint='watchman encoding error')
168
168
169 try:
169 try:
170 encoded = decoded.encode(_fsencoding, 'strict')
170 encoded = decoded.encode(_fsencoding, 'strict')
171 except UnicodeEncodeError as e:
171 except UnicodeEncodeError as e:
172 raise error.Abort(str(e))
172 raise error.Abort(str(e))
173
173
174 return encoded
174 return encoded
175
175
176 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
176 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
177 '''Replacement for dirstate.walk, hooking into Watchman.
177 '''Replacement for dirstate.walk, hooking into Watchman.
178
178
179 Whenever full is False, ignored is False, and the Watchman client is
179 Whenever full is False, ignored is False, and the Watchman client is
180 available, use Watchman combined with saved state to possibly return only a
180 available, use Watchman combined with saved state to possibly return only a
181 subset of files.'''
181 subset of files.'''
182 def bail():
182 def bail():
183 return orig(match, subrepos, unknown, ignored, full=True)
183 return orig(match, subrepos, unknown, ignored, full=True)
184
184
185 if full or ignored or not self._watchmanclient.available():
185 if full or ignored or not self._watchmanclient.available():
186 return bail()
186 return bail()
187 state = self._fsmonitorstate
187 state = self._fsmonitorstate
188 clock, ignorehash, notefiles = state.get()
188 clock, ignorehash, notefiles = state.get()
189 if not clock:
189 if not clock:
190 if state.walk_on_invalidate:
190 if state.walk_on_invalidate:
191 return bail()
191 return bail()
192 # Initial NULL clock value, see
192 # Initial NULL clock value, see
193 # https://facebook.github.io/watchman/docs/clockspec.html
193 # https://facebook.github.io/watchman/docs/clockspec.html
194 clock = 'c:0:0'
194 clock = 'c:0:0'
195 notefiles = []
195 notefiles = []
196
196
197 def fwarn(f, msg):
197 def fwarn(f, msg):
198 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
198 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
199 return False
199 return False
200
200
201 def badtype(mode):
201 def badtype(mode):
202 kind = _('unknown')
202 kind = _('unknown')
203 if stat.S_ISCHR(mode):
203 if stat.S_ISCHR(mode):
204 kind = _('character device')
204 kind = _('character device')
205 elif stat.S_ISBLK(mode):
205 elif stat.S_ISBLK(mode):
206 kind = _('block device')
206 kind = _('block device')
207 elif stat.S_ISFIFO(mode):
207 elif stat.S_ISFIFO(mode):
208 kind = _('fifo')
208 kind = _('fifo')
209 elif stat.S_ISSOCK(mode):
209 elif stat.S_ISSOCK(mode):
210 kind = _('socket')
210 kind = _('socket')
211 elif stat.S_ISDIR(mode):
211 elif stat.S_ISDIR(mode):
212 kind = _('directory')
212 kind = _('directory')
213 return _('unsupported file type (type is %s)') % kind
213 return _('unsupported file type (type is %s)') % kind
214
214
215 ignore = self._ignore
215 ignore = self._ignore
216 dirignore = self._dirignore
216 dirignore = self._dirignore
217 if unknown:
217 if unknown:
218 if _hashignore(ignore) != ignorehash and clock != 'c:0:0':
218 if _hashignore(ignore) != ignorehash and clock != 'c:0:0':
219 # ignore list changed -- can't rely on Watchman state any more
219 # ignore list changed -- can't rely on Watchman state any more
220 if state.walk_on_invalidate:
220 if state.walk_on_invalidate:
221 return bail()
221 return bail()
222 notefiles = []
222 notefiles = []
223 clock = 'c:0:0'
223 clock = 'c:0:0'
224 else:
224 else:
225 # always ignore
225 # always ignore
226 ignore = util.always
226 ignore = util.always
227 dirignore = util.always
227 dirignore = util.always
228
228
229 matchfn = match.matchfn
229 matchfn = match.matchfn
230 matchalways = match.always()
230 matchalways = match.always()
231 dmap = self._map
231 dmap = self._map
232 nonnormalset = getattr(self, '_nonnormalset', None)
232 nonnormalset = getattr(self, '_nonnormalset', None)
233
233
234 copymap = self._copymap
234 copymap = self._copymap
235 getkind = stat.S_IFMT
235 getkind = stat.S_IFMT
236 dirkind = stat.S_IFDIR
236 dirkind = stat.S_IFDIR
237 regkind = stat.S_IFREG
237 regkind = stat.S_IFREG
238 lnkkind = stat.S_IFLNK
238 lnkkind = stat.S_IFLNK
239 join = self._join
239 join = self._join
240 normcase = util.normcase
240 normcase = util.normcase
241 fresh_instance = False
241 fresh_instance = False
242
242
243 exact = skipstep3 = False
243 exact = skipstep3 = False
244 if match.isexact(): # match.exact
244 if match.isexact(): # match.exact
245 exact = True
245 exact = True
246 dirignore = util.always # skip step 2
246 dirignore = util.always # skip step 2
247 elif match.prefix(): # match.match, no patterns
247 elif match.prefix(): # match.match, no patterns
248 skipstep3 = True
248 skipstep3 = True
249
249
250 if not exact and self._checkcase:
250 if not exact and self._checkcase:
251 # note that even though we could receive directory entries, we're only
251 # note that even though we could receive directory entries, we're only
252 # interested in checking if a file with the same name exists. So only
252 # interested in checking if a file with the same name exists. So only
253 # normalize files if possible.
253 # normalize files if possible.
254 normalize = self._normalizefile
254 normalize = self._normalizefile
255 skipstep3 = False
255 skipstep3 = False
256 else:
256 else:
257 normalize = None
257 normalize = None
258
258
259 # step 1: find all explicit files
259 # step 1: find all explicit files
260 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
260 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
261
261
262 skipstep3 = skipstep3 and not (work or dirsnotfound)
262 skipstep3 = skipstep3 and not (work or dirsnotfound)
263 work = [d for d in work if not dirignore(d[0])]
263 work = [d for d in work if not dirignore(d[0])]
264
264
265 if not work and (exact or skipstep3):
265 if not work and (exact or skipstep3):
266 for s in subrepos:
266 for s in subrepos:
267 del results[s]
267 del results[s]
268 del results['.hg']
268 del results['.hg']
269 return results
269 return results
270
270
271 # step 2: query Watchman
271 # step 2: query Watchman
272 try:
272 try:
273 # Use the user-configured timeout for the query.
273 # Use the user-configured timeout for the query.
274 # Add a little slack over the top of the user query to allow for
274 # Add a little slack over the top of the user query to allow for
275 # overheads while transferring the data
275 # overheads while transferring the data
276 self._watchmanclient.settimeout(state.timeout + 0.1)
276 self._watchmanclient.settimeout(state.timeout + 0.1)
277 result = self._watchmanclient.command('query', {
277 result = self._watchmanclient.command('query', {
278 'fields': ['mode', 'mtime', 'size', 'exists', 'name'],
278 'fields': ['mode', 'mtime', 'size', 'exists', 'name'],
279 'since': clock,
279 'since': clock,
280 'expression': [
280 'expression': [
281 'not', [
281 'not', [
282 'anyof', ['dirname', '.hg'],
282 'anyof', ['dirname', '.hg'],
283 ['name', '.hg', 'wholename']
283 ['name', '.hg', 'wholename']
284 ]
284 ]
285 ],
285 ],
286 'sync_timeout': int(state.timeout * 1000),
286 'sync_timeout': int(state.timeout * 1000),
287 'empty_on_fresh_instance': state.walk_on_invalidate,
287 'empty_on_fresh_instance': state.walk_on_invalidate,
288 })
288 })
289 except Exception as ex:
289 except Exception as ex:
290 _handleunavailable(self._ui, state, ex)
290 _handleunavailable(self._ui, state, ex)
291 self._watchmanclient.clearconnection()
291 self._watchmanclient.clearconnection()
292 return bail()
292 return bail()
293 else:
293 else:
294 # We need to propagate the last observed clock up so that we
294 # We need to propagate the last observed clock up so that we
295 # can use it for our next query
295 # can use it for our next query
296 state.setlastclock(result['clock'])
296 state.setlastclock(result['clock'])
297 if result['is_fresh_instance']:
297 if result['is_fresh_instance']:
298 if state.walk_on_invalidate:
298 if state.walk_on_invalidate:
299 state.invalidate()
299 state.invalidate()
300 return bail()
300 return bail()
301 fresh_instance = True
301 fresh_instance = True
302 # Ignore any prior noteable files from the state info
302 # Ignore any prior noteable files from the state info
303 notefiles = []
303 notefiles = []
304
304
305 # for file paths which require normalization and we encounter a case
305 # for file paths which require normalization and we encounter a case
306 # collision, we store our own foldmap
306 # collision, we store our own foldmap
307 if normalize:
307 if normalize:
308 foldmap = dict((normcase(k), k) for k in results)
308 foldmap = dict((normcase(k), k) for k in results)
309
309
310 switch_slashes = pycompat.ossep == '\\'
310 switch_slashes = pycompat.ossep == '\\'
311 # The order of the results is, strictly speaking, undefined.
311 # The order of the results is, strictly speaking, undefined.
312 # For case changes on a case insensitive filesystem we may receive
312 # For case changes on a case insensitive filesystem we may receive
313 # two entries, one with exists=True and another with exists=False.
313 # two entries, one with exists=True and another with exists=False.
314 # The exists=True entries in the same response should be interpreted
314 # The exists=True entries in the same response should be interpreted
315 # as being happens-after the exists=False entries due to the way that
315 # as being happens-after the exists=False entries due to the way that
316 # Watchman tracks files. We use this property to reconcile deletes
316 # Watchman tracks files. We use this property to reconcile deletes
317 # for name case changes.
317 # for name case changes.
318 for entry in result['files']:
318 for entry in result['files']:
319 fname = entry['name']
319 fname = entry['name']
320 if _fixencoding:
320 if _fixencoding:
321 fname = _watchmantofsencoding(fname)
321 fname = _watchmantofsencoding(fname)
322 if switch_slashes:
322 if switch_slashes:
323 fname = fname.replace('\\', '/')
323 fname = fname.replace('\\', '/')
324 if normalize:
324 if normalize:
325 normed = normcase(fname)
325 normed = normcase(fname)
326 fname = normalize(fname, True, True)
326 fname = normalize(fname, True, True)
327 foldmap[normed] = fname
327 foldmap[normed] = fname
328 fmode = entry['mode']
328 fmode = entry['mode']
329 fexists = entry['exists']
329 fexists = entry['exists']
330 kind = getkind(fmode)
330 kind = getkind(fmode)
331
331
332 if not fexists:
332 if not fexists:
333 # if marked as deleted and we don't already have a change
333 # if marked as deleted and we don't already have a change
334 # record, mark it as deleted. If we already have an entry
334 # record, mark it as deleted. If we already have an entry
335 # for fname then it was either part of walkexplicit or was
335 # for fname then it was either part of walkexplicit or was
336 # an earlier result that was a case change
336 # an earlier result that was a case change
337 if fname not in results and fname in dmap and (
337 if fname not in results and fname in dmap and (
338 matchalways or matchfn(fname)):
338 matchalways or matchfn(fname)):
339 results[fname] = None
339 results[fname] = None
340 elif kind == dirkind:
340 elif kind == dirkind:
341 if fname in dmap and (matchalways or matchfn(fname)):
341 if fname in dmap and (matchalways or matchfn(fname)):
342 results[fname] = None
342 results[fname] = None
343 elif kind == regkind or kind == lnkkind:
343 elif kind == regkind or kind == lnkkind:
344 if fname in dmap:
344 if fname in dmap:
345 if matchalways or matchfn(fname):
345 if matchalways or matchfn(fname):
346 results[fname] = entry
346 results[fname] = entry
347 elif (matchalways or matchfn(fname)) and not ignore(fname):
347 elif (matchalways or matchfn(fname)) and not ignore(fname):
348 results[fname] = entry
348 results[fname] = entry
349 elif fname in dmap and (matchalways or matchfn(fname)):
349 elif fname in dmap and (matchalways or matchfn(fname)):
350 results[fname] = None
350 results[fname] = None
351
351
352 # step 3: query notable files we don't already know about
352 # step 3: query notable files we don't already know about
353 # XXX try not to iterate over the entire dmap
353 # XXX try not to iterate over the entire dmap
354 if normalize:
354 if normalize:
355 # any notable files that have changed case will already be handled
355 # any notable files that have changed case will already be handled
356 # above, so just check membership in the foldmap
356 # above, so just check membership in the foldmap
357 notefiles = set((normalize(f, True, True) for f in notefiles
357 notefiles = set((normalize(f, True, True) for f in notefiles
358 if normcase(f) not in foldmap))
358 if normcase(f) not in foldmap))
359 visit = set((f for f in notefiles if (f not in results and matchfn(f)
359 visit = set((f for f in notefiles if (f not in results and matchfn(f)
360 and (f in dmap or not ignore(f)))))
360 and (f in dmap or not ignore(f)))))
361
361
362 if nonnormalset is not None and not fresh_instance:
362 if nonnormalset is not None and not fresh_instance:
363 if matchalways:
363 if matchalways:
364 visit.update(f for f in nonnormalset if f not in results)
364 visit.update(f for f in nonnormalset if f not in results)
365 visit.update(f for f in copymap if f not in results)
365 visit.update(f for f in copymap if f not in results)
366 else:
366 else:
367 visit.update(f for f in nonnormalset
367 visit.update(f for f in nonnormalset
368 if f not in results and matchfn(f))
368 if f not in results and matchfn(f))
369 visit.update(f for f in copymap
369 visit.update(f for f in copymap
370 if f not in results and matchfn(f))
370 if f not in results and matchfn(f))
371 else:
371 else:
372 if matchalways:
372 if matchalways:
373 visit.update(f for f, st in dmap.iteritems()
373 visit.update(f for f, st in dmap.iteritems()
374 if (f not in results and
374 if (f not in results and
375 (st[2] < 0 or st[0] != 'n' or fresh_instance)))
375 (st[2] < 0 or st[0] != 'n' or fresh_instance)))
376 visit.update(f for f in copymap if f not in results)
376 visit.update(f for f in copymap if f not in results)
377 else:
377 else:
378 visit.update(f for f, st in dmap.iteritems()
378 visit.update(f for f, st in dmap.iteritems()
379 if (f not in results and
379 if (f not in results and
380 (st[2] < 0 or st[0] != 'n' or fresh_instance)
380 (st[2] < 0 or st[0] != 'n' or fresh_instance)
381 and matchfn(f)))
381 and matchfn(f)))
382 visit.update(f for f in copymap
382 visit.update(f for f in copymap
383 if f not in results and matchfn(f))
383 if f not in results and matchfn(f))
384
384
385 audit = pathutil.pathauditor(self._root).check
385 audit = pathutil.pathauditor(self._root, cached=True).check
386 auditpass = [f for f in visit if audit(f)]
386 auditpass = [f for f in visit if audit(f)]
387 auditpass.sort()
387 auditpass.sort()
388 auditfail = visit.difference(auditpass)
388 auditfail = visit.difference(auditpass)
389 for f in auditfail:
389 for f in auditfail:
390 results[f] = None
390 results[f] = None
391
391
392 nf = iter(auditpass).next
392 nf = iter(auditpass).next
393 for st in util.statfiles([join(f) for f in auditpass]):
393 for st in util.statfiles([join(f) for f in auditpass]):
394 f = nf()
394 f = nf()
395 if st or f in dmap:
395 if st or f in dmap:
396 results[f] = st
396 results[f] = st
397
397
398 for s in subrepos:
398 for s in subrepos:
399 del results[s]
399 del results[s]
400 del results['.hg']
400 del results['.hg']
401 return results
401 return results
402
402
403 def overridestatus(
403 def overridestatus(
404 orig, self, node1='.', node2=None, match=None, ignored=False,
404 orig, self, node1='.', node2=None, match=None, ignored=False,
405 clean=False, unknown=False, listsubrepos=False):
405 clean=False, unknown=False, listsubrepos=False):
406 listignored = ignored
406 listignored = ignored
407 listclean = clean
407 listclean = clean
408 listunknown = unknown
408 listunknown = unknown
409
409
410 def _cmpsets(l1, l2):
410 def _cmpsets(l1, l2):
411 try:
411 try:
412 if 'FSMONITOR_LOG_FILE' in encoding.environ:
412 if 'FSMONITOR_LOG_FILE' in encoding.environ:
413 fn = encoding.environ['FSMONITOR_LOG_FILE']
413 fn = encoding.environ['FSMONITOR_LOG_FILE']
414 f = open(fn, 'wb')
414 f = open(fn, 'wb')
415 else:
415 else:
416 fn = 'fsmonitorfail.log'
416 fn = 'fsmonitorfail.log'
417 f = self.opener(fn, 'wb')
417 f = self.opener(fn, 'wb')
418 except (IOError, OSError):
418 except (IOError, OSError):
419 self.ui.warn(_('warning: unable to write to %s\n') % fn)
419 self.ui.warn(_('warning: unable to write to %s\n') % fn)
420 return
420 return
421
421
422 try:
422 try:
423 for i, (s1, s2) in enumerate(zip(l1, l2)):
423 for i, (s1, s2) in enumerate(zip(l1, l2)):
424 if set(s1) != set(s2):
424 if set(s1) != set(s2):
425 f.write('sets at position %d are unequal\n' % i)
425 f.write('sets at position %d are unequal\n' % i)
426 f.write('watchman returned: %s\n' % s1)
426 f.write('watchman returned: %s\n' % s1)
427 f.write('stat returned: %s\n' % s2)
427 f.write('stat returned: %s\n' % s2)
428 finally:
428 finally:
429 f.close()
429 f.close()
430
430
431 if isinstance(node1, context.changectx):
431 if isinstance(node1, context.changectx):
432 ctx1 = node1
432 ctx1 = node1
433 else:
433 else:
434 ctx1 = self[node1]
434 ctx1 = self[node1]
435 if isinstance(node2, context.changectx):
435 if isinstance(node2, context.changectx):
436 ctx2 = node2
436 ctx2 = node2
437 else:
437 else:
438 ctx2 = self[node2]
438 ctx2 = self[node2]
439
439
440 working = ctx2.rev() is None
440 working = ctx2.rev() is None
441 parentworking = working and ctx1 == self['.']
441 parentworking = working and ctx1 == self['.']
442 match = match or matchmod.always(self.root, self.getcwd())
442 match = match or matchmod.always(self.root, self.getcwd())
443
443
444 # Maybe we can use this opportunity to update Watchman's state.
444 # Maybe we can use this opportunity to update Watchman's state.
445 # Mercurial uses workingcommitctx and/or memctx to represent the part of
445 # Mercurial uses workingcommitctx and/or memctx to represent the part of
446 # the workingctx that is to be committed. So don't update the state in
446 # the workingctx that is to be committed. So don't update the state in
447 # that case.
447 # that case.
448 # HG_PENDING is set in the environment when the dirstate is being updated
448 # HG_PENDING is set in the environment when the dirstate is being updated
449 # in the middle of a transaction; we must not update our state in that
449 # in the middle of a transaction; we must not update our state in that
450 # case, or we risk forgetting about changes in the working copy.
450 # case, or we risk forgetting about changes in the working copy.
451 updatestate = (parentworking and match.always() and
451 updatestate = (parentworking and match.always() and
452 not isinstance(ctx2, (context.workingcommitctx,
452 not isinstance(ctx2, (context.workingcommitctx,
453 context.memctx)) and
453 context.memctx)) and
454 'HG_PENDING' not in encoding.environ)
454 'HG_PENDING' not in encoding.environ)
455
455
456 try:
456 try:
457 if self._fsmonitorstate.walk_on_invalidate:
457 if self._fsmonitorstate.walk_on_invalidate:
458 # Use a short timeout to query the current clock. If that
458 # Use a short timeout to query the current clock. If that
459 # takes too long then we assume that the service will be slow
459 # takes too long then we assume that the service will be slow
460 # to answer our query.
460 # to answer our query.
461 # walk_on_invalidate indicates that we prefer to walk the
461 # walk_on_invalidate indicates that we prefer to walk the
462 # tree ourselves because we can ignore portions that Watchman
462 # tree ourselves because we can ignore portions that Watchman
463 # cannot and we tend to be faster in the warmer buffer cache
463 # cannot and we tend to be faster in the warmer buffer cache
464 # cases.
464 # cases.
465 self._watchmanclient.settimeout(0.1)
465 self._watchmanclient.settimeout(0.1)
466 else:
466 else:
467 # Give Watchman more time to potentially complete its walk
467 # Give Watchman more time to potentially complete its walk
468 # and return the initial clock. In this mode we assume that
468 # and return the initial clock. In this mode we assume that
469 # the filesystem will be slower than parsing a potentially
469 # the filesystem will be slower than parsing a potentially
470 # very large Watchman result set.
470 # very large Watchman result set.
471 self._watchmanclient.settimeout(
471 self._watchmanclient.settimeout(
472 self._fsmonitorstate.timeout + 0.1)
472 self._fsmonitorstate.timeout + 0.1)
473 startclock = self._watchmanclient.getcurrentclock()
473 startclock = self._watchmanclient.getcurrentclock()
474 except Exception as ex:
474 except Exception as ex:
475 self._watchmanclient.clearconnection()
475 self._watchmanclient.clearconnection()
476 _handleunavailable(self.ui, self._fsmonitorstate, ex)
476 _handleunavailable(self.ui, self._fsmonitorstate, ex)
477 # boo, Watchman failed. bail
477 # boo, Watchman failed. bail
478 return orig(node1, node2, match, listignored, listclean,
478 return orig(node1, node2, match, listignored, listclean,
479 listunknown, listsubrepos)
479 listunknown, listsubrepos)
480
480
481 if updatestate:
481 if updatestate:
482 # We need info about unknown files. This may make things slower the
482 # We need info about unknown files. This may make things slower the
483 # first time, but whatever.
483 # first time, but whatever.
484 stateunknown = True
484 stateunknown = True
485 else:
485 else:
486 stateunknown = listunknown
486 stateunknown = listunknown
487
487
488 if updatestate:
488 if updatestate:
489 ps = poststatus(startclock)
489 ps = poststatus(startclock)
490 self.addpostdsstatus(ps)
490 self.addpostdsstatus(ps)
491
491
492 r = orig(node1, node2, match, listignored, listclean, stateunknown,
492 r = orig(node1, node2, match, listignored, listclean, stateunknown,
493 listsubrepos)
493 listsubrepos)
494 modified, added, removed, deleted, unknown, ignored, clean = r
494 modified, added, removed, deleted, unknown, ignored, clean = r
495
495
496 if not listunknown:
496 if not listunknown:
497 unknown = []
497 unknown = []
498
498
499 # don't do paranoid checks if we're not going to query Watchman anyway
499 # don't do paranoid checks if we're not going to query Watchman anyway
500 full = listclean or match.traversedir is not None
500 full = listclean or match.traversedir is not None
501 if self._fsmonitorstate.mode == 'paranoid' and not full:
501 if self._fsmonitorstate.mode == 'paranoid' and not full:
502 # run status again and fall back to the old walk this time
502 # run status again and fall back to the old walk this time
503 self.dirstate._fsmonitordisable = True
503 self.dirstate._fsmonitordisable = True
504
504
505 # shut the UI up
505 # shut the UI up
506 quiet = self.ui.quiet
506 quiet = self.ui.quiet
507 self.ui.quiet = True
507 self.ui.quiet = True
508 fout, ferr = self.ui.fout, self.ui.ferr
508 fout, ferr = self.ui.fout, self.ui.ferr
509 self.ui.fout = self.ui.ferr = open(os.devnull, 'wb')
509 self.ui.fout = self.ui.ferr = open(os.devnull, 'wb')
510
510
511 try:
511 try:
512 rv2 = orig(
512 rv2 = orig(
513 node1, node2, match, listignored, listclean, listunknown,
513 node1, node2, match, listignored, listclean, listunknown,
514 listsubrepos)
514 listsubrepos)
515 finally:
515 finally:
516 self.dirstate._fsmonitordisable = False
516 self.dirstate._fsmonitordisable = False
517 self.ui.quiet = quiet
517 self.ui.quiet = quiet
518 self.ui.fout, self.ui.ferr = fout, ferr
518 self.ui.fout, self.ui.ferr = fout, ferr
519
519
520 # clean isn't tested since it's set to True above
520 # clean isn't tested since it's set to True above
521 _cmpsets([modified, added, removed, deleted, unknown, ignored, clean],
521 _cmpsets([modified, added, removed, deleted, unknown, ignored, clean],
522 rv2)
522 rv2)
523 modified, added, removed, deleted, unknown, ignored, clean = rv2
523 modified, added, removed, deleted, unknown, ignored, clean = rv2
524
524
525 return scmutil.status(
525 return scmutil.status(
526 modified, added, removed, deleted, unknown, ignored, clean)
526 modified, added, removed, deleted, unknown, ignored, clean)
527
527
528 class poststatus(object):
528 class poststatus(object):
529 def __init__(self, startclock):
529 def __init__(self, startclock):
530 self._startclock = startclock
530 self._startclock = startclock
531
531
532 def __call__(self, wctx, status):
532 def __call__(self, wctx, status):
533 clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
533 clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
534 hashignore = _hashignore(wctx.repo().dirstate._ignore)
534 hashignore = _hashignore(wctx.repo().dirstate._ignore)
535 notefiles = (status.modified + status.added + status.removed +
535 notefiles = (status.modified + status.added + status.removed +
536 status.deleted + status.unknown)
536 status.deleted + status.unknown)
537 wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
537 wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
538
538
539 def makedirstate(repo, dirstate):
539 def makedirstate(repo, dirstate):
540 class fsmonitordirstate(dirstate.__class__):
540 class fsmonitordirstate(dirstate.__class__):
541 def _fsmonitorinit(self, fsmonitorstate, watchmanclient):
541 def _fsmonitorinit(self, fsmonitorstate, watchmanclient):
542 # _fsmonitordisable is used in paranoid mode
542 # _fsmonitordisable is used in paranoid mode
543 self._fsmonitordisable = False
543 self._fsmonitordisable = False
544 self._fsmonitorstate = fsmonitorstate
544 self._fsmonitorstate = fsmonitorstate
545 self._watchmanclient = watchmanclient
545 self._watchmanclient = watchmanclient
546
546
547 def walk(self, *args, **kwargs):
547 def walk(self, *args, **kwargs):
548 orig = super(fsmonitordirstate, self).walk
548 orig = super(fsmonitordirstate, self).walk
549 if self._fsmonitordisable:
549 if self._fsmonitordisable:
550 return orig(*args, **kwargs)
550 return orig(*args, **kwargs)
551 return overridewalk(orig, self, *args, **kwargs)
551 return overridewalk(orig, self, *args, **kwargs)
552
552
553 def rebuild(self, *args, **kwargs):
553 def rebuild(self, *args, **kwargs):
554 self._fsmonitorstate.invalidate()
554 self._fsmonitorstate.invalidate()
555 return super(fsmonitordirstate, self).rebuild(*args, **kwargs)
555 return super(fsmonitordirstate, self).rebuild(*args, **kwargs)
556
556
557 def invalidate(self, *args, **kwargs):
557 def invalidate(self, *args, **kwargs):
558 self._fsmonitorstate.invalidate()
558 self._fsmonitorstate.invalidate()
559 return super(fsmonitordirstate, self).invalidate(*args, **kwargs)
559 return super(fsmonitordirstate, self).invalidate(*args, **kwargs)
560
560
561 dirstate.__class__ = fsmonitordirstate
561 dirstate.__class__ = fsmonitordirstate
562 dirstate._fsmonitorinit(repo._fsmonitorstate, repo._watchmanclient)
562 dirstate._fsmonitorinit(repo._fsmonitorstate, repo._watchmanclient)
563
563
564 def wrapdirstate(orig, self):
564 def wrapdirstate(orig, self):
565 ds = orig(self)
565 ds = orig(self)
566 # only override the dirstate when Watchman is available for the repo
566 # only override the dirstate when Watchman is available for the repo
567 if util.safehasattr(self, '_fsmonitorstate'):
567 if util.safehasattr(self, '_fsmonitorstate'):
568 makedirstate(self, ds)
568 makedirstate(self, ds)
569 return ds
569 return ds
570
570
571 def extsetup(ui):
571 def extsetup(ui):
572 extensions.wrapfilecache(
572 extensions.wrapfilecache(
573 localrepo.localrepository, 'dirstate', wrapdirstate)
573 localrepo.localrepository, 'dirstate', wrapdirstate)
574 if pycompat.sysplatform == 'darwin':
574 if pycompat.sysplatform == 'darwin':
575 # An assist for avoiding the dangling-symlink fsevents bug
575 # An assist for avoiding the dangling-symlink fsevents bug
576 extensions.wrapfunction(os, 'symlink', wrapsymlink)
576 extensions.wrapfunction(os, 'symlink', wrapsymlink)
577
577
578 extensions.wrapfunction(merge, 'update', wrapupdate)
578 extensions.wrapfunction(merge, 'update', wrapupdate)
579
579
580 def wrapsymlink(orig, source, link_name):
580 def wrapsymlink(orig, source, link_name):
581 ''' if we create a dangling symlink, also touch the parent dir
581 ''' if we create a dangling symlink, also touch the parent dir
582 to encourage fsevents notifications to work more correctly '''
582 to encourage fsevents notifications to work more correctly '''
583 try:
583 try:
584 return orig(source, link_name)
584 return orig(source, link_name)
585 finally:
585 finally:
586 try:
586 try:
587 os.utime(os.path.dirname(link_name), None)
587 os.utime(os.path.dirname(link_name), None)
588 except OSError:
588 except OSError:
589 pass
589 pass
590
590
591 class state_update(object):
591 class state_update(object):
592 ''' This context manager is responsible for dispatching the state-enter
592 ''' This context manager is responsible for dispatching the state-enter
593 and state-leave signals to the watchman service '''
593 and state-leave signals to the watchman service '''
594
594
595 def __init__(self, repo, node, distance, partial):
595 def __init__(self, repo, node, distance, partial):
596 self.repo = repo
596 self.repo = repo
597 self.node = node
597 self.node = node
598 self.distance = distance
598 self.distance = distance
599 self.partial = partial
599 self.partial = partial
600 self._lock = None
600 self._lock = None
601 self.need_leave = False
601 self.need_leave = False
602
602
603 def __enter__(self):
603 def __enter__(self):
604 # We explicitly need to take a lock here, before we proceed to update
604 # We explicitly need to take a lock here, before we proceed to update
605 # watchman about the update operation, so that we don't race with
605 # watchman about the update operation, so that we don't race with
606 # some other actor. merge.update is going to take the wlock almost
606 # some other actor. merge.update is going to take the wlock almost
607 # immediately anyway, so this is effectively extending the lock
607 # immediately anyway, so this is effectively extending the lock
608 # around a couple of short sanity checks.
608 # around a couple of short sanity checks.
609 self._lock = self.repo.wlock()
609 self._lock = self.repo.wlock()
610 self.need_leave = self._state('state-enter')
610 self.need_leave = self._state('state-enter')
611 return self
611 return self
612
612
613 def __exit__(self, type_, value, tb):
613 def __exit__(self, type_, value, tb):
614 try:
614 try:
615 if self.need_leave:
615 if self.need_leave:
616 status = 'ok' if type_ is None else 'failed'
616 status = 'ok' if type_ is None else 'failed'
617 self._state('state-leave', status=status)
617 self._state('state-leave', status=status)
618 finally:
618 finally:
619 if self._lock:
619 if self._lock:
620 self._lock.release()
620 self._lock.release()
621
621
622 def _state(self, cmd, status='ok'):
622 def _state(self, cmd, status='ok'):
623 if not util.safehasattr(self.repo, '_watchmanclient'):
623 if not util.safehasattr(self.repo, '_watchmanclient'):
624 return False
624 return False
625 try:
625 try:
626 commithash = self.repo[self.node].hex()
626 commithash = self.repo[self.node].hex()
627 self.repo._watchmanclient.command(cmd, {
627 self.repo._watchmanclient.command(cmd, {
628 'name': 'hg.update',
628 'name': 'hg.update',
629 'metadata': {
629 'metadata': {
630 # the target revision
630 # the target revision
631 'rev': commithash,
631 'rev': commithash,
632 # approximate number of commits between current and target
632 # approximate number of commits between current and target
633 'distance': self.distance,
633 'distance': self.distance,
634 # success/failure (only really meaningful for state-leave)
634 # success/failure (only really meaningful for state-leave)
635 'status': status,
635 'status': status,
636 # whether the working copy parent is changing
636 # whether the working copy parent is changing
637 'partial': self.partial,
637 'partial': self.partial,
638 }})
638 }})
639 return True
639 return True
640 except Exception as e:
640 except Exception as e:
641 # Swallow any errors; fire and forget
641 # Swallow any errors; fire and forget
642 self.repo.ui.log(
642 self.repo.ui.log(
643 'watchman', 'Exception %s while running %s\n', e, cmd)
643 'watchman', 'Exception %s while running %s\n', e, cmd)
644 return False
644 return False
645
645
646 # Bracket working copy updates with calls to the watchman state-enter
646 # Bracket working copy updates with calls to the watchman state-enter
647 # and state-leave commands. This allows clients to perform more intelligent
647 # and state-leave commands. This allows clients to perform more intelligent
648 # settling during bulk file change scenarios
648 # settling during bulk file change scenarios
649 # https://facebook.github.io/watchman/docs/cmd/subscribe.html#advanced-settling
649 # https://facebook.github.io/watchman/docs/cmd/subscribe.html#advanced-settling
650 def wrapupdate(orig, repo, node, branchmerge, force, ancestor=None,
650 def wrapupdate(orig, repo, node, branchmerge, force, ancestor=None,
651 mergeancestor=False, labels=None, matcher=None, **kwargs):
651 mergeancestor=False, labels=None, matcher=None, **kwargs):
652
652
653 distance = 0
653 distance = 0
654 partial = True
654 partial = True
655 if matcher is None or matcher.always():
655 if matcher is None or matcher.always():
656 partial = False
656 partial = False
657 wc = repo[None]
657 wc = repo[None]
658 parents = wc.parents()
658 parents = wc.parents()
659 if len(parents) == 2:
659 if len(parents) == 2:
660 anc = repo.changelog.ancestor(parents[0].node(), parents[1].node())
660 anc = repo.changelog.ancestor(parents[0].node(), parents[1].node())
661 ancrev = repo[anc].rev()
661 ancrev = repo[anc].rev()
662 distance = abs(repo[node].rev() - ancrev)
662 distance = abs(repo[node].rev() - ancrev)
663 elif len(parents) == 1:
663 elif len(parents) == 1:
664 distance = abs(repo[node].rev() - parents[0].rev())
664 distance = abs(repo[node].rev() - parents[0].rev())
665
665
666 with state_update(repo, node, distance, partial):
666 with state_update(repo, node, distance, partial):
667 return orig(
667 return orig(
668 repo, node, branchmerge, force, ancestor, mergeancestor,
668 repo, node, branchmerge, force, ancestor, mergeancestor,
669 labels, matcher, **kwargs)
669 labels, matcher, **kwargs)
670
670
671 def reposetup(ui, repo):
671 def reposetup(ui, repo):
672 # We don't work with largefiles or inotify
672 # We don't work with largefiles or inotify
673 exts = extensions.enabled()
673 exts = extensions.enabled()
674 for ext in _blacklist:
674 for ext in _blacklist:
675 if ext in exts:
675 if ext in exts:
676 ui.warn(_('The fsmonitor extension is incompatible with the %s '
676 ui.warn(_('The fsmonitor extension is incompatible with the %s '
677 'extension and has been disabled.\n') % ext)
677 'extension and has been disabled.\n') % ext)
678 return
678 return
679
679
680 if repo.local():
680 if repo.local():
681 # We don't work with subrepos either.
681 # We don't work with subrepos either.
682 #
682 #
683 # if repo[None].substate can cause a dirstate parse, which is too
683 # if repo[None].substate can cause a dirstate parse, which is too
684 # slow. Instead, look for a file called hgsubstate,
684 # slow. Instead, look for a file called hgsubstate,
685 if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'):
685 if repo.wvfs.exists('.hgsubstate') or repo.wvfs.exists('.hgsub'):
686 return
686 return
687
687
688 fsmonitorstate = state.state(repo)
688 fsmonitorstate = state.state(repo)
689 if fsmonitorstate.mode == 'off':
689 if fsmonitorstate.mode == 'off':
690 return
690 return
691
691
692 try:
692 try:
693 client = watchmanclient.client(repo)
693 client = watchmanclient.client(repo)
694 except Exception as ex:
694 except Exception as ex:
695 _handleunavailable(ui, fsmonitorstate, ex)
695 _handleunavailable(ui, fsmonitorstate, ex)
696 return
696 return
697
697
698 repo._fsmonitorstate = fsmonitorstate
698 repo._fsmonitorstate = fsmonitorstate
699 repo._watchmanclient = client
699 repo._watchmanclient = client
700
700
701 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
701 dirstate, cached = localrepo.isfilecached(repo, 'dirstate')
702 if cached:
702 if cached:
703 # at this point since fsmonitorstate wasn't present,
703 # at this point since fsmonitorstate wasn't present,
704 # repo.dirstate is not a fsmonitordirstate
704 # repo.dirstate is not a fsmonitordirstate
705 makedirstate(repo, dirstate)
705 makedirstate(repo, dirstate)
706
706
707 class fsmonitorrepo(repo.__class__):
707 class fsmonitorrepo(repo.__class__):
708 def status(self, *args, **kwargs):
708 def status(self, *args, **kwargs):
709 orig = super(fsmonitorrepo, self).status
709 orig = super(fsmonitorrepo, self).status
710 return overridestatus(orig, self, *args, **kwargs)
710 return overridestatus(orig, self, *args, **kwargs)
711
711
712 repo.__class__ = fsmonitorrepo
712 repo.__class__ = fsmonitorrepo
@@ -1,3762 +1,3762 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 dirstateguard,
29 dirstateguard,
30 encoding,
30 encoding,
31 error,
31 error,
32 formatter,
32 formatter,
33 graphmod,
33 graphmod,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 pycompat,
39 pycompat,
40 registrar,
40 registrar,
41 revlog,
41 revlog,
42 revset,
42 revset,
43 scmutil,
43 scmutil,
44 smartset,
44 smartset,
45 templatekw,
45 templatekw,
46 templater,
46 templater,
47 util,
47 util,
48 vfs as vfsmod,
48 vfs as vfsmod,
49 )
49 )
50 stringio = util.stringio
50 stringio = util.stringio
51
51
52 # templates of common command options
52 # templates of common command options
53
53
54 dryrunopts = [
54 dryrunopts = [
55 ('n', 'dry-run', None,
55 ('n', 'dry-run', None,
56 _('do not perform actions, just print output')),
56 _('do not perform actions, just print output')),
57 ]
57 ]
58
58
59 remoteopts = [
59 remoteopts = [
60 ('e', 'ssh', '',
60 ('e', 'ssh', '',
61 _('specify ssh command to use'), _('CMD')),
61 _('specify ssh command to use'), _('CMD')),
62 ('', 'remotecmd', '',
62 ('', 'remotecmd', '',
63 _('specify hg command to run on the remote side'), _('CMD')),
63 _('specify hg command to run on the remote side'), _('CMD')),
64 ('', 'insecure', None,
64 ('', 'insecure', None,
65 _('do not verify server certificate (ignoring web.cacerts config)')),
65 _('do not verify server certificate (ignoring web.cacerts config)')),
66 ]
66 ]
67
67
68 walkopts = [
68 walkopts = [
69 ('I', 'include', [],
69 ('I', 'include', [],
70 _('include names matching the given patterns'), _('PATTERN')),
70 _('include names matching the given patterns'), _('PATTERN')),
71 ('X', 'exclude', [],
71 ('X', 'exclude', [],
72 _('exclude names matching the given patterns'), _('PATTERN')),
72 _('exclude names matching the given patterns'), _('PATTERN')),
73 ]
73 ]
74
74
75 commitopts = [
75 commitopts = [
76 ('m', 'message', '',
76 ('m', 'message', '',
77 _('use text as commit message'), _('TEXT')),
77 _('use text as commit message'), _('TEXT')),
78 ('l', 'logfile', '',
78 ('l', 'logfile', '',
79 _('read commit message from file'), _('FILE')),
79 _('read commit message from file'), _('FILE')),
80 ]
80 ]
81
81
82 commitopts2 = [
82 commitopts2 = [
83 ('d', 'date', '',
83 ('d', 'date', '',
84 _('record the specified date as commit date'), _('DATE')),
84 _('record the specified date as commit date'), _('DATE')),
85 ('u', 'user', '',
85 ('u', 'user', '',
86 _('record the specified user as committer'), _('USER')),
86 _('record the specified user as committer'), _('USER')),
87 ]
87 ]
88
88
89 # hidden for now
89 # hidden for now
90 formatteropts = [
90 formatteropts = [
91 ('T', 'template', '',
91 ('T', 'template', '',
92 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
92 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 ]
93 ]
94
94
95 templateopts = [
95 templateopts = [
96 ('', 'style', '',
96 ('', 'style', '',
97 _('display using template map file (DEPRECATED)'), _('STYLE')),
97 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 ('T', 'template', '',
98 ('T', 'template', '',
99 _('display with template'), _('TEMPLATE')),
99 _('display with template'), _('TEMPLATE')),
100 ]
100 ]
101
101
102 logopts = [
102 logopts = [
103 ('p', 'patch', None, _('show patch')),
103 ('p', 'patch', None, _('show patch')),
104 ('g', 'git', None, _('use git extended diff format')),
104 ('g', 'git', None, _('use git extended diff format')),
105 ('l', 'limit', '',
105 ('l', 'limit', '',
106 _('limit number of changes displayed'), _('NUM')),
106 _('limit number of changes displayed'), _('NUM')),
107 ('M', 'no-merges', None, _('do not show merges')),
107 ('M', 'no-merges', None, _('do not show merges')),
108 ('', 'stat', None, _('output diffstat-style summary of changes')),
108 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('G', 'graph', None, _("show the revision DAG")),
109 ('G', 'graph', None, _("show the revision DAG")),
110 ] + templateopts
110 ] + templateopts
111
111
112 diffopts = [
112 diffopts = [
113 ('a', 'text', None, _('treat all files as text')),
113 ('a', 'text', None, _('treat all files as text')),
114 ('g', 'git', None, _('use git extended diff format')),
114 ('g', 'git', None, _('use git extended diff format')),
115 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
115 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'nodates', None, _('omit dates from diff headers'))
116 ('', 'nodates', None, _('omit dates from diff headers'))
117 ]
117 ]
118
118
119 diffwsopts = [
119 diffwsopts = [
120 ('w', 'ignore-all-space', None,
120 ('w', 'ignore-all-space', None,
121 _('ignore white space when comparing lines')),
121 _('ignore white space when comparing lines')),
122 ('b', 'ignore-space-change', None,
122 ('b', 'ignore-space-change', None,
123 _('ignore changes in the amount of white space')),
123 _('ignore changes in the amount of white space')),
124 ('B', 'ignore-blank-lines', None,
124 ('B', 'ignore-blank-lines', None,
125 _('ignore changes whose lines are all blank')),
125 _('ignore changes whose lines are all blank')),
126 ]
126 ]
127
127
128 diffopts2 = [
128 diffopts2 = [
129 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
129 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('p', 'show-function', None, _('show which function each change is in')),
130 ('p', 'show-function', None, _('show which function each change is in')),
131 ('', 'reverse', None, _('produce a diff that undoes the changes')),
131 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ] + diffwsopts + [
132 ] + diffwsopts + [
133 ('U', 'unified', '',
133 ('U', 'unified', '',
134 _('number of lines of context to show'), _('NUM')),
134 _('number of lines of context to show'), _('NUM')),
135 ('', 'stat', None, _('output diffstat-style summary of changes')),
135 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
136 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ]
137 ]
138
138
139 mergetoolopts = [
139 mergetoolopts = [
140 ('t', 'tool', '', _('specify merge tool')),
140 ('t', 'tool', '', _('specify merge tool')),
141 ]
141 ]
142
142
143 similarityopts = [
143 similarityopts = [
144 ('s', 'similarity', '',
144 ('s', 'similarity', '',
145 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
145 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 ]
146 ]
147
147
148 subrepoopts = [
148 subrepoopts = [
149 ('S', 'subrepos', None,
149 ('S', 'subrepos', None,
150 _('recurse into subrepositories'))
150 _('recurse into subrepositories'))
151 ]
151 ]
152
152
153 debugrevlogopts = [
153 debugrevlogopts = [
154 ('c', 'changelog', False, _('open changelog')),
154 ('c', 'changelog', False, _('open changelog')),
155 ('m', 'manifest', False, _('open manifest')),
155 ('m', 'manifest', False, _('open manifest')),
156 ('', 'dir', '', _('open directory manifest')),
156 ('', 'dir', '', _('open directory manifest')),
157 ]
157 ]
158
158
159 # special string such that everything below this line will be ingored in the
159 # special string such that everything below this line will be ingored in the
160 # editor text
160 # editor text
161 _linebelow = "^HG: ------------------------ >8 ------------------------$"
161 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162
162
163 def ishunk(x):
163 def ishunk(x):
164 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
164 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 return isinstance(x, hunkclasses)
165 return isinstance(x, hunkclasses)
166
166
167 def newandmodified(chunks, originalchunks):
167 def newandmodified(chunks, originalchunks):
168 newlyaddedandmodifiedfiles = set()
168 newlyaddedandmodifiedfiles = set()
169 for chunk in chunks:
169 for chunk in chunks:
170 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
170 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 originalchunks:
171 originalchunks:
172 newlyaddedandmodifiedfiles.add(chunk.header.filename())
172 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 return newlyaddedandmodifiedfiles
173 return newlyaddedandmodifiedfiles
174
174
175 def parsealiases(cmd):
175 def parsealiases(cmd):
176 return cmd.lstrip("^").split("|")
176 return cmd.lstrip("^").split("|")
177
177
178 def setupwrapcolorwrite(ui):
178 def setupwrapcolorwrite(ui):
179 # wrap ui.write so diff output can be labeled/colorized
179 # wrap ui.write so diff output can be labeled/colorized
180 def wrapwrite(orig, *args, **kw):
180 def wrapwrite(orig, *args, **kw):
181 label = kw.pop('label', '')
181 label = kw.pop('label', '')
182 for chunk, l in patch.difflabel(lambda: args):
182 for chunk, l in patch.difflabel(lambda: args):
183 orig(chunk, label=label + l)
183 orig(chunk, label=label + l)
184
184
185 oldwrite = ui.write
185 oldwrite = ui.write
186 def wrap(*args, **kwargs):
186 def wrap(*args, **kwargs):
187 return wrapwrite(oldwrite, *args, **kwargs)
187 return wrapwrite(oldwrite, *args, **kwargs)
188 setattr(ui, 'write', wrap)
188 setattr(ui, 'write', wrap)
189 return oldwrite
189 return oldwrite
190
190
191 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
191 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 if usecurses:
192 if usecurses:
193 if testfile:
193 if testfile:
194 recordfn = crecordmod.testdecorator(testfile,
194 recordfn = crecordmod.testdecorator(testfile,
195 crecordmod.testchunkselector)
195 crecordmod.testchunkselector)
196 else:
196 else:
197 recordfn = crecordmod.chunkselector
197 recordfn = crecordmod.chunkselector
198
198
199 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
199 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200
200
201 else:
201 else:
202 return patch.filterpatch(ui, originalhunks, operation)
202 return patch.filterpatch(ui, originalhunks, operation)
203
203
204 def recordfilter(ui, originalhunks, operation=None):
204 def recordfilter(ui, originalhunks, operation=None):
205 """ Prompts the user to filter the originalhunks and return a list of
205 """ Prompts the user to filter the originalhunks and return a list of
206 selected hunks.
206 selected hunks.
207 *operation* is used for to build ui messages to indicate the user what
207 *operation* is used for to build ui messages to indicate the user what
208 kind of filtering they are doing: reverting, committing, shelving, etc.
208 kind of filtering they are doing: reverting, committing, shelving, etc.
209 (see patch.filterpatch).
209 (see patch.filterpatch).
210 """
210 """
211 usecurses = crecordmod.checkcurses(ui)
211 usecurses = crecordmod.checkcurses(ui)
212 testfile = ui.config('experimental', 'crecordtest')
212 testfile = ui.config('experimental', 'crecordtest')
213 oldwrite = setupwrapcolorwrite(ui)
213 oldwrite = setupwrapcolorwrite(ui)
214 try:
214 try:
215 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
215 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 testfile, operation)
216 testfile, operation)
217 finally:
217 finally:
218 ui.write = oldwrite
218 ui.write = oldwrite
219 return newchunks, newopts
219 return newchunks, newopts
220
220
221 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
221 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 filterfn, *pats, **opts):
222 filterfn, *pats, **opts):
223 from . import merge as mergemod
223 from . import merge as mergemod
224 opts = pycompat.byteskwargs(opts)
224 opts = pycompat.byteskwargs(opts)
225 if not ui.interactive():
225 if not ui.interactive():
226 if cmdsuggest:
226 if cmdsuggest:
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 else:
228 else:
229 msg = _('running non-interactively')
229 msg = _('running non-interactively')
230 raise error.Abort(msg)
230 raise error.Abort(msg)
231
231
232 # make sure username is set before going interactive
232 # make sure username is set before going interactive
233 if not opts.get('user'):
233 if not opts.get('user'):
234 ui.username() # raise exception, username not provided
234 ui.username() # raise exception, username not provided
235
235
236 def recordfunc(ui, repo, message, match, opts):
236 def recordfunc(ui, repo, message, match, opts):
237 """This is generic record driver.
237 """This is generic record driver.
238
238
239 Its job is to interactively filter local changes, and
239 Its job is to interactively filter local changes, and
240 accordingly prepare working directory into a state in which the
240 accordingly prepare working directory into a state in which the
241 job can be delegated to a non-interactive commit command such as
241 job can be delegated to a non-interactive commit command such as
242 'commit' or 'qrefresh'.
242 'commit' or 'qrefresh'.
243
243
244 After the actual job is done by non-interactive command, the
244 After the actual job is done by non-interactive command, the
245 working directory is restored to its original state.
245 working directory is restored to its original state.
246
246
247 In the end we'll record interesting changes, and everything else
247 In the end we'll record interesting changes, and everything else
248 will be left in place, so the user can continue working.
248 will be left in place, so the user can continue working.
249 """
249 """
250
250
251 checkunfinished(repo, commit=True)
251 checkunfinished(repo, commit=True)
252 wctx = repo[None]
252 wctx = repo[None]
253 merge = len(wctx.parents()) > 1
253 merge = len(wctx.parents()) > 1
254 if merge:
254 if merge:
255 raise error.Abort(_('cannot partially commit a merge '
255 raise error.Abort(_('cannot partially commit a merge '
256 '(use "hg commit" instead)'))
256 '(use "hg commit" instead)'))
257
257
258 def fail(f, msg):
258 def fail(f, msg):
259 raise error.Abort('%s: %s' % (f, msg))
259 raise error.Abort('%s: %s' % (f, msg))
260
260
261 force = opts.get('force')
261 force = opts.get('force')
262 if not force:
262 if not force:
263 vdirs = []
263 vdirs = []
264 match.explicitdir = vdirs.append
264 match.explicitdir = vdirs.append
265 match.bad = fail
265 match.bad = fail
266
266
267 status = repo.status(match=match)
267 status = repo.status(match=match)
268 if not force:
268 if not force:
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts.nodates = True
271 diffopts.nodates = True
272 diffopts.git = True
272 diffopts.git = True
273 diffopts.showfunc = True
273 diffopts.showfunc = True
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originalchunks = patch.parsepatch(originaldiff)
275 originalchunks = patch.parsepatch(originaldiff)
276
276
277 # 1. filter patch, since we are intending to apply subset of it
277 # 1. filter patch, since we are intending to apply subset of it
278 try:
278 try:
279 chunks, newopts = filterfn(ui, originalchunks)
279 chunks, newopts = filterfn(ui, originalchunks)
280 except patch.PatchError as err:
280 except patch.PatchError as err:
281 raise error.Abort(_('error parsing patch: %s') % err)
281 raise error.Abort(_('error parsing patch: %s') % err)
282 opts.update(newopts)
282 opts.update(newopts)
283
283
284 # We need to keep a backup of files that have been newly added and
284 # We need to keep a backup of files that have been newly added and
285 # modified during the recording process because there is a previous
285 # modified during the recording process because there is a previous
286 # version without the edit in the workdir
286 # version without the edit in the workdir
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 contenders = set()
288 contenders = set()
289 for h in chunks:
289 for h in chunks:
290 try:
290 try:
291 contenders.update(set(h.files()))
291 contenders.update(set(h.files()))
292 except AttributeError:
292 except AttributeError:
293 pass
293 pass
294
294
295 changed = status.modified + status.added + status.removed
295 changed = status.modified + status.added + status.removed
296 newfiles = [f for f in changed if f in contenders]
296 newfiles = [f for f in changed if f in contenders]
297 if not newfiles:
297 if not newfiles:
298 ui.status(_('no changes to record\n'))
298 ui.status(_('no changes to record\n'))
299 return 0
299 return 0
300
300
301 modified = set(status.modified)
301 modified = set(status.modified)
302
302
303 # 2. backup changed files, so we can restore them in the end
303 # 2. backup changed files, so we can restore them in the end
304
304
305 if backupall:
305 if backupall:
306 tobackup = changed
306 tobackup = changed
307 else:
307 else:
308 tobackup = [f for f in newfiles if f in modified or f in \
308 tobackup = [f for f in newfiles if f in modified or f in \
309 newlyaddedandmodifiedfiles]
309 newlyaddedandmodifiedfiles]
310 backups = {}
310 backups = {}
311 if tobackup:
311 if tobackup:
312 backupdir = repo.vfs.join('record-backups')
312 backupdir = repo.vfs.join('record-backups')
313 try:
313 try:
314 os.mkdir(backupdir)
314 os.mkdir(backupdir)
315 except OSError as err:
315 except OSError as err:
316 if err.errno != errno.EEXIST:
316 if err.errno != errno.EEXIST:
317 raise
317 raise
318 try:
318 try:
319 # backup continues
319 # backup continues
320 for f in tobackup:
320 for f in tobackup:
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 dir=backupdir)
322 dir=backupdir)
323 os.close(fd)
323 os.close(fd)
324 ui.debug('backup %r as %r\n' % (f, tmpname))
324 ui.debug('backup %r as %r\n' % (f, tmpname))
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 backups[f] = tmpname
326 backups[f] = tmpname
327
327
328 fp = stringio()
328 fp = stringio()
329 for c in chunks:
329 for c in chunks:
330 fname = c.filename()
330 fname = c.filename()
331 if fname in backups:
331 if fname in backups:
332 c.write(fp)
332 c.write(fp)
333 dopatch = fp.tell()
333 dopatch = fp.tell()
334 fp.seek(0)
334 fp.seek(0)
335
335
336 # 2.5 optionally review / modify patch in text editor
336 # 2.5 optionally review / modify patch in text editor
337 if opts.get('review', False):
337 if opts.get('review', False):
338 patchtext = (crecordmod.diffhelptext
338 patchtext = (crecordmod.diffhelptext
339 + crecordmod.patchhelptext
339 + crecordmod.patchhelptext
340 + fp.read())
340 + fp.read())
341 reviewedpatch = ui.edit(patchtext, "",
341 reviewedpatch = ui.edit(patchtext, "",
342 extra={"suffix": ".diff"},
342 extra={"suffix": ".diff"},
343 repopath=repo.path)
343 repopath=repo.path)
344 fp.truncate(0)
344 fp.truncate(0)
345 fp.write(reviewedpatch)
345 fp.write(reviewedpatch)
346 fp.seek(0)
346 fp.seek(0)
347
347
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 # 3a. apply filtered patch to clean repo (clean)
349 # 3a. apply filtered patch to clean repo (clean)
350 if backups:
350 if backups:
351 # Equivalent to hg.revert
351 # Equivalent to hg.revert
352 m = scmutil.matchfiles(repo, backups.keys())
352 m = scmutil.matchfiles(repo, backups.keys())
353 mergemod.update(repo, repo.dirstate.p1(),
353 mergemod.update(repo, repo.dirstate.p1(),
354 False, True, matcher=m)
354 False, True, matcher=m)
355
355
356 # 3b. (apply)
356 # 3b. (apply)
357 if dopatch:
357 if dopatch:
358 try:
358 try:
359 ui.debug('applying patch\n')
359 ui.debug('applying patch\n')
360 ui.debug(fp.getvalue())
360 ui.debug(fp.getvalue())
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 except patch.PatchError as err:
362 except patch.PatchError as err:
363 raise error.Abort(str(err))
363 raise error.Abort(str(err))
364 del fp
364 del fp
365
365
366 # 4. We prepared working directory according to filtered
366 # 4. We prepared working directory according to filtered
367 # patch. Now is the time to delegate the job to
367 # patch. Now is the time to delegate the job to
368 # commit/qrefresh or the like!
368 # commit/qrefresh or the like!
369
369
370 # Make all of the pathnames absolute.
370 # Make all of the pathnames absolute.
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 return commitfunc(ui, repo, *newfiles, **opts)
372 return commitfunc(ui, repo, *newfiles, **opts)
373 finally:
373 finally:
374 # 5. finally restore backed-up files
374 # 5. finally restore backed-up files
375 try:
375 try:
376 dirstate = repo.dirstate
376 dirstate = repo.dirstate
377 for realname, tmpname in backups.iteritems():
377 for realname, tmpname in backups.iteritems():
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379
379
380 if dirstate[realname] == 'n':
380 if dirstate[realname] == 'n':
381 # without normallookup, restoring timestamp
381 # without normallookup, restoring timestamp
382 # may cause partially committed files
382 # may cause partially committed files
383 # to be treated as unmodified
383 # to be treated as unmodified
384 dirstate.normallookup(realname)
384 dirstate.normallookup(realname)
385
385
386 # copystat=True here and above are a hack to trick any
386 # copystat=True here and above are a hack to trick any
387 # editors that have f open that we haven't modified them.
387 # editors that have f open that we haven't modified them.
388 #
388 #
389 # Also note that this racy as an editor could notice the
389 # Also note that this racy as an editor could notice the
390 # file's mtime before we've finished writing it.
390 # file's mtime before we've finished writing it.
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 os.unlink(tmpname)
392 os.unlink(tmpname)
393 if tobackup:
393 if tobackup:
394 os.rmdir(backupdir)
394 os.rmdir(backupdir)
395 except OSError:
395 except OSError:
396 pass
396 pass
397
397
398 def recordinwlock(ui, repo, message, match, opts):
398 def recordinwlock(ui, repo, message, match, opts):
399 with repo.wlock():
399 with repo.wlock():
400 return recordfunc(ui, repo, message, match, opts)
400 return recordfunc(ui, repo, message, match, opts)
401
401
402 return commit(ui, repo, recordinwlock, pats, opts)
402 return commit(ui, repo, recordinwlock, pats, opts)
403
403
404 def tersestatus(root, statlist, status, ignorefn, ignore):
404 def tersestatus(root, statlist, status, ignorefn, ignore):
405 """
405 """
406 Returns a list of statuses with directory collapsed if all the files in the
406 Returns a list of statuses with directory collapsed if all the files in the
407 directory has the same status.
407 directory has the same status.
408 """
408 """
409
409
410 def numfiles(dirname):
410 def numfiles(dirname):
411 """
411 """
412 Calculates the number of tracked files in a given directory which also
412 Calculates the number of tracked files in a given directory which also
413 includes files which were removed or deleted. Considers ignored files
413 includes files which were removed or deleted. Considers ignored files
414 if ignore argument is True or 'i' is present in status argument.
414 if ignore argument is True or 'i' is present in status argument.
415 """
415 """
416 if lencache.get(dirname):
416 if lencache.get(dirname):
417 return lencache[dirname]
417 return lencache[dirname]
418 if 'i' in status or ignore:
418 if 'i' in status or ignore:
419 def match(localpath):
419 def match(localpath):
420 absolutepath = os.path.join(root, localpath)
420 absolutepath = os.path.join(root, localpath)
421 if os.path.isdir(absolutepath) and isemptydir(absolutepath):
421 if os.path.isdir(absolutepath) and isemptydir(absolutepath):
422 return True
422 return True
423 return False
423 return False
424 else:
424 else:
425 def match(localpath):
425 def match(localpath):
426 # there can be directory whose all the files are ignored and
426 # there can be directory whose all the files are ignored and
427 # hence the drectory should also be ignored while counting
427 # hence the drectory should also be ignored while counting
428 # number of files or subdirs in it's parent directory. This
428 # number of files or subdirs in it's parent directory. This
429 # checks the same.
429 # checks the same.
430 # XXX: We need a better logic here.
430 # XXX: We need a better logic here.
431 if os.path.isdir(os.path.join(root, localpath)):
431 if os.path.isdir(os.path.join(root, localpath)):
432 return isignoreddir(localpath)
432 return isignoreddir(localpath)
433 else:
433 else:
434 # XXX: there can be files which have the ignored pattern but
434 # XXX: there can be files which have the ignored pattern but
435 # are not ignored. That leads to bug in counting number of
435 # are not ignored. That leads to bug in counting number of
436 # tracked files in the directory.
436 # tracked files in the directory.
437 return ignorefn(localpath)
437 return ignorefn(localpath)
438 lendir = 0
438 lendir = 0
439 abspath = os.path.join(root, dirname)
439 abspath = os.path.join(root, dirname)
440 # There might be cases when a directory does not exists as the whole
440 # There might be cases when a directory does not exists as the whole
441 # directory can be removed and/or deleted.
441 # directory can be removed and/or deleted.
442 try:
442 try:
443 for f in os.listdir(abspath):
443 for f in os.listdir(abspath):
444 localpath = os.path.join(dirname, f)
444 localpath = os.path.join(dirname, f)
445 if not match(localpath):
445 if not match(localpath):
446 lendir += 1
446 lendir += 1
447 except OSError:
447 except OSError:
448 pass
448 pass
449 lendir += len(absentdir.get(dirname, []))
449 lendir += len(absentdir.get(dirname, []))
450 lencache[dirname] = lendir
450 lencache[dirname] = lendir
451 return lendir
451 return lendir
452
452
453 def isemptydir(abspath):
453 def isemptydir(abspath):
454 """
454 """
455 Check whether a directory is empty or not, i.e. there is no files in the
455 Check whether a directory is empty or not, i.e. there is no files in the
456 directory and all its subdirectories.
456 directory and all its subdirectories.
457 """
457 """
458 for f in os.listdir(abspath):
458 for f in os.listdir(abspath):
459 fullpath = os.path.join(abspath, f)
459 fullpath = os.path.join(abspath, f)
460 if os.path.isdir(fullpath):
460 if os.path.isdir(fullpath):
461 # recursion here
461 # recursion here
462 ret = isemptydir(fullpath)
462 ret = isemptydir(fullpath)
463 if not ret:
463 if not ret:
464 return False
464 return False
465 else:
465 else:
466 return False
466 return False
467 return True
467 return True
468
468
469 def isignoreddir(localpath):
469 def isignoreddir(localpath):
470 """Return True if `localpath` directory is ignored or contains only
470 """Return True if `localpath` directory is ignored or contains only
471 ignored files and should hence be considered ignored.
471 ignored files and should hence be considered ignored.
472 """
472 """
473 dirpath = os.path.join(root, localpath)
473 dirpath = os.path.join(root, localpath)
474 if ignorefn(dirpath):
474 if ignorefn(dirpath):
475 return True
475 return True
476 for f in os.listdir(dirpath):
476 for f in os.listdir(dirpath):
477 filepath = os.path.join(dirpath, f)
477 filepath = os.path.join(dirpath, f)
478 if os.path.isdir(filepath):
478 if os.path.isdir(filepath):
479 # recursion here
479 # recursion here
480 ret = isignoreddir(os.path.join(localpath, f))
480 ret = isignoreddir(os.path.join(localpath, f))
481 if not ret:
481 if not ret:
482 return False
482 return False
483 else:
483 else:
484 if not ignorefn(os.path.join(localpath, f)):
484 if not ignorefn(os.path.join(localpath, f)):
485 return False
485 return False
486 return True
486 return True
487
487
488 def absentones(removedfiles, missingfiles):
488 def absentones(removedfiles, missingfiles):
489 """
489 """
490 Returns a dictionary of directories with files in it which are either
490 Returns a dictionary of directories with files in it which are either
491 removed or missing (deleted) in them.
491 removed or missing (deleted) in them.
492 """
492 """
493 absentdir = {}
493 absentdir = {}
494 absentfiles = removedfiles + missingfiles
494 absentfiles = removedfiles + missingfiles
495 while absentfiles:
495 while absentfiles:
496 f = absentfiles.pop()
496 f = absentfiles.pop()
497 par = os.path.dirname(f)
497 par = os.path.dirname(f)
498 if par == '':
498 if par == '':
499 continue
499 continue
500 # we need to store files rather than number of files as some files
500 # we need to store files rather than number of files as some files
501 # or subdirectories in a directory can be counted twice. This is
501 # or subdirectories in a directory can be counted twice. This is
502 # also we have used sets here.
502 # also we have used sets here.
503 try:
503 try:
504 absentdir[par].add(f)
504 absentdir[par].add(f)
505 except KeyError:
505 except KeyError:
506 absentdir[par] = set([f])
506 absentdir[par] = set([f])
507 absentfiles.append(par)
507 absentfiles.append(par)
508 return absentdir
508 return absentdir
509
509
510 indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6}
510 indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6}
511 # get a dictonary of directories and files which are missing as os.listdir()
511 # get a dictonary of directories and files which are missing as os.listdir()
512 # won't be able to list them.
512 # won't be able to list them.
513 absentdir = absentones(statlist[2], statlist[3])
513 absentdir = absentones(statlist[2], statlist[3])
514 finalrs = [[]] * len(indexes)
514 finalrs = [[]] * len(indexes)
515 didsomethingchanged = False
515 didsomethingchanged = False
516 # dictionary to store number of files and subdir in a directory so that we
516 # dictionary to store number of files and subdir in a directory so that we
517 # don't compute that again.
517 # don't compute that again.
518 lencache = {}
518 lencache = {}
519
519
520 for st in pycompat.bytestr(status):
520 for st in pycompat.bytestr(status):
521
521
522 try:
522 try:
523 ind = indexes[st]
523 ind = indexes[st]
524 except KeyError:
524 except KeyError:
525 # TODO: Need a better error message here
525 # TODO: Need a better error message here
526 raise error.Abort("'%s' not recognized" % st)
526 raise error.Abort("'%s' not recognized" % st)
527
527
528 sfiles = statlist[ind]
528 sfiles = statlist[ind]
529 if not sfiles:
529 if not sfiles:
530 continue
530 continue
531 pardict = {}
531 pardict = {}
532 for a in sfiles:
532 for a in sfiles:
533 par = os.path.dirname(a)
533 par = os.path.dirname(a)
534 pardict.setdefault(par, []).append(a)
534 pardict.setdefault(par, []).append(a)
535
535
536 rs = []
536 rs = []
537 newls = []
537 newls = []
538 for par, files in pardict.iteritems():
538 for par, files in pardict.iteritems():
539 lenpar = numfiles(par)
539 lenpar = numfiles(par)
540 if lenpar == len(files):
540 if lenpar == len(files):
541 newls.append(par)
541 newls.append(par)
542
542
543 if not newls:
543 if not newls:
544 continue
544 continue
545
545
546 while newls:
546 while newls:
547 newel = newls.pop()
547 newel = newls.pop()
548 if newel == '':
548 if newel == '':
549 continue
549 continue
550 parn = os.path.dirname(newel)
550 parn = os.path.dirname(newel)
551 pardict[newel] = []
551 pardict[newel] = []
552 # Adding pycompat.ossep as newel is a directory.
552 # Adding pycompat.ossep as newel is a directory.
553 pardict.setdefault(parn, []).append(newel + pycompat.ossep)
553 pardict.setdefault(parn, []).append(newel + pycompat.ossep)
554 lenpar = numfiles(parn)
554 lenpar = numfiles(parn)
555 if lenpar == len(pardict[parn]):
555 if lenpar == len(pardict[parn]):
556 newls.append(parn)
556 newls.append(parn)
557
557
558 # dict.values() for Py3 compatibility
558 # dict.values() for Py3 compatibility
559 for files in pardict.values():
559 for files in pardict.values():
560 rs.extend(files)
560 rs.extend(files)
561
561
562 rs.sort()
562 rs.sort()
563 finalrs[ind] = rs
563 finalrs[ind] = rs
564 didsomethingchanged = True
564 didsomethingchanged = True
565
565
566 # If nothing is changed, make sure the order of files is preserved.
566 # If nothing is changed, make sure the order of files is preserved.
567 if not didsomethingchanged:
567 if not didsomethingchanged:
568 return statlist
568 return statlist
569
569
570 for x in xrange(len(indexes)):
570 for x in xrange(len(indexes)):
571 if not finalrs[x]:
571 if not finalrs[x]:
572 finalrs[x] = statlist[x]
572 finalrs[x] = statlist[x]
573
573
574 return finalrs
574 return finalrs
575
575
576 def findpossible(cmd, table, strict=False):
576 def findpossible(cmd, table, strict=False):
577 """
577 """
578 Return cmd -> (aliases, command table entry)
578 Return cmd -> (aliases, command table entry)
579 for each matching command.
579 for each matching command.
580 Return debug commands (or their aliases) only if no normal command matches.
580 Return debug commands (or their aliases) only if no normal command matches.
581 """
581 """
582 choice = {}
582 choice = {}
583 debugchoice = {}
583 debugchoice = {}
584
584
585 if cmd in table:
585 if cmd in table:
586 # short-circuit exact matches, "log" alias beats "^log|history"
586 # short-circuit exact matches, "log" alias beats "^log|history"
587 keys = [cmd]
587 keys = [cmd]
588 else:
588 else:
589 keys = table.keys()
589 keys = table.keys()
590
590
591 allcmds = []
591 allcmds = []
592 for e in keys:
592 for e in keys:
593 aliases = parsealiases(e)
593 aliases = parsealiases(e)
594 allcmds.extend(aliases)
594 allcmds.extend(aliases)
595 found = None
595 found = None
596 if cmd in aliases:
596 if cmd in aliases:
597 found = cmd
597 found = cmd
598 elif not strict:
598 elif not strict:
599 for a in aliases:
599 for a in aliases:
600 if a.startswith(cmd):
600 if a.startswith(cmd):
601 found = a
601 found = a
602 break
602 break
603 if found is not None:
603 if found is not None:
604 if aliases[0].startswith("debug") or found.startswith("debug"):
604 if aliases[0].startswith("debug") or found.startswith("debug"):
605 debugchoice[found] = (aliases, table[e])
605 debugchoice[found] = (aliases, table[e])
606 else:
606 else:
607 choice[found] = (aliases, table[e])
607 choice[found] = (aliases, table[e])
608
608
609 if not choice and debugchoice:
609 if not choice and debugchoice:
610 choice = debugchoice
610 choice = debugchoice
611
611
612 return choice, allcmds
612 return choice, allcmds
613
613
614 def findcmd(cmd, table, strict=True):
614 def findcmd(cmd, table, strict=True):
615 """Return (aliases, command table entry) for command string."""
615 """Return (aliases, command table entry) for command string."""
616 choice, allcmds = findpossible(cmd, table, strict)
616 choice, allcmds = findpossible(cmd, table, strict)
617
617
618 if cmd in choice:
618 if cmd in choice:
619 return choice[cmd]
619 return choice[cmd]
620
620
621 if len(choice) > 1:
621 if len(choice) > 1:
622 clist = sorted(choice)
622 clist = sorted(choice)
623 raise error.AmbiguousCommand(cmd, clist)
623 raise error.AmbiguousCommand(cmd, clist)
624
624
625 if choice:
625 if choice:
626 return list(choice.values())[0]
626 return list(choice.values())[0]
627
627
628 raise error.UnknownCommand(cmd, allcmds)
628 raise error.UnknownCommand(cmd, allcmds)
629
629
630 def findrepo(p):
630 def findrepo(p):
631 while not os.path.isdir(os.path.join(p, ".hg")):
631 while not os.path.isdir(os.path.join(p, ".hg")):
632 oldp, p = p, os.path.dirname(p)
632 oldp, p = p, os.path.dirname(p)
633 if p == oldp:
633 if p == oldp:
634 return None
634 return None
635
635
636 return p
636 return p
637
637
638 def bailifchanged(repo, merge=True, hint=None):
638 def bailifchanged(repo, merge=True, hint=None):
639 """ enforce the precondition that working directory must be clean.
639 """ enforce the precondition that working directory must be clean.
640
640
641 'merge' can be set to false if a pending uncommitted merge should be
641 'merge' can be set to false if a pending uncommitted merge should be
642 ignored (such as when 'update --check' runs).
642 ignored (such as when 'update --check' runs).
643
643
644 'hint' is the usual hint given to Abort exception.
644 'hint' is the usual hint given to Abort exception.
645 """
645 """
646
646
647 if merge and repo.dirstate.p2() != nullid:
647 if merge and repo.dirstate.p2() != nullid:
648 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
648 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
649 modified, added, removed, deleted = repo.status()[:4]
649 modified, added, removed, deleted = repo.status()[:4]
650 if modified or added or removed or deleted:
650 if modified or added or removed or deleted:
651 raise error.Abort(_('uncommitted changes'), hint=hint)
651 raise error.Abort(_('uncommitted changes'), hint=hint)
652 ctx = repo[None]
652 ctx = repo[None]
653 for s in sorted(ctx.substate):
653 for s in sorted(ctx.substate):
654 ctx.sub(s).bailifchanged(hint=hint)
654 ctx.sub(s).bailifchanged(hint=hint)
655
655
656 def logmessage(ui, opts):
656 def logmessage(ui, opts):
657 """ get the log message according to -m and -l option """
657 """ get the log message according to -m and -l option """
658 message = opts.get('message')
658 message = opts.get('message')
659 logfile = opts.get('logfile')
659 logfile = opts.get('logfile')
660
660
661 if message and logfile:
661 if message and logfile:
662 raise error.Abort(_('options --message and --logfile are mutually '
662 raise error.Abort(_('options --message and --logfile are mutually '
663 'exclusive'))
663 'exclusive'))
664 if not message and logfile:
664 if not message and logfile:
665 try:
665 try:
666 if isstdiofilename(logfile):
666 if isstdiofilename(logfile):
667 message = ui.fin.read()
667 message = ui.fin.read()
668 else:
668 else:
669 message = '\n'.join(util.readfile(logfile).splitlines())
669 message = '\n'.join(util.readfile(logfile).splitlines())
670 except IOError as inst:
670 except IOError as inst:
671 raise error.Abort(_("can't read commit message '%s': %s") %
671 raise error.Abort(_("can't read commit message '%s': %s") %
672 (logfile, inst.strerror))
672 (logfile, inst.strerror))
673 return message
673 return message
674
674
675 def mergeeditform(ctxorbool, baseformname):
675 def mergeeditform(ctxorbool, baseformname):
676 """return appropriate editform name (referencing a committemplate)
676 """return appropriate editform name (referencing a committemplate)
677
677
678 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
678 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
679 merging is committed.
679 merging is committed.
680
680
681 This returns baseformname with '.merge' appended if it is a merge,
681 This returns baseformname with '.merge' appended if it is a merge,
682 otherwise '.normal' is appended.
682 otherwise '.normal' is appended.
683 """
683 """
684 if isinstance(ctxorbool, bool):
684 if isinstance(ctxorbool, bool):
685 if ctxorbool:
685 if ctxorbool:
686 return baseformname + ".merge"
686 return baseformname + ".merge"
687 elif 1 < len(ctxorbool.parents()):
687 elif 1 < len(ctxorbool.parents()):
688 return baseformname + ".merge"
688 return baseformname + ".merge"
689
689
690 return baseformname + ".normal"
690 return baseformname + ".normal"
691
691
692 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
692 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
693 editform='', **opts):
693 editform='', **opts):
694 """get appropriate commit message editor according to '--edit' option
694 """get appropriate commit message editor according to '--edit' option
695
695
696 'finishdesc' is a function to be called with edited commit message
696 'finishdesc' is a function to be called with edited commit message
697 (= 'description' of the new changeset) just after editing, but
697 (= 'description' of the new changeset) just after editing, but
698 before checking empty-ness. It should return actual text to be
698 before checking empty-ness. It should return actual text to be
699 stored into history. This allows to change description before
699 stored into history. This allows to change description before
700 storing.
700 storing.
701
701
702 'extramsg' is a extra message to be shown in the editor instead of
702 'extramsg' is a extra message to be shown in the editor instead of
703 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
703 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
704 is automatically added.
704 is automatically added.
705
705
706 'editform' is a dot-separated list of names, to distinguish
706 'editform' is a dot-separated list of names, to distinguish
707 the purpose of commit text editing.
707 the purpose of commit text editing.
708
708
709 'getcommiteditor' returns 'commitforceeditor' regardless of
709 'getcommiteditor' returns 'commitforceeditor' regardless of
710 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
710 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
711 they are specific for usage in MQ.
711 they are specific for usage in MQ.
712 """
712 """
713 if edit or finishdesc or extramsg:
713 if edit or finishdesc or extramsg:
714 return lambda r, c, s: commitforceeditor(r, c, s,
714 return lambda r, c, s: commitforceeditor(r, c, s,
715 finishdesc=finishdesc,
715 finishdesc=finishdesc,
716 extramsg=extramsg,
716 extramsg=extramsg,
717 editform=editform)
717 editform=editform)
718 elif editform:
718 elif editform:
719 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
719 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
720 else:
720 else:
721 return commiteditor
721 return commiteditor
722
722
723 def loglimit(opts):
723 def loglimit(opts):
724 """get the log limit according to option -l/--limit"""
724 """get the log limit according to option -l/--limit"""
725 limit = opts.get('limit')
725 limit = opts.get('limit')
726 if limit:
726 if limit:
727 try:
727 try:
728 limit = int(limit)
728 limit = int(limit)
729 except ValueError:
729 except ValueError:
730 raise error.Abort(_('limit must be a positive integer'))
730 raise error.Abort(_('limit must be a positive integer'))
731 if limit <= 0:
731 if limit <= 0:
732 raise error.Abort(_('limit must be positive'))
732 raise error.Abort(_('limit must be positive'))
733 else:
733 else:
734 limit = None
734 limit = None
735 return limit
735 return limit
736
736
737 def makefilename(repo, pat, node, desc=None,
737 def makefilename(repo, pat, node, desc=None,
738 total=None, seqno=None, revwidth=None, pathname=None):
738 total=None, seqno=None, revwidth=None, pathname=None):
739 node_expander = {
739 node_expander = {
740 'H': lambda: hex(node),
740 'H': lambda: hex(node),
741 'R': lambda: str(repo.changelog.rev(node)),
741 'R': lambda: str(repo.changelog.rev(node)),
742 'h': lambda: short(node),
742 'h': lambda: short(node),
743 'm': lambda: re.sub('[^\w]', '_', str(desc))
743 'm': lambda: re.sub('[^\w]', '_', str(desc))
744 }
744 }
745 expander = {
745 expander = {
746 '%': lambda: '%',
746 '%': lambda: '%',
747 'b': lambda: os.path.basename(repo.root),
747 'b': lambda: os.path.basename(repo.root),
748 }
748 }
749
749
750 try:
750 try:
751 if node:
751 if node:
752 expander.update(node_expander)
752 expander.update(node_expander)
753 if node:
753 if node:
754 expander['r'] = (lambda:
754 expander['r'] = (lambda:
755 str(repo.changelog.rev(node)).zfill(revwidth or 0))
755 str(repo.changelog.rev(node)).zfill(revwidth or 0))
756 if total is not None:
756 if total is not None:
757 expander['N'] = lambda: str(total)
757 expander['N'] = lambda: str(total)
758 if seqno is not None:
758 if seqno is not None:
759 expander['n'] = lambda: str(seqno)
759 expander['n'] = lambda: str(seqno)
760 if total is not None and seqno is not None:
760 if total is not None and seqno is not None:
761 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
761 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
762 if pathname is not None:
762 if pathname is not None:
763 expander['s'] = lambda: os.path.basename(pathname)
763 expander['s'] = lambda: os.path.basename(pathname)
764 expander['d'] = lambda: os.path.dirname(pathname) or '.'
764 expander['d'] = lambda: os.path.dirname(pathname) or '.'
765 expander['p'] = lambda: pathname
765 expander['p'] = lambda: pathname
766
766
767 newname = []
767 newname = []
768 patlen = len(pat)
768 patlen = len(pat)
769 i = 0
769 i = 0
770 while i < patlen:
770 while i < patlen:
771 c = pat[i:i + 1]
771 c = pat[i:i + 1]
772 if c == '%':
772 if c == '%':
773 i += 1
773 i += 1
774 c = pat[i:i + 1]
774 c = pat[i:i + 1]
775 c = expander[c]()
775 c = expander[c]()
776 newname.append(c)
776 newname.append(c)
777 i += 1
777 i += 1
778 return ''.join(newname)
778 return ''.join(newname)
779 except KeyError as inst:
779 except KeyError as inst:
780 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
780 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
781 inst.args[0])
781 inst.args[0])
782
782
783 def isstdiofilename(pat):
783 def isstdiofilename(pat):
784 """True if the given pat looks like a filename denoting stdin/stdout"""
784 """True if the given pat looks like a filename denoting stdin/stdout"""
785 return not pat or pat == '-'
785 return not pat or pat == '-'
786
786
787 class _unclosablefile(object):
787 class _unclosablefile(object):
788 def __init__(self, fp):
788 def __init__(self, fp):
789 self._fp = fp
789 self._fp = fp
790
790
791 def close(self):
791 def close(self):
792 pass
792 pass
793
793
794 def __iter__(self):
794 def __iter__(self):
795 return iter(self._fp)
795 return iter(self._fp)
796
796
797 def __getattr__(self, attr):
797 def __getattr__(self, attr):
798 return getattr(self._fp, attr)
798 return getattr(self._fp, attr)
799
799
800 def __enter__(self):
800 def __enter__(self):
801 return self
801 return self
802
802
803 def __exit__(self, exc_type, exc_value, exc_tb):
803 def __exit__(self, exc_type, exc_value, exc_tb):
804 pass
804 pass
805
805
806 def makefileobj(repo, pat, node=None, desc=None, total=None,
806 def makefileobj(repo, pat, node=None, desc=None, total=None,
807 seqno=None, revwidth=None, mode='wb', modemap=None,
807 seqno=None, revwidth=None, mode='wb', modemap=None,
808 pathname=None):
808 pathname=None):
809
809
810 writable = mode not in ('r', 'rb')
810 writable = mode not in ('r', 'rb')
811
811
812 if isstdiofilename(pat):
812 if isstdiofilename(pat):
813 if writable:
813 if writable:
814 fp = repo.ui.fout
814 fp = repo.ui.fout
815 else:
815 else:
816 fp = repo.ui.fin
816 fp = repo.ui.fin
817 return _unclosablefile(fp)
817 return _unclosablefile(fp)
818 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
818 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
819 if modemap is not None:
819 if modemap is not None:
820 mode = modemap.get(fn, mode)
820 mode = modemap.get(fn, mode)
821 if mode == 'wb':
821 if mode == 'wb':
822 modemap[fn] = 'ab'
822 modemap[fn] = 'ab'
823 return open(fn, mode)
823 return open(fn, mode)
824
824
825 def openrevlog(repo, cmd, file_, opts):
825 def openrevlog(repo, cmd, file_, opts):
826 """opens the changelog, manifest, a filelog or a given revlog"""
826 """opens the changelog, manifest, a filelog or a given revlog"""
827 cl = opts['changelog']
827 cl = opts['changelog']
828 mf = opts['manifest']
828 mf = opts['manifest']
829 dir = opts['dir']
829 dir = opts['dir']
830 msg = None
830 msg = None
831 if cl and mf:
831 if cl and mf:
832 msg = _('cannot specify --changelog and --manifest at the same time')
832 msg = _('cannot specify --changelog and --manifest at the same time')
833 elif cl and dir:
833 elif cl and dir:
834 msg = _('cannot specify --changelog and --dir at the same time')
834 msg = _('cannot specify --changelog and --dir at the same time')
835 elif cl or mf or dir:
835 elif cl or mf or dir:
836 if file_:
836 if file_:
837 msg = _('cannot specify filename with --changelog or --manifest')
837 msg = _('cannot specify filename with --changelog or --manifest')
838 elif not repo:
838 elif not repo:
839 msg = _('cannot specify --changelog or --manifest or --dir '
839 msg = _('cannot specify --changelog or --manifest or --dir '
840 'without a repository')
840 'without a repository')
841 if msg:
841 if msg:
842 raise error.Abort(msg)
842 raise error.Abort(msg)
843
843
844 r = None
844 r = None
845 if repo:
845 if repo:
846 if cl:
846 if cl:
847 r = repo.unfiltered().changelog
847 r = repo.unfiltered().changelog
848 elif dir:
848 elif dir:
849 if 'treemanifest' not in repo.requirements:
849 if 'treemanifest' not in repo.requirements:
850 raise error.Abort(_("--dir can only be used on repos with "
850 raise error.Abort(_("--dir can only be used on repos with "
851 "treemanifest enabled"))
851 "treemanifest enabled"))
852 dirlog = repo.manifestlog._revlog.dirlog(dir)
852 dirlog = repo.manifestlog._revlog.dirlog(dir)
853 if len(dirlog):
853 if len(dirlog):
854 r = dirlog
854 r = dirlog
855 elif mf:
855 elif mf:
856 r = repo.manifestlog._revlog
856 r = repo.manifestlog._revlog
857 elif file_:
857 elif file_:
858 filelog = repo.file(file_)
858 filelog = repo.file(file_)
859 if len(filelog):
859 if len(filelog):
860 r = filelog
860 r = filelog
861 if not r:
861 if not r:
862 if not file_:
862 if not file_:
863 raise error.CommandError(cmd, _('invalid arguments'))
863 raise error.CommandError(cmd, _('invalid arguments'))
864 if not os.path.isfile(file_):
864 if not os.path.isfile(file_):
865 raise error.Abort(_("revlog '%s' not found") % file_)
865 raise error.Abort(_("revlog '%s' not found") % file_)
866 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
866 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
867 file_[:-2] + ".i")
867 file_[:-2] + ".i")
868 return r
868 return r
869
869
870 def copy(ui, repo, pats, opts, rename=False):
870 def copy(ui, repo, pats, opts, rename=False):
871 # called with the repo lock held
871 # called with the repo lock held
872 #
872 #
873 # hgsep => pathname that uses "/" to separate directories
873 # hgsep => pathname that uses "/" to separate directories
874 # ossep => pathname that uses os.sep to separate directories
874 # ossep => pathname that uses os.sep to separate directories
875 cwd = repo.getcwd()
875 cwd = repo.getcwd()
876 targets = {}
876 targets = {}
877 after = opts.get("after")
877 after = opts.get("after")
878 dryrun = opts.get("dry_run")
878 dryrun = opts.get("dry_run")
879 wctx = repo[None]
879 wctx = repo[None]
880
880
881 def walkpat(pat):
881 def walkpat(pat):
882 srcs = []
882 srcs = []
883 if after:
883 if after:
884 badstates = '?'
884 badstates = '?'
885 else:
885 else:
886 badstates = '?r'
886 badstates = '?r'
887 m = scmutil.match(wctx, [pat], opts, globbed=True)
887 m = scmutil.match(wctx, [pat], opts, globbed=True)
888 for abs in wctx.walk(m):
888 for abs in wctx.walk(m):
889 state = repo.dirstate[abs]
889 state = repo.dirstate[abs]
890 rel = m.rel(abs)
890 rel = m.rel(abs)
891 exact = m.exact(abs)
891 exact = m.exact(abs)
892 if state in badstates:
892 if state in badstates:
893 if exact and state == '?':
893 if exact and state == '?':
894 ui.warn(_('%s: not copying - file is not managed\n') % rel)
894 ui.warn(_('%s: not copying - file is not managed\n') % rel)
895 if exact and state == 'r':
895 if exact and state == 'r':
896 ui.warn(_('%s: not copying - file has been marked for'
896 ui.warn(_('%s: not copying - file has been marked for'
897 ' remove\n') % rel)
897 ' remove\n') % rel)
898 continue
898 continue
899 # abs: hgsep
899 # abs: hgsep
900 # rel: ossep
900 # rel: ossep
901 srcs.append((abs, rel, exact))
901 srcs.append((abs, rel, exact))
902 return srcs
902 return srcs
903
903
904 # abssrc: hgsep
904 # abssrc: hgsep
905 # relsrc: ossep
905 # relsrc: ossep
906 # otarget: ossep
906 # otarget: ossep
907 def copyfile(abssrc, relsrc, otarget, exact):
907 def copyfile(abssrc, relsrc, otarget, exact):
908 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
908 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
909 if '/' in abstarget:
909 if '/' in abstarget:
910 # We cannot normalize abstarget itself, this would prevent
910 # We cannot normalize abstarget itself, this would prevent
911 # case only renames, like a => A.
911 # case only renames, like a => A.
912 abspath, absname = abstarget.rsplit('/', 1)
912 abspath, absname = abstarget.rsplit('/', 1)
913 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
913 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
914 reltarget = repo.pathto(abstarget, cwd)
914 reltarget = repo.pathto(abstarget, cwd)
915 target = repo.wjoin(abstarget)
915 target = repo.wjoin(abstarget)
916 src = repo.wjoin(abssrc)
916 src = repo.wjoin(abssrc)
917 state = repo.dirstate[abstarget]
917 state = repo.dirstate[abstarget]
918
918
919 scmutil.checkportable(ui, abstarget)
919 scmutil.checkportable(ui, abstarget)
920
920
921 # check for collisions
921 # check for collisions
922 prevsrc = targets.get(abstarget)
922 prevsrc = targets.get(abstarget)
923 if prevsrc is not None:
923 if prevsrc is not None:
924 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
924 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
925 (reltarget, repo.pathto(abssrc, cwd),
925 (reltarget, repo.pathto(abssrc, cwd),
926 repo.pathto(prevsrc, cwd)))
926 repo.pathto(prevsrc, cwd)))
927 return
927 return
928
928
929 # check for overwrites
929 # check for overwrites
930 exists = os.path.lexists(target)
930 exists = os.path.lexists(target)
931 samefile = False
931 samefile = False
932 if exists and abssrc != abstarget:
932 if exists and abssrc != abstarget:
933 if (repo.dirstate.normalize(abssrc) ==
933 if (repo.dirstate.normalize(abssrc) ==
934 repo.dirstate.normalize(abstarget)):
934 repo.dirstate.normalize(abstarget)):
935 if not rename:
935 if not rename:
936 ui.warn(_("%s: can't copy - same file\n") % reltarget)
936 ui.warn(_("%s: can't copy - same file\n") % reltarget)
937 return
937 return
938 exists = False
938 exists = False
939 samefile = True
939 samefile = True
940
940
941 if not after and exists or after and state in 'mn':
941 if not after and exists or after and state in 'mn':
942 if not opts['force']:
942 if not opts['force']:
943 if state in 'mn':
943 if state in 'mn':
944 msg = _('%s: not overwriting - file already committed\n')
944 msg = _('%s: not overwriting - file already committed\n')
945 if after:
945 if after:
946 flags = '--after --force'
946 flags = '--after --force'
947 else:
947 else:
948 flags = '--force'
948 flags = '--force'
949 if rename:
949 if rename:
950 hint = _('(hg rename %s to replace the file by '
950 hint = _('(hg rename %s to replace the file by '
951 'recording a rename)\n') % flags
951 'recording a rename)\n') % flags
952 else:
952 else:
953 hint = _('(hg copy %s to replace the file by '
953 hint = _('(hg copy %s to replace the file by '
954 'recording a copy)\n') % flags
954 'recording a copy)\n') % flags
955 else:
955 else:
956 msg = _('%s: not overwriting - file exists\n')
956 msg = _('%s: not overwriting - file exists\n')
957 if rename:
957 if rename:
958 hint = _('(hg rename --after to record the rename)\n')
958 hint = _('(hg rename --after to record the rename)\n')
959 else:
959 else:
960 hint = _('(hg copy --after to record the copy)\n')
960 hint = _('(hg copy --after to record the copy)\n')
961 ui.warn(msg % reltarget)
961 ui.warn(msg % reltarget)
962 ui.warn(hint)
962 ui.warn(hint)
963 return
963 return
964
964
965 if after:
965 if after:
966 if not exists:
966 if not exists:
967 if rename:
967 if rename:
968 ui.warn(_('%s: not recording move - %s does not exist\n') %
968 ui.warn(_('%s: not recording move - %s does not exist\n') %
969 (relsrc, reltarget))
969 (relsrc, reltarget))
970 else:
970 else:
971 ui.warn(_('%s: not recording copy - %s does not exist\n') %
971 ui.warn(_('%s: not recording copy - %s does not exist\n') %
972 (relsrc, reltarget))
972 (relsrc, reltarget))
973 return
973 return
974 elif not dryrun:
974 elif not dryrun:
975 try:
975 try:
976 if exists:
976 if exists:
977 os.unlink(target)
977 os.unlink(target)
978 targetdir = os.path.dirname(target) or '.'
978 targetdir = os.path.dirname(target) or '.'
979 if not os.path.isdir(targetdir):
979 if not os.path.isdir(targetdir):
980 os.makedirs(targetdir)
980 os.makedirs(targetdir)
981 if samefile:
981 if samefile:
982 tmp = target + "~hgrename"
982 tmp = target + "~hgrename"
983 os.rename(src, tmp)
983 os.rename(src, tmp)
984 os.rename(tmp, target)
984 os.rename(tmp, target)
985 else:
985 else:
986 util.copyfile(src, target)
986 util.copyfile(src, target)
987 srcexists = True
987 srcexists = True
988 except IOError as inst:
988 except IOError as inst:
989 if inst.errno == errno.ENOENT:
989 if inst.errno == errno.ENOENT:
990 ui.warn(_('%s: deleted in working directory\n') % relsrc)
990 ui.warn(_('%s: deleted in working directory\n') % relsrc)
991 srcexists = False
991 srcexists = False
992 else:
992 else:
993 ui.warn(_('%s: cannot copy - %s\n') %
993 ui.warn(_('%s: cannot copy - %s\n') %
994 (relsrc, inst.strerror))
994 (relsrc, inst.strerror))
995 return True # report a failure
995 return True # report a failure
996
996
997 if ui.verbose or not exact:
997 if ui.verbose or not exact:
998 if rename:
998 if rename:
999 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
999 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1000 else:
1000 else:
1001 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1001 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1002
1002
1003 targets[abstarget] = abssrc
1003 targets[abstarget] = abssrc
1004
1004
1005 # fix up dirstate
1005 # fix up dirstate
1006 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1006 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1007 dryrun=dryrun, cwd=cwd)
1007 dryrun=dryrun, cwd=cwd)
1008 if rename and not dryrun:
1008 if rename and not dryrun:
1009 if not after and srcexists and not samefile:
1009 if not after and srcexists and not samefile:
1010 repo.wvfs.unlinkpath(abssrc)
1010 repo.wvfs.unlinkpath(abssrc)
1011 wctx.forget([abssrc])
1011 wctx.forget([abssrc])
1012
1012
1013 # pat: ossep
1013 # pat: ossep
1014 # dest ossep
1014 # dest ossep
1015 # srcs: list of (hgsep, hgsep, ossep, bool)
1015 # srcs: list of (hgsep, hgsep, ossep, bool)
1016 # return: function that takes hgsep and returns ossep
1016 # return: function that takes hgsep and returns ossep
1017 def targetpathfn(pat, dest, srcs):
1017 def targetpathfn(pat, dest, srcs):
1018 if os.path.isdir(pat):
1018 if os.path.isdir(pat):
1019 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1019 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1020 abspfx = util.localpath(abspfx)
1020 abspfx = util.localpath(abspfx)
1021 if destdirexists:
1021 if destdirexists:
1022 striplen = len(os.path.split(abspfx)[0])
1022 striplen = len(os.path.split(abspfx)[0])
1023 else:
1023 else:
1024 striplen = len(abspfx)
1024 striplen = len(abspfx)
1025 if striplen:
1025 if striplen:
1026 striplen += len(pycompat.ossep)
1026 striplen += len(pycompat.ossep)
1027 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1027 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1028 elif destdirexists:
1028 elif destdirexists:
1029 res = lambda p: os.path.join(dest,
1029 res = lambda p: os.path.join(dest,
1030 os.path.basename(util.localpath(p)))
1030 os.path.basename(util.localpath(p)))
1031 else:
1031 else:
1032 res = lambda p: dest
1032 res = lambda p: dest
1033 return res
1033 return res
1034
1034
1035 # pat: ossep
1035 # pat: ossep
1036 # dest ossep
1036 # dest ossep
1037 # srcs: list of (hgsep, hgsep, ossep, bool)
1037 # srcs: list of (hgsep, hgsep, ossep, bool)
1038 # return: function that takes hgsep and returns ossep
1038 # return: function that takes hgsep and returns ossep
1039 def targetpathafterfn(pat, dest, srcs):
1039 def targetpathafterfn(pat, dest, srcs):
1040 if matchmod.patkind(pat):
1040 if matchmod.patkind(pat):
1041 # a mercurial pattern
1041 # a mercurial pattern
1042 res = lambda p: os.path.join(dest,
1042 res = lambda p: os.path.join(dest,
1043 os.path.basename(util.localpath(p)))
1043 os.path.basename(util.localpath(p)))
1044 else:
1044 else:
1045 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1045 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1046 if len(abspfx) < len(srcs[0][0]):
1046 if len(abspfx) < len(srcs[0][0]):
1047 # A directory. Either the target path contains the last
1047 # A directory. Either the target path contains the last
1048 # component of the source path or it does not.
1048 # component of the source path or it does not.
1049 def evalpath(striplen):
1049 def evalpath(striplen):
1050 score = 0
1050 score = 0
1051 for s in srcs:
1051 for s in srcs:
1052 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1052 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1053 if os.path.lexists(t):
1053 if os.path.lexists(t):
1054 score += 1
1054 score += 1
1055 return score
1055 return score
1056
1056
1057 abspfx = util.localpath(abspfx)
1057 abspfx = util.localpath(abspfx)
1058 striplen = len(abspfx)
1058 striplen = len(abspfx)
1059 if striplen:
1059 if striplen:
1060 striplen += len(pycompat.ossep)
1060 striplen += len(pycompat.ossep)
1061 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1061 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1062 score = evalpath(striplen)
1062 score = evalpath(striplen)
1063 striplen1 = len(os.path.split(abspfx)[0])
1063 striplen1 = len(os.path.split(abspfx)[0])
1064 if striplen1:
1064 if striplen1:
1065 striplen1 += len(pycompat.ossep)
1065 striplen1 += len(pycompat.ossep)
1066 if evalpath(striplen1) > score:
1066 if evalpath(striplen1) > score:
1067 striplen = striplen1
1067 striplen = striplen1
1068 res = lambda p: os.path.join(dest,
1068 res = lambda p: os.path.join(dest,
1069 util.localpath(p)[striplen:])
1069 util.localpath(p)[striplen:])
1070 else:
1070 else:
1071 # a file
1071 # a file
1072 if destdirexists:
1072 if destdirexists:
1073 res = lambda p: os.path.join(dest,
1073 res = lambda p: os.path.join(dest,
1074 os.path.basename(util.localpath(p)))
1074 os.path.basename(util.localpath(p)))
1075 else:
1075 else:
1076 res = lambda p: dest
1076 res = lambda p: dest
1077 return res
1077 return res
1078
1078
1079 pats = scmutil.expandpats(pats)
1079 pats = scmutil.expandpats(pats)
1080 if not pats:
1080 if not pats:
1081 raise error.Abort(_('no source or destination specified'))
1081 raise error.Abort(_('no source or destination specified'))
1082 if len(pats) == 1:
1082 if len(pats) == 1:
1083 raise error.Abort(_('no destination specified'))
1083 raise error.Abort(_('no destination specified'))
1084 dest = pats.pop()
1084 dest = pats.pop()
1085 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1085 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1086 if not destdirexists:
1086 if not destdirexists:
1087 if len(pats) > 1 or matchmod.patkind(pats[0]):
1087 if len(pats) > 1 or matchmod.patkind(pats[0]):
1088 raise error.Abort(_('with multiple sources, destination must be an '
1088 raise error.Abort(_('with multiple sources, destination must be an '
1089 'existing directory'))
1089 'existing directory'))
1090 if util.endswithsep(dest):
1090 if util.endswithsep(dest):
1091 raise error.Abort(_('destination %s is not a directory') % dest)
1091 raise error.Abort(_('destination %s is not a directory') % dest)
1092
1092
1093 tfn = targetpathfn
1093 tfn = targetpathfn
1094 if after:
1094 if after:
1095 tfn = targetpathafterfn
1095 tfn = targetpathafterfn
1096 copylist = []
1096 copylist = []
1097 for pat in pats:
1097 for pat in pats:
1098 srcs = walkpat(pat)
1098 srcs = walkpat(pat)
1099 if not srcs:
1099 if not srcs:
1100 continue
1100 continue
1101 copylist.append((tfn(pat, dest, srcs), srcs))
1101 copylist.append((tfn(pat, dest, srcs), srcs))
1102 if not copylist:
1102 if not copylist:
1103 raise error.Abort(_('no files to copy'))
1103 raise error.Abort(_('no files to copy'))
1104
1104
1105 errors = 0
1105 errors = 0
1106 for targetpath, srcs in copylist:
1106 for targetpath, srcs in copylist:
1107 for abssrc, relsrc, exact in srcs:
1107 for abssrc, relsrc, exact in srcs:
1108 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1108 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1109 errors += 1
1109 errors += 1
1110
1110
1111 if errors:
1111 if errors:
1112 ui.warn(_('(consider using --after)\n'))
1112 ui.warn(_('(consider using --after)\n'))
1113
1113
1114 return errors != 0
1114 return errors != 0
1115
1115
1116 ## facility to let extension process additional data into an import patch
1116 ## facility to let extension process additional data into an import patch
1117 # list of identifier to be executed in order
1117 # list of identifier to be executed in order
1118 extrapreimport = [] # run before commit
1118 extrapreimport = [] # run before commit
1119 extrapostimport = [] # run after commit
1119 extrapostimport = [] # run after commit
1120 # mapping from identifier to actual import function
1120 # mapping from identifier to actual import function
1121 #
1121 #
1122 # 'preimport' are run before the commit is made and are provided the following
1122 # 'preimport' are run before the commit is made and are provided the following
1123 # arguments:
1123 # arguments:
1124 # - repo: the localrepository instance,
1124 # - repo: the localrepository instance,
1125 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1125 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1126 # - extra: the future extra dictionary of the changeset, please mutate it,
1126 # - extra: the future extra dictionary of the changeset, please mutate it,
1127 # - opts: the import options.
1127 # - opts: the import options.
1128 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1128 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1129 # mutation of in memory commit and more. Feel free to rework the code to get
1129 # mutation of in memory commit and more. Feel free to rework the code to get
1130 # there.
1130 # there.
1131 extrapreimportmap = {}
1131 extrapreimportmap = {}
1132 # 'postimport' are run after the commit is made and are provided the following
1132 # 'postimport' are run after the commit is made and are provided the following
1133 # argument:
1133 # argument:
1134 # - ctx: the changectx created by import.
1134 # - ctx: the changectx created by import.
1135 extrapostimportmap = {}
1135 extrapostimportmap = {}
1136
1136
1137 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1137 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1138 """Utility function used by commands.import to import a single patch
1138 """Utility function used by commands.import to import a single patch
1139
1139
1140 This function is explicitly defined here to help the evolve extension to
1140 This function is explicitly defined here to help the evolve extension to
1141 wrap this part of the import logic.
1141 wrap this part of the import logic.
1142
1142
1143 The API is currently a bit ugly because it a simple code translation from
1143 The API is currently a bit ugly because it a simple code translation from
1144 the import command. Feel free to make it better.
1144 the import command. Feel free to make it better.
1145
1145
1146 :hunk: a patch (as a binary string)
1146 :hunk: a patch (as a binary string)
1147 :parents: nodes that will be parent of the created commit
1147 :parents: nodes that will be parent of the created commit
1148 :opts: the full dict of option passed to the import command
1148 :opts: the full dict of option passed to the import command
1149 :msgs: list to save commit message to.
1149 :msgs: list to save commit message to.
1150 (used in case we need to save it when failing)
1150 (used in case we need to save it when failing)
1151 :updatefunc: a function that update a repo to a given node
1151 :updatefunc: a function that update a repo to a given node
1152 updatefunc(<repo>, <node>)
1152 updatefunc(<repo>, <node>)
1153 """
1153 """
1154 # avoid cycle context -> subrepo -> cmdutil
1154 # avoid cycle context -> subrepo -> cmdutil
1155 from . import context
1155 from . import context
1156 extractdata = patch.extract(ui, hunk)
1156 extractdata = patch.extract(ui, hunk)
1157 tmpname = extractdata.get('filename')
1157 tmpname = extractdata.get('filename')
1158 message = extractdata.get('message')
1158 message = extractdata.get('message')
1159 user = opts.get('user') or extractdata.get('user')
1159 user = opts.get('user') or extractdata.get('user')
1160 date = opts.get('date') or extractdata.get('date')
1160 date = opts.get('date') or extractdata.get('date')
1161 branch = extractdata.get('branch')
1161 branch = extractdata.get('branch')
1162 nodeid = extractdata.get('nodeid')
1162 nodeid = extractdata.get('nodeid')
1163 p1 = extractdata.get('p1')
1163 p1 = extractdata.get('p1')
1164 p2 = extractdata.get('p2')
1164 p2 = extractdata.get('p2')
1165
1165
1166 nocommit = opts.get('no_commit')
1166 nocommit = opts.get('no_commit')
1167 importbranch = opts.get('import_branch')
1167 importbranch = opts.get('import_branch')
1168 update = not opts.get('bypass')
1168 update = not opts.get('bypass')
1169 strip = opts["strip"]
1169 strip = opts["strip"]
1170 prefix = opts["prefix"]
1170 prefix = opts["prefix"]
1171 sim = float(opts.get('similarity') or 0)
1171 sim = float(opts.get('similarity') or 0)
1172 if not tmpname:
1172 if not tmpname:
1173 return (None, None, False)
1173 return (None, None, False)
1174
1174
1175 rejects = False
1175 rejects = False
1176
1176
1177 try:
1177 try:
1178 cmdline_message = logmessage(ui, opts)
1178 cmdline_message = logmessage(ui, opts)
1179 if cmdline_message:
1179 if cmdline_message:
1180 # pickup the cmdline msg
1180 # pickup the cmdline msg
1181 message = cmdline_message
1181 message = cmdline_message
1182 elif message:
1182 elif message:
1183 # pickup the patch msg
1183 # pickup the patch msg
1184 message = message.strip()
1184 message = message.strip()
1185 else:
1185 else:
1186 # launch the editor
1186 # launch the editor
1187 message = None
1187 message = None
1188 ui.debug('message:\n%s\n' % message)
1188 ui.debug('message:\n%s\n' % message)
1189
1189
1190 if len(parents) == 1:
1190 if len(parents) == 1:
1191 parents.append(repo[nullid])
1191 parents.append(repo[nullid])
1192 if opts.get('exact'):
1192 if opts.get('exact'):
1193 if not nodeid or not p1:
1193 if not nodeid or not p1:
1194 raise error.Abort(_('not a Mercurial patch'))
1194 raise error.Abort(_('not a Mercurial patch'))
1195 p1 = repo[p1]
1195 p1 = repo[p1]
1196 p2 = repo[p2 or nullid]
1196 p2 = repo[p2 or nullid]
1197 elif p2:
1197 elif p2:
1198 try:
1198 try:
1199 p1 = repo[p1]
1199 p1 = repo[p1]
1200 p2 = repo[p2]
1200 p2 = repo[p2]
1201 # Without any options, consider p2 only if the
1201 # Without any options, consider p2 only if the
1202 # patch is being applied on top of the recorded
1202 # patch is being applied on top of the recorded
1203 # first parent.
1203 # first parent.
1204 if p1 != parents[0]:
1204 if p1 != parents[0]:
1205 p1 = parents[0]
1205 p1 = parents[0]
1206 p2 = repo[nullid]
1206 p2 = repo[nullid]
1207 except error.RepoError:
1207 except error.RepoError:
1208 p1, p2 = parents
1208 p1, p2 = parents
1209 if p2.node() == nullid:
1209 if p2.node() == nullid:
1210 ui.warn(_("warning: import the patch as a normal revision\n"
1210 ui.warn(_("warning: import the patch as a normal revision\n"
1211 "(use --exact to import the patch as a merge)\n"))
1211 "(use --exact to import the patch as a merge)\n"))
1212 else:
1212 else:
1213 p1, p2 = parents
1213 p1, p2 = parents
1214
1214
1215 n = None
1215 n = None
1216 if update:
1216 if update:
1217 if p1 != parents[0]:
1217 if p1 != parents[0]:
1218 updatefunc(repo, p1.node())
1218 updatefunc(repo, p1.node())
1219 if p2 != parents[1]:
1219 if p2 != parents[1]:
1220 repo.setparents(p1.node(), p2.node())
1220 repo.setparents(p1.node(), p2.node())
1221
1221
1222 if opts.get('exact') or importbranch:
1222 if opts.get('exact') or importbranch:
1223 repo.dirstate.setbranch(branch or 'default')
1223 repo.dirstate.setbranch(branch or 'default')
1224
1224
1225 partial = opts.get('partial', False)
1225 partial = opts.get('partial', False)
1226 files = set()
1226 files = set()
1227 try:
1227 try:
1228 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1228 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1229 files=files, eolmode=None, similarity=sim / 100.0)
1229 files=files, eolmode=None, similarity=sim / 100.0)
1230 except patch.PatchError as e:
1230 except patch.PatchError as e:
1231 if not partial:
1231 if not partial:
1232 raise error.Abort(str(e))
1232 raise error.Abort(str(e))
1233 if partial:
1233 if partial:
1234 rejects = True
1234 rejects = True
1235
1235
1236 files = list(files)
1236 files = list(files)
1237 if nocommit:
1237 if nocommit:
1238 if message:
1238 if message:
1239 msgs.append(message)
1239 msgs.append(message)
1240 else:
1240 else:
1241 if opts.get('exact') or p2:
1241 if opts.get('exact') or p2:
1242 # If you got here, you either use --force and know what
1242 # If you got here, you either use --force and know what
1243 # you are doing or used --exact or a merge patch while
1243 # you are doing or used --exact or a merge patch while
1244 # being updated to its first parent.
1244 # being updated to its first parent.
1245 m = None
1245 m = None
1246 else:
1246 else:
1247 m = scmutil.matchfiles(repo, files or [])
1247 m = scmutil.matchfiles(repo, files or [])
1248 editform = mergeeditform(repo[None], 'import.normal')
1248 editform = mergeeditform(repo[None], 'import.normal')
1249 if opts.get('exact'):
1249 if opts.get('exact'):
1250 editor = None
1250 editor = None
1251 else:
1251 else:
1252 editor = getcommiteditor(editform=editform, **opts)
1252 editor = getcommiteditor(editform=editform, **opts)
1253 extra = {}
1253 extra = {}
1254 for idfunc in extrapreimport:
1254 for idfunc in extrapreimport:
1255 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1255 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1256 overrides = {}
1256 overrides = {}
1257 if partial:
1257 if partial:
1258 overrides[('ui', 'allowemptycommit')] = True
1258 overrides[('ui', 'allowemptycommit')] = True
1259 with repo.ui.configoverride(overrides, 'import'):
1259 with repo.ui.configoverride(overrides, 'import'):
1260 n = repo.commit(message, user,
1260 n = repo.commit(message, user,
1261 date, match=m,
1261 date, match=m,
1262 editor=editor, extra=extra)
1262 editor=editor, extra=extra)
1263 for idfunc in extrapostimport:
1263 for idfunc in extrapostimport:
1264 extrapostimportmap[idfunc](repo[n])
1264 extrapostimportmap[idfunc](repo[n])
1265 else:
1265 else:
1266 if opts.get('exact') or importbranch:
1266 if opts.get('exact') or importbranch:
1267 branch = branch or 'default'
1267 branch = branch or 'default'
1268 else:
1268 else:
1269 branch = p1.branch()
1269 branch = p1.branch()
1270 store = patch.filestore()
1270 store = patch.filestore()
1271 try:
1271 try:
1272 files = set()
1272 files = set()
1273 try:
1273 try:
1274 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1274 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1275 files, eolmode=None)
1275 files, eolmode=None)
1276 except patch.PatchError as e:
1276 except patch.PatchError as e:
1277 raise error.Abort(str(e))
1277 raise error.Abort(str(e))
1278 if opts.get('exact'):
1278 if opts.get('exact'):
1279 editor = None
1279 editor = None
1280 else:
1280 else:
1281 editor = getcommiteditor(editform='import.bypass')
1281 editor = getcommiteditor(editform='import.bypass')
1282 memctx = context.memctx(repo, (p1.node(), p2.node()),
1282 memctx = context.memctx(repo, (p1.node(), p2.node()),
1283 message,
1283 message,
1284 files=files,
1284 files=files,
1285 filectxfn=store,
1285 filectxfn=store,
1286 user=user,
1286 user=user,
1287 date=date,
1287 date=date,
1288 branch=branch,
1288 branch=branch,
1289 editor=editor)
1289 editor=editor)
1290 n = memctx.commit()
1290 n = memctx.commit()
1291 finally:
1291 finally:
1292 store.close()
1292 store.close()
1293 if opts.get('exact') and nocommit:
1293 if opts.get('exact') and nocommit:
1294 # --exact with --no-commit is still useful in that it does merge
1294 # --exact with --no-commit is still useful in that it does merge
1295 # and branch bits
1295 # and branch bits
1296 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1296 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1297 elif opts.get('exact') and hex(n) != nodeid:
1297 elif opts.get('exact') and hex(n) != nodeid:
1298 raise error.Abort(_('patch is damaged or loses information'))
1298 raise error.Abort(_('patch is damaged or loses information'))
1299 msg = _('applied to working directory')
1299 msg = _('applied to working directory')
1300 if n:
1300 if n:
1301 # i18n: refers to a short changeset id
1301 # i18n: refers to a short changeset id
1302 msg = _('created %s') % short(n)
1302 msg = _('created %s') % short(n)
1303 return (msg, n, rejects)
1303 return (msg, n, rejects)
1304 finally:
1304 finally:
1305 os.unlink(tmpname)
1305 os.unlink(tmpname)
1306
1306
1307 # facility to let extensions include additional data in an exported patch
1307 # facility to let extensions include additional data in an exported patch
1308 # list of identifiers to be executed in order
1308 # list of identifiers to be executed in order
1309 extraexport = []
1309 extraexport = []
1310 # mapping from identifier to actual export function
1310 # mapping from identifier to actual export function
1311 # function as to return a string to be added to the header or None
1311 # function as to return a string to be added to the header or None
1312 # it is given two arguments (sequencenumber, changectx)
1312 # it is given two arguments (sequencenumber, changectx)
1313 extraexportmap = {}
1313 extraexportmap = {}
1314
1314
1315 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1315 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1316 node = scmutil.binnode(ctx)
1316 node = scmutil.binnode(ctx)
1317 parents = [p.node() for p in ctx.parents() if p]
1317 parents = [p.node() for p in ctx.parents() if p]
1318 branch = ctx.branch()
1318 branch = ctx.branch()
1319 if switch_parent:
1319 if switch_parent:
1320 parents.reverse()
1320 parents.reverse()
1321
1321
1322 if parents:
1322 if parents:
1323 prev = parents[0]
1323 prev = parents[0]
1324 else:
1324 else:
1325 prev = nullid
1325 prev = nullid
1326
1326
1327 write("# HG changeset patch\n")
1327 write("# HG changeset patch\n")
1328 write("# User %s\n" % ctx.user())
1328 write("# User %s\n" % ctx.user())
1329 write("# Date %d %d\n" % ctx.date())
1329 write("# Date %d %d\n" % ctx.date())
1330 write("# %s\n" % util.datestr(ctx.date()))
1330 write("# %s\n" % util.datestr(ctx.date()))
1331 if branch and branch != 'default':
1331 if branch and branch != 'default':
1332 write("# Branch %s\n" % branch)
1332 write("# Branch %s\n" % branch)
1333 write("# Node ID %s\n" % hex(node))
1333 write("# Node ID %s\n" % hex(node))
1334 write("# Parent %s\n" % hex(prev))
1334 write("# Parent %s\n" % hex(prev))
1335 if len(parents) > 1:
1335 if len(parents) > 1:
1336 write("# Parent %s\n" % hex(parents[1]))
1336 write("# Parent %s\n" % hex(parents[1]))
1337
1337
1338 for headerid in extraexport:
1338 for headerid in extraexport:
1339 header = extraexportmap[headerid](seqno, ctx)
1339 header = extraexportmap[headerid](seqno, ctx)
1340 if header is not None:
1340 if header is not None:
1341 write('# %s\n' % header)
1341 write('# %s\n' % header)
1342 write(ctx.description().rstrip())
1342 write(ctx.description().rstrip())
1343 write("\n\n")
1343 write("\n\n")
1344
1344
1345 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1345 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1346 write(chunk, label=label)
1346 write(chunk, label=label)
1347
1347
1348 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1348 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1349 opts=None, match=None):
1349 opts=None, match=None):
1350 '''export changesets as hg patches
1350 '''export changesets as hg patches
1351
1351
1352 Args:
1352 Args:
1353 repo: The repository from which we're exporting revisions.
1353 repo: The repository from which we're exporting revisions.
1354 revs: A list of revisions to export as revision numbers.
1354 revs: A list of revisions to export as revision numbers.
1355 fntemplate: An optional string to use for generating patch file names.
1355 fntemplate: An optional string to use for generating patch file names.
1356 fp: An optional file-like object to which patches should be written.
1356 fp: An optional file-like object to which patches should be written.
1357 switch_parent: If True, show diffs against second parent when not nullid.
1357 switch_parent: If True, show diffs against second parent when not nullid.
1358 Default is false, which always shows diff against p1.
1358 Default is false, which always shows diff against p1.
1359 opts: diff options to use for generating the patch.
1359 opts: diff options to use for generating the patch.
1360 match: If specified, only export changes to files matching this matcher.
1360 match: If specified, only export changes to files matching this matcher.
1361
1361
1362 Returns:
1362 Returns:
1363 Nothing.
1363 Nothing.
1364
1364
1365 Side Effect:
1365 Side Effect:
1366 "HG Changeset Patch" data is emitted to one of the following
1366 "HG Changeset Patch" data is emitted to one of the following
1367 destinations:
1367 destinations:
1368 fp is specified: All revs are written to the specified
1368 fp is specified: All revs are written to the specified
1369 file-like object.
1369 file-like object.
1370 fntemplate specified: Each rev is written to a unique file named using
1370 fntemplate specified: Each rev is written to a unique file named using
1371 the given template.
1371 the given template.
1372 Neither fp nor template specified: All revs written to repo.ui.write()
1372 Neither fp nor template specified: All revs written to repo.ui.write()
1373 '''
1373 '''
1374
1374
1375 total = len(revs)
1375 total = len(revs)
1376 revwidth = max(len(str(rev)) for rev in revs)
1376 revwidth = max(len(str(rev)) for rev in revs)
1377 filemode = {}
1377 filemode = {}
1378
1378
1379 write = None
1379 write = None
1380 dest = '<unnamed>'
1380 dest = '<unnamed>'
1381 if fp:
1381 if fp:
1382 dest = getattr(fp, 'name', dest)
1382 dest = getattr(fp, 'name', dest)
1383 def write(s, **kw):
1383 def write(s, **kw):
1384 fp.write(s)
1384 fp.write(s)
1385 elif not fntemplate:
1385 elif not fntemplate:
1386 write = repo.ui.write
1386 write = repo.ui.write
1387
1387
1388 for seqno, rev in enumerate(revs, 1):
1388 for seqno, rev in enumerate(revs, 1):
1389 ctx = repo[rev]
1389 ctx = repo[rev]
1390 fo = None
1390 fo = None
1391 if not fp and fntemplate:
1391 if not fp and fntemplate:
1392 desc_lines = ctx.description().rstrip().split('\n')
1392 desc_lines = ctx.description().rstrip().split('\n')
1393 desc = desc_lines[0] #Commit always has a first line.
1393 desc = desc_lines[0] #Commit always has a first line.
1394 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1394 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1395 total=total, seqno=seqno, revwidth=revwidth,
1395 total=total, seqno=seqno, revwidth=revwidth,
1396 mode='wb', modemap=filemode)
1396 mode='wb', modemap=filemode)
1397 dest = fo.name
1397 dest = fo.name
1398 def write(s, **kw):
1398 def write(s, **kw):
1399 fo.write(s)
1399 fo.write(s)
1400 if not dest.startswith('<'):
1400 if not dest.startswith('<'):
1401 repo.ui.note("%s\n" % dest)
1401 repo.ui.note("%s\n" % dest)
1402 _exportsingle(
1402 _exportsingle(
1403 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1403 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1404 if fo is not None:
1404 if fo is not None:
1405 fo.close()
1405 fo.close()
1406
1406
1407 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1407 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1408 changes=None, stat=False, fp=None, prefix='',
1408 changes=None, stat=False, fp=None, prefix='',
1409 root='', listsubrepos=False):
1409 root='', listsubrepos=False):
1410 '''show diff or diffstat.'''
1410 '''show diff or diffstat.'''
1411 if fp is None:
1411 if fp is None:
1412 write = ui.write
1412 write = ui.write
1413 else:
1413 else:
1414 def write(s, **kw):
1414 def write(s, **kw):
1415 fp.write(s)
1415 fp.write(s)
1416
1416
1417 if root:
1417 if root:
1418 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1418 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1419 else:
1419 else:
1420 relroot = ''
1420 relroot = ''
1421 if relroot != '':
1421 if relroot != '':
1422 # XXX relative roots currently don't work if the root is within a
1422 # XXX relative roots currently don't work if the root is within a
1423 # subrepo
1423 # subrepo
1424 uirelroot = match.uipath(relroot)
1424 uirelroot = match.uipath(relroot)
1425 relroot += '/'
1425 relroot += '/'
1426 for matchroot in match.files():
1426 for matchroot in match.files():
1427 if not matchroot.startswith(relroot):
1427 if not matchroot.startswith(relroot):
1428 ui.warn(_('warning: %s not inside relative root %s\n') % (
1428 ui.warn(_('warning: %s not inside relative root %s\n') % (
1429 match.uipath(matchroot), uirelroot))
1429 match.uipath(matchroot), uirelroot))
1430
1430
1431 if stat:
1431 if stat:
1432 diffopts = diffopts.copy(context=0)
1432 diffopts = diffopts.copy(context=0)
1433 width = 80
1433 width = 80
1434 if not ui.plain():
1434 if not ui.plain():
1435 width = ui.termwidth()
1435 width = ui.termwidth()
1436 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1436 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1437 prefix=prefix, relroot=relroot)
1437 prefix=prefix, relroot=relroot)
1438 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1438 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1439 width=width):
1439 width=width):
1440 write(chunk, label=label)
1440 write(chunk, label=label)
1441 else:
1441 else:
1442 for chunk, label in patch.diffui(repo, node1, node2, match,
1442 for chunk, label in patch.diffui(repo, node1, node2, match,
1443 changes, diffopts, prefix=prefix,
1443 changes, diffopts, prefix=prefix,
1444 relroot=relroot):
1444 relroot=relroot):
1445 write(chunk, label=label)
1445 write(chunk, label=label)
1446
1446
1447 if listsubrepos:
1447 if listsubrepos:
1448 ctx1 = repo[node1]
1448 ctx1 = repo[node1]
1449 ctx2 = repo[node2]
1449 ctx2 = repo[node2]
1450 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1450 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1451 tempnode2 = node2
1451 tempnode2 = node2
1452 try:
1452 try:
1453 if node2 is not None:
1453 if node2 is not None:
1454 tempnode2 = ctx2.substate[subpath][1]
1454 tempnode2 = ctx2.substate[subpath][1]
1455 except KeyError:
1455 except KeyError:
1456 # A subrepo that existed in node1 was deleted between node1 and
1456 # A subrepo that existed in node1 was deleted between node1 and
1457 # node2 (inclusive). Thus, ctx2's substate won't contain that
1457 # node2 (inclusive). Thus, ctx2's substate won't contain that
1458 # subpath. The best we can do is to ignore it.
1458 # subpath. The best we can do is to ignore it.
1459 tempnode2 = None
1459 tempnode2 = None
1460 submatch = matchmod.subdirmatcher(subpath, match)
1460 submatch = matchmod.subdirmatcher(subpath, match)
1461 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1461 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1462 stat=stat, fp=fp, prefix=prefix)
1462 stat=stat, fp=fp, prefix=prefix)
1463
1463
1464 def _changesetlabels(ctx):
1464 def _changesetlabels(ctx):
1465 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1465 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1466 if ctx.obsolete():
1466 if ctx.obsolete():
1467 labels.append('changeset.obsolete')
1467 labels.append('changeset.obsolete')
1468 if ctx.isunstable():
1468 if ctx.isunstable():
1469 labels.append('changeset.troubled')
1469 labels.append('changeset.troubled')
1470 for instability in ctx.instabilities():
1470 for instability in ctx.instabilities():
1471 labels.append('trouble.%s' % instability)
1471 labels.append('trouble.%s' % instability)
1472 return ' '.join(labels)
1472 return ' '.join(labels)
1473
1473
1474 class changeset_printer(object):
1474 class changeset_printer(object):
1475 '''show changeset information when templating not requested.'''
1475 '''show changeset information when templating not requested.'''
1476
1476
1477 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1477 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1478 self.ui = ui
1478 self.ui = ui
1479 self.repo = repo
1479 self.repo = repo
1480 self.buffered = buffered
1480 self.buffered = buffered
1481 self.matchfn = matchfn
1481 self.matchfn = matchfn
1482 self.diffopts = diffopts
1482 self.diffopts = diffopts
1483 self.header = {}
1483 self.header = {}
1484 self.hunk = {}
1484 self.hunk = {}
1485 self.lastheader = None
1485 self.lastheader = None
1486 self.footer = None
1486 self.footer = None
1487
1487
1488 def flush(self, ctx):
1488 def flush(self, ctx):
1489 rev = ctx.rev()
1489 rev = ctx.rev()
1490 if rev in self.header:
1490 if rev in self.header:
1491 h = self.header[rev]
1491 h = self.header[rev]
1492 if h != self.lastheader:
1492 if h != self.lastheader:
1493 self.lastheader = h
1493 self.lastheader = h
1494 self.ui.write(h)
1494 self.ui.write(h)
1495 del self.header[rev]
1495 del self.header[rev]
1496 if rev in self.hunk:
1496 if rev in self.hunk:
1497 self.ui.write(self.hunk[rev])
1497 self.ui.write(self.hunk[rev])
1498 del self.hunk[rev]
1498 del self.hunk[rev]
1499 return 1
1499 return 1
1500 return 0
1500 return 0
1501
1501
1502 def close(self):
1502 def close(self):
1503 if self.footer:
1503 if self.footer:
1504 self.ui.write(self.footer)
1504 self.ui.write(self.footer)
1505
1505
1506 def show(self, ctx, copies=None, matchfn=None, **props):
1506 def show(self, ctx, copies=None, matchfn=None, **props):
1507 props = pycompat.byteskwargs(props)
1507 props = pycompat.byteskwargs(props)
1508 if self.buffered:
1508 if self.buffered:
1509 self.ui.pushbuffer(labeled=True)
1509 self.ui.pushbuffer(labeled=True)
1510 self._show(ctx, copies, matchfn, props)
1510 self._show(ctx, copies, matchfn, props)
1511 self.hunk[ctx.rev()] = self.ui.popbuffer()
1511 self.hunk[ctx.rev()] = self.ui.popbuffer()
1512 else:
1512 else:
1513 self._show(ctx, copies, matchfn, props)
1513 self._show(ctx, copies, matchfn, props)
1514
1514
1515 def _show(self, ctx, copies, matchfn, props):
1515 def _show(self, ctx, copies, matchfn, props):
1516 '''show a single changeset or file revision'''
1516 '''show a single changeset or file revision'''
1517 changenode = ctx.node()
1517 changenode = ctx.node()
1518 rev = ctx.rev()
1518 rev = ctx.rev()
1519 if self.ui.debugflag:
1519 if self.ui.debugflag:
1520 hexfunc = hex
1520 hexfunc = hex
1521 else:
1521 else:
1522 hexfunc = short
1522 hexfunc = short
1523 # as of now, wctx.node() and wctx.rev() return None, but we want to
1523 # as of now, wctx.node() and wctx.rev() return None, but we want to
1524 # show the same values as {node} and {rev} templatekw
1524 # show the same values as {node} and {rev} templatekw
1525 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1525 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1526
1526
1527 if self.ui.quiet:
1527 if self.ui.quiet:
1528 self.ui.write("%d:%s\n" % revnode, label='log.node')
1528 self.ui.write("%d:%s\n" % revnode, label='log.node')
1529 return
1529 return
1530
1530
1531 date = util.datestr(ctx.date())
1531 date = util.datestr(ctx.date())
1532
1532
1533 # i18n: column positioning for "hg log"
1533 # i18n: column positioning for "hg log"
1534 self.ui.write(_("changeset: %d:%s\n") % revnode,
1534 self.ui.write(_("changeset: %d:%s\n") % revnode,
1535 label=_changesetlabels(ctx))
1535 label=_changesetlabels(ctx))
1536
1536
1537 # branches are shown first before any other names due to backwards
1537 # branches are shown first before any other names due to backwards
1538 # compatibility
1538 # compatibility
1539 branch = ctx.branch()
1539 branch = ctx.branch()
1540 # don't show the default branch name
1540 # don't show the default branch name
1541 if branch != 'default':
1541 if branch != 'default':
1542 # i18n: column positioning for "hg log"
1542 # i18n: column positioning for "hg log"
1543 self.ui.write(_("branch: %s\n") % branch,
1543 self.ui.write(_("branch: %s\n") % branch,
1544 label='log.branch')
1544 label='log.branch')
1545
1545
1546 for nsname, ns in self.repo.names.iteritems():
1546 for nsname, ns in self.repo.names.iteritems():
1547 # branches has special logic already handled above, so here we just
1547 # branches has special logic already handled above, so here we just
1548 # skip it
1548 # skip it
1549 if nsname == 'branches':
1549 if nsname == 'branches':
1550 continue
1550 continue
1551 # we will use the templatename as the color name since those two
1551 # we will use the templatename as the color name since those two
1552 # should be the same
1552 # should be the same
1553 for name in ns.names(self.repo, changenode):
1553 for name in ns.names(self.repo, changenode):
1554 self.ui.write(ns.logfmt % name,
1554 self.ui.write(ns.logfmt % name,
1555 label='log.%s' % ns.colorname)
1555 label='log.%s' % ns.colorname)
1556 if self.ui.debugflag:
1556 if self.ui.debugflag:
1557 # i18n: column positioning for "hg log"
1557 # i18n: column positioning for "hg log"
1558 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1558 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1559 label='log.phase')
1559 label='log.phase')
1560 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1560 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1561 label = 'log.parent changeset.%s' % pctx.phasestr()
1561 label = 'log.parent changeset.%s' % pctx.phasestr()
1562 # i18n: column positioning for "hg log"
1562 # i18n: column positioning for "hg log"
1563 self.ui.write(_("parent: %d:%s\n")
1563 self.ui.write(_("parent: %d:%s\n")
1564 % (pctx.rev(), hexfunc(pctx.node())),
1564 % (pctx.rev(), hexfunc(pctx.node())),
1565 label=label)
1565 label=label)
1566
1566
1567 if self.ui.debugflag and rev is not None:
1567 if self.ui.debugflag and rev is not None:
1568 mnode = ctx.manifestnode()
1568 mnode = ctx.manifestnode()
1569 # i18n: column positioning for "hg log"
1569 # i18n: column positioning for "hg log"
1570 self.ui.write(_("manifest: %d:%s\n") %
1570 self.ui.write(_("manifest: %d:%s\n") %
1571 (self.repo.manifestlog._revlog.rev(mnode),
1571 (self.repo.manifestlog._revlog.rev(mnode),
1572 hex(mnode)),
1572 hex(mnode)),
1573 label='ui.debug log.manifest')
1573 label='ui.debug log.manifest')
1574 # i18n: column positioning for "hg log"
1574 # i18n: column positioning for "hg log"
1575 self.ui.write(_("user: %s\n") % ctx.user(),
1575 self.ui.write(_("user: %s\n") % ctx.user(),
1576 label='log.user')
1576 label='log.user')
1577 # i18n: column positioning for "hg log"
1577 # i18n: column positioning for "hg log"
1578 self.ui.write(_("date: %s\n") % date,
1578 self.ui.write(_("date: %s\n") % date,
1579 label='log.date')
1579 label='log.date')
1580
1580
1581 if ctx.isunstable():
1581 if ctx.isunstable():
1582 # i18n: column positioning for "hg log"
1582 # i18n: column positioning for "hg log"
1583 instabilities = ctx.instabilities()
1583 instabilities = ctx.instabilities()
1584 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1584 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1585 label='log.trouble')
1585 label='log.trouble')
1586
1586
1587 self._exthook(ctx)
1587 self._exthook(ctx)
1588
1588
1589 if self.ui.debugflag:
1589 if self.ui.debugflag:
1590 files = ctx.p1().status(ctx)[:3]
1590 files = ctx.p1().status(ctx)[:3]
1591 for key, value in zip([# i18n: column positioning for "hg log"
1591 for key, value in zip([# i18n: column positioning for "hg log"
1592 _("files:"),
1592 _("files:"),
1593 # i18n: column positioning for "hg log"
1593 # i18n: column positioning for "hg log"
1594 _("files+:"),
1594 _("files+:"),
1595 # i18n: column positioning for "hg log"
1595 # i18n: column positioning for "hg log"
1596 _("files-:")], files):
1596 _("files-:")], files):
1597 if value:
1597 if value:
1598 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1598 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1599 label='ui.debug log.files')
1599 label='ui.debug log.files')
1600 elif ctx.files() and self.ui.verbose:
1600 elif ctx.files() and self.ui.verbose:
1601 # i18n: column positioning for "hg log"
1601 # i18n: column positioning for "hg log"
1602 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1602 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1603 label='ui.note log.files')
1603 label='ui.note log.files')
1604 if copies and self.ui.verbose:
1604 if copies and self.ui.verbose:
1605 copies = ['%s (%s)' % c for c in copies]
1605 copies = ['%s (%s)' % c for c in copies]
1606 # i18n: column positioning for "hg log"
1606 # i18n: column positioning for "hg log"
1607 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1607 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1608 label='ui.note log.copies')
1608 label='ui.note log.copies')
1609
1609
1610 extra = ctx.extra()
1610 extra = ctx.extra()
1611 if extra and self.ui.debugflag:
1611 if extra and self.ui.debugflag:
1612 for key, value in sorted(extra.items()):
1612 for key, value in sorted(extra.items()):
1613 # i18n: column positioning for "hg log"
1613 # i18n: column positioning for "hg log"
1614 self.ui.write(_("extra: %s=%s\n")
1614 self.ui.write(_("extra: %s=%s\n")
1615 % (key, util.escapestr(value)),
1615 % (key, util.escapestr(value)),
1616 label='ui.debug log.extra')
1616 label='ui.debug log.extra')
1617
1617
1618 description = ctx.description().strip()
1618 description = ctx.description().strip()
1619 if description:
1619 if description:
1620 if self.ui.verbose:
1620 if self.ui.verbose:
1621 self.ui.write(_("description:\n"),
1621 self.ui.write(_("description:\n"),
1622 label='ui.note log.description')
1622 label='ui.note log.description')
1623 self.ui.write(description,
1623 self.ui.write(description,
1624 label='ui.note log.description')
1624 label='ui.note log.description')
1625 self.ui.write("\n\n")
1625 self.ui.write("\n\n")
1626 else:
1626 else:
1627 # i18n: column positioning for "hg log"
1627 # i18n: column positioning for "hg log"
1628 self.ui.write(_("summary: %s\n") %
1628 self.ui.write(_("summary: %s\n") %
1629 description.splitlines()[0],
1629 description.splitlines()[0],
1630 label='log.summary')
1630 label='log.summary')
1631 self.ui.write("\n")
1631 self.ui.write("\n")
1632
1632
1633 self.showpatch(ctx, matchfn)
1633 self.showpatch(ctx, matchfn)
1634
1634
1635 def _exthook(self, ctx):
1635 def _exthook(self, ctx):
1636 '''empty method used by extension as a hook point
1636 '''empty method used by extension as a hook point
1637 '''
1637 '''
1638 pass
1638 pass
1639
1639
1640 def showpatch(self, ctx, matchfn):
1640 def showpatch(self, ctx, matchfn):
1641 if not matchfn:
1641 if not matchfn:
1642 matchfn = self.matchfn
1642 matchfn = self.matchfn
1643 if matchfn:
1643 if matchfn:
1644 stat = self.diffopts.get('stat')
1644 stat = self.diffopts.get('stat')
1645 diff = self.diffopts.get('patch')
1645 diff = self.diffopts.get('patch')
1646 diffopts = patch.diffallopts(self.ui, self.diffopts)
1646 diffopts = patch.diffallopts(self.ui, self.diffopts)
1647 node = ctx.node()
1647 node = ctx.node()
1648 prev = ctx.p1().node()
1648 prev = ctx.p1().node()
1649 if stat:
1649 if stat:
1650 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1650 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1651 match=matchfn, stat=True)
1651 match=matchfn, stat=True)
1652 if diff:
1652 if diff:
1653 if stat:
1653 if stat:
1654 self.ui.write("\n")
1654 self.ui.write("\n")
1655 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1655 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1656 match=matchfn, stat=False)
1656 match=matchfn, stat=False)
1657 self.ui.write("\n")
1657 self.ui.write("\n")
1658
1658
1659 class jsonchangeset(changeset_printer):
1659 class jsonchangeset(changeset_printer):
1660 '''format changeset information.'''
1660 '''format changeset information.'''
1661
1661
1662 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1662 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1663 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1663 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1664 self.cache = {}
1664 self.cache = {}
1665 self._first = True
1665 self._first = True
1666
1666
1667 def close(self):
1667 def close(self):
1668 if not self._first:
1668 if not self._first:
1669 self.ui.write("\n]\n")
1669 self.ui.write("\n]\n")
1670 else:
1670 else:
1671 self.ui.write("[]\n")
1671 self.ui.write("[]\n")
1672
1672
1673 def _show(self, ctx, copies, matchfn, props):
1673 def _show(self, ctx, copies, matchfn, props):
1674 '''show a single changeset or file revision'''
1674 '''show a single changeset or file revision'''
1675 rev = ctx.rev()
1675 rev = ctx.rev()
1676 if rev is None:
1676 if rev is None:
1677 jrev = jnode = 'null'
1677 jrev = jnode = 'null'
1678 else:
1678 else:
1679 jrev = '%d' % rev
1679 jrev = '%d' % rev
1680 jnode = '"%s"' % hex(ctx.node())
1680 jnode = '"%s"' % hex(ctx.node())
1681 j = encoding.jsonescape
1681 j = encoding.jsonescape
1682
1682
1683 if self._first:
1683 if self._first:
1684 self.ui.write("[\n {")
1684 self.ui.write("[\n {")
1685 self._first = False
1685 self._first = False
1686 else:
1686 else:
1687 self.ui.write(",\n {")
1687 self.ui.write(",\n {")
1688
1688
1689 if self.ui.quiet:
1689 if self.ui.quiet:
1690 self.ui.write(('\n "rev": %s') % jrev)
1690 self.ui.write(('\n "rev": %s') % jrev)
1691 self.ui.write((',\n "node": %s') % jnode)
1691 self.ui.write((',\n "node": %s') % jnode)
1692 self.ui.write('\n }')
1692 self.ui.write('\n }')
1693 return
1693 return
1694
1694
1695 self.ui.write(('\n "rev": %s') % jrev)
1695 self.ui.write(('\n "rev": %s') % jrev)
1696 self.ui.write((',\n "node": %s') % jnode)
1696 self.ui.write((',\n "node": %s') % jnode)
1697 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1697 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1698 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1698 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1699 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1699 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1700 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1700 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1701 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1701 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1702
1702
1703 self.ui.write((',\n "bookmarks": [%s]') %
1703 self.ui.write((',\n "bookmarks": [%s]') %
1704 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1704 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1705 self.ui.write((',\n "tags": [%s]') %
1705 self.ui.write((',\n "tags": [%s]') %
1706 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1706 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1707 self.ui.write((',\n "parents": [%s]') %
1707 self.ui.write((',\n "parents": [%s]') %
1708 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1708 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1709
1709
1710 if self.ui.debugflag:
1710 if self.ui.debugflag:
1711 if rev is None:
1711 if rev is None:
1712 jmanifestnode = 'null'
1712 jmanifestnode = 'null'
1713 else:
1713 else:
1714 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1714 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1715 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1715 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1716
1716
1717 self.ui.write((',\n "extra": {%s}') %
1717 self.ui.write((',\n "extra": {%s}') %
1718 ", ".join('"%s": "%s"' % (j(k), j(v))
1718 ", ".join('"%s": "%s"' % (j(k), j(v))
1719 for k, v in ctx.extra().items()))
1719 for k, v in ctx.extra().items()))
1720
1720
1721 files = ctx.p1().status(ctx)
1721 files = ctx.p1().status(ctx)
1722 self.ui.write((',\n "modified": [%s]') %
1722 self.ui.write((',\n "modified": [%s]') %
1723 ", ".join('"%s"' % j(f) for f in files[0]))
1723 ", ".join('"%s"' % j(f) for f in files[0]))
1724 self.ui.write((',\n "added": [%s]') %
1724 self.ui.write((',\n "added": [%s]') %
1725 ", ".join('"%s"' % j(f) for f in files[1]))
1725 ", ".join('"%s"' % j(f) for f in files[1]))
1726 self.ui.write((',\n "removed": [%s]') %
1726 self.ui.write((',\n "removed": [%s]') %
1727 ", ".join('"%s"' % j(f) for f in files[2]))
1727 ", ".join('"%s"' % j(f) for f in files[2]))
1728
1728
1729 elif self.ui.verbose:
1729 elif self.ui.verbose:
1730 self.ui.write((',\n "files": [%s]') %
1730 self.ui.write((',\n "files": [%s]') %
1731 ", ".join('"%s"' % j(f) for f in ctx.files()))
1731 ", ".join('"%s"' % j(f) for f in ctx.files()))
1732
1732
1733 if copies:
1733 if copies:
1734 self.ui.write((',\n "copies": {%s}') %
1734 self.ui.write((',\n "copies": {%s}') %
1735 ", ".join('"%s": "%s"' % (j(k), j(v))
1735 ", ".join('"%s": "%s"' % (j(k), j(v))
1736 for k, v in copies))
1736 for k, v in copies))
1737
1737
1738 matchfn = self.matchfn
1738 matchfn = self.matchfn
1739 if matchfn:
1739 if matchfn:
1740 stat = self.diffopts.get('stat')
1740 stat = self.diffopts.get('stat')
1741 diff = self.diffopts.get('patch')
1741 diff = self.diffopts.get('patch')
1742 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1742 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1743 node, prev = ctx.node(), ctx.p1().node()
1743 node, prev = ctx.node(), ctx.p1().node()
1744 if stat:
1744 if stat:
1745 self.ui.pushbuffer()
1745 self.ui.pushbuffer()
1746 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1746 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1747 match=matchfn, stat=True)
1747 match=matchfn, stat=True)
1748 self.ui.write((',\n "diffstat": "%s"')
1748 self.ui.write((',\n "diffstat": "%s"')
1749 % j(self.ui.popbuffer()))
1749 % j(self.ui.popbuffer()))
1750 if diff:
1750 if diff:
1751 self.ui.pushbuffer()
1751 self.ui.pushbuffer()
1752 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1752 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1753 match=matchfn, stat=False)
1753 match=matchfn, stat=False)
1754 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1754 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1755
1755
1756 self.ui.write("\n }")
1756 self.ui.write("\n }")
1757
1757
1758 class changeset_templater(changeset_printer):
1758 class changeset_templater(changeset_printer):
1759 '''format changeset information.'''
1759 '''format changeset information.'''
1760
1760
1761 # Arguments before "buffered" used to be positional. Consider not
1761 # Arguments before "buffered" used to be positional. Consider not
1762 # adding/removing arguments before "buffered" to not break callers.
1762 # adding/removing arguments before "buffered" to not break callers.
1763 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1763 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1764 buffered=False):
1764 buffered=False):
1765 diffopts = diffopts or {}
1765 diffopts = diffopts or {}
1766
1766
1767 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1767 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1768 self.t = formatter.loadtemplater(ui, tmplspec,
1768 self.t = formatter.loadtemplater(ui, tmplspec,
1769 cache=templatekw.defaulttempl)
1769 cache=templatekw.defaulttempl)
1770 self._counter = itertools.count()
1770 self._counter = itertools.count()
1771 self.cache = {}
1771 self.cache = {}
1772
1772
1773 self._tref = tmplspec.ref
1773 self._tref = tmplspec.ref
1774 self._parts = {'header': '', 'footer': '',
1774 self._parts = {'header': '', 'footer': '',
1775 tmplspec.ref: tmplspec.ref,
1775 tmplspec.ref: tmplspec.ref,
1776 'docheader': '', 'docfooter': '',
1776 'docheader': '', 'docfooter': '',
1777 'separator': ''}
1777 'separator': ''}
1778 if tmplspec.mapfile:
1778 if tmplspec.mapfile:
1779 # find correct templates for current mode, for backward
1779 # find correct templates for current mode, for backward
1780 # compatibility with 'log -v/-q/--debug' using a mapfile
1780 # compatibility with 'log -v/-q/--debug' using a mapfile
1781 tmplmodes = [
1781 tmplmodes = [
1782 (True, ''),
1782 (True, ''),
1783 (self.ui.verbose, '_verbose'),
1783 (self.ui.verbose, '_verbose'),
1784 (self.ui.quiet, '_quiet'),
1784 (self.ui.quiet, '_quiet'),
1785 (self.ui.debugflag, '_debug'),
1785 (self.ui.debugflag, '_debug'),
1786 ]
1786 ]
1787 for mode, postfix in tmplmodes:
1787 for mode, postfix in tmplmodes:
1788 for t in self._parts:
1788 for t in self._parts:
1789 cur = t + postfix
1789 cur = t + postfix
1790 if mode and cur in self.t:
1790 if mode and cur in self.t:
1791 self._parts[t] = cur
1791 self._parts[t] = cur
1792 else:
1792 else:
1793 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1793 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1794 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1794 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1795 self._parts.update(m)
1795 self._parts.update(m)
1796
1796
1797 if self._parts['docheader']:
1797 if self._parts['docheader']:
1798 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1798 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1799
1799
1800 def close(self):
1800 def close(self):
1801 if self._parts['docfooter']:
1801 if self._parts['docfooter']:
1802 if not self.footer:
1802 if not self.footer:
1803 self.footer = ""
1803 self.footer = ""
1804 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1804 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1805 return super(changeset_templater, self).close()
1805 return super(changeset_templater, self).close()
1806
1806
1807 def _show(self, ctx, copies, matchfn, props):
1807 def _show(self, ctx, copies, matchfn, props):
1808 '''show a single changeset or file revision'''
1808 '''show a single changeset or file revision'''
1809 props = props.copy()
1809 props = props.copy()
1810 props.update(templatekw.keywords)
1810 props.update(templatekw.keywords)
1811 props['templ'] = self.t
1811 props['templ'] = self.t
1812 props['ctx'] = ctx
1812 props['ctx'] = ctx
1813 props['repo'] = self.repo
1813 props['repo'] = self.repo
1814 props['ui'] = self.repo.ui
1814 props['ui'] = self.repo.ui
1815 props['index'] = index = next(self._counter)
1815 props['index'] = index = next(self._counter)
1816 props['revcache'] = {'copies': copies}
1816 props['revcache'] = {'copies': copies}
1817 props['cache'] = self.cache
1817 props['cache'] = self.cache
1818 props = pycompat.strkwargs(props)
1818 props = pycompat.strkwargs(props)
1819
1819
1820 # write separator, which wouldn't work well with the header part below
1820 # write separator, which wouldn't work well with the header part below
1821 # since there's inherently a conflict between header (across items) and
1821 # since there's inherently a conflict between header (across items) and
1822 # separator (per item)
1822 # separator (per item)
1823 if self._parts['separator'] and index > 0:
1823 if self._parts['separator'] and index > 0:
1824 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1824 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1825
1825
1826 # write header
1826 # write header
1827 if self._parts['header']:
1827 if self._parts['header']:
1828 h = templater.stringify(self.t(self._parts['header'], **props))
1828 h = templater.stringify(self.t(self._parts['header'], **props))
1829 if self.buffered:
1829 if self.buffered:
1830 self.header[ctx.rev()] = h
1830 self.header[ctx.rev()] = h
1831 else:
1831 else:
1832 if self.lastheader != h:
1832 if self.lastheader != h:
1833 self.lastheader = h
1833 self.lastheader = h
1834 self.ui.write(h)
1834 self.ui.write(h)
1835
1835
1836 # write changeset metadata, then patch if requested
1836 # write changeset metadata, then patch if requested
1837 key = self._parts[self._tref]
1837 key = self._parts[self._tref]
1838 self.ui.write(templater.stringify(self.t(key, **props)))
1838 self.ui.write(templater.stringify(self.t(key, **props)))
1839 self.showpatch(ctx, matchfn)
1839 self.showpatch(ctx, matchfn)
1840
1840
1841 if self._parts['footer']:
1841 if self._parts['footer']:
1842 if not self.footer:
1842 if not self.footer:
1843 self.footer = templater.stringify(
1843 self.footer = templater.stringify(
1844 self.t(self._parts['footer'], **props))
1844 self.t(self._parts['footer'], **props))
1845
1845
1846 def logtemplatespec(tmpl, mapfile):
1846 def logtemplatespec(tmpl, mapfile):
1847 if mapfile:
1847 if mapfile:
1848 return formatter.templatespec('changeset', tmpl, mapfile)
1848 return formatter.templatespec('changeset', tmpl, mapfile)
1849 else:
1849 else:
1850 return formatter.templatespec('', tmpl, None)
1850 return formatter.templatespec('', tmpl, None)
1851
1851
1852 def _lookuplogtemplate(ui, tmpl, style):
1852 def _lookuplogtemplate(ui, tmpl, style):
1853 """Find the template matching the given template spec or style
1853 """Find the template matching the given template spec or style
1854
1854
1855 See formatter.lookuptemplate() for details.
1855 See formatter.lookuptemplate() for details.
1856 """
1856 """
1857
1857
1858 # ui settings
1858 # ui settings
1859 if not tmpl and not style: # template are stronger than style
1859 if not tmpl and not style: # template are stronger than style
1860 tmpl = ui.config('ui', 'logtemplate')
1860 tmpl = ui.config('ui', 'logtemplate')
1861 if tmpl:
1861 if tmpl:
1862 return logtemplatespec(templater.unquotestring(tmpl), None)
1862 return logtemplatespec(templater.unquotestring(tmpl), None)
1863 else:
1863 else:
1864 style = util.expandpath(ui.config('ui', 'style'))
1864 style = util.expandpath(ui.config('ui', 'style'))
1865
1865
1866 if not tmpl and style:
1866 if not tmpl and style:
1867 mapfile = style
1867 mapfile = style
1868 if not os.path.split(mapfile)[0]:
1868 if not os.path.split(mapfile)[0]:
1869 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1869 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1870 or templater.templatepath(mapfile))
1870 or templater.templatepath(mapfile))
1871 if mapname:
1871 if mapname:
1872 mapfile = mapname
1872 mapfile = mapname
1873 return logtemplatespec(None, mapfile)
1873 return logtemplatespec(None, mapfile)
1874
1874
1875 if not tmpl:
1875 if not tmpl:
1876 return logtemplatespec(None, None)
1876 return logtemplatespec(None, None)
1877
1877
1878 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1878 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1879
1879
1880 def makelogtemplater(ui, repo, tmpl, buffered=False):
1880 def makelogtemplater(ui, repo, tmpl, buffered=False):
1881 """Create a changeset_templater from a literal template 'tmpl'"""
1881 """Create a changeset_templater from a literal template 'tmpl'"""
1882 spec = logtemplatespec(tmpl, None)
1882 spec = logtemplatespec(tmpl, None)
1883 return changeset_templater(ui, repo, spec, buffered=buffered)
1883 return changeset_templater(ui, repo, spec, buffered=buffered)
1884
1884
1885 def show_changeset(ui, repo, opts, buffered=False):
1885 def show_changeset(ui, repo, opts, buffered=False):
1886 """show one changeset using template or regular display.
1886 """show one changeset using template or regular display.
1887
1887
1888 Display format will be the first non-empty hit of:
1888 Display format will be the first non-empty hit of:
1889 1. option 'template'
1889 1. option 'template'
1890 2. option 'style'
1890 2. option 'style'
1891 3. [ui] setting 'logtemplate'
1891 3. [ui] setting 'logtemplate'
1892 4. [ui] setting 'style'
1892 4. [ui] setting 'style'
1893 If all of these values are either the unset or the empty string,
1893 If all of these values are either the unset or the empty string,
1894 regular display via changeset_printer() is done.
1894 regular display via changeset_printer() is done.
1895 """
1895 """
1896 # options
1896 # options
1897 matchfn = None
1897 matchfn = None
1898 if opts.get('patch') or opts.get('stat'):
1898 if opts.get('patch') or opts.get('stat'):
1899 matchfn = scmutil.matchall(repo)
1899 matchfn = scmutil.matchall(repo)
1900
1900
1901 if opts.get('template') == 'json':
1901 if opts.get('template') == 'json':
1902 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1902 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1903
1903
1904 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1904 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1905
1905
1906 if not spec.ref and not spec.tmpl and not spec.mapfile:
1906 if not spec.ref and not spec.tmpl and not spec.mapfile:
1907 return changeset_printer(ui, repo, matchfn, opts, buffered)
1907 return changeset_printer(ui, repo, matchfn, opts, buffered)
1908
1908
1909 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1909 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1910
1910
1911 def showmarker(fm, marker, index=None):
1911 def showmarker(fm, marker, index=None):
1912 """utility function to display obsolescence marker in a readable way
1912 """utility function to display obsolescence marker in a readable way
1913
1913
1914 To be used by debug function."""
1914 To be used by debug function."""
1915 if index is not None:
1915 if index is not None:
1916 fm.write('index', '%i ', index)
1916 fm.write('index', '%i ', index)
1917 fm.write('precnode', '%s ', hex(marker.prednode()))
1917 fm.write('precnode', '%s ', hex(marker.prednode()))
1918 succs = marker.succnodes()
1918 succs = marker.succnodes()
1919 fm.condwrite(succs, 'succnodes', '%s ',
1919 fm.condwrite(succs, 'succnodes', '%s ',
1920 fm.formatlist(map(hex, succs), name='node'))
1920 fm.formatlist(map(hex, succs), name='node'))
1921 fm.write('flag', '%X ', marker.flags())
1921 fm.write('flag', '%X ', marker.flags())
1922 parents = marker.parentnodes()
1922 parents = marker.parentnodes()
1923 if parents is not None:
1923 if parents is not None:
1924 fm.write('parentnodes', '{%s} ',
1924 fm.write('parentnodes', '{%s} ',
1925 fm.formatlist(map(hex, parents), name='node', sep=', '))
1925 fm.formatlist(map(hex, parents), name='node', sep=', '))
1926 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1926 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1927 meta = marker.metadata().copy()
1927 meta = marker.metadata().copy()
1928 meta.pop('date', None)
1928 meta.pop('date', None)
1929 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1929 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1930 fm.plain('\n')
1930 fm.plain('\n')
1931
1931
1932 def finddate(ui, repo, date):
1932 def finddate(ui, repo, date):
1933 """Find the tipmost changeset that matches the given date spec"""
1933 """Find the tipmost changeset that matches the given date spec"""
1934
1934
1935 df = util.matchdate(date)
1935 df = util.matchdate(date)
1936 m = scmutil.matchall(repo)
1936 m = scmutil.matchall(repo)
1937 results = {}
1937 results = {}
1938
1938
1939 def prep(ctx, fns):
1939 def prep(ctx, fns):
1940 d = ctx.date()
1940 d = ctx.date()
1941 if df(d[0]):
1941 if df(d[0]):
1942 results[ctx.rev()] = d
1942 results[ctx.rev()] = d
1943
1943
1944 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1944 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1945 rev = ctx.rev()
1945 rev = ctx.rev()
1946 if rev in results:
1946 if rev in results:
1947 ui.status(_("found revision %s from %s\n") %
1947 ui.status(_("found revision %s from %s\n") %
1948 (rev, util.datestr(results[rev])))
1948 (rev, util.datestr(results[rev])))
1949 return '%d' % rev
1949 return '%d' % rev
1950
1950
1951 raise error.Abort(_("revision matching date not found"))
1951 raise error.Abort(_("revision matching date not found"))
1952
1952
1953 def increasingwindows(windowsize=8, sizelimit=512):
1953 def increasingwindows(windowsize=8, sizelimit=512):
1954 while True:
1954 while True:
1955 yield windowsize
1955 yield windowsize
1956 if windowsize < sizelimit:
1956 if windowsize < sizelimit:
1957 windowsize *= 2
1957 windowsize *= 2
1958
1958
1959 class FileWalkError(Exception):
1959 class FileWalkError(Exception):
1960 pass
1960 pass
1961
1961
1962 def walkfilerevs(repo, match, follow, revs, fncache):
1962 def walkfilerevs(repo, match, follow, revs, fncache):
1963 '''Walks the file history for the matched files.
1963 '''Walks the file history for the matched files.
1964
1964
1965 Returns the changeset revs that are involved in the file history.
1965 Returns the changeset revs that are involved in the file history.
1966
1966
1967 Throws FileWalkError if the file history can't be walked using
1967 Throws FileWalkError if the file history can't be walked using
1968 filelogs alone.
1968 filelogs alone.
1969 '''
1969 '''
1970 wanted = set()
1970 wanted = set()
1971 copies = []
1971 copies = []
1972 minrev, maxrev = min(revs), max(revs)
1972 minrev, maxrev = min(revs), max(revs)
1973 def filerevgen(filelog, last):
1973 def filerevgen(filelog, last):
1974 """
1974 """
1975 Only files, no patterns. Check the history of each file.
1975 Only files, no patterns. Check the history of each file.
1976
1976
1977 Examines filelog entries within minrev, maxrev linkrev range
1977 Examines filelog entries within minrev, maxrev linkrev range
1978 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1978 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1979 tuples in backwards order
1979 tuples in backwards order
1980 """
1980 """
1981 cl_count = len(repo)
1981 cl_count = len(repo)
1982 revs = []
1982 revs = []
1983 for j in xrange(0, last + 1):
1983 for j in xrange(0, last + 1):
1984 linkrev = filelog.linkrev(j)
1984 linkrev = filelog.linkrev(j)
1985 if linkrev < minrev:
1985 if linkrev < minrev:
1986 continue
1986 continue
1987 # only yield rev for which we have the changelog, it can
1987 # only yield rev for which we have the changelog, it can
1988 # happen while doing "hg log" during a pull or commit
1988 # happen while doing "hg log" during a pull or commit
1989 if linkrev >= cl_count:
1989 if linkrev >= cl_count:
1990 break
1990 break
1991
1991
1992 parentlinkrevs = []
1992 parentlinkrevs = []
1993 for p in filelog.parentrevs(j):
1993 for p in filelog.parentrevs(j):
1994 if p != nullrev:
1994 if p != nullrev:
1995 parentlinkrevs.append(filelog.linkrev(p))
1995 parentlinkrevs.append(filelog.linkrev(p))
1996 n = filelog.node(j)
1996 n = filelog.node(j)
1997 revs.append((linkrev, parentlinkrevs,
1997 revs.append((linkrev, parentlinkrevs,
1998 follow and filelog.renamed(n)))
1998 follow and filelog.renamed(n)))
1999
1999
2000 return reversed(revs)
2000 return reversed(revs)
2001 def iterfiles():
2001 def iterfiles():
2002 pctx = repo['.']
2002 pctx = repo['.']
2003 for filename in match.files():
2003 for filename in match.files():
2004 if follow:
2004 if follow:
2005 if filename not in pctx:
2005 if filename not in pctx:
2006 raise error.Abort(_('cannot follow file not in parent '
2006 raise error.Abort(_('cannot follow file not in parent '
2007 'revision: "%s"') % filename)
2007 'revision: "%s"') % filename)
2008 yield filename, pctx[filename].filenode()
2008 yield filename, pctx[filename].filenode()
2009 else:
2009 else:
2010 yield filename, None
2010 yield filename, None
2011 for filename_node in copies:
2011 for filename_node in copies:
2012 yield filename_node
2012 yield filename_node
2013
2013
2014 for file_, node in iterfiles():
2014 for file_, node in iterfiles():
2015 filelog = repo.file(file_)
2015 filelog = repo.file(file_)
2016 if not len(filelog):
2016 if not len(filelog):
2017 if node is None:
2017 if node is None:
2018 # A zero count may be a directory or deleted file, so
2018 # A zero count may be a directory or deleted file, so
2019 # try to find matching entries on the slow path.
2019 # try to find matching entries on the slow path.
2020 if follow:
2020 if follow:
2021 raise error.Abort(
2021 raise error.Abort(
2022 _('cannot follow nonexistent file: "%s"') % file_)
2022 _('cannot follow nonexistent file: "%s"') % file_)
2023 raise FileWalkError("Cannot walk via filelog")
2023 raise FileWalkError("Cannot walk via filelog")
2024 else:
2024 else:
2025 continue
2025 continue
2026
2026
2027 if node is None:
2027 if node is None:
2028 last = len(filelog) - 1
2028 last = len(filelog) - 1
2029 else:
2029 else:
2030 last = filelog.rev(node)
2030 last = filelog.rev(node)
2031
2031
2032 # keep track of all ancestors of the file
2032 # keep track of all ancestors of the file
2033 ancestors = {filelog.linkrev(last)}
2033 ancestors = {filelog.linkrev(last)}
2034
2034
2035 # iterate from latest to oldest revision
2035 # iterate from latest to oldest revision
2036 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2036 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2037 if not follow:
2037 if not follow:
2038 if rev > maxrev:
2038 if rev > maxrev:
2039 continue
2039 continue
2040 else:
2040 else:
2041 # Note that last might not be the first interesting
2041 # Note that last might not be the first interesting
2042 # rev to us:
2042 # rev to us:
2043 # if the file has been changed after maxrev, we'll
2043 # if the file has been changed after maxrev, we'll
2044 # have linkrev(last) > maxrev, and we still need
2044 # have linkrev(last) > maxrev, and we still need
2045 # to explore the file graph
2045 # to explore the file graph
2046 if rev not in ancestors:
2046 if rev not in ancestors:
2047 continue
2047 continue
2048 # XXX insert 1327 fix here
2048 # XXX insert 1327 fix here
2049 if flparentlinkrevs:
2049 if flparentlinkrevs:
2050 ancestors.update(flparentlinkrevs)
2050 ancestors.update(flparentlinkrevs)
2051
2051
2052 fncache.setdefault(rev, []).append(file_)
2052 fncache.setdefault(rev, []).append(file_)
2053 wanted.add(rev)
2053 wanted.add(rev)
2054 if copied:
2054 if copied:
2055 copies.append(copied)
2055 copies.append(copied)
2056
2056
2057 return wanted
2057 return wanted
2058
2058
2059 class _followfilter(object):
2059 class _followfilter(object):
2060 def __init__(self, repo, onlyfirst=False):
2060 def __init__(self, repo, onlyfirst=False):
2061 self.repo = repo
2061 self.repo = repo
2062 self.startrev = nullrev
2062 self.startrev = nullrev
2063 self.roots = set()
2063 self.roots = set()
2064 self.onlyfirst = onlyfirst
2064 self.onlyfirst = onlyfirst
2065
2065
2066 def match(self, rev):
2066 def match(self, rev):
2067 def realparents(rev):
2067 def realparents(rev):
2068 if self.onlyfirst:
2068 if self.onlyfirst:
2069 return self.repo.changelog.parentrevs(rev)[0:1]
2069 return self.repo.changelog.parentrevs(rev)[0:1]
2070 else:
2070 else:
2071 return filter(lambda x: x != nullrev,
2071 return filter(lambda x: x != nullrev,
2072 self.repo.changelog.parentrevs(rev))
2072 self.repo.changelog.parentrevs(rev))
2073
2073
2074 if self.startrev == nullrev:
2074 if self.startrev == nullrev:
2075 self.startrev = rev
2075 self.startrev = rev
2076 return True
2076 return True
2077
2077
2078 if rev > self.startrev:
2078 if rev > self.startrev:
2079 # forward: all descendants
2079 # forward: all descendants
2080 if not self.roots:
2080 if not self.roots:
2081 self.roots.add(self.startrev)
2081 self.roots.add(self.startrev)
2082 for parent in realparents(rev):
2082 for parent in realparents(rev):
2083 if parent in self.roots:
2083 if parent in self.roots:
2084 self.roots.add(rev)
2084 self.roots.add(rev)
2085 return True
2085 return True
2086 else:
2086 else:
2087 # backwards: all parents
2087 # backwards: all parents
2088 if not self.roots:
2088 if not self.roots:
2089 self.roots.update(realparents(self.startrev))
2089 self.roots.update(realparents(self.startrev))
2090 if rev in self.roots:
2090 if rev in self.roots:
2091 self.roots.remove(rev)
2091 self.roots.remove(rev)
2092 self.roots.update(realparents(rev))
2092 self.roots.update(realparents(rev))
2093 return True
2093 return True
2094
2094
2095 return False
2095 return False
2096
2096
2097 def walkchangerevs(repo, match, opts, prepare):
2097 def walkchangerevs(repo, match, opts, prepare):
2098 '''Iterate over files and the revs in which they changed.
2098 '''Iterate over files and the revs in which they changed.
2099
2099
2100 Callers most commonly need to iterate backwards over the history
2100 Callers most commonly need to iterate backwards over the history
2101 in which they are interested. Doing so has awful (quadratic-looking)
2101 in which they are interested. Doing so has awful (quadratic-looking)
2102 performance, so we use iterators in a "windowed" way.
2102 performance, so we use iterators in a "windowed" way.
2103
2103
2104 We walk a window of revisions in the desired order. Within the
2104 We walk a window of revisions in the desired order. Within the
2105 window, we first walk forwards to gather data, then in the desired
2105 window, we first walk forwards to gather data, then in the desired
2106 order (usually backwards) to display it.
2106 order (usually backwards) to display it.
2107
2107
2108 This function returns an iterator yielding contexts. Before
2108 This function returns an iterator yielding contexts. Before
2109 yielding each context, the iterator will first call the prepare
2109 yielding each context, the iterator will first call the prepare
2110 function on each context in the window in forward order.'''
2110 function on each context in the window in forward order.'''
2111
2111
2112 follow = opts.get('follow') or opts.get('follow_first')
2112 follow = opts.get('follow') or opts.get('follow_first')
2113 revs = _logrevs(repo, opts)
2113 revs = _logrevs(repo, opts)
2114 if not revs:
2114 if not revs:
2115 return []
2115 return []
2116 wanted = set()
2116 wanted = set()
2117 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2117 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2118 opts.get('removed'))
2118 opts.get('removed'))
2119 fncache = {}
2119 fncache = {}
2120 change = repo.changectx
2120 change = repo.changectx
2121
2121
2122 # First step is to fill wanted, the set of revisions that we want to yield.
2122 # First step is to fill wanted, the set of revisions that we want to yield.
2123 # When it does not induce extra cost, we also fill fncache for revisions in
2123 # When it does not induce extra cost, we also fill fncache for revisions in
2124 # wanted: a cache of filenames that were changed (ctx.files()) and that
2124 # wanted: a cache of filenames that were changed (ctx.files()) and that
2125 # match the file filtering conditions.
2125 # match the file filtering conditions.
2126
2126
2127 if match.always():
2127 if match.always():
2128 # No files, no patterns. Display all revs.
2128 # No files, no patterns. Display all revs.
2129 wanted = revs
2129 wanted = revs
2130 elif not slowpath:
2130 elif not slowpath:
2131 # We only have to read through the filelog to find wanted revisions
2131 # We only have to read through the filelog to find wanted revisions
2132
2132
2133 try:
2133 try:
2134 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2134 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2135 except FileWalkError:
2135 except FileWalkError:
2136 slowpath = True
2136 slowpath = True
2137
2137
2138 # We decided to fall back to the slowpath because at least one
2138 # We decided to fall back to the slowpath because at least one
2139 # of the paths was not a file. Check to see if at least one of them
2139 # of the paths was not a file. Check to see if at least one of them
2140 # existed in history, otherwise simply return
2140 # existed in history, otherwise simply return
2141 for path in match.files():
2141 for path in match.files():
2142 if path == '.' or path in repo.store:
2142 if path == '.' or path in repo.store:
2143 break
2143 break
2144 else:
2144 else:
2145 return []
2145 return []
2146
2146
2147 if slowpath:
2147 if slowpath:
2148 # We have to read the changelog to match filenames against
2148 # We have to read the changelog to match filenames against
2149 # changed files
2149 # changed files
2150
2150
2151 if follow:
2151 if follow:
2152 raise error.Abort(_('can only follow copies/renames for explicit '
2152 raise error.Abort(_('can only follow copies/renames for explicit '
2153 'filenames'))
2153 'filenames'))
2154
2154
2155 # The slow path checks files modified in every changeset.
2155 # The slow path checks files modified in every changeset.
2156 # This is really slow on large repos, so compute the set lazily.
2156 # This is really slow on large repos, so compute the set lazily.
2157 class lazywantedset(object):
2157 class lazywantedset(object):
2158 def __init__(self):
2158 def __init__(self):
2159 self.set = set()
2159 self.set = set()
2160 self.revs = set(revs)
2160 self.revs = set(revs)
2161
2161
2162 # No need to worry about locality here because it will be accessed
2162 # No need to worry about locality here because it will be accessed
2163 # in the same order as the increasing window below.
2163 # in the same order as the increasing window below.
2164 def __contains__(self, value):
2164 def __contains__(self, value):
2165 if value in self.set:
2165 if value in self.set:
2166 return True
2166 return True
2167 elif not value in self.revs:
2167 elif not value in self.revs:
2168 return False
2168 return False
2169 else:
2169 else:
2170 self.revs.discard(value)
2170 self.revs.discard(value)
2171 ctx = change(value)
2171 ctx = change(value)
2172 matches = filter(match, ctx.files())
2172 matches = filter(match, ctx.files())
2173 if matches:
2173 if matches:
2174 fncache[value] = matches
2174 fncache[value] = matches
2175 self.set.add(value)
2175 self.set.add(value)
2176 return True
2176 return True
2177 return False
2177 return False
2178
2178
2179 def discard(self, value):
2179 def discard(self, value):
2180 self.revs.discard(value)
2180 self.revs.discard(value)
2181 self.set.discard(value)
2181 self.set.discard(value)
2182
2182
2183 wanted = lazywantedset()
2183 wanted = lazywantedset()
2184
2184
2185 # it might be worthwhile to do this in the iterator if the rev range
2185 # it might be worthwhile to do this in the iterator if the rev range
2186 # is descending and the prune args are all within that range
2186 # is descending and the prune args are all within that range
2187 for rev in opts.get('prune', ()):
2187 for rev in opts.get('prune', ()):
2188 rev = repo[rev].rev()
2188 rev = repo[rev].rev()
2189 ff = _followfilter(repo)
2189 ff = _followfilter(repo)
2190 stop = min(revs[0], revs[-1])
2190 stop = min(revs[0], revs[-1])
2191 for x in xrange(rev, stop - 1, -1):
2191 for x in xrange(rev, stop - 1, -1):
2192 if ff.match(x):
2192 if ff.match(x):
2193 wanted = wanted - [x]
2193 wanted = wanted - [x]
2194
2194
2195 # Now that wanted is correctly initialized, we can iterate over the
2195 # Now that wanted is correctly initialized, we can iterate over the
2196 # revision range, yielding only revisions in wanted.
2196 # revision range, yielding only revisions in wanted.
2197 def iterate():
2197 def iterate():
2198 if follow and match.always():
2198 if follow and match.always():
2199 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2199 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2200 def want(rev):
2200 def want(rev):
2201 return ff.match(rev) and rev in wanted
2201 return ff.match(rev) and rev in wanted
2202 else:
2202 else:
2203 def want(rev):
2203 def want(rev):
2204 return rev in wanted
2204 return rev in wanted
2205
2205
2206 it = iter(revs)
2206 it = iter(revs)
2207 stopiteration = False
2207 stopiteration = False
2208 for windowsize in increasingwindows():
2208 for windowsize in increasingwindows():
2209 nrevs = []
2209 nrevs = []
2210 for i in xrange(windowsize):
2210 for i in xrange(windowsize):
2211 rev = next(it, None)
2211 rev = next(it, None)
2212 if rev is None:
2212 if rev is None:
2213 stopiteration = True
2213 stopiteration = True
2214 break
2214 break
2215 elif want(rev):
2215 elif want(rev):
2216 nrevs.append(rev)
2216 nrevs.append(rev)
2217 for rev in sorted(nrevs):
2217 for rev in sorted(nrevs):
2218 fns = fncache.get(rev)
2218 fns = fncache.get(rev)
2219 ctx = change(rev)
2219 ctx = change(rev)
2220 if not fns:
2220 if not fns:
2221 def fns_generator():
2221 def fns_generator():
2222 for f in ctx.files():
2222 for f in ctx.files():
2223 if match(f):
2223 if match(f):
2224 yield f
2224 yield f
2225 fns = fns_generator()
2225 fns = fns_generator()
2226 prepare(ctx, fns)
2226 prepare(ctx, fns)
2227 for rev in nrevs:
2227 for rev in nrevs:
2228 yield change(rev)
2228 yield change(rev)
2229
2229
2230 if stopiteration:
2230 if stopiteration:
2231 break
2231 break
2232
2232
2233 return iterate()
2233 return iterate()
2234
2234
2235 def _makefollowlogfilematcher(repo, files, followfirst):
2235 def _makefollowlogfilematcher(repo, files, followfirst):
2236 # When displaying a revision with --patch --follow FILE, we have
2236 # When displaying a revision with --patch --follow FILE, we have
2237 # to know which file of the revision must be diffed. With
2237 # to know which file of the revision must be diffed. With
2238 # --follow, we want the names of the ancestors of FILE in the
2238 # --follow, we want the names of the ancestors of FILE in the
2239 # revision, stored in "fcache". "fcache" is populated by
2239 # revision, stored in "fcache". "fcache" is populated by
2240 # reproducing the graph traversal already done by --follow revset
2240 # reproducing the graph traversal already done by --follow revset
2241 # and relating revs to file names (which is not "correct" but
2241 # and relating revs to file names (which is not "correct" but
2242 # good enough).
2242 # good enough).
2243 fcache = {}
2243 fcache = {}
2244 fcacheready = [False]
2244 fcacheready = [False]
2245 pctx = repo['.']
2245 pctx = repo['.']
2246
2246
2247 def populate():
2247 def populate():
2248 for fn in files:
2248 for fn in files:
2249 fctx = pctx[fn]
2249 fctx = pctx[fn]
2250 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2250 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2251 for c in fctx.ancestors(followfirst=followfirst):
2251 for c in fctx.ancestors(followfirst=followfirst):
2252 fcache.setdefault(c.rev(), set()).add(c.path())
2252 fcache.setdefault(c.rev(), set()).add(c.path())
2253
2253
2254 def filematcher(rev):
2254 def filematcher(rev):
2255 if not fcacheready[0]:
2255 if not fcacheready[0]:
2256 # Lazy initialization
2256 # Lazy initialization
2257 fcacheready[0] = True
2257 fcacheready[0] = True
2258 populate()
2258 populate()
2259 return scmutil.matchfiles(repo, fcache.get(rev, []))
2259 return scmutil.matchfiles(repo, fcache.get(rev, []))
2260
2260
2261 return filematcher
2261 return filematcher
2262
2262
2263 def _makenofollowlogfilematcher(repo, pats, opts):
2263 def _makenofollowlogfilematcher(repo, pats, opts):
2264 '''hook for extensions to override the filematcher for non-follow cases'''
2264 '''hook for extensions to override the filematcher for non-follow cases'''
2265 return None
2265 return None
2266
2266
2267 def _makelogrevset(repo, pats, opts, revs):
2267 def _makelogrevset(repo, pats, opts, revs):
2268 """Return (expr, filematcher) where expr is a revset string built
2268 """Return (expr, filematcher) where expr is a revset string built
2269 from log options and file patterns or None. If --stat or --patch
2269 from log options and file patterns or None. If --stat or --patch
2270 are not passed filematcher is None. Otherwise it is a callable
2270 are not passed filematcher is None. Otherwise it is a callable
2271 taking a revision number and returning a match objects filtering
2271 taking a revision number and returning a match objects filtering
2272 the files to be detailed when displaying the revision.
2272 the files to be detailed when displaying the revision.
2273 """
2273 """
2274 opt2revset = {
2274 opt2revset = {
2275 'no_merges': ('not merge()', None),
2275 'no_merges': ('not merge()', None),
2276 'only_merges': ('merge()', None),
2276 'only_merges': ('merge()', None),
2277 '_ancestors': ('ancestors(%(val)s)', None),
2277 '_ancestors': ('ancestors(%(val)s)', None),
2278 '_fancestors': ('_firstancestors(%(val)s)', None),
2278 '_fancestors': ('_firstancestors(%(val)s)', None),
2279 '_descendants': ('descendants(%(val)s)', None),
2279 '_descendants': ('descendants(%(val)s)', None),
2280 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2280 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2281 '_matchfiles': ('_matchfiles(%(val)s)', None),
2281 '_matchfiles': ('_matchfiles(%(val)s)', None),
2282 'date': ('date(%(val)r)', None),
2282 'date': ('date(%(val)r)', None),
2283 'branch': ('branch(%(val)r)', ' or '),
2283 'branch': ('branch(%(val)r)', ' or '),
2284 '_patslog': ('filelog(%(val)r)', ' or '),
2284 '_patslog': ('filelog(%(val)r)', ' or '),
2285 '_patsfollow': ('follow(%(val)r)', ' or '),
2285 '_patsfollow': ('follow(%(val)r)', ' or '),
2286 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2286 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2287 'keyword': ('keyword(%(val)r)', ' or '),
2287 'keyword': ('keyword(%(val)r)', ' or '),
2288 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2288 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2289 'user': ('user(%(val)r)', ' or '),
2289 'user': ('user(%(val)r)', ' or '),
2290 }
2290 }
2291
2291
2292 opts = dict(opts)
2292 opts = dict(opts)
2293 # follow or not follow?
2293 # follow or not follow?
2294 follow = opts.get('follow') or opts.get('follow_first')
2294 follow = opts.get('follow') or opts.get('follow_first')
2295 if opts.get('follow_first'):
2295 if opts.get('follow_first'):
2296 followfirst = 1
2296 followfirst = 1
2297 else:
2297 else:
2298 followfirst = 0
2298 followfirst = 0
2299 # --follow with FILE behavior depends on revs...
2299 # --follow with FILE behavior depends on revs...
2300 it = iter(revs)
2300 it = iter(revs)
2301 startrev = next(it)
2301 startrev = next(it)
2302 followdescendants = startrev < next(it, startrev)
2302 followdescendants = startrev < next(it, startrev)
2303
2303
2304 # branch and only_branch are really aliases and must be handled at
2304 # branch and only_branch are really aliases and must be handled at
2305 # the same time
2305 # the same time
2306 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2306 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2307 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2307 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2308 # pats/include/exclude are passed to match.match() directly in
2308 # pats/include/exclude are passed to match.match() directly in
2309 # _matchfiles() revset but walkchangerevs() builds its matcher with
2309 # _matchfiles() revset but walkchangerevs() builds its matcher with
2310 # scmutil.match(). The difference is input pats are globbed on
2310 # scmutil.match(). The difference is input pats are globbed on
2311 # platforms without shell expansion (windows).
2311 # platforms without shell expansion (windows).
2312 wctx = repo[None]
2312 wctx = repo[None]
2313 match, pats = scmutil.matchandpats(wctx, pats, opts)
2313 match, pats = scmutil.matchandpats(wctx, pats, opts)
2314 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2314 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2315 opts.get('removed'))
2315 opts.get('removed'))
2316 if not slowpath:
2316 if not slowpath:
2317 for f in match.files():
2317 for f in match.files():
2318 if follow and f not in wctx:
2318 if follow and f not in wctx:
2319 # If the file exists, it may be a directory, so let it
2319 # If the file exists, it may be a directory, so let it
2320 # take the slow path.
2320 # take the slow path.
2321 if os.path.exists(repo.wjoin(f)):
2321 if os.path.exists(repo.wjoin(f)):
2322 slowpath = True
2322 slowpath = True
2323 continue
2323 continue
2324 else:
2324 else:
2325 raise error.Abort(_('cannot follow file not in parent '
2325 raise error.Abort(_('cannot follow file not in parent '
2326 'revision: "%s"') % f)
2326 'revision: "%s"') % f)
2327 filelog = repo.file(f)
2327 filelog = repo.file(f)
2328 if not filelog:
2328 if not filelog:
2329 # A zero count may be a directory or deleted file, so
2329 # A zero count may be a directory or deleted file, so
2330 # try to find matching entries on the slow path.
2330 # try to find matching entries on the slow path.
2331 if follow:
2331 if follow:
2332 raise error.Abort(
2332 raise error.Abort(
2333 _('cannot follow nonexistent file: "%s"') % f)
2333 _('cannot follow nonexistent file: "%s"') % f)
2334 slowpath = True
2334 slowpath = True
2335
2335
2336 # We decided to fall back to the slowpath because at least one
2336 # We decided to fall back to the slowpath because at least one
2337 # of the paths was not a file. Check to see if at least one of them
2337 # of the paths was not a file. Check to see if at least one of them
2338 # existed in history - in that case, we'll continue down the
2338 # existed in history - in that case, we'll continue down the
2339 # slowpath; otherwise, we can turn off the slowpath
2339 # slowpath; otherwise, we can turn off the slowpath
2340 if slowpath:
2340 if slowpath:
2341 for path in match.files():
2341 for path in match.files():
2342 if path == '.' or path in repo.store:
2342 if path == '.' or path in repo.store:
2343 break
2343 break
2344 else:
2344 else:
2345 slowpath = False
2345 slowpath = False
2346
2346
2347 fpats = ('_patsfollow', '_patsfollowfirst')
2347 fpats = ('_patsfollow', '_patsfollowfirst')
2348 fnopats = (('_ancestors', '_fancestors'),
2348 fnopats = (('_ancestors', '_fancestors'),
2349 ('_descendants', '_fdescendants'))
2349 ('_descendants', '_fdescendants'))
2350 if slowpath:
2350 if slowpath:
2351 # See walkchangerevs() slow path.
2351 # See walkchangerevs() slow path.
2352 #
2352 #
2353 # pats/include/exclude cannot be represented as separate
2353 # pats/include/exclude cannot be represented as separate
2354 # revset expressions as their filtering logic applies at file
2354 # revset expressions as their filtering logic applies at file
2355 # level. For instance "-I a -X a" matches a revision touching
2355 # level. For instance "-I a -X a" matches a revision touching
2356 # "a" and "b" while "file(a) and not file(b)" does
2356 # "a" and "b" while "file(a) and not file(b)" does
2357 # not. Besides, filesets are evaluated against the working
2357 # not. Besides, filesets are evaluated against the working
2358 # directory.
2358 # directory.
2359 matchargs = ['r:', 'd:relpath']
2359 matchargs = ['r:', 'd:relpath']
2360 for p in pats:
2360 for p in pats:
2361 matchargs.append('p:' + p)
2361 matchargs.append('p:' + p)
2362 for p in opts.get('include', []):
2362 for p in opts.get('include', []):
2363 matchargs.append('i:' + p)
2363 matchargs.append('i:' + p)
2364 for p in opts.get('exclude', []):
2364 for p in opts.get('exclude', []):
2365 matchargs.append('x:' + p)
2365 matchargs.append('x:' + p)
2366 matchargs = ','.join(('%r' % p) for p in matchargs)
2366 matchargs = ','.join(('%r' % p) for p in matchargs)
2367 opts['_matchfiles'] = matchargs
2367 opts['_matchfiles'] = matchargs
2368 if follow:
2368 if follow:
2369 opts[fnopats[0][followfirst]] = '.'
2369 opts[fnopats[0][followfirst]] = '.'
2370 else:
2370 else:
2371 if follow:
2371 if follow:
2372 if pats:
2372 if pats:
2373 # follow() revset interprets its file argument as a
2373 # follow() revset interprets its file argument as a
2374 # manifest entry, so use match.files(), not pats.
2374 # manifest entry, so use match.files(), not pats.
2375 opts[fpats[followfirst]] = list(match.files())
2375 opts[fpats[followfirst]] = list(match.files())
2376 else:
2376 else:
2377 op = fnopats[followdescendants][followfirst]
2377 op = fnopats[followdescendants][followfirst]
2378 opts[op] = 'rev(%d)' % startrev
2378 opts[op] = 'rev(%d)' % startrev
2379 else:
2379 else:
2380 opts['_patslog'] = list(pats)
2380 opts['_patslog'] = list(pats)
2381
2381
2382 filematcher = None
2382 filematcher = None
2383 if opts.get('patch') or opts.get('stat'):
2383 if opts.get('patch') or opts.get('stat'):
2384 # When following files, track renames via a special matcher.
2384 # When following files, track renames via a special matcher.
2385 # If we're forced to take the slowpath it means we're following
2385 # If we're forced to take the slowpath it means we're following
2386 # at least one pattern/directory, so don't bother with rename tracking.
2386 # at least one pattern/directory, so don't bother with rename tracking.
2387 if follow and not match.always() and not slowpath:
2387 if follow and not match.always() and not slowpath:
2388 # _makefollowlogfilematcher expects its files argument to be
2388 # _makefollowlogfilematcher expects its files argument to be
2389 # relative to the repo root, so use match.files(), not pats.
2389 # relative to the repo root, so use match.files(), not pats.
2390 filematcher = _makefollowlogfilematcher(repo, match.files(),
2390 filematcher = _makefollowlogfilematcher(repo, match.files(),
2391 followfirst)
2391 followfirst)
2392 else:
2392 else:
2393 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2393 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2394 if filematcher is None:
2394 if filematcher is None:
2395 filematcher = lambda rev: match
2395 filematcher = lambda rev: match
2396
2396
2397 expr = []
2397 expr = []
2398 for op, val in sorted(opts.iteritems()):
2398 for op, val in sorted(opts.iteritems()):
2399 if not val:
2399 if not val:
2400 continue
2400 continue
2401 if op not in opt2revset:
2401 if op not in opt2revset:
2402 continue
2402 continue
2403 revop, andor = opt2revset[op]
2403 revop, andor = opt2revset[op]
2404 if '%(val)' not in revop:
2404 if '%(val)' not in revop:
2405 expr.append(revop)
2405 expr.append(revop)
2406 else:
2406 else:
2407 if not isinstance(val, list):
2407 if not isinstance(val, list):
2408 e = revop % {'val': val}
2408 e = revop % {'val': val}
2409 else:
2409 else:
2410 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2410 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2411 expr.append(e)
2411 expr.append(e)
2412
2412
2413 if expr:
2413 if expr:
2414 expr = '(' + ' and '.join(expr) + ')'
2414 expr = '(' + ' and '.join(expr) + ')'
2415 else:
2415 else:
2416 expr = None
2416 expr = None
2417 return expr, filematcher
2417 return expr, filematcher
2418
2418
2419 def _logrevs(repo, opts):
2419 def _logrevs(repo, opts):
2420 # Default --rev value depends on --follow but --follow behavior
2420 # Default --rev value depends on --follow but --follow behavior
2421 # depends on revisions resolved from --rev...
2421 # depends on revisions resolved from --rev...
2422 follow = opts.get('follow') or opts.get('follow_first')
2422 follow = opts.get('follow') or opts.get('follow_first')
2423 if opts.get('rev'):
2423 if opts.get('rev'):
2424 revs = scmutil.revrange(repo, opts['rev'])
2424 revs = scmutil.revrange(repo, opts['rev'])
2425 elif follow and repo.dirstate.p1() == nullid:
2425 elif follow and repo.dirstate.p1() == nullid:
2426 revs = smartset.baseset()
2426 revs = smartset.baseset()
2427 elif follow:
2427 elif follow:
2428 revs = repo.revs('reverse(:.)')
2428 revs = repo.revs('reverse(:.)')
2429 else:
2429 else:
2430 revs = smartset.spanset(repo)
2430 revs = smartset.spanset(repo)
2431 revs.reverse()
2431 revs.reverse()
2432 return revs
2432 return revs
2433
2433
2434 def getgraphlogrevs(repo, pats, opts):
2434 def getgraphlogrevs(repo, pats, opts):
2435 """Return (revs, expr, filematcher) where revs is an iterable of
2435 """Return (revs, expr, filematcher) where revs is an iterable of
2436 revision numbers, expr is a revset string built from log options
2436 revision numbers, expr is a revset string built from log options
2437 and file patterns or None, and used to filter 'revs'. If --stat or
2437 and file patterns or None, and used to filter 'revs'. If --stat or
2438 --patch are not passed filematcher is None. Otherwise it is a
2438 --patch are not passed filematcher is None. Otherwise it is a
2439 callable taking a revision number and returning a match objects
2439 callable taking a revision number and returning a match objects
2440 filtering the files to be detailed when displaying the revision.
2440 filtering the files to be detailed when displaying the revision.
2441 """
2441 """
2442 limit = loglimit(opts)
2442 limit = loglimit(opts)
2443 revs = _logrevs(repo, opts)
2443 revs = _logrevs(repo, opts)
2444 if not revs:
2444 if not revs:
2445 return smartset.baseset(), None, None
2445 return smartset.baseset(), None, None
2446 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2446 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2447 if opts.get('rev'):
2447 if opts.get('rev'):
2448 # User-specified revs might be unsorted, but don't sort before
2448 # User-specified revs might be unsorted, but don't sort before
2449 # _makelogrevset because it might depend on the order of revs
2449 # _makelogrevset because it might depend on the order of revs
2450 if not (revs.isdescending() or revs.istopo()):
2450 if not (revs.isdescending() or revs.istopo()):
2451 revs.sort(reverse=True)
2451 revs.sort(reverse=True)
2452 if expr:
2452 if expr:
2453 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2453 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2454 revs = matcher(repo, revs)
2454 revs = matcher(repo, revs)
2455 if limit is not None:
2455 if limit is not None:
2456 limitedrevs = []
2456 limitedrevs = []
2457 for idx, rev in enumerate(revs):
2457 for idx, rev in enumerate(revs):
2458 if idx >= limit:
2458 if idx >= limit:
2459 break
2459 break
2460 limitedrevs.append(rev)
2460 limitedrevs.append(rev)
2461 revs = smartset.baseset(limitedrevs)
2461 revs = smartset.baseset(limitedrevs)
2462
2462
2463 return revs, expr, filematcher
2463 return revs, expr, filematcher
2464
2464
2465 def getlogrevs(repo, pats, opts):
2465 def getlogrevs(repo, pats, opts):
2466 """Return (revs, expr, filematcher) where revs is an iterable of
2466 """Return (revs, expr, filematcher) where revs is an iterable of
2467 revision numbers, expr is a revset string built from log options
2467 revision numbers, expr is a revset string built from log options
2468 and file patterns or None, and used to filter 'revs'. If --stat or
2468 and file patterns or None, and used to filter 'revs'. If --stat or
2469 --patch are not passed filematcher is None. Otherwise it is a
2469 --patch are not passed filematcher is None. Otherwise it is a
2470 callable taking a revision number and returning a match objects
2470 callable taking a revision number and returning a match objects
2471 filtering the files to be detailed when displaying the revision.
2471 filtering the files to be detailed when displaying the revision.
2472 """
2472 """
2473 limit = loglimit(opts)
2473 limit = loglimit(opts)
2474 revs = _logrevs(repo, opts)
2474 revs = _logrevs(repo, opts)
2475 if not revs:
2475 if not revs:
2476 return smartset.baseset([]), None, None
2476 return smartset.baseset([]), None, None
2477 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2477 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2478 if expr:
2478 if expr:
2479 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2479 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2480 revs = matcher(repo, revs)
2480 revs = matcher(repo, revs)
2481 if limit is not None:
2481 if limit is not None:
2482 limitedrevs = []
2482 limitedrevs = []
2483 for idx, r in enumerate(revs):
2483 for idx, r in enumerate(revs):
2484 if limit <= idx:
2484 if limit <= idx:
2485 break
2485 break
2486 limitedrevs.append(r)
2486 limitedrevs.append(r)
2487 revs = smartset.baseset(limitedrevs)
2487 revs = smartset.baseset(limitedrevs)
2488
2488
2489 return revs, expr, filematcher
2489 return revs, expr, filematcher
2490
2490
2491 def _graphnodeformatter(ui, displayer):
2491 def _graphnodeformatter(ui, displayer):
2492 spec = ui.config('ui', 'graphnodetemplate')
2492 spec = ui.config('ui', 'graphnodetemplate')
2493 if not spec:
2493 if not spec:
2494 return templatekw.showgraphnode # fast path for "{graphnode}"
2494 return templatekw.showgraphnode # fast path for "{graphnode}"
2495
2495
2496 spec = templater.unquotestring(spec)
2496 spec = templater.unquotestring(spec)
2497 templ = formatter.maketemplater(ui, spec)
2497 templ = formatter.maketemplater(ui, spec)
2498 cache = {}
2498 cache = {}
2499 if isinstance(displayer, changeset_templater):
2499 if isinstance(displayer, changeset_templater):
2500 cache = displayer.cache # reuse cache of slow templates
2500 cache = displayer.cache # reuse cache of slow templates
2501 props = templatekw.keywords.copy()
2501 props = templatekw.keywords.copy()
2502 props['templ'] = templ
2502 props['templ'] = templ
2503 props['cache'] = cache
2503 props['cache'] = cache
2504 def formatnode(repo, ctx):
2504 def formatnode(repo, ctx):
2505 props['ctx'] = ctx
2505 props['ctx'] = ctx
2506 props['repo'] = repo
2506 props['repo'] = repo
2507 props['ui'] = repo.ui
2507 props['ui'] = repo.ui
2508 props['revcache'] = {}
2508 props['revcache'] = {}
2509 return templ.render(props)
2509 return templ.render(props)
2510 return formatnode
2510 return formatnode
2511
2511
2512 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2512 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2513 filematcher=None):
2513 filematcher=None):
2514 formatnode = _graphnodeformatter(ui, displayer)
2514 formatnode = _graphnodeformatter(ui, displayer)
2515 state = graphmod.asciistate()
2515 state = graphmod.asciistate()
2516 styles = state['styles']
2516 styles = state['styles']
2517
2517
2518 # only set graph styling if HGPLAIN is not set.
2518 # only set graph styling if HGPLAIN is not set.
2519 if ui.plain('graph'):
2519 if ui.plain('graph'):
2520 # set all edge styles to |, the default pre-3.8 behaviour
2520 # set all edge styles to |, the default pre-3.8 behaviour
2521 styles.update(dict.fromkeys(styles, '|'))
2521 styles.update(dict.fromkeys(styles, '|'))
2522 else:
2522 else:
2523 edgetypes = {
2523 edgetypes = {
2524 'parent': graphmod.PARENT,
2524 'parent': graphmod.PARENT,
2525 'grandparent': graphmod.GRANDPARENT,
2525 'grandparent': graphmod.GRANDPARENT,
2526 'missing': graphmod.MISSINGPARENT
2526 'missing': graphmod.MISSINGPARENT
2527 }
2527 }
2528 for name, key in edgetypes.items():
2528 for name, key in edgetypes.items():
2529 # experimental config: experimental.graphstyle.*
2529 # experimental config: experimental.graphstyle.*
2530 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2530 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2531 styles[key])
2531 styles[key])
2532 if not styles[key]:
2532 if not styles[key]:
2533 styles[key] = None
2533 styles[key] = None
2534
2534
2535 # experimental config: experimental.graphshorten
2535 # experimental config: experimental.graphshorten
2536 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2536 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2537
2537
2538 for rev, type, ctx, parents in dag:
2538 for rev, type, ctx, parents in dag:
2539 char = formatnode(repo, ctx)
2539 char = formatnode(repo, ctx)
2540 copies = None
2540 copies = None
2541 if getrenamed and ctx.rev():
2541 if getrenamed and ctx.rev():
2542 copies = []
2542 copies = []
2543 for fn in ctx.files():
2543 for fn in ctx.files():
2544 rename = getrenamed(fn, ctx.rev())
2544 rename = getrenamed(fn, ctx.rev())
2545 if rename:
2545 if rename:
2546 copies.append((fn, rename[0]))
2546 copies.append((fn, rename[0]))
2547 revmatchfn = None
2547 revmatchfn = None
2548 if filematcher is not None:
2548 if filematcher is not None:
2549 revmatchfn = filematcher(ctx.rev())
2549 revmatchfn = filematcher(ctx.rev())
2550 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2550 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2551 lines = displayer.hunk.pop(rev).split('\n')
2551 lines = displayer.hunk.pop(rev).split('\n')
2552 if not lines[-1]:
2552 if not lines[-1]:
2553 del lines[-1]
2553 del lines[-1]
2554 displayer.flush(ctx)
2554 displayer.flush(ctx)
2555 edges = edgefn(type, char, lines, state, rev, parents)
2555 edges = edgefn(type, char, lines, state, rev, parents)
2556 for type, char, lines, coldata in edges:
2556 for type, char, lines, coldata in edges:
2557 graphmod.ascii(ui, state, type, char, lines, coldata)
2557 graphmod.ascii(ui, state, type, char, lines, coldata)
2558 displayer.close()
2558 displayer.close()
2559
2559
2560 def graphlog(ui, repo, pats, opts):
2560 def graphlog(ui, repo, pats, opts):
2561 # Parameters are identical to log command ones
2561 # Parameters are identical to log command ones
2562 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2562 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2563 revdag = graphmod.dagwalker(repo, revs)
2563 revdag = graphmod.dagwalker(repo, revs)
2564
2564
2565 getrenamed = None
2565 getrenamed = None
2566 if opts.get('copies'):
2566 if opts.get('copies'):
2567 endrev = None
2567 endrev = None
2568 if opts.get('rev'):
2568 if opts.get('rev'):
2569 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2569 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2570 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2570 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2571
2571
2572 ui.pager('log')
2572 ui.pager('log')
2573 displayer = show_changeset(ui, repo, opts, buffered=True)
2573 displayer = show_changeset(ui, repo, opts, buffered=True)
2574 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2574 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2575 filematcher)
2575 filematcher)
2576
2576
2577 def checkunsupportedgraphflags(pats, opts):
2577 def checkunsupportedgraphflags(pats, opts):
2578 for op in ["newest_first"]:
2578 for op in ["newest_first"]:
2579 if op in opts and opts[op]:
2579 if op in opts and opts[op]:
2580 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2580 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2581 % op.replace("_", "-"))
2581 % op.replace("_", "-"))
2582
2582
2583 def graphrevs(repo, nodes, opts):
2583 def graphrevs(repo, nodes, opts):
2584 limit = loglimit(opts)
2584 limit = loglimit(opts)
2585 nodes.reverse()
2585 nodes.reverse()
2586 if limit is not None:
2586 if limit is not None:
2587 nodes = nodes[:limit]
2587 nodes = nodes[:limit]
2588 return graphmod.nodes(repo, nodes)
2588 return graphmod.nodes(repo, nodes)
2589
2589
2590 def add(ui, repo, match, prefix, explicitonly, **opts):
2590 def add(ui, repo, match, prefix, explicitonly, **opts):
2591 join = lambda f: os.path.join(prefix, f)
2591 join = lambda f: os.path.join(prefix, f)
2592 bad = []
2592 bad = []
2593
2593
2594 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2594 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2595 names = []
2595 names = []
2596 wctx = repo[None]
2596 wctx = repo[None]
2597 cca = None
2597 cca = None
2598 abort, warn = scmutil.checkportabilityalert(ui)
2598 abort, warn = scmutil.checkportabilityalert(ui)
2599 if abort or warn:
2599 if abort or warn:
2600 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2600 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2601
2601
2602 badmatch = matchmod.badmatch(match, badfn)
2602 badmatch = matchmod.badmatch(match, badfn)
2603 dirstate = repo.dirstate
2603 dirstate = repo.dirstate
2604 # We don't want to just call wctx.walk here, since it would return a lot of
2604 # We don't want to just call wctx.walk here, since it would return a lot of
2605 # clean files, which we aren't interested in and takes time.
2605 # clean files, which we aren't interested in and takes time.
2606 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2606 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2607 True, False, full=False)):
2607 True, False, full=False)):
2608 exact = match.exact(f)
2608 exact = match.exact(f)
2609 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2609 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2610 if cca:
2610 if cca:
2611 cca(f)
2611 cca(f)
2612 names.append(f)
2612 names.append(f)
2613 if ui.verbose or not exact:
2613 if ui.verbose or not exact:
2614 ui.status(_('adding %s\n') % match.rel(f))
2614 ui.status(_('adding %s\n') % match.rel(f))
2615
2615
2616 for subpath in sorted(wctx.substate):
2616 for subpath in sorted(wctx.substate):
2617 sub = wctx.sub(subpath)
2617 sub = wctx.sub(subpath)
2618 try:
2618 try:
2619 submatch = matchmod.subdirmatcher(subpath, match)
2619 submatch = matchmod.subdirmatcher(subpath, match)
2620 if opts.get(r'subrepos'):
2620 if opts.get(r'subrepos'):
2621 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2621 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2622 else:
2622 else:
2623 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2623 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2624 except error.LookupError:
2624 except error.LookupError:
2625 ui.status(_("skipping missing subrepository: %s\n")
2625 ui.status(_("skipping missing subrepository: %s\n")
2626 % join(subpath))
2626 % join(subpath))
2627
2627
2628 if not opts.get(r'dry_run'):
2628 if not opts.get(r'dry_run'):
2629 rejected = wctx.add(names, prefix)
2629 rejected = wctx.add(names, prefix)
2630 bad.extend(f for f in rejected if f in match.files())
2630 bad.extend(f for f in rejected if f in match.files())
2631 return bad
2631 return bad
2632
2632
2633 def addwebdirpath(repo, serverpath, webconf):
2633 def addwebdirpath(repo, serverpath, webconf):
2634 webconf[serverpath] = repo.root
2634 webconf[serverpath] = repo.root
2635 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2635 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2636
2636
2637 for r in repo.revs('filelog("path:.hgsub")'):
2637 for r in repo.revs('filelog("path:.hgsub")'):
2638 ctx = repo[r]
2638 ctx = repo[r]
2639 for subpath in ctx.substate:
2639 for subpath in ctx.substate:
2640 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2640 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2641
2641
2642 def forget(ui, repo, match, prefix, explicitonly):
2642 def forget(ui, repo, match, prefix, explicitonly):
2643 join = lambda f: os.path.join(prefix, f)
2643 join = lambda f: os.path.join(prefix, f)
2644 bad = []
2644 bad = []
2645 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2645 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2646 wctx = repo[None]
2646 wctx = repo[None]
2647 forgot = []
2647 forgot = []
2648
2648
2649 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2649 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2650 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2650 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2651 if explicitonly:
2651 if explicitonly:
2652 forget = [f for f in forget if match.exact(f)]
2652 forget = [f for f in forget if match.exact(f)]
2653
2653
2654 for subpath in sorted(wctx.substate):
2654 for subpath in sorted(wctx.substate):
2655 sub = wctx.sub(subpath)
2655 sub = wctx.sub(subpath)
2656 try:
2656 try:
2657 submatch = matchmod.subdirmatcher(subpath, match)
2657 submatch = matchmod.subdirmatcher(subpath, match)
2658 subbad, subforgot = sub.forget(submatch, prefix)
2658 subbad, subforgot = sub.forget(submatch, prefix)
2659 bad.extend([subpath + '/' + f for f in subbad])
2659 bad.extend([subpath + '/' + f for f in subbad])
2660 forgot.extend([subpath + '/' + f for f in subforgot])
2660 forgot.extend([subpath + '/' + f for f in subforgot])
2661 except error.LookupError:
2661 except error.LookupError:
2662 ui.status(_("skipping missing subrepository: %s\n")
2662 ui.status(_("skipping missing subrepository: %s\n")
2663 % join(subpath))
2663 % join(subpath))
2664
2664
2665 if not explicitonly:
2665 if not explicitonly:
2666 for f in match.files():
2666 for f in match.files():
2667 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2667 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2668 if f not in forgot:
2668 if f not in forgot:
2669 if repo.wvfs.exists(f):
2669 if repo.wvfs.exists(f):
2670 # Don't complain if the exact case match wasn't given.
2670 # Don't complain if the exact case match wasn't given.
2671 # But don't do this until after checking 'forgot', so
2671 # But don't do this until after checking 'forgot', so
2672 # that subrepo files aren't normalized, and this op is
2672 # that subrepo files aren't normalized, and this op is
2673 # purely from data cached by the status walk above.
2673 # purely from data cached by the status walk above.
2674 if repo.dirstate.normalize(f) in repo.dirstate:
2674 if repo.dirstate.normalize(f) in repo.dirstate:
2675 continue
2675 continue
2676 ui.warn(_('not removing %s: '
2676 ui.warn(_('not removing %s: '
2677 'file is already untracked\n')
2677 'file is already untracked\n')
2678 % match.rel(f))
2678 % match.rel(f))
2679 bad.append(f)
2679 bad.append(f)
2680
2680
2681 for f in forget:
2681 for f in forget:
2682 if ui.verbose or not match.exact(f):
2682 if ui.verbose or not match.exact(f):
2683 ui.status(_('removing %s\n') % match.rel(f))
2683 ui.status(_('removing %s\n') % match.rel(f))
2684
2684
2685 rejected = wctx.forget(forget, prefix)
2685 rejected = wctx.forget(forget, prefix)
2686 bad.extend(f for f in rejected if f in match.files())
2686 bad.extend(f for f in rejected if f in match.files())
2687 forgot.extend(f for f in forget if f not in rejected)
2687 forgot.extend(f for f in forget if f not in rejected)
2688 return bad, forgot
2688 return bad, forgot
2689
2689
2690 def files(ui, ctx, m, fm, fmt, subrepos):
2690 def files(ui, ctx, m, fm, fmt, subrepos):
2691 rev = ctx.rev()
2691 rev = ctx.rev()
2692 ret = 1
2692 ret = 1
2693 ds = ctx.repo().dirstate
2693 ds = ctx.repo().dirstate
2694
2694
2695 for f in ctx.matches(m):
2695 for f in ctx.matches(m):
2696 if rev is None and ds[f] == 'r':
2696 if rev is None and ds[f] == 'r':
2697 continue
2697 continue
2698 fm.startitem()
2698 fm.startitem()
2699 if ui.verbose:
2699 if ui.verbose:
2700 fc = ctx[f]
2700 fc = ctx[f]
2701 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2701 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2702 fm.data(abspath=f)
2702 fm.data(abspath=f)
2703 fm.write('path', fmt, m.rel(f))
2703 fm.write('path', fmt, m.rel(f))
2704 ret = 0
2704 ret = 0
2705
2705
2706 for subpath in sorted(ctx.substate):
2706 for subpath in sorted(ctx.substate):
2707 submatch = matchmod.subdirmatcher(subpath, m)
2707 submatch = matchmod.subdirmatcher(subpath, m)
2708 if (subrepos or m.exact(subpath) or any(submatch.files())):
2708 if (subrepos or m.exact(subpath) or any(submatch.files())):
2709 sub = ctx.sub(subpath)
2709 sub = ctx.sub(subpath)
2710 try:
2710 try:
2711 recurse = m.exact(subpath) or subrepos
2711 recurse = m.exact(subpath) or subrepos
2712 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2712 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2713 ret = 0
2713 ret = 0
2714 except error.LookupError:
2714 except error.LookupError:
2715 ui.status(_("skipping missing subrepository: %s\n")
2715 ui.status(_("skipping missing subrepository: %s\n")
2716 % m.abs(subpath))
2716 % m.abs(subpath))
2717
2717
2718 return ret
2718 return ret
2719
2719
2720 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2720 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2721 join = lambda f: os.path.join(prefix, f)
2721 join = lambda f: os.path.join(prefix, f)
2722 ret = 0
2722 ret = 0
2723 s = repo.status(match=m, clean=True)
2723 s = repo.status(match=m, clean=True)
2724 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2724 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2725
2725
2726 wctx = repo[None]
2726 wctx = repo[None]
2727
2727
2728 if warnings is None:
2728 if warnings is None:
2729 warnings = []
2729 warnings = []
2730 warn = True
2730 warn = True
2731 else:
2731 else:
2732 warn = False
2732 warn = False
2733
2733
2734 subs = sorted(wctx.substate)
2734 subs = sorted(wctx.substate)
2735 total = len(subs)
2735 total = len(subs)
2736 count = 0
2736 count = 0
2737 for subpath in subs:
2737 for subpath in subs:
2738 count += 1
2738 count += 1
2739 submatch = matchmod.subdirmatcher(subpath, m)
2739 submatch = matchmod.subdirmatcher(subpath, m)
2740 if subrepos or m.exact(subpath) or any(submatch.files()):
2740 if subrepos or m.exact(subpath) or any(submatch.files()):
2741 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2741 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2742 sub = wctx.sub(subpath)
2742 sub = wctx.sub(subpath)
2743 try:
2743 try:
2744 if sub.removefiles(submatch, prefix, after, force, subrepos,
2744 if sub.removefiles(submatch, prefix, after, force, subrepos,
2745 warnings):
2745 warnings):
2746 ret = 1
2746 ret = 1
2747 except error.LookupError:
2747 except error.LookupError:
2748 warnings.append(_("skipping missing subrepository: %s\n")
2748 warnings.append(_("skipping missing subrepository: %s\n")
2749 % join(subpath))
2749 % join(subpath))
2750 ui.progress(_('searching'), None)
2750 ui.progress(_('searching'), None)
2751
2751
2752 # warn about failure to delete explicit files/dirs
2752 # warn about failure to delete explicit files/dirs
2753 deleteddirs = util.dirs(deleted)
2753 deleteddirs = util.dirs(deleted)
2754 files = m.files()
2754 files = m.files()
2755 total = len(files)
2755 total = len(files)
2756 count = 0
2756 count = 0
2757 for f in files:
2757 for f in files:
2758 def insubrepo():
2758 def insubrepo():
2759 for subpath in wctx.substate:
2759 for subpath in wctx.substate:
2760 if f.startswith(subpath + '/'):
2760 if f.startswith(subpath + '/'):
2761 return True
2761 return True
2762 return False
2762 return False
2763
2763
2764 count += 1
2764 count += 1
2765 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2765 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2766 isdir = f in deleteddirs or wctx.hasdir(f)
2766 isdir = f in deleteddirs or wctx.hasdir(f)
2767 if (f in repo.dirstate or isdir or f == '.'
2767 if (f in repo.dirstate or isdir or f == '.'
2768 or insubrepo() or f in subs):
2768 or insubrepo() or f in subs):
2769 continue
2769 continue
2770
2770
2771 if repo.wvfs.exists(f):
2771 if repo.wvfs.exists(f):
2772 if repo.wvfs.isdir(f):
2772 if repo.wvfs.isdir(f):
2773 warnings.append(_('not removing %s: no tracked files\n')
2773 warnings.append(_('not removing %s: no tracked files\n')
2774 % m.rel(f))
2774 % m.rel(f))
2775 else:
2775 else:
2776 warnings.append(_('not removing %s: file is untracked\n')
2776 warnings.append(_('not removing %s: file is untracked\n')
2777 % m.rel(f))
2777 % m.rel(f))
2778 # missing files will generate a warning elsewhere
2778 # missing files will generate a warning elsewhere
2779 ret = 1
2779 ret = 1
2780 ui.progress(_('deleting'), None)
2780 ui.progress(_('deleting'), None)
2781
2781
2782 if force:
2782 if force:
2783 list = modified + deleted + clean + added
2783 list = modified + deleted + clean + added
2784 elif after:
2784 elif after:
2785 list = deleted
2785 list = deleted
2786 remaining = modified + added + clean
2786 remaining = modified + added + clean
2787 total = len(remaining)
2787 total = len(remaining)
2788 count = 0
2788 count = 0
2789 for f in remaining:
2789 for f in remaining:
2790 count += 1
2790 count += 1
2791 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2791 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2792 warnings.append(_('not removing %s: file still exists\n')
2792 warnings.append(_('not removing %s: file still exists\n')
2793 % m.rel(f))
2793 % m.rel(f))
2794 ret = 1
2794 ret = 1
2795 ui.progress(_('skipping'), None)
2795 ui.progress(_('skipping'), None)
2796 else:
2796 else:
2797 list = deleted + clean
2797 list = deleted + clean
2798 total = len(modified) + len(added)
2798 total = len(modified) + len(added)
2799 count = 0
2799 count = 0
2800 for f in modified:
2800 for f in modified:
2801 count += 1
2801 count += 1
2802 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2802 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2803 warnings.append(_('not removing %s: file is modified (use -f'
2803 warnings.append(_('not removing %s: file is modified (use -f'
2804 ' to force removal)\n') % m.rel(f))
2804 ' to force removal)\n') % m.rel(f))
2805 ret = 1
2805 ret = 1
2806 for f in added:
2806 for f in added:
2807 count += 1
2807 count += 1
2808 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2808 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2809 warnings.append(_("not removing %s: file has been marked for add"
2809 warnings.append(_("not removing %s: file has been marked for add"
2810 " (use 'hg forget' to undo add)\n") % m.rel(f))
2810 " (use 'hg forget' to undo add)\n") % m.rel(f))
2811 ret = 1
2811 ret = 1
2812 ui.progress(_('skipping'), None)
2812 ui.progress(_('skipping'), None)
2813
2813
2814 list = sorted(list)
2814 list = sorted(list)
2815 total = len(list)
2815 total = len(list)
2816 count = 0
2816 count = 0
2817 for f in list:
2817 for f in list:
2818 count += 1
2818 count += 1
2819 if ui.verbose or not m.exact(f):
2819 if ui.verbose or not m.exact(f):
2820 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2820 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2821 ui.status(_('removing %s\n') % m.rel(f))
2821 ui.status(_('removing %s\n') % m.rel(f))
2822 ui.progress(_('deleting'), None)
2822 ui.progress(_('deleting'), None)
2823
2823
2824 with repo.wlock():
2824 with repo.wlock():
2825 if not after:
2825 if not after:
2826 for f in list:
2826 for f in list:
2827 if f in added:
2827 if f in added:
2828 continue # we never unlink added files on remove
2828 continue # we never unlink added files on remove
2829 repo.wvfs.unlinkpath(f, ignoremissing=True)
2829 repo.wvfs.unlinkpath(f, ignoremissing=True)
2830 repo[None].forget(list)
2830 repo[None].forget(list)
2831
2831
2832 if warn:
2832 if warn:
2833 for warning in warnings:
2833 for warning in warnings:
2834 ui.warn(warning)
2834 ui.warn(warning)
2835
2835
2836 return ret
2836 return ret
2837
2837
2838 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2838 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2839 err = 1
2839 err = 1
2840
2840
2841 def write(path):
2841 def write(path):
2842 filename = None
2842 filename = None
2843 if fntemplate:
2843 if fntemplate:
2844 filename = makefilename(repo, fntemplate, ctx.node(),
2844 filename = makefilename(repo, fntemplate, ctx.node(),
2845 pathname=os.path.join(prefix, path))
2845 pathname=os.path.join(prefix, path))
2846 with formatter.maybereopen(basefm, filename, opts) as fm:
2846 with formatter.maybereopen(basefm, filename, opts) as fm:
2847 data = ctx[path].data()
2847 data = ctx[path].data()
2848 if opts.get('decode'):
2848 if opts.get('decode'):
2849 data = repo.wwritedata(path, data)
2849 data = repo.wwritedata(path, data)
2850 fm.startitem()
2850 fm.startitem()
2851 fm.write('data', '%s', data)
2851 fm.write('data', '%s', data)
2852 fm.data(abspath=path, path=matcher.rel(path))
2852 fm.data(abspath=path, path=matcher.rel(path))
2853
2853
2854 # Automation often uses hg cat on single files, so special case it
2854 # Automation often uses hg cat on single files, so special case it
2855 # for performance to avoid the cost of parsing the manifest.
2855 # for performance to avoid the cost of parsing the manifest.
2856 if len(matcher.files()) == 1 and not matcher.anypats():
2856 if len(matcher.files()) == 1 and not matcher.anypats():
2857 file = matcher.files()[0]
2857 file = matcher.files()[0]
2858 mfl = repo.manifestlog
2858 mfl = repo.manifestlog
2859 mfnode = ctx.manifestnode()
2859 mfnode = ctx.manifestnode()
2860 try:
2860 try:
2861 if mfnode and mfl[mfnode].find(file)[0]:
2861 if mfnode and mfl[mfnode].find(file)[0]:
2862 write(file)
2862 write(file)
2863 return 0
2863 return 0
2864 except KeyError:
2864 except KeyError:
2865 pass
2865 pass
2866
2866
2867 for abs in ctx.walk(matcher):
2867 for abs in ctx.walk(matcher):
2868 write(abs)
2868 write(abs)
2869 err = 0
2869 err = 0
2870
2870
2871 for subpath in sorted(ctx.substate):
2871 for subpath in sorted(ctx.substate):
2872 sub = ctx.sub(subpath)
2872 sub = ctx.sub(subpath)
2873 try:
2873 try:
2874 submatch = matchmod.subdirmatcher(subpath, matcher)
2874 submatch = matchmod.subdirmatcher(subpath, matcher)
2875
2875
2876 if not sub.cat(submatch, basefm, fntemplate,
2876 if not sub.cat(submatch, basefm, fntemplate,
2877 os.path.join(prefix, sub._path), **opts):
2877 os.path.join(prefix, sub._path), **opts):
2878 err = 0
2878 err = 0
2879 except error.RepoLookupError:
2879 except error.RepoLookupError:
2880 ui.status(_("skipping missing subrepository: %s\n")
2880 ui.status(_("skipping missing subrepository: %s\n")
2881 % os.path.join(prefix, subpath))
2881 % os.path.join(prefix, subpath))
2882
2882
2883 return err
2883 return err
2884
2884
2885 def commit(ui, repo, commitfunc, pats, opts):
2885 def commit(ui, repo, commitfunc, pats, opts):
2886 '''commit the specified files or all outstanding changes'''
2886 '''commit the specified files or all outstanding changes'''
2887 date = opts.get('date')
2887 date = opts.get('date')
2888 if date:
2888 if date:
2889 opts['date'] = util.parsedate(date)
2889 opts['date'] = util.parsedate(date)
2890 message = logmessage(ui, opts)
2890 message = logmessage(ui, opts)
2891 matcher = scmutil.match(repo[None], pats, opts)
2891 matcher = scmutil.match(repo[None], pats, opts)
2892
2892
2893 dsguard = None
2893 dsguard = None
2894 # extract addremove carefully -- this function can be called from a command
2894 # extract addremove carefully -- this function can be called from a command
2895 # that doesn't support addremove
2895 # that doesn't support addremove
2896 try:
2896 try:
2897 if opts.get('addremove'):
2897 if opts.get('addremove'):
2898 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2898 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2899 if scmutil.addremove(repo, matcher, "", opts) != 0:
2899 if scmutil.addremove(repo, matcher, "", opts) != 0:
2900 raise error.Abort(
2900 raise error.Abort(
2901 _("failed to mark all new/missing files as added/removed"))
2901 _("failed to mark all new/missing files as added/removed"))
2902
2902
2903 r = commitfunc(ui, repo, message, matcher, opts)
2903 r = commitfunc(ui, repo, message, matcher, opts)
2904 if dsguard:
2904 if dsguard:
2905 dsguard.close()
2905 dsguard.close()
2906 return r
2906 return r
2907 finally:
2907 finally:
2908 if dsguard:
2908 if dsguard:
2909 dsguard.release()
2909 dsguard.release()
2910
2910
2911 def samefile(f, ctx1, ctx2):
2911 def samefile(f, ctx1, ctx2):
2912 if f in ctx1.manifest():
2912 if f in ctx1.manifest():
2913 a = ctx1.filectx(f)
2913 a = ctx1.filectx(f)
2914 if f in ctx2.manifest():
2914 if f in ctx2.manifest():
2915 b = ctx2.filectx(f)
2915 b = ctx2.filectx(f)
2916 return (not a.cmp(b)
2916 return (not a.cmp(b)
2917 and a.flags() == b.flags())
2917 and a.flags() == b.flags())
2918 else:
2918 else:
2919 return False
2919 return False
2920 else:
2920 else:
2921 return f not in ctx2.manifest()
2921 return f not in ctx2.manifest()
2922
2922
2923 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2923 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2924 # avoid cycle context -> subrepo -> cmdutil
2924 # avoid cycle context -> subrepo -> cmdutil
2925 from . import context
2925 from . import context
2926
2926
2927 # amend will reuse the existing user if not specified, but the obsolete
2927 # amend will reuse the existing user if not specified, but the obsolete
2928 # marker creation requires that the current user's name is specified.
2928 # marker creation requires that the current user's name is specified.
2929 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2929 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2930 ui.username() # raise exception if username not set
2930 ui.username() # raise exception if username not set
2931
2931
2932 ui.note(_('amending changeset %s\n') % old)
2932 ui.note(_('amending changeset %s\n') % old)
2933 base = old.p1()
2933 base = old.p1()
2934
2934
2935 newid = None
2935 newid = None
2936 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2936 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2937 # See if we got a message from -m or -l, if not, open the editor
2937 # See if we got a message from -m or -l, if not, open the editor
2938 # with the message of the changeset to amend
2938 # with the message of the changeset to amend
2939 message = logmessage(ui, opts)
2939 message = logmessage(ui, opts)
2940 # ensure logfile does not conflict with later enforcement of the
2940 # ensure logfile does not conflict with later enforcement of the
2941 # message. potential logfile content has been processed by
2941 # message. potential logfile content has been processed by
2942 # `logmessage` anyway.
2942 # `logmessage` anyway.
2943 opts.pop('logfile')
2943 opts.pop('logfile')
2944 # First, do a regular commit to record all changes in the working
2944 # First, do a regular commit to record all changes in the working
2945 # directory (if there are any)
2945 # directory (if there are any)
2946 ui.callhooks = False
2946 ui.callhooks = False
2947 activebookmark = repo._bookmarks.active
2947 activebookmark = repo._bookmarks.active
2948 try:
2948 try:
2949 repo._bookmarks.active = None
2949 repo._bookmarks.active = None
2950 opts['message'] = 'temporary amend commit for %s' % old
2950 opts['message'] = 'temporary amend commit for %s' % old
2951 node = commit(ui, repo, commitfunc, pats, opts)
2951 node = commit(ui, repo, commitfunc, pats, opts)
2952 finally:
2952 finally:
2953 repo._bookmarks.active = activebookmark
2953 repo._bookmarks.active = activebookmark
2954 ui.callhooks = True
2954 ui.callhooks = True
2955 ctx = repo[node]
2955 ctx = repo[node]
2956
2956
2957 # Participating changesets:
2957 # Participating changesets:
2958 #
2958 #
2959 # node/ctx o - new (intermediate) commit that contains changes
2959 # node/ctx o - new (intermediate) commit that contains changes
2960 # | from working dir to go into amending commit
2960 # | from working dir to go into amending commit
2961 # | (or a workingctx if there were no changes)
2961 # | (or a workingctx if there were no changes)
2962 # |
2962 # |
2963 # old o - changeset to amend
2963 # old o - changeset to amend
2964 # |
2964 # |
2965 # base o - parent of amending changeset
2965 # base o - parent of amending changeset
2966
2966
2967 # Update extra dict from amended commit (e.g. to preserve graft
2967 # Update extra dict from amended commit (e.g. to preserve graft
2968 # source)
2968 # source)
2969 extra.update(old.extra())
2969 extra.update(old.extra())
2970
2970
2971 # Also update it from the intermediate commit or from the wctx
2971 # Also update it from the intermediate commit or from the wctx
2972 extra.update(ctx.extra())
2972 extra.update(ctx.extra())
2973
2973
2974 if len(old.parents()) > 1:
2974 if len(old.parents()) > 1:
2975 # ctx.files() isn't reliable for merges, so fall back to the
2975 # ctx.files() isn't reliable for merges, so fall back to the
2976 # slower repo.status() method
2976 # slower repo.status() method
2977 files = set([fn for st in repo.status(base, old)[:3]
2977 files = set([fn for st in repo.status(base, old)[:3]
2978 for fn in st])
2978 for fn in st])
2979 else:
2979 else:
2980 files = set(old.files())
2980 files = set(old.files())
2981
2981
2982 # Second, we use either the commit we just did, or if there were no
2982 # Second, we use either the commit we just did, or if there were no
2983 # changes the parent of the working directory as the version of the
2983 # changes the parent of the working directory as the version of the
2984 # files in the final amend commit
2984 # files in the final amend commit
2985 if node:
2985 if node:
2986 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2986 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2987
2987
2988 user = ctx.user()
2988 user = ctx.user()
2989 date = ctx.date()
2989 date = ctx.date()
2990 # Recompute copies (avoid recording a -> b -> a)
2990 # Recompute copies (avoid recording a -> b -> a)
2991 copied = copies.pathcopies(base, ctx)
2991 copied = copies.pathcopies(base, ctx)
2992 if old.p2:
2992 if old.p2:
2993 copied.update(copies.pathcopies(old.p2(), ctx))
2993 copied.update(copies.pathcopies(old.p2(), ctx))
2994
2994
2995 # Prune files which were reverted by the updates: if old
2995 # Prune files which were reverted by the updates: if old
2996 # introduced file X and our intermediate commit, node,
2996 # introduced file X and our intermediate commit, node,
2997 # renamed that file, then those two files are the same and
2997 # renamed that file, then those two files are the same and
2998 # we can discard X from our list of files. Likewise if X
2998 # we can discard X from our list of files. Likewise if X
2999 # was deleted, it's no longer relevant
2999 # was deleted, it's no longer relevant
3000 files.update(ctx.files())
3000 files.update(ctx.files())
3001 files = [f for f in files if not samefile(f, ctx, base)]
3001 files = [f for f in files if not samefile(f, ctx, base)]
3002
3002
3003 def filectxfn(repo, ctx_, path):
3003 def filectxfn(repo, ctx_, path):
3004 try:
3004 try:
3005 fctx = ctx[path]
3005 fctx = ctx[path]
3006 flags = fctx.flags()
3006 flags = fctx.flags()
3007 mctx = context.memfilectx(repo,
3007 mctx = context.memfilectx(repo,
3008 fctx.path(), fctx.data(),
3008 fctx.path(), fctx.data(),
3009 islink='l' in flags,
3009 islink='l' in flags,
3010 isexec='x' in flags,
3010 isexec='x' in flags,
3011 copied=copied.get(path))
3011 copied=copied.get(path))
3012 return mctx
3012 return mctx
3013 except KeyError:
3013 except KeyError:
3014 return None
3014 return None
3015 else:
3015 else:
3016 ui.note(_('copying changeset %s to %s\n') % (old, base))
3016 ui.note(_('copying changeset %s to %s\n') % (old, base))
3017
3017
3018 # Use version of files as in the old cset
3018 # Use version of files as in the old cset
3019 def filectxfn(repo, ctx_, path):
3019 def filectxfn(repo, ctx_, path):
3020 try:
3020 try:
3021 return old.filectx(path)
3021 return old.filectx(path)
3022 except KeyError:
3022 except KeyError:
3023 return None
3023 return None
3024
3024
3025 user = opts.get('user') or old.user()
3025 user = opts.get('user') or old.user()
3026 date = opts.get('date') or old.date()
3026 date = opts.get('date') or old.date()
3027 editform = mergeeditform(old, 'commit.amend')
3027 editform = mergeeditform(old, 'commit.amend')
3028 editor = getcommiteditor(editform=editform,
3028 editor = getcommiteditor(editform=editform,
3029 **pycompat.strkwargs(opts))
3029 **pycompat.strkwargs(opts))
3030 if not message:
3030 if not message:
3031 editor = getcommiteditor(edit=True, editform=editform)
3031 editor = getcommiteditor(edit=True, editform=editform)
3032 message = old.description()
3032 message = old.description()
3033
3033
3034 pureextra = extra.copy()
3034 pureextra = extra.copy()
3035 extra['amend_source'] = old.hex()
3035 extra['amend_source'] = old.hex()
3036
3036
3037 new = context.memctx(repo,
3037 new = context.memctx(repo,
3038 parents=[base.node(), old.p2().node()],
3038 parents=[base.node(), old.p2().node()],
3039 text=message,
3039 text=message,
3040 files=files,
3040 files=files,
3041 filectxfn=filectxfn,
3041 filectxfn=filectxfn,
3042 user=user,
3042 user=user,
3043 date=date,
3043 date=date,
3044 extra=extra,
3044 extra=extra,
3045 editor=editor)
3045 editor=editor)
3046
3046
3047 newdesc = changelog.stripdesc(new.description())
3047 newdesc = changelog.stripdesc(new.description())
3048 if ((not node)
3048 if ((not node)
3049 and newdesc == old.description()
3049 and newdesc == old.description()
3050 and user == old.user()
3050 and user == old.user()
3051 and date == old.date()
3051 and date == old.date()
3052 and pureextra == old.extra()):
3052 and pureextra == old.extra()):
3053 # nothing changed. continuing here would create a new node
3053 # nothing changed. continuing here would create a new node
3054 # anyway because of the amend_source noise.
3054 # anyway because of the amend_source noise.
3055 #
3055 #
3056 # This not what we expect from amend.
3056 # This not what we expect from amend.
3057 return old.node()
3057 return old.node()
3058
3058
3059 ph = repo.ui.config('phases', 'new-commit', phases.draft)
3059 ph = repo.ui.config('phases', 'new-commit', phases.draft)
3060 try:
3060 try:
3061 if opts.get('secret'):
3061 if opts.get('secret'):
3062 commitphase = 'secret'
3062 commitphase = 'secret'
3063 else:
3063 else:
3064 commitphase = old.phase()
3064 commitphase = old.phase()
3065 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
3065 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
3066 newid = repo.commitctx(new)
3066 newid = repo.commitctx(new)
3067 finally:
3067 finally:
3068 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
3068 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
3069 if newid != old.node():
3069 if newid != old.node():
3070 # Reroute the working copy parent to the new changeset
3070 # Reroute the working copy parent to the new changeset
3071 repo.setparents(newid, nullid)
3071 repo.setparents(newid, nullid)
3072 mapping = {old.node(): (newid,)}
3072 mapping = {old.node(): (newid,)}
3073 if node:
3073 if node:
3074 mapping[node] = ()
3074 mapping[node] = ()
3075 scmutil.cleanupnodes(repo, mapping, 'amend')
3075 scmutil.cleanupnodes(repo, mapping, 'amend')
3076 return newid
3076 return newid
3077
3077
3078 def commiteditor(repo, ctx, subs, editform=''):
3078 def commiteditor(repo, ctx, subs, editform=''):
3079 if ctx.description():
3079 if ctx.description():
3080 return ctx.description()
3080 return ctx.description()
3081 return commitforceeditor(repo, ctx, subs, editform=editform,
3081 return commitforceeditor(repo, ctx, subs, editform=editform,
3082 unchangedmessagedetection=True)
3082 unchangedmessagedetection=True)
3083
3083
3084 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3084 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3085 editform='', unchangedmessagedetection=False):
3085 editform='', unchangedmessagedetection=False):
3086 if not extramsg:
3086 if not extramsg:
3087 extramsg = _("Leave message empty to abort commit.")
3087 extramsg = _("Leave message empty to abort commit.")
3088
3088
3089 forms = [e for e in editform.split('.') if e]
3089 forms = [e for e in editform.split('.') if e]
3090 forms.insert(0, 'changeset')
3090 forms.insert(0, 'changeset')
3091 templatetext = None
3091 templatetext = None
3092 while forms:
3092 while forms:
3093 ref = '.'.join(forms)
3093 ref = '.'.join(forms)
3094 if repo.ui.config('committemplate', ref):
3094 if repo.ui.config('committemplate', ref):
3095 templatetext = committext = buildcommittemplate(
3095 templatetext = committext = buildcommittemplate(
3096 repo, ctx, subs, extramsg, ref)
3096 repo, ctx, subs, extramsg, ref)
3097 break
3097 break
3098 forms.pop()
3098 forms.pop()
3099 else:
3099 else:
3100 committext = buildcommittext(repo, ctx, subs, extramsg)
3100 committext = buildcommittext(repo, ctx, subs, extramsg)
3101
3101
3102 # run editor in the repository root
3102 # run editor in the repository root
3103 olddir = pycompat.getcwd()
3103 olddir = pycompat.getcwd()
3104 os.chdir(repo.root)
3104 os.chdir(repo.root)
3105
3105
3106 # make in-memory changes visible to external process
3106 # make in-memory changes visible to external process
3107 tr = repo.currenttransaction()
3107 tr = repo.currenttransaction()
3108 repo.dirstate.write(tr)
3108 repo.dirstate.write(tr)
3109 pending = tr and tr.writepending() and repo.root
3109 pending = tr and tr.writepending() and repo.root
3110
3110
3111 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3111 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3112 editform=editform, pending=pending,
3112 editform=editform, pending=pending,
3113 repopath=repo.path)
3113 repopath=repo.path)
3114 text = editortext
3114 text = editortext
3115
3115
3116 # strip away anything below this special string (used for editors that want
3116 # strip away anything below this special string (used for editors that want
3117 # to display the diff)
3117 # to display the diff)
3118 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3118 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3119 if stripbelow:
3119 if stripbelow:
3120 text = text[:stripbelow.start()]
3120 text = text[:stripbelow.start()]
3121
3121
3122 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3122 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3123 os.chdir(olddir)
3123 os.chdir(olddir)
3124
3124
3125 if finishdesc:
3125 if finishdesc:
3126 text = finishdesc(text)
3126 text = finishdesc(text)
3127 if not text.strip():
3127 if not text.strip():
3128 raise error.Abort(_("empty commit message"))
3128 raise error.Abort(_("empty commit message"))
3129 if unchangedmessagedetection and editortext == templatetext:
3129 if unchangedmessagedetection and editortext == templatetext:
3130 raise error.Abort(_("commit message unchanged"))
3130 raise error.Abort(_("commit message unchanged"))
3131
3131
3132 return text
3132 return text
3133
3133
3134 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3134 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3135 ui = repo.ui
3135 ui = repo.ui
3136 spec = formatter.templatespec(ref, None, None)
3136 spec = formatter.templatespec(ref, None, None)
3137 t = changeset_templater(ui, repo, spec, None, {}, False)
3137 t = changeset_templater(ui, repo, spec, None, {}, False)
3138 t.t.cache.update((k, templater.unquotestring(v))
3138 t.t.cache.update((k, templater.unquotestring(v))
3139 for k, v in repo.ui.configitems('committemplate'))
3139 for k, v in repo.ui.configitems('committemplate'))
3140
3140
3141 if not extramsg:
3141 if not extramsg:
3142 extramsg = '' # ensure that extramsg is string
3142 extramsg = '' # ensure that extramsg is string
3143
3143
3144 ui.pushbuffer()
3144 ui.pushbuffer()
3145 t.show(ctx, extramsg=extramsg)
3145 t.show(ctx, extramsg=extramsg)
3146 return ui.popbuffer()
3146 return ui.popbuffer()
3147
3147
3148 def hgprefix(msg):
3148 def hgprefix(msg):
3149 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3149 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3150
3150
3151 def buildcommittext(repo, ctx, subs, extramsg):
3151 def buildcommittext(repo, ctx, subs, extramsg):
3152 edittext = []
3152 edittext = []
3153 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3153 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3154 if ctx.description():
3154 if ctx.description():
3155 edittext.append(ctx.description())
3155 edittext.append(ctx.description())
3156 edittext.append("")
3156 edittext.append("")
3157 edittext.append("") # Empty line between message and comments.
3157 edittext.append("") # Empty line between message and comments.
3158 edittext.append(hgprefix(_("Enter commit message."
3158 edittext.append(hgprefix(_("Enter commit message."
3159 " Lines beginning with 'HG:' are removed.")))
3159 " Lines beginning with 'HG:' are removed.")))
3160 edittext.append(hgprefix(extramsg))
3160 edittext.append(hgprefix(extramsg))
3161 edittext.append("HG: --")
3161 edittext.append("HG: --")
3162 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3162 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3163 if ctx.p2():
3163 if ctx.p2():
3164 edittext.append(hgprefix(_("branch merge")))
3164 edittext.append(hgprefix(_("branch merge")))
3165 if ctx.branch():
3165 if ctx.branch():
3166 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3166 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3167 if bookmarks.isactivewdirparent(repo):
3167 if bookmarks.isactivewdirparent(repo):
3168 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3168 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3169 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3169 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3170 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3170 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3171 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3171 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3172 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3172 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3173 if not added and not modified and not removed:
3173 if not added and not modified and not removed:
3174 edittext.append(hgprefix(_("no files changed")))
3174 edittext.append(hgprefix(_("no files changed")))
3175 edittext.append("")
3175 edittext.append("")
3176
3176
3177 return "\n".join(edittext)
3177 return "\n".join(edittext)
3178
3178
3179 def commitstatus(repo, node, branch, bheads=None, opts=None):
3179 def commitstatus(repo, node, branch, bheads=None, opts=None):
3180 if opts is None:
3180 if opts is None:
3181 opts = {}
3181 opts = {}
3182 ctx = repo[node]
3182 ctx = repo[node]
3183 parents = ctx.parents()
3183 parents = ctx.parents()
3184
3184
3185 if (not opts.get('amend') and bheads and node not in bheads and not
3185 if (not opts.get('amend') and bheads and node not in bheads and not
3186 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3186 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3187 repo.ui.status(_('created new head\n'))
3187 repo.ui.status(_('created new head\n'))
3188 # The message is not printed for initial roots. For the other
3188 # The message is not printed for initial roots. For the other
3189 # changesets, it is printed in the following situations:
3189 # changesets, it is printed in the following situations:
3190 #
3190 #
3191 # Par column: for the 2 parents with ...
3191 # Par column: for the 2 parents with ...
3192 # N: null or no parent
3192 # N: null or no parent
3193 # B: parent is on another named branch
3193 # B: parent is on another named branch
3194 # C: parent is a regular non head changeset
3194 # C: parent is a regular non head changeset
3195 # H: parent was a branch head of the current branch
3195 # H: parent was a branch head of the current branch
3196 # Msg column: whether we print "created new head" message
3196 # Msg column: whether we print "created new head" message
3197 # In the following, it is assumed that there already exists some
3197 # In the following, it is assumed that there already exists some
3198 # initial branch heads of the current branch, otherwise nothing is
3198 # initial branch heads of the current branch, otherwise nothing is
3199 # printed anyway.
3199 # printed anyway.
3200 #
3200 #
3201 # Par Msg Comment
3201 # Par Msg Comment
3202 # N N y additional topo root
3202 # N N y additional topo root
3203 #
3203 #
3204 # B N y additional branch root
3204 # B N y additional branch root
3205 # C N y additional topo head
3205 # C N y additional topo head
3206 # H N n usual case
3206 # H N n usual case
3207 #
3207 #
3208 # B B y weird additional branch root
3208 # B B y weird additional branch root
3209 # C B y branch merge
3209 # C B y branch merge
3210 # H B n merge with named branch
3210 # H B n merge with named branch
3211 #
3211 #
3212 # C C y additional head from merge
3212 # C C y additional head from merge
3213 # C H n merge with a head
3213 # C H n merge with a head
3214 #
3214 #
3215 # H H n head merge: head count decreases
3215 # H H n head merge: head count decreases
3216
3216
3217 if not opts.get('close_branch'):
3217 if not opts.get('close_branch'):
3218 for r in parents:
3218 for r in parents:
3219 if r.closesbranch() and r.branch() == branch:
3219 if r.closesbranch() and r.branch() == branch:
3220 repo.ui.status(_('reopening closed branch head %d\n') % r)
3220 repo.ui.status(_('reopening closed branch head %d\n') % r)
3221
3221
3222 if repo.ui.debugflag:
3222 if repo.ui.debugflag:
3223 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3223 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3224 elif repo.ui.verbose:
3224 elif repo.ui.verbose:
3225 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3225 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3226
3226
3227 def postcommitstatus(repo, pats, opts):
3227 def postcommitstatus(repo, pats, opts):
3228 return repo.status(match=scmutil.match(repo[None], pats, opts))
3228 return repo.status(match=scmutil.match(repo[None], pats, opts))
3229
3229
3230 def revert(ui, repo, ctx, parents, *pats, **opts):
3230 def revert(ui, repo, ctx, parents, *pats, **opts):
3231 parent, p2 = parents
3231 parent, p2 = parents
3232 node = ctx.node()
3232 node = ctx.node()
3233
3233
3234 mf = ctx.manifest()
3234 mf = ctx.manifest()
3235 if node == p2:
3235 if node == p2:
3236 parent = p2
3236 parent = p2
3237
3237
3238 # need all matching names in dirstate and manifest of target rev,
3238 # need all matching names in dirstate and manifest of target rev,
3239 # so have to walk both. do not print errors if files exist in one
3239 # so have to walk both. do not print errors if files exist in one
3240 # but not other. in both cases, filesets should be evaluated against
3240 # but not other. in both cases, filesets should be evaluated against
3241 # workingctx to get consistent result (issue4497). this means 'set:**'
3241 # workingctx to get consistent result (issue4497). this means 'set:**'
3242 # cannot be used to select missing files from target rev.
3242 # cannot be used to select missing files from target rev.
3243
3243
3244 # `names` is a mapping for all elements in working copy and target revision
3244 # `names` is a mapping for all elements in working copy and target revision
3245 # The mapping is in the form:
3245 # The mapping is in the form:
3246 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3246 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3247 names = {}
3247 names = {}
3248
3248
3249 with repo.wlock():
3249 with repo.wlock():
3250 ## filling of the `names` mapping
3250 ## filling of the `names` mapping
3251 # walk dirstate to fill `names`
3251 # walk dirstate to fill `names`
3252
3252
3253 interactive = opts.get('interactive', False)
3253 interactive = opts.get('interactive', False)
3254 wctx = repo[None]
3254 wctx = repo[None]
3255 m = scmutil.match(wctx, pats, opts)
3255 m = scmutil.match(wctx, pats, opts)
3256
3256
3257 # we'll need this later
3257 # we'll need this later
3258 targetsubs = sorted(s for s in wctx.substate if m(s))
3258 targetsubs = sorted(s for s in wctx.substate if m(s))
3259
3259
3260 if not m.always():
3260 if not m.always():
3261 matcher = matchmod.badmatch(m, lambda x, y: False)
3261 matcher = matchmod.badmatch(m, lambda x, y: False)
3262 for abs in wctx.walk(matcher):
3262 for abs in wctx.walk(matcher):
3263 names[abs] = m.rel(abs), m.exact(abs)
3263 names[abs] = m.rel(abs), m.exact(abs)
3264
3264
3265 # walk target manifest to fill `names`
3265 # walk target manifest to fill `names`
3266
3266
3267 def badfn(path, msg):
3267 def badfn(path, msg):
3268 if path in names:
3268 if path in names:
3269 return
3269 return
3270 if path in ctx.substate:
3270 if path in ctx.substate:
3271 return
3271 return
3272 path_ = path + '/'
3272 path_ = path + '/'
3273 for f in names:
3273 for f in names:
3274 if f.startswith(path_):
3274 if f.startswith(path_):
3275 return
3275 return
3276 ui.warn("%s: %s\n" % (m.rel(path), msg))
3276 ui.warn("%s: %s\n" % (m.rel(path), msg))
3277
3277
3278 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3278 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3279 if abs not in names:
3279 if abs not in names:
3280 names[abs] = m.rel(abs), m.exact(abs)
3280 names[abs] = m.rel(abs), m.exact(abs)
3281
3281
3282 # Find status of all file in `names`.
3282 # Find status of all file in `names`.
3283 m = scmutil.matchfiles(repo, names)
3283 m = scmutil.matchfiles(repo, names)
3284
3284
3285 changes = repo.status(node1=node, match=m,
3285 changes = repo.status(node1=node, match=m,
3286 unknown=True, ignored=True, clean=True)
3286 unknown=True, ignored=True, clean=True)
3287 else:
3287 else:
3288 changes = repo.status(node1=node, match=m)
3288 changes = repo.status(node1=node, match=m)
3289 for kind in changes:
3289 for kind in changes:
3290 for abs in kind:
3290 for abs in kind:
3291 names[abs] = m.rel(abs), m.exact(abs)
3291 names[abs] = m.rel(abs), m.exact(abs)
3292
3292
3293 m = scmutil.matchfiles(repo, names)
3293 m = scmutil.matchfiles(repo, names)
3294
3294
3295 modified = set(changes.modified)
3295 modified = set(changes.modified)
3296 added = set(changes.added)
3296 added = set(changes.added)
3297 removed = set(changes.removed)
3297 removed = set(changes.removed)
3298 _deleted = set(changes.deleted)
3298 _deleted = set(changes.deleted)
3299 unknown = set(changes.unknown)
3299 unknown = set(changes.unknown)
3300 unknown.update(changes.ignored)
3300 unknown.update(changes.ignored)
3301 clean = set(changes.clean)
3301 clean = set(changes.clean)
3302 modadded = set()
3302 modadded = set()
3303
3303
3304 # We need to account for the state of the file in the dirstate,
3304 # We need to account for the state of the file in the dirstate,
3305 # even when we revert against something else than parent. This will
3305 # even when we revert against something else than parent. This will
3306 # slightly alter the behavior of revert (doing back up or not, delete
3306 # slightly alter the behavior of revert (doing back up or not, delete
3307 # or just forget etc).
3307 # or just forget etc).
3308 if parent == node:
3308 if parent == node:
3309 dsmodified = modified
3309 dsmodified = modified
3310 dsadded = added
3310 dsadded = added
3311 dsremoved = removed
3311 dsremoved = removed
3312 # store all local modifications, useful later for rename detection
3312 # store all local modifications, useful later for rename detection
3313 localchanges = dsmodified | dsadded
3313 localchanges = dsmodified | dsadded
3314 modified, added, removed = set(), set(), set()
3314 modified, added, removed = set(), set(), set()
3315 else:
3315 else:
3316 changes = repo.status(node1=parent, match=m)
3316 changes = repo.status(node1=parent, match=m)
3317 dsmodified = set(changes.modified)
3317 dsmodified = set(changes.modified)
3318 dsadded = set(changes.added)
3318 dsadded = set(changes.added)
3319 dsremoved = set(changes.removed)
3319 dsremoved = set(changes.removed)
3320 # store all local modifications, useful later for rename detection
3320 # store all local modifications, useful later for rename detection
3321 localchanges = dsmodified | dsadded
3321 localchanges = dsmodified | dsadded
3322
3322
3323 # only take into account for removes between wc and target
3323 # only take into account for removes between wc and target
3324 clean |= dsremoved - removed
3324 clean |= dsremoved - removed
3325 dsremoved &= removed
3325 dsremoved &= removed
3326 # distinct between dirstate remove and other
3326 # distinct between dirstate remove and other
3327 removed -= dsremoved
3327 removed -= dsremoved
3328
3328
3329 modadded = added & dsmodified
3329 modadded = added & dsmodified
3330 added -= modadded
3330 added -= modadded
3331
3331
3332 # tell newly modified apart.
3332 # tell newly modified apart.
3333 dsmodified &= modified
3333 dsmodified &= modified
3334 dsmodified |= modified & dsadded # dirstate added may need backup
3334 dsmodified |= modified & dsadded # dirstate added may need backup
3335 modified -= dsmodified
3335 modified -= dsmodified
3336
3336
3337 # We need to wait for some post-processing to update this set
3337 # We need to wait for some post-processing to update this set
3338 # before making the distinction. The dirstate will be used for
3338 # before making the distinction. The dirstate will be used for
3339 # that purpose.
3339 # that purpose.
3340 dsadded = added
3340 dsadded = added
3341
3341
3342 # in case of merge, files that are actually added can be reported as
3342 # in case of merge, files that are actually added can be reported as
3343 # modified, we need to post process the result
3343 # modified, we need to post process the result
3344 if p2 != nullid:
3344 if p2 != nullid:
3345 mergeadd = set(dsmodified)
3345 mergeadd = set(dsmodified)
3346 for path in dsmodified:
3346 for path in dsmodified:
3347 if path in mf:
3347 if path in mf:
3348 mergeadd.remove(path)
3348 mergeadd.remove(path)
3349 dsadded |= mergeadd
3349 dsadded |= mergeadd
3350 dsmodified -= mergeadd
3350 dsmodified -= mergeadd
3351
3351
3352 # if f is a rename, update `names` to also revert the source
3352 # if f is a rename, update `names` to also revert the source
3353 cwd = repo.getcwd()
3353 cwd = repo.getcwd()
3354 for f in localchanges:
3354 for f in localchanges:
3355 src = repo.dirstate.copied(f)
3355 src = repo.dirstate.copied(f)
3356 # XXX should we check for rename down to target node?
3356 # XXX should we check for rename down to target node?
3357 if src and src not in names and repo.dirstate[src] == 'r':
3357 if src and src not in names and repo.dirstate[src] == 'r':
3358 dsremoved.add(src)
3358 dsremoved.add(src)
3359 names[src] = (repo.pathto(src, cwd), True)
3359 names[src] = (repo.pathto(src, cwd), True)
3360
3360
3361 # determine the exact nature of the deleted changesets
3361 # determine the exact nature of the deleted changesets
3362 deladded = set(_deleted)
3362 deladded = set(_deleted)
3363 for path in _deleted:
3363 for path in _deleted:
3364 if path in mf:
3364 if path in mf:
3365 deladded.remove(path)
3365 deladded.remove(path)
3366 deleted = _deleted - deladded
3366 deleted = _deleted - deladded
3367
3367
3368 # distinguish between file to forget and the other
3368 # distinguish between file to forget and the other
3369 added = set()
3369 added = set()
3370 for abs in dsadded:
3370 for abs in dsadded:
3371 if repo.dirstate[abs] != 'a':
3371 if repo.dirstate[abs] != 'a':
3372 added.add(abs)
3372 added.add(abs)
3373 dsadded -= added
3373 dsadded -= added
3374
3374
3375 for abs in deladded:
3375 for abs in deladded:
3376 if repo.dirstate[abs] == 'a':
3376 if repo.dirstate[abs] == 'a':
3377 dsadded.add(abs)
3377 dsadded.add(abs)
3378 deladded -= dsadded
3378 deladded -= dsadded
3379
3379
3380 # For files marked as removed, we check if an unknown file is present at
3380 # For files marked as removed, we check if an unknown file is present at
3381 # the same path. If a such file exists it may need to be backed up.
3381 # the same path. If a such file exists it may need to be backed up.
3382 # Making the distinction at this stage helps have simpler backup
3382 # Making the distinction at this stage helps have simpler backup
3383 # logic.
3383 # logic.
3384 removunk = set()
3384 removunk = set()
3385 for abs in removed:
3385 for abs in removed:
3386 target = repo.wjoin(abs)
3386 target = repo.wjoin(abs)
3387 if os.path.lexists(target):
3387 if os.path.lexists(target):
3388 removunk.add(abs)
3388 removunk.add(abs)
3389 removed -= removunk
3389 removed -= removunk
3390
3390
3391 dsremovunk = set()
3391 dsremovunk = set()
3392 for abs in dsremoved:
3392 for abs in dsremoved:
3393 target = repo.wjoin(abs)
3393 target = repo.wjoin(abs)
3394 if os.path.lexists(target):
3394 if os.path.lexists(target):
3395 dsremovunk.add(abs)
3395 dsremovunk.add(abs)
3396 dsremoved -= dsremovunk
3396 dsremoved -= dsremovunk
3397
3397
3398 # action to be actually performed by revert
3398 # action to be actually performed by revert
3399 # (<list of file>, message>) tuple
3399 # (<list of file>, message>) tuple
3400 actions = {'revert': ([], _('reverting %s\n')),
3400 actions = {'revert': ([], _('reverting %s\n')),
3401 'add': ([], _('adding %s\n')),
3401 'add': ([], _('adding %s\n')),
3402 'remove': ([], _('removing %s\n')),
3402 'remove': ([], _('removing %s\n')),
3403 'drop': ([], _('removing %s\n')),
3403 'drop': ([], _('removing %s\n')),
3404 'forget': ([], _('forgetting %s\n')),
3404 'forget': ([], _('forgetting %s\n')),
3405 'undelete': ([], _('undeleting %s\n')),
3405 'undelete': ([], _('undeleting %s\n')),
3406 'noop': (None, _('no changes needed to %s\n')),
3406 'noop': (None, _('no changes needed to %s\n')),
3407 'unknown': (None, _('file not managed: %s\n')),
3407 'unknown': (None, _('file not managed: %s\n')),
3408 }
3408 }
3409
3409
3410 # "constant" that convey the backup strategy.
3410 # "constant" that convey the backup strategy.
3411 # All set to `discard` if `no-backup` is set do avoid checking
3411 # All set to `discard` if `no-backup` is set do avoid checking
3412 # no_backup lower in the code.
3412 # no_backup lower in the code.
3413 # These values are ordered for comparison purposes
3413 # These values are ordered for comparison purposes
3414 backupinteractive = 3 # do backup if interactively modified
3414 backupinteractive = 3 # do backup if interactively modified
3415 backup = 2 # unconditionally do backup
3415 backup = 2 # unconditionally do backup
3416 check = 1 # check if the existing file differs from target
3416 check = 1 # check if the existing file differs from target
3417 discard = 0 # never do backup
3417 discard = 0 # never do backup
3418 if opts.get('no_backup'):
3418 if opts.get('no_backup'):
3419 backupinteractive = backup = check = discard
3419 backupinteractive = backup = check = discard
3420 if interactive:
3420 if interactive:
3421 dsmodifiedbackup = backupinteractive
3421 dsmodifiedbackup = backupinteractive
3422 else:
3422 else:
3423 dsmodifiedbackup = backup
3423 dsmodifiedbackup = backup
3424 tobackup = set()
3424 tobackup = set()
3425
3425
3426 backupanddel = actions['remove']
3426 backupanddel = actions['remove']
3427 if not opts.get('no_backup'):
3427 if not opts.get('no_backup'):
3428 backupanddel = actions['drop']
3428 backupanddel = actions['drop']
3429
3429
3430 disptable = (
3430 disptable = (
3431 # dispatch table:
3431 # dispatch table:
3432 # file state
3432 # file state
3433 # action
3433 # action
3434 # make backup
3434 # make backup
3435
3435
3436 ## Sets that results that will change file on disk
3436 ## Sets that results that will change file on disk
3437 # Modified compared to target, no local change
3437 # Modified compared to target, no local change
3438 (modified, actions['revert'], discard),
3438 (modified, actions['revert'], discard),
3439 # Modified compared to target, but local file is deleted
3439 # Modified compared to target, but local file is deleted
3440 (deleted, actions['revert'], discard),
3440 (deleted, actions['revert'], discard),
3441 # Modified compared to target, local change
3441 # Modified compared to target, local change
3442 (dsmodified, actions['revert'], dsmodifiedbackup),
3442 (dsmodified, actions['revert'], dsmodifiedbackup),
3443 # Added since target
3443 # Added since target
3444 (added, actions['remove'], discard),
3444 (added, actions['remove'], discard),
3445 # Added in working directory
3445 # Added in working directory
3446 (dsadded, actions['forget'], discard),
3446 (dsadded, actions['forget'], discard),
3447 # Added since target, have local modification
3447 # Added since target, have local modification
3448 (modadded, backupanddel, backup),
3448 (modadded, backupanddel, backup),
3449 # Added since target but file is missing in working directory
3449 # Added since target but file is missing in working directory
3450 (deladded, actions['drop'], discard),
3450 (deladded, actions['drop'], discard),
3451 # Removed since target, before working copy parent
3451 # Removed since target, before working copy parent
3452 (removed, actions['add'], discard),
3452 (removed, actions['add'], discard),
3453 # Same as `removed` but an unknown file exists at the same path
3453 # Same as `removed` but an unknown file exists at the same path
3454 (removunk, actions['add'], check),
3454 (removunk, actions['add'], check),
3455 # Removed since targe, marked as such in working copy parent
3455 # Removed since targe, marked as such in working copy parent
3456 (dsremoved, actions['undelete'], discard),
3456 (dsremoved, actions['undelete'], discard),
3457 # Same as `dsremoved` but an unknown file exists at the same path
3457 # Same as `dsremoved` but an unknown file exists at the same path
3458 (dsremovunk, actions['undelete'], check),
3458 (dsremovunk, actions['undelete'], check),
3459 ## the following sets does not result in any file changes
3459 ## the following sets does not result in any file changes
3460 # File with no modification
3460 # File with no modification
3461 (clean, actions['noop'], discard),
3461 (clean, actions['noop'], discard),
3462 # Existing file, not tracked anywhere
3462 # Existing file, not tracked anywhere
3463 (unknown, actions['unknown'], discard),
3463 (unknown, actions['unknown'], discard),
3464 )
3464 )
3465
3465
3466 for abs, (rel, exact) in sorted(names.items()):
3466 for abs, (rel, exact) in sorted(names.items()):
3467 # target file to be touch on disk (relative to cwd)
3467 # target file to be touch on disk (relative to cwd)
3468 target = repo.wjoin(abs)
3468 target = repo.wjoin(abs)
3469 # search the entry in the dispatch table.
3469 # search the entry in the dispatch table.
3470 # if the file is in any of these sets, it was touched in the working
3470 # if the file is in any of these sets, it was touched in the working
3471 # directory parent and we are sure it needs to be reverted.
3471 # directory parent and we are sure it needs to be reverted.
3472 for table, (xlist, msg), dobackup in disptable:
3472 for table, (xlist, msg), dobackup in disptable:
3473 if abs not in table:
3473 if abs not in table:
3474 continue
3474 continue
3475 if xlist is not None:
3475 if xlist is not None:
3476 xlist.append(abs)
3476 xlist.append(abs)
3477 if dobackup:
3477 if dobackup:
3478 # If in interactive mode, don't automatically create
3478 # If in interactive mode, don't automatically create
3479 # .orig files (issue4793)
3479 # .orig files (issue4793)
3480 if dobackup == backupinteractive:
3480 if dobackup == backupinteractive:
3481 tobackup.add(abs)
3481 tobackup.add(abs)
3482 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3482 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3483 bakname = scmutil.origpath(ui, repo, rel)
3483 bakname = scmutil.origpath(ui, repo, rel)
3484 ui.note(_('saving current version of %s as %s\n') %
3484 ui.note(_('saving current version of %s as %s\n') %
3485 (rel, bakname))
3485 (rel, bakname))
3486 if not opts.get('dry_run'):
3486 if not opts.get('dry_run'):
3487 if interactive:
3487 if interactive:
3488 util.copyfile(target, bakname)
3488 util.copyfile(target, bakname)
3489 else:
3489 else:
3490 util.rename(target, bakname)
3490 util.rename(target, bakname)
3491 if ui.verbose or not exact:
3491 if ui.verbose or not exact:
3492 if not isinstance(msg, basestring):
3492 if not isinstance(msg, basestring):
3493 msg = msg(abs)
3493 msg = msg(abs)
3494 ui.status(msg % rel)
3494 ui.status(msg % rel)
3495 elif exact:
3495 elif exact:
3496 ui.warn(msg % rel)
3496 ui.warn(msg % rel)
3497 break
3497 break
3498
3498
3499 if not opts.get('dry_run'):
3499 if not opts.get('dry_run'):
3500 needdata = ('revert', 'add', 'undelete')
3500 needdata = ('revert', 'add', 'undelete')
3501 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3501 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3502 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3502 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3503
3503
3504 if targetsubs:
3504 if targetsubs:
3505 # Revert the subrepos on the revert list
3505 # Revert the subrepos on the revert list
3506 for sub in targetsubs:
3506 for sub in targetsubs:
3507 try:
3507 try:
3508 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3508 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3509 except KeyError:
3509 except KeyError:
3510 raise error.Abort("subrepository '%s' does not exist in %s!"
3510 raise error.Abort("subrepository '%s' does not exist in %s!"
3511 % (sub, short(ctx.node())))
3511 % (sub, short(ctx.node())))
3512
3512
3513 def _revertprefetch(repo, ctx, *files):
3513 def _revertprefetch(repo, ctx, *files):
3514 """Let extension changing the storage layer prefetch content"""
3514 """Let extension changing the storage layer prefetch content"""
3515 pass
3515 pass
3516
3516
3517 def _performrevert(repo, parents, ctx, actions, interactive=False,
3517 def _performrevert(repo, parents, ctx, actions, interactive=False,
3518 tobackup=None):
3518 tobackup=None):
3519 """function that actually perform all the actions computed for revert
3519 """function that actually perform all the actions computed for revert
3520
3520
3521 This is an independent function to let extension to plug in and react to
3521 This is an independent function to let extension to plug in and react to
3522 the imminent revert.
3522 the imminent revert.
3523
3523
3524 Make sure you have the working directory locked when calling this function.
3524 Make sure you have the working directory locked when calling this function.
3525 """
3525 """
3526 parent, p2 = parents
3526 parent, p2 = parents
3527 node = ctx.node()
3527 node = ctx.node()
3528 excluded_files = []
3528 excluded_files = []
3529 matcher_opts = {"exclude": excluded_files}
3529 matcher_opts = {"exclude": excluded_files}
3530
3530
3531 def checkout(f):
3531 def checkout(f):
3532 fc = ctx[f]
3532 fc = ctx[f]
3533 repo.wwrite(f, fc.data(), fc.flags())
3533 repo.wwrite(f, fc.data(), fc.flags())
3534
3534
3535 def doremove(f):
3535 def doremove(f):
3536 try:
3536 try:
3537 repo.wvfs.unlinkpath(f)
3537 repo.wvfs.unlinkpath(f)
3538 except OSError:
3538 except OSError:
3539 pass
3539 pass
3540 repo.dirstate.remove(f)
3540 repo.dirstate.remove(f)
3541
3541
3542 audit_path = pathutil.pathauditor(repo.root)
3542 audit_path = pathutil.pathauditor(repo.root, cached=True)
3543 for f in actions['forget'][0]:
3543 for f in actions['forget'][0]:
3544 if interactive:
3544 if interactive:
3545 choice = repo.ui.promptchoice(
3545 choice = repo.ui.promptchoice(
3546 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3546 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3547 if choice == 0:
3547 if choice == 0:
3548 repo.dirstate.drop(f)
3548 repo.dirstate.drop(f)
3549 else:
3549 else:
3550 excluded_files.append(repo.wjoin(f))
3550 excluded_files.append(repo.wjoin(f))
3551 else:
3551 else:
3552 repo.dirstate.drop(f)
3552 repo.dirstate.drop(f)
3553 for f in actions['remove'][0]:
3553 for f in actions['remove'][0]:
3554 audit_path(f)
3554 audit_path(f)
3555 if interactive:
3555 if interactive:
3556 choice = repo.ui.promptchoice(
3556 choice = repo.ui.promptchoice(
3557 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3557 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3558 if choice == 0:
3558 if choice == 0:
3559 doremove(f)
3559 doremove(f)
3560 else:
3560 else:
3561 excluded_files.append(repo.wjoin(f))
3561 excluded_files.append(repo.wjoin(f))
3562 else:
3562 else:
3563 doremove(f)
3563 doremove(f)
3564 for f in actions['drop'][0]:
3564 for f in actions['drop'][0]:
3565 audit_path(f)
3565 audit_path(f)
3566 repo.dirstate.remove(f)
3566 repo.dirstate.remove(f)
3567
3567
3568 normal = None
3568 normal = None
3569 if node == parent:
3569 if node == parent:
3570 # We're reverting to our parent. If possible, we'd like status
3570 # We're reverting to our parent. If possible, we'd like status
3571 # to report the file as clean. We have to use normallookup for
3571 # to report the file as clean. We have to use normallookup for
3572 # merges to avoid losing information about merged/dirty files.
3572 # merges to avoid losing information about merged/dirty files.
3573 if p2 != nullid:
3573 if p2 != nullid:
3574 normal = repo.dirstate.normallookup
3574 normal = repo.dirstate.normallookup
3575 else:
3575 else:
3576 normal = repo.dirstate.normal
3576 normal = repo.dirstate.normal
3577
3577
3578 newlyaddedandmodifiedfiles = set()
3578 newlyaddedandmodifiedfiles = set()
3579 if interactive:
3579 if interactive:
3580 # Prompt the user for changes to revert
3580 # Prompt the user for changes to revert
3581 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3581 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3582 m = scmutil.match(ctx, torevert, matcher_opts)
3582 m = scmutil.match(ctx, torevert, matcher_opts)
3583 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3583 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3584 diffopts.nodates = True
3584 diffopts.nodates = True
3585 diffopts.git = True
3585 diffopts.git = True
3586 operation = 'discard'
3586 operation = 'discard'
3587 reversehunks = True
3587 reversehunks = True
3588 if node != parent:
3588 if node != parent:
3589 operation = 'revert'
3589 operation = 'revert'
3590 reversehunks = repo.ui.configbool('experimental',
3590 reversehunks = repo.ui.configbool('experimental',
3591 'revertalternateinteractivemode')
3591 'revertalternateinteractivemode')
3592 if reversehunks:
3592 if reversehunks:
3593 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3593 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3594 else:
3594 else:
3595 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3595 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3596 originalchunks = patch.parsepatch(diff)
3596 originalchunks = patch.parsepatch(diff)
3597
3597
3598 try:
3598 try:
3599
3599
3600 chunks, opts = recordfilter(repo.ui, originalchunks,
3600 chunks, opts = recordfilter(repo.ui, originalchunks,
3601 operation=operation)
3601 operation=operation)
3602 if reversehunks:
3602 if reversehunks:
3603 chunks = patch.reversehunks(chunks)
3603 chunks = patch.reversehunks(chunks)
3604
3604
3605 except patch.PatchError as err:
3605 except patch.PatchError as err:
3606 raise error.Abort(_('error parsing patch: %s') % err)
3606 raise error.Abort(_('error parsing patch: %s') % err)
3607
3607
3608 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3608 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3609 if tobackup is None:
3609 if tobackup is None:
3610 tobackup = set()
3610 tobackup = set()
3611 # Apply changes
3611 # Apply changes
3612 fp = stringio()
3612 fp = stringio()
3613 for c in chunks:
3613 for c in chunks:
3614 # Create a backup file only if this hunk should be backed up
3614 # Create a backup file only if this hunk should be backed up
3615 if ishunk(c) and c.header.filename() in tobackup:
3615 if ishunk(c) and c.header.filename() in tobackup:
3616 abs = c.header.filename()
3616 abs = c.header.filename()
3617 target = repo.wjoin(abs)
3617 target = repo.wjoin(abs)
3618 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3618 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3619 util.copyfile(target, bakname)
3619 util.copyfile(target, bakname)
3620 tobackup.remove(abs)
3620 tobackup.remove(abs)
3621 c.write(fp)
3621 c.write(fp)
3622 dopatch = fp.tell()
3622 dopatch = fp.tell()
3623 fp.seek(0)
3623 fp.seek(0)
3624 if dopatch:
3624 if dopatch:
3625 try:
3625 try:
3626 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3626 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3627 except patch.PatchError as err:
3627 except patch.PatchError as err:
3628 raise error.Abort(str(err))
3628 raise error.Abort(str(err))
3629 del fp
3629 del fp
3630 else:
3630 else:
3631 for f in actions['revert'][0]:
3631 for f in actions['revert'][0]:
3632 checkout(f)
3632 checkout(f)
3633 if normal:
3633 if normal:
3634 normal(f)
3634 normal(f)
3635
3635
3636 for f in actions['add'][0]:
3636 for f in actions['add'][0]:
3637 # Don't checkout modified files, they are already created by the diff
3637 # Don't checkout modified files, they are already created by the diff
3638 if f not in newlyaddedandmodifiedfiles:
3638 if f not in newlyaddedandmodifiedfiles:
3639 checkout(f)
3639 checkout(f)
3640 repo.dirstate.add(f)
3640 repo.dirstate.add(f)
3641
3641
3642 normal = repo.dirstate.normallookup
3642 normal = repo.dirstate.normallookup
3643 if node == parent and p2 == nullid:
3643 if node == parent and p2 == nullid:
3644 normal = repo.dirstate.normal
3644 normal = repo.dirstate.normal
3645 for f in actions['undelete'][0]:
3645 for f in actions['undelete'][0]:
3646 checkout(f)
3646 checkout(f)
3647 normal(f)
3647 normal(f)
3648
3648
3649 copied = copies.pathcopies(repo[parent], ctx)
3649 copied = copies.pathcopies(repo[parent], ctx)
3650
3650
3651 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3651 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3652 if f in copied:
3652 if f in copied:
3653 repo.dirstate.copy(copied[f], f)
3653 repo.dirstate.copy(copied[f], f)
3654
3654
3655 class command(registrar.command):
3655 class command(registrar.command):
3656 def _doregister(self, func, name, *args, **kwargs):
3656 def _doregister(self, func, name, *args, **kwargs):
3657 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3657 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3658 return super(command, self)._doregister(func, name, *args, **kwargs)
3658 return super(command, self)._doregister(func, name, *args, **kwargs)
3659
3659
3660 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3660 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3661 # commands.outgoing. "missing" is "missing" of the result of
3661 # commands.outgoing. "missing" is "missing" of the result of
3662 # "findcommonoutgoing()"
3662 # "findcommonoutgoing()"
3663 outgoinghooks = util.hooks()
3663 outgoinghooks = util.hooks()
3664
3664
3665 # a list of (ui, repo) functions called by commands.summary
3665 # a list of (ui, repo) functions called by commands.summary
3666 summaryhooks = util.hooks()
3666 summaryhooks = util.hooks()
3667
3667
3668 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3668 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3669 #
3669 #
3670 # functions should return tuple of booleans below, if 'changes' is None:
3670 # functions should return tuple of booleans below, if 'changes' is None:
3671 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3671 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3672 #
3672 #
3673 # otherwise, 'changes' is a tuple of tuples below:
3673 # otherwise, 'changes' is a tuple of tuples below:
3674 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3674 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3675 # - (desturl, destbranch, destpeer, outgoing)
3675 # - (desturl, destbranch, destpeer, outgoing)
3676 summaryremotehooks = util.hooks()
3676 summaryremotehooks = util.hooks()
3677
3677
3678 # A list of state files kept by multistep operations like graft.
3678 # A list of state files kept by multistep operations like graft.
3679 # Since graft cannot be aborted, it is considered 'clearable' by update.
3679 # Since graft cannot be aborted, it is considered 'clearable' by update.
3680 # note: bisect is intentionally excluded
3680 # note: bisect is intentionally excluded
3681 # (state file, clearable, allowcommit, error, hint)
3681 # (state file, clearable, allowcommit, error, hint)
3682 unfinishedstates = [
3682 unfinishedstates = [
3683 ('graftstate', True, False, _('graft in progress'),
3683 ('graftstate', True, False, _('graft in progress'),
3684 _("use 'hg graft --continue' or 'hg update' to abort")),
3684 _("use 'hg graft --continue' or 'hg update' to abort")),
3685 ('updatestate', True, False, _('last update was interrupted'),
3685 ('updatestate', True, False, _('last update was interrupted'),
3686 _("use 'hg update' to get a consistent checkout"))
3686 _("use 'hg update' to get a consistent checkout"))
3687 ]
3687 ]
3688
3688
3689 def checkunfinished(repo, commit=False):
3689 def checkunfinished(repo, commit=False):
3690 '''Look for an unfinished multistep operation, like graft, and abort
3690 '''Look for an unfinished multistep operation, like graft, and abort
3691 if found. It's probably good to check this right before
3691 if found. It's probably good to check this right before
3692 bailifchanged().
3692 bailifchanged().
3693 '''
3693 '''
3694 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3694 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3695 if commit and allowcommit:
3695 if commit and allowcommit:
3696 continue
3696 continue
3697 if repo.vfs.exists(f):
3697 if repo.vfs.exists(f):
3698 raise error.Abort(msg, hint=hint)
3698 raise error.Abort(msg, hint=hint)
3699
3699
3700 def clearunfinished(repo):
3700 def clearunfinished(repo):
3701 '''Check for unfinished operations (as above), and clear the ones
3701 '''Check for unfinished operations (as above), and clear the ones
3702 that are clearable.
3702 that are clearable.
3703 '''
3703 '''
3704 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3704 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3705 if not clearable and repo.vfs.exists(f):
3705 if not clearable and repo.vfs.exists(f):
3706 raise error.Abort(msg, hint=hint)
3706 raise error.Abort(msg, hint=hint)
3707 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3707 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3708 if clearable and repo.vfs.exists(f):
3708 if clearable and repo.vfs.exists(f):
3709 util.unlink(repo.vfs.join(f))
3709 util.unlink(repo.vfs.join(f))
3710
3710
3711 afterresolvedstates = [
3711 afterresolvedstates = [
3712 ('graftstate',
3712 ('graftstate',
3713 _('hg graft --continue')),
3713 _('hg graft --continue')),
3714 ]
3714 ]
3715
3715
3716 def howtocontinue(repo):
3716 def howtocontinue(repo):
3717 '''Check for an unfinished operation and return the command to finish
3717 '''Check for an unfinished operation and return the command to finish
3718 it.
3718 it.
3719
3719
3720 afterresolvedstates tuples define a .hg/{file} and the corresponding
3720 afterresolvedstates tuples define a .hg/{file} and the corresponding
3721 command needed to finish it.
3721 command needed to finish it.
3722
3722
3723 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3723 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3724 a boolean.
3724 a boolean.
3725 '''
3725 '''
3726 contmsg = _("continue: %s")
3726 contmsg = _("continue: %s")
3727 for f, msg in afterresolvedstates:
3727 for f, msg in afterresolvedstates:
3728 if repo.vfs.exists(f):
3728 if repo.vfs.exists(f):
3729 return contmsg % msg, True
3729 return contmsg % msg, True
3730 if repo[None].dirty(missing=True, merge=False, branch=False):
3730 if repo[None].dirty(missing=True, merge=False, branch=False):
3731 return contmsg % _("hg commit"), False
3731 return contmsg % _("hg commit"), False
3732 return None, None
3732 return None, None
3733
3733
3734 def checkafterresolved(repo):
3734 def checkafterresolved(repo):
3735 '''Inform the user about the next action after completing hg resolve
3735 '''Inform the user about the next action after completing hg resolve
3736
3736
3737 If there's a matching afterresolvedstates, howtocontinue will yield
3737 If there's a matching afterresolvedstates, howtocontinue will yield
3738 repo.ui.warn as the reporter.
3738 repo.ui.warn as the reporter.
3739
3739
3740 Otherwise, it will yield repo.ui.note.
3740 Otherwise, it will yield repo.ui.note.
3741 '''
3741 '''
3742 msg, warning = howtocontinue(repo)
3742 msg, warning = howtocontinue(repo)
3743 if msg is not None:
3743 if msg is not None:
3744 if warning:
3744 if warning:
3745 repo.ui.warn("%s\n" % msg)
3745 repo.ui.warn("%s\n" % msg)
3746 else:
3746 else:
3747 repo.ui.note("%s\n" % msg)
3747 repo.ui.note("%s\n" % msg)
3748
3748
3749 def wrongtooltocontinue(repo, task):
3749 def wrongtooltocontinue(repo, task):
3750 '''Raise an abort suggesting how to properly continue if there is an
3750 '''Raise an abort suggesting how to properly continue if there is an
3751 active task.
3751 active task.
3752
3752
3753 Uses howtocontinue() to find the active task.
3753 Uses howtocontinue() to find the active task.
3754
3754
3755 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3755 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3756 a hint.
3756 a hint.
3757 '''
3757 '''
3758 after = howtocontinue(repo)
3758 after = howtocontinue(repo)
3759 hint = None
3759 hint = None
3760 if after[1]:
3760 if after[1]:
3761 hint = after[0]
3761 hint = after[0]
3762 raise error.Abort(_('no %s in progress') % task, hint=hint)
3762 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,1342 +1,1342 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 parsers = policy.importmod(r'parsers')
30 parsers = policy.importmod(r'parsers')
31
31
32 propertycache = util.propertycache
32 propertycache = util.propertycache
33 filecache = scmutil.filecache
33 filecache = scmutil.filecache
34 _rangemask = 0x7fffffff
34 _rangemask = 0x7fffffff
35
35
36 dirstatetuple = parsers.dirstatetuple
36 dirstatetuple = parsers.dirstatetuple
37
37
38 class repocache(filecache):
38 class repocache(filecache):
39 """filecache for files in .hg/"""
39 """filecache for files in .hg/"""
40 def join(self, obj, fname):
40 def join(self, obj, fname):
41 return obj._opener.join(fname)
41 return obj._opener.join(fname)
42
42
43 class rootcache(filecache):
43 class rootcache(filecache):
44 """filecache for files in the repository root"""
44 """filecache for files in the repository root"""
45 def join(self, obj, fname):
45 def join(self, obj, fname):
46 return obj._join(fname)
46 return obj._join(fname)
47
47
48 def _getfsnow(vfs):
48 def _getfsnow(vfs):
49 '''Get "now" timestamp on filesystem'''
49 '''Get "now" timestamp on filesystem'''
50 tmpfd, tmpname = vfs.mkstemp()
50 tmpfd, tmpname = vfs.mkstemp()
51 try:
51 try:
52 return os.fstat(tmpfd).st_mtime
52 return os.fstat(tmpfd).st_mtime
53 finally:
53 finally:
54 os.close(tmpfd)
54 os.close(tmpfd)
55 vfs.unlink(tmpname)
55 vfs.unlink(tmpname)
56
56
57 def nonnormalentries(dmap):
57 def nonnormalentries(dmap):
58 '''Compute the nonnormal dirstate entries from the dmap'''
58 '''Compute the nonnormal dirstate entries from the dmap'''
59 try:
59 try:
60 return parsers.nonnormalotherparententries(dmap)
60 return parsers.nonnormalotherparententries(dmap)
61 except AttributeError:
61 except AttributeError:
62 nonnorm = set()
62 nonnorm = set()
63 otherparent = set()
63 otherparent = set()
64 for fname, e in dmap.iteritems():
64 for fname, e in dmap.iteritems():
65 if e[0] != 'n' or e[3] == -1:
65 if e[0] != 'n' or e[3] == -1:
66 nonnorm.add(fname)
66 nonnorm.add(fname)
67 if e[0] == 'n' and e[2] == -2:
67 if e[0] == 'n' and e[2] == -2:
68 otherparent.add(fname)
68 otherparent.add(fname)
69 return nonnorm, otherparent
69 return nonnorm, otherparent
70
70
71 class dirstate(object):
71 class dirstate(object):
72
72
73 def __init__(self, opener, ui, root, validate, sparsematchfn):
73 def __init__(self, opener, ui, root, validate, sparsematchfn):
74 '''Create a new dirstate object.
74 '''Create a new dirstate object.
75
75
76 opener is an open()-like callable that can be used to open the
76 opener is an open()-like callable that can be used to open the
77 dirstate file; root is the root of the directory tracked by
77 dirstate file; root is the root of the directory tracked by
78 the dirstate.
78 the dirstate.
79 '''
79 '''
80 self._opener = opener
80 self._opener = opener
81 self._validate = validate
81 self._validate = validate
82 self._root = root
82 self._root = root
83 self._sparsematchfn = sparsematchfn
83 self._sparsematchfn = sparsematchfn
84 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
84 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
85 # UNC path pointing to root share (issue4557)
85 # UNC path pointing to root share (issue4557)
86 self._rootdir = pathutil.normasprefix(root)
86 self._rootdir = pathutil.normasprefix(root)
87 self._dirty = False
87 self._dirty = False
88 self._dirtypl = False
88 self._dirtypl = False
89 self._lastnormaltime = 0
89 self._lastnormaltime = 0
90 self._ui = ui
90 self._ui = ui
91 self._filecache = {}
91 self._filecache = {}
92 self._parentwriters = 0
92 self._parentwriters = 0
93 self._filename = 'dirstate'
93 self._filename = 'dirstate'
94 self._pendingfilename = '%s.pending' % self._filename
94 self._pendingfilename = '%s.pending' % self._filename
95 self._plchangecallbacks = {}
95 self._plchangecallbacks = {}
96 self._origpl = None
96 self._origpl = None
97 self._updatedfiles = set()
97 self._updatedfiles = set()
98
98
99 # for consistent view between _pl() and _read() invocations
99 # for consistent view between _pl() and _read() invocations
100 self._pendingmode = None
100 self._pendingmode = None
101
101
102 @contextlib.contextmanager
102 @contextlib.contextmanager
103 def parentchange(self):
103 def parentchange(self):
104 '''Context manager for handling dirstate parents.
104 '''Context manager for handling dirstate parents.
105
105
106 If an exception occurs in the scope of the context manager,
106 If an exception occurs in the scope of the context manager,
107 the incoherent dirstate won't be written when wlock is
107 the incoherent dirstate won't be written when wlock is
108 released.
108 released.
109 '''
109 '''
110 self._parentwriters += 1
110 self._parentwriters += 1
111 yield
111 yield
112 # Typically we want the "undo" step of a context manager in a
112 # Typically we want the "undo" step of a context manager in a
113 # finally block so it happens even when an exception
113 # finally block so it happens even when an exception
114 # occurs. In this case, however, we only want to decrement
114 # occurs. In this case, however, we only want to decrement
115 # parentwriters if the code in the with statement exits
115 # parentwriters if the code in the with statement exits
116 # normally, so we don't have a try/finally here on purpose.
116 # normally, so we don't have a try/finally here on purpose.
117 self._parentwriters -= 1
117 self._parentwriters -= 1
118
118
119 def beginparentchange(self):
119 def beginparentchange(self):
120 '''Marks the beginning of a set of changes that involve changing
120 '''Marks the beginning of a set of changes that involve changing
121 the dirstate parents. If there is an exception during this time,
121 the dirstate parents. If there is an exception during this time,
122 the dirstate will not be written when the wlock is released. This
122 the dirstate will not be written when the wlock is released. This
123 prevents writing an incoherent dirstate where the parent doesn't
123 prevents writing an incoherent dirstate where the parent doesn't
124 match the contents.
124 match the contents.
125 '''
125 '''
126 self._ui.deprecwarn('beginparentchange is obsoleted by the '
126 self._ui.deprecwarn('beginparentchange is obsoleted by the '
127 'parentchange context manager.', '4.3')
127 'parentchange context manager.', '4.3')
128 self._parentwriters += 1
128 self._parentwriters += 1
129
129
130 def endparentchange(self):
130 def endparentchange(self):
131 '''Marks the end of a set of changes that involve changing the
131 '''Marks the end of a set of changes that involve changing the
132 dirstate parents. Once all parent changes have been marked done,
132 dirstate parents. Once all parent changes have been marked done,
133 the wlock will be free to write the dirstate on release.
133 the wlock will be free to write the dirstate on release.
134 '''
134 '''
135 self._ui.deprecwarn('endparentchange is obsoleted by the '
135 self._ui.deprecwarn('endparentchange is obsoleted by the '
136 'parentchange context manager.', '4.3')
136 'parentchange context manager.', '4.3')
137 if self._parentwriters > 0:
137 if self._parentwriters > 0:
138 self._parentwriters -= 1
138 self._parentwriters -= 1
139
139
140 def pendingparentchange(self):
140 def pendingparentchange(self):
141 '''Returns true if the dirstate is in the middle of a set of changes
141 '''Returns true if the dirstate is in the middle of a set of changes
142 that modify the dirstate parent.
142 that modify the dirstate parent.
143 '''
143 '''
144 return self._parentwriters > 0
144 return self._parentwriters > 0
145
145
146 @propertycache
146 @propertycache
147 def _map(self):
147 def _map(self):
148 '''Return the dirstate contents as a map from filename to
148 '''Return the dirstate contents as a map from filename to
149 (state, mode, size, time).'''
149 (state, mode, size, time).'''
150 self._read()
150 self._read()
151 return self._map
151 return self._map
152
152
153 @propertycache
153 @propertycache
154 def _copymap(self):
154 def _copymap(self):
155 self._read()
155 self._read()
156 return self._copymap
156 return self._copymap
157
157
158 @propertycache
158 @propertycache
159 def _identity(self):
159 def _identity(self):
160 self._read()
160 self._read()
161 return self._identity
161 return self._identity
162
162
163 @propertycache
163 @propertycache
164 def _nonnormalset(self):
164 def _nonnormalset(self):
165 nonnorm, otherparents = nonnormalentries(self._map)
165 nonnorm, otherparents = nonnormalentries(self._map)
166 self._otherparentset = otherparents
166 self._otherparentset = otherparents
167 return nonnorm
167 return nonnorm
168
168
169 @propertycache
169 @propertycache
170 def _otherparentset(self):
170 def _otherparentset(self):
171 nonnorm, otherparents = nonnormalentries(self._map)
171 nonnorm, otherparents = nonnormalentries(self._map)
172 self._nonnormalset = nonnorm
172 self._nonnormalset = nonnorm
173 return otherparents
173 return otherparents
174
174
175 @propertycache
175 @propertycache
176 def _filefoldmap(self):
176 def _filefoldmap(self):
177 try:
177 try:
178 makefilefoldmap = parsers.make_file_foldmap
178 makefilefoldmap = parsers.make_file_foldmap
179 except AttributeError:
179 except AttributeError:
180 pass
180 pass
181 else:
181 else:
182 return makefilefoldmap(self._map, util.normcasespec,
182 return makefilefoldmap(self._map, util.normcasespec,
183 util.normcasefallback)
183 util.normcasefallback)
184
184
185 f = {}
185 f = {}
186 normcase = util.normcase
186 normcase = util.normcase
187 for name, s in self._map.iteritems():
187 for name, s in self._map.iteritems():
188 if s[0] != 'r':
188 if s[0] != 'r':
189 f[normcase(name)] = name
189 f[normcase(name)] = name
190 f['.'] = '.' # prevents useless util.fspath() invocation
190 f['.'] = '.' # prevents useless util.fspath() invocation
191 return f
191 return f
192
192
193 @propertycache
193 @propertycache
194 def _dirfoldmap(self):
194 def _dirfoldmap(self):
195 f = {}
195 f = {}
196 normcase = util.normcase
196 normcase = util.normcase
197 for name in self._dirs:
197 for name in self._dirs:
198 f[normcase(name)] = name
198 f[normcase(name)] = name
199 return f
199 return f
200
200
201 @property
201 @property
202 def _sparsematcher(self):
202 def _sparsematcher(self):
203 """The matcher for the sparse checkout.
203 """The matcher for the sparse checkout.
204
204
205 The working directory may not include every file from a manifest. The
205 The working directory may not include every file from a manifest. The
206 matcher obtained by this property will match a path if it is to be
206 matcher obtained by this property will match a path if it is to be
207 included in the working directory.
207 included in the working directory.
208 """
208 """
209 # TODO there is potential to cache this property. For now, the matcher
209 # TODO there is potential to cache this property. For now, the matcher
210 # is resolved on every access. (But the called function does use a
210 # is resolved on every access. (But the called function does use a
211 # cache to keep the lookup fast.)
211 # cache to keep the lookup fast.)
212 return self._sparsematchfn()
212 return self._sparsematchfn()
213
213
214 @repocache('branch')
214 @repocache('branch')
215 def _branch(self):
215 def _branch(self):
216 try:
216 try:
217 return self._opener.read("branch").strip() or "default"
217 return self._opener.read("branch").strip() or "default"
218 except IOError as inst:
218 except IOError as inst:
219 if inst.errno != errno.ENOENT:
219 if inst.errno != errno.ENOENT:
220 raise
220 raise
221 return "default"
221 return "default"
222
222
223 @propertycache
223 @propertycache
224 def _pl(self):
224 def _pl(self):
225 try:
225 try:
226 fp = self._opendirstatefile()
226 fp = self._opendirstatefile()
227 st = fp.read(40)
227 st = fp.read(40)
228 fp.close()
228 fp.close()
229 l = len(st)
229 l = len(st)
230 if l == 40:
230 if l == 40:
231 return st[:20], st[20:40]
231 return st[:20], st[20:40]
232 elif l > 0 and l < 40:
232 elif l > 0 and l < 40:
233 raise error.Abort(_('working directory state appears damaged!'))
233 raise error.Abort(_('working directory state appears damaged!'))
234 except IOError as err:
234 except IOError as err:
235 if err.errno != errno.ENOENT:
235 if err.errno != errno.ENOENT:
236 raise
236 raise
237 return [nullid, nullid]
237 return [nullid, nullid]
238
238
239 @propertycache
239 @propertycache
240 def _dirs(self):
240 def _dirs(self):
241 return util.dirs(self._map, 'r')
241 return util.dirs(self._map, 'r')
242
242
243 def dirs(self):
243 def dirs(self):
244 return self._dirs
244 return self._dirs
245
245
246 @rootcache('.hgignore')
246 @rootcache('.hgignore')
247 def _ignore(self):
247 def _ignore(self):
248 files = self._ignorefiles()
248 files = self._ignorefiles()
249 if not files:
249 if not files:
250 return matchmod.never(self._root, '')
250 return matchmod.never(self._root, '')
251
251
252 pats = ['include:%s' % f for f in files]
252 pats = ['include:%s' % f for f in files]
253 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
253 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
254
254
255 @propertycache
255 @propertycache
256 def _slash(self):
256 def _slash(self):
257 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
257 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
258
258
259 @propertycache
259 @propertycache
260 def _checklink(self):
260 def _checklink(self):
261 return util.checklink(self._root)
261 return util.checklink(self._root)
262
262
263 @propertycache
263 @propertycache
264 def _checkexec(self):
264 def _checkexec(self):
265 return util.checkexec(self._root)
265 return util.checkexec(self._root)
266
266
267 @propertycache
267 @propertycache
268 def _checkcase(self):
268 def _checkcase(self):
269 return not util.fscasesensitive(self._join('.hg'))
269 return not util.fscasesensitive(self._join('.hg'))
270
270
271 def _join(self, f):
271 def _join(self, f):
272 # much faster than os.path.join()
272 # much faster than os.path.join()
273 # it's safe because f is always a relative path
273 # it's safe because f is always a relative path
274 return self._rootdir + f
274 return self._rootdir + f
275
275
276 def flagfunc(self, buildfallback):
276 def flagfunc(self, buildfallback):
277 if self._checklink and self._checkexec:
277 if self._checklink and self._checkexec:
278 def f(x):
278 def f(x):
279 try:
279 try:
280 st = os.lstat(self._join(x))
280 st = os.lstat(self._join(x))
281 if util.statislink(st):
281 if util.statislink(st):
282 return 'l'
282 return 'l'
283 if util.statisexec(st):
283 if util.statisexec(st):
284 return 'x'
284 return 'x'
285 except OSError:
285 except OSError:
286 pass
286 pass
287 return ''
287 return ''
288 return f
288 return f
289
289
290 fallback = buildfallback()
290 fallback = buildfallback()
291 if self._checklink:
291 if self._checklink:
292 def f(x):
292 def f(x):
293 if os.path.islink(self._join(x)):
293 if os.path.islink(self._join(x)):
294 return 'l'
294 return 'l'
295 if 'x' in fallback(x):
295 if 'x' in fallback(x):
296 return 'x'
296 return 'x'
297 return ''
297 return ''
298 return f
298 return f
299 if self._checkexec:
299 if self._checkexec:
300 def f(x):
300 def f(x):
301 if 'l' in fallback(x):
301 if 'l' in fallback(x):
302 return 'l'
302 return 'l'
303 if util.isexec(self._join(x)):
303 if util.isexec(self._join(x)):
304 return 'x'
304 return 'x'
305 return ''
305 return ''
306 return f
306 return f
307 else:
307 else:
308 return fallback
308 return fallback
309
309
310 @propertycache
310 @propertycache
311 def _cwd(self):
311 def _cwd(self):
312 # internal config: ui.forcecwd
312 # internal config: ui.forcecwd
313 forcecwd = self._ui.config('ui', 'forcecwd')
313 forcecwd = self._ui.config('ui', 'forcecwd')
314 if forcecwd:
314 if forcecwd:
315 return forcecwd
315 return forcecwd
316 return pycompat.getcwd()
316 return pycompat.getcwd()
317
317
318 def getcwd(self):
318 def getcwd(self):
319 '''Return the path from which a canonical path is calculated.
319 '''Return the path from which a canonical path is calculated.
320
320
321 This path should be used to resolve file patterns or to convert
321 This path should be used to resolve file patterns or to convert
322 canonical paths back to file paths for display. It shouldn't be
322 canonical paths back to file paths for display. It shouldn't be
323 used to get real file paths. Use vfs functions instead.
323 used to get real file paths. Use vfs functions instead.
324 '''
324 '''
325 cwd = self._cwd
325 cwd = self._cwd
326 if cwd == self._root:
326 if cwd == self._root:
327 return ''
327 return ''
328 # self._root ends with a path separator if self._root is '/' or 'C:\'
328 # self._root ends with a path separator if self._root is '/' or 'C:\'
329 rootsep = self._root
329 rootsep = self._root
330 if not util.endswithsep(rootsep):
330 if not util.endswithsep(rootsep):
331 rootsep += pycompat.ossep
331 rootsep += pycompat.ossep
332 if cwd.startswith(rootsep):
332 if cwd.startswith(rootsep):
333 return cwd[len(rootsep):]
333 return cwd[len(rootsep):]
334 else:
334 else:
335 # we're outside the repo. return an absolute path.
335 # we're outside the repo. return an absolute path.
336 return cwd
336 return cwd
337
337
338 def pathto(self, f, cwd=None):
338 def pathto(self, f, cwd=None):
339 if cwd is None:
339 if cwd is None:
340 cwd = self.getcwd()
340 cwd = self.getcwd()
341 path = util.pathto(self._root, cwd, f)
341 path = util.pathto(self._root, cwd, f)
342 if self._slash:
342 if self._slash:
343 return util.pconvert(path)
343 return util.pconvert(path)
344 return path
344 return path
345
345
346 def __getitem__(self, key):
346 def __getitem__(self, key):
347 '''Return the current state of key (a filename) in the dirstate.
347 '''Return the current state of key (a filename) in the dirstate.
348
348
349 States are:
349 States are:
350 n normal
350 n normal
351 m needs merging
351 m needs merging
352 r marked for removal
352 r marked for removal
353 a marked for addition
353 a marked for addition
354 ? not tracked
354 ? not tracked
355 '''
355 '''
356 return self._map.get(key, ("?",))[0]
356 return self._map.get(key, ("?",))[0]
357
357
358 def __contains__(self, key):
358 def __contains__(self, key):
359 return key in self._map
359 return key in self._map
360
360
361 def __iter__(self):
361 def __iter__(self):
362 return iter(sorted(self._map))
362 return iter(sorted(self._map))
363
363
364 def items(self):
364 def items(self):
365 return self._map.iteritems()
365 return self._map.iteritems()
366
366
367 iteritems = items
367 iteritems = items
368
368
369 def parents(self):
369 def parents(self):
370 return [self._validate(p) for p in self._pl]
370 return [self._validate(p) for p in self._pl]
371
371
372 def p1(self):
372 def p1(self):
373 return self._validate(self._pl[0])
373 return self._validate(self._pl[0])
374
374
375 def p2(self):
375 def p2(self):
376 return self._validate(self._pl[1])
376 return self._validate(self._pl[1])
377
377
378 def branch(self):
378 def branch(self):
379 return encoding.tolocal(self._branch)
379 return encoding.tolocal(self._branch)
380
380
381 def setparents(self, p1, p2=nullid):
381 def setparents(self, p1, p2=nullid):
382 """Set dirstate parents to p1 and p2.
382 """Set dirstate parents to p1 and p2.
383
383
384 When moving from two parents to one, 'm' merged entries a
384 When moving from two parents to one, 'm' merged entries a
385 adjusted to normal and previous copy records discarded and
385 adjusted to normal and previous copy records discarded and
386 returned by the call.
386 returned by the call.
387
387
388 See localrepo.setparents()
388 See localrepo.setparents()
389 """
389 """
390 if self._parentwriters == 0:
390 if self._parentwriters == 0:
391 raise ValueError("cannot set dirstate parent without "
391 raise ValueError("cannot set dirstate parent without "
392 "calling dirstate.beginparentchange")
392 "calling dirstate.beginparentchange")
393
393
394 self._dirty = self._dirtypl = True
394 self._dirty = self._dirtypl = True
395 oldp2 = self._pl[1]
395 oldp2 = self._pl[1]
396 if self._origpl is None:
396 if self._origpl is None:
397 self._origpl = self._pl
397 self._origpl = self._pl
398 self._pl = p1, p2
398 self._pl = p1, p2
399 copies = {}
399 copies = {}
400 if oldp2 != nullid and p2 == nullid:
400 if oldp2 != nullid and p2 == nullid:
401 candidatefiles = self._nonnormalset.union(self._otherparentset)
401 candidatefiles = self._nonnormalset.union(self._otherparentset)
402 for f in candidatefiles:
402 for f in candidatefiles:
403 s = self._map.get(f)
403 s = self._map.get(f)
404 if s is None:
404 if s is None:
405 continue
405 continue
406
406
407 # Discard 'm' markers when moving away from a merge state
407 # Discard 'm' markers when moving away from a merge state
408 if s[0] == 'm':
408 if s[0] == 'm':
409 if f in self._copymap:
409 if f in self._copymap:
410 copies[f] = self._copymap[f]
410 copies[f] = self._copymap[f]
411 self.normallookup(f)
411 self.normallookup(f)
412 # Also fix up otherparent markers
412 # Also fix up otherparent markers
413 elif s[0] == 'n' and s[2] == -2:
413 elif s[0] == 'n' and s[2] == -2:
414 if f in self._copymap:
414 if f in self._copymap:
415 copies[f] = self._copymap[f]
415 copies[f] = self._copymap[f]
416 self.add(f)
416 self.add(f)
417 return copies
417 return copies
418
418
419 def setbranch(self, branch):
419 def setbranch(self, branch):
420 self._branch = encoding.fromlocal(branch)
420 self._branch = encoding.fromlocal(branch)
421 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
421 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
422 try:
422 try:
423 f.write(self._branch + '\n')
423 f.write(self._branch + '\n')
424 f.close()
424 f.close()
425
425
426 # make sure filecache has the correct stat info for _branch after
426 # make sure filecache has the correct stat info for _branch after
427 # replacing the underlying file
427 # replacing the underlying file
428 ce = self._filecache['_branch']
428 ce = self._filecache['_branch']
429 if ce:
429 if ce:
430 ce.refresh()
430 ce.refresh()
431 except: # re-raises
431 except: # re-raises
432 f.discard()
432 f.discard()
433 raise
433 raise
434
434
435 def _opendirstatefile(self):
435 def _opendirstatefile(self):
436 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
436 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
437 if self._pendingmode is not None and self._pendingmode != mode:
437 if self._pendingmode is not None and self._pendingmode != mode:
438 fp.close()
438 fp.close()
439 raise error.Abort(_('working directory state may be '
439 raise error.Abort(_('working directory state may be '
440 'changed parallelly'))
440 'changed parallelly'))
441 self._pendingmode = mode
441 self._pendingmode = mode
442 return fp
442 return fp
443
443
444 def _read(self):
444 def _read(self):
445 self._map = {}
445 self._map = {}
446 self._copymap = {}
446 self._copymap = {}
447 # ignore HG_PENDING because identity is used only for writing
447 # ignore HG_PENDING because identity is used only for writing
448 self._identity = util.filestat.frompath(
448 self._identity = util.filestat.frompath(
449 self._opener.join(self._filename))
449 self._opener.join(self._filename))
450 try:
450 try:
451 fp = self._opendirstatefile()
451 fp = self._opendirstatefile()
452 try:
452 try:
453 st = fp.read()
453 st = fp.read()
454 finally:
454 finally:
455 fp.close()
455 fp.close()
456 except IOError as err:
456 except IOError as err:
457 if err.errno != errno.ENOENT:
457 if err.errno != errno.ENOENT:
458 raise
458 raise
459 return
459 return
460 if not st:
460 if not st:
461 return
461 return
462
462
463 if util.safehasattr(parsers, 'dict_new_presized'):
463 if util.safehasattr(parsers, 'dict_new_presized'):
464 # Make an estimate of the number of files in the dirstate based on
464 # Make an estimate of the number of files in the dirstate based on
465 # its size. From a linear regression on a set of real-world repos,
465 # its size. From a linear regression on a set of real-world repos,
466 # all over 10,000 files, the size of a dirstate entry is 85
466 # all over 10,000 files, the size of a dirstate entry is 85
467 # bytes. The cost of resizing is significantly higher than the cost
467 # bytes. The cost of resizing is significantly higher than the cost
468 # of filling in a larger presized dict, so subtract 20% from the
468 # of filling in a larger presized dict, so subtract 20% from the
469 # size.
469 # size.
470 #
470 #
471 # This heuristic is imperfect in many ways, so in a future dirstate
471 # This heuristic is imperfect in many ways, so in a future dirstate
472 # format update it makes sense to just record the number of entries
472 # format update it makes sense to just record the number of entries
473 # on write.
473 # on write.
474 self._map = parsers.dict_new_presized(len(st) / 71)
474 self._map = parsers.dict_new_presized(len(st) / 71)
475
475
476 # Python's garbage collector triggers a GC each time a certain number
476 # Python's garbage collector triggers a GC each time a certain number
477 # of container objects (the number being defined by
477 # of container objects (the number being defined by
478 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
478 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
479 # for each file in the dirstate. The C version then immediately marks
479 # for each file in the dirstate. The C version then immediately marks
480 # them as not to be tracked by the collector. However, this has no
480 # them as not to be tracked by the collector. However, this has no
481 # effect on when GCs are triggered, only on what objects the GC looks
481 # effect on when GCs are triggered, only on what objects the GC looks
482 # into. This means that O(number of files) GCs are unavoidable.
482 # into. This means that O(number of files) GCs are unavoidable.
483 # Depending on when in the process's lifetime the dirstate is parsed,
483 # Depending on when in the process's lifetime the dirstate is parsed,
484 # this can get very expensive. As a workaround, disable GC while
484 # this can get very expensive. As a workaround, disable GC while
485 # parsing the dirstate.
485 # parsing the dirstate.
486 #
486 #
487 # (we cannot decorate the function directly since it is in a C module)
487 # (we cannot decorate the function directly since it is in a C module)
488 parse_dirstate = util.nogc(parsers.parse_dirstate)
488 parse_dirstate = util.nogc(parsers.parse_dirstate)
489 p = parse_dirstate(self._map, self._copymap, st)
489 p = parse_dirstate(self._map, self._copymap, st)
490 if not self._dirtypl:
490 if not self._dirtypl:
491 self._pl = p
491 self._pl = p
492
492
493 def invalidate(self):
493 def invalidate(self):
494 '''Causes the next access to reread the dirstate.
494 '''Causes the next access to reread the dirstate.
495
495
496 This is different from localrepo.invalidatedirstate() because it always
496 This is different from localrepo.invalidatedirstate() because it always
497 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
497 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
498 check whether the dirstate has changed before rereading it.'''
498 check whether the dirstate has changed before rereading it.'''
499
499
500 for a in ("_map", "_copymap", "_identity",
500 for a in ("_map", "_copymap", "_identity",
501 "_filefoldmap", "_dirfoldmap", "_branch",
501 "_filefoldmap", "_dirfoldmap", "_branch",
502 "_pl", "_dirs", "_ignore", "_nonnormalset",
502 "_pl", "_dirs", "_ignore", "_nonnormalset",
503 "_otherparentset"):
503 "_otherparentset"):
504 if a in self.__dict__:
504 if a in self.__dict__:
505 delattr(self, a)
505 delattr(self, a)
506 self._lastnormaltime = 0
506 self._lastnormaltime = 0
507 self._dirty = False
507 self._dirty = False
508 self._updatedfiles.clear()
508 self._updatedfiles.clear()
509 self._parentwriters = 0
509 self._parentwriters = 0
510 self._origpl = None
510 self._origpl = None
511
511
512 def copy(self, source, dest):
512 def copy(self, source, dest):
513 """Mark dest as a copy of source. Unmark dest if source is None."""
513 """Mark dest as a copy of source. Unmark dest if source is None."""
514 if source == dest:
514 if source == dest:
515 return
515 return
516 self._dirty = True
516 self._dirty = True
517 if source is not None:
517 if source is not None:
518 self._copymap[dest] = source
518 self._copymap[dest] = source
519 self._updatedfiles.add(source)
519 self._updatedfiles.add(source)
520 self._updatedfiles.add(dest)
520 self._updatedfiles.add(dest)
521 elif dest in self._copymap:
521 elif dest in self._copymap:
522 del self._copymap[dest]
522 del self._copymap[dest]
523 self._updatedfiles.add(dest)
523 self._updatedfiles.add(dest)
524
524
525 def copied(self, file):
525 def copied(self, file):
526 return self._copymap.get(file, None)
526 return self._copymap.get(file, None)
527
527
528 def copies(self):
528 def copies(self):
529 return self._copymap
529 return self._copymap
530
530
531 def _droppath(self, f):
531 def _droppath(self, f):
532 if self[f] not in "?r" and "_dirs" in self.__dict__:
532 if self[f] not in "?r" and "_dirs" in self.__dict__:
533 self._dirs.delpath(f)
533 self._dirs.delpath(f)
534
534
535 if "_filefoldmap" in self.__dict__:
535 if "_filefoldmap" in self.__dict__:
536 normed = util.normcase(f)
536 normed = util.normcase(f)
537 if normed in self._filefoldmap:
537 if normed in self._filefoldmap:
538 del self._filefoldmap[normed]
538 del self._filefoldmap[normed]
539
539
540 self._updatedfiles.add(f)
540 self._updatedfiles.add(f)
541
541
542 def _addpath(self, f, state, mode, size, mtime):
542 def _addpath(self, f, state, mode, size, mtime):
543 oldstate = self[f]
543 oldstate = self[f]
544 if state == 'a' or oldstate == 'r':
544 if state == 'a' or oldstate == 'r':
545 scmutil.checkfilename(f)
545 scmutil.checkfilename(f)
546 if f in self._dirs:
546 if f in self._dirs:
547 raise error.Abort(_('directory %r already in dirstate') % f)
547 raise error.Abort(_('directory %r already in dirstate') % f)
548 # shadows
548 # shadows
549 for d in util.finddirs(f):
549 for d in util.finddirs(f):
550 if d in self._dirs:
550 if d in self._dirs:
551 break
551 break
552 if d in self._map and self[d] != 'r':
552 if d in self._map and self[d] != 'r':
553 raise error.Abort(
553 raise error.Abort(
554 _('file %r in dirstate clashes with %r') % (d, f))
554 _('file %r in dirstate clashes with %r') % (d, f))
555 if oldstate in "?r" and "_dirs" in self.__dict__:
555 if oldstate in "?r" and "_dirs" in self.__dict__:
556 self._dirs.addpath(f)
556 self._dirs.addpath(f)
557 self._dirty = True
557 self._dirty = True
558 self._updatedfiles.add(f)
558 self._updatedfiles.add(f)
559 self._map[f] = dirstatetuple(state, mode, size, mtime)
559 self._map[f] = dirstatetuple(state, mode, size, mtime)
560 if state != 'n' or mtime == -1:
560 if state != 'n' or mtime == -1:
561 self._nonnormalset.add(f)
561 self._nonnormalset.add(f)
562 if size == -2:
562 if size == -2:
563 self._otherparentset.add(f)
563 self._otherparentset.add(f)
564
564
565 def normal(self, f):
565 def normal(self, f):
566 '''Mark a file normal and clean.'''
566 '''Mark a file normal and clean.'''
567 s = os.lstat(self._join(f))
567 s = os.lstat(self._join(f))
568 mtime = s.st_mtime
568 mtime = s.st_mtime
569 self._addpath(f, 'n', s.st_mode,
569 self._addpath(f, 'n', s.st_mode,
570 s.st_size & _rangemask, mtime & _rangemask)
570 s.st_size & _rangemask, mtime & _rangemask)
571 if f in self._copymap:
571 if f in self._copymap:
572 del self._copymap[f]
572 del self._copymap[f]
573 if f in self._nonnormalset:
573 if f in self._nonnormalset:
574 self._nonnormalset.remove(f)
574 self._nonnormalset.remove(f)
575 if mtime > self._lastnormaltime:
575 if mtime > self._lastnormaltime:
576 # Remember the most recent modification timeslot for status(),
576 # Remember the most recent modification timeslot for status(),
577 # to make sure we won't miss future size-preserving file content
577 # to make sure we won't miss future size-preserving file content
578 # modifications that happen within the same timeslot.
578 # modifications that happen within the same timeslot.
579 self._lastnormaltime = mtime
579 self._lastnormaltime = mtime
580
580
581 def normallookup(self, f):
581 def normallookup(self, f):
582 '''Mark a file normal, but possibly dirty.'''
582 '''Mark a file normal, but possibly dirty.'''
583 if self._pl[1] != nullid and f in self._map:
583 if self._pl[1] != nullid and f in self._map:
584 # if there is a merge going on and the file was either
584 # if there is a merge going on and the file was either
585 # in state 'm' (-1) or coming from other parent (-2) before
585 # in state 'm' (-1) or coming from other parent (-2) before
586 # being removed, restore that state.
586 # being removed, restore that state.
587 entry = self._map[f]
587 entry = self._map[f]
588 if entry[0] == 'r' and entry[2] in (-1, -2):
588 if entry[0] == 'r' and entry[2] in (-1, -2):
589 source = self._copymap.get(f)
589 source = self._copymap.get(f)
590 if entry[2] == -1:
590 if entry[2] == -1:
591 self.merge(f)
591 self.merge(f)
592 elif entry[2] == -2:
592 elif entry[2] == -2:
593 self.otherparent(f)
593 self.otherparent(f)
594 if source:
594 if source:
595 self.copy(source, f)
595 self.copy(source, f)
596 return
596 return
597 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
597 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
598 return
598 return
599 self._addpath(f, 'n', 0, -1, -1)
599 self._addpath(f, 'n', 0, -1, -1)
600 if f in self._copymap:
600 if f in self._copymap:
601 del self._copymap[f]
601 del self._copymap[f]
602 if f in self._nonnormalset:
602 if f in self._nonnormalset:
603 self._nonnormalset.remove(f)
603 self._nonnormalset.remove(f)
604
604
605 def otherparent(self, f):
605 def otherparent(self, f):
606 '''Mark as coming from the other parent, always dirty.'''
606 '''Mark as coming from the other parent, always dirty.'''
607 if self._pl[1] == nullid:
607 if self._pl[1] == nullid:
608 raise error.Abort(_("setting %r to other parent "
608 raise error.Abort(_("setting %r to other parent "
609 "only allowed in merges") % f)
609 "only allowed in merges") % f)
610 if f in self and self[f] == 'n':
610 if f in self and self[f] == 'n':
611 # merge-like
611 # merge-like
612 self._addpath(f, 'm', 0, -2, -1)
612 self._addpath(f, 'm', 0, -2, -1)
613 else:
613 else:
614 # add-like
614 # add-like
615 self._addpath(f, 'n', 0, -2, -1)
615 self._addpath(f, 'n', 0, -2, -1)
616
616
617 if f in self._copymap:
617 if f in self._copymap:
618 del self._copymap[f]
618 del self._copymap[f]
619
619
620 def add(self, f):
620 def add(self, f):
621 '''Mark a file added.'''
621 '''Mark a file added.'''
622 self._addpath(f, 'a', 0, -1, -1)
622 self._addpath(f, 'a', 0, -1, -1)
623 if f in self._copymap:
623 if f in self._copymap:
624 del self._copymap[f]
624 del self._copymap[f]
625
625
626 def remove(self, f):
626 def remove(self, f):
627 '''Mark a file removed.'''
627 '''Mark a file removed.'''
628 self._dirty = True
628 self._dirty = True
629 self._droppath(f)
629 self._droppath(f)
630 size = 0
630 size = 0
631 if self._pl[1] != nullid and f in self._map:
631 if self._pl[1] != nullid and f in self._map:
632 # backup the previous state
632 # backup the previous state
633 entry = self._map[f]
633 entry = self._map[f]
634 if entry[0] == 'm': # merge
634 if entry[0] == 'm': # merge
635 size = -1
635 size = -1
636 elif entry[0] == 'n' and entry[2] == -2: # other parent
636 elif entry[0] == 'n' and entry[2] == -2: # other parent
637 size = -2
637 size = -2
638 self._otherparentset.add(f)
638 self._otherparentset.add(f)
639 self._map[f] = dirstatetuple('r', 0, size, 0)
639 self._map[f] = dirstatetuple('r', 0, size, 0)
640 self._nonnormalset.add(f)
640 self._nonnormalset.add(f)
641 if size == 0 and f in self._copymap:
641 if size == 0 and f in self._copymap:
642 del self._copymap[f]
642 del self._copymap[f]
643
643
644 def merge(self, f):
644 def merge(self, f):
645 '''Mark a file merged.'''
645 '''Mark a file merged.'''
646 if self._pl[1] == nullid:
646 if self._pl[1] == nullid:
647 return self.normallookup(f)
647 return self.normallookup(f)
648 return self.otherparent(f)
648 return self.otherparent(f)
649
649
650 def drop(self, f):
650 def drop(self, f):
651 '''Drop a file from the dirstate'''
651 '''Drop a file from the dirstate'''
652 if f in self._map:
652 if f in self._map:
653 self._dirty = True
653 self._dirty = True
654 self._droppath(f)
654 self._droppath(f)
655 del self._map[f]
655 del self._map[f]
656 if f in self._nonnormalset:
656 if f in self._nonnormalset:
657 self._nonnormalset.remove(f)
657 self._nonnormalset.remove(f)
658 if f in self._copymap:
658 if f in self._copymap:
659 del self._copymap[f]
659 del self._copymap[f]
660
660
661 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
661 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
662 if exists is None:
662 if exists is None:
663 exists = os.path.lexists(os.path.join(self._root, path))
663 exists = os.path.lexists(os.path.join(self._root, path))
664 if not exists:
664 if not exists:
665 # Maybe a path component exists
665 # Maybe a path component exists
666 if not ignoremissing and '/' in path:
666 if not ignoremissing and '/' in path:
667 d, f = path.rsplit('/', 1)
667 d, f = path.rsplit('/', 1)
668 d = self._normalize(d, False, ignoremissing, None)
668 d = self._normalize(d, False, ignoremissing, None)
669 folded = d + "/" + f
669 folded = d + "/" + f
670 else:
670 else:
671 # No path components, preserve original case
671 # No path components, preserve original case
672 folded = path
672 folded = path
673 else:
673 else:
674 # recursively normalize leading directory components
674 # recursively normalize leading directory components
675 # against dirstate
675 # against dirstate
676 if '/' in normed:
676 if '/' in normed:
677 d, f = normed.rsplit('/', 1)
677 d, f = normed.rsplit('/', 1)
678 d = self._normalize(d, False, ignoremissing, True)
678 d = self._normalize(d, False, ignoremissing, True)
679 r = self._root + "/" + d
679 r = self._root + "/" + d
680 folded = d + "/" + util.fspath(f, r)
680 folded = d + "/" + util.fspath(f, r)
681 else:
681 else:
682 folded = util.fspath(normed, self._root)
682 folded = util.fspath(normed, self._root)
683 storemap[normed] = folded
683 storemap[normed] = folded
684
684
685 return folded
685 return folded
686
686
687 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
687 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
688 normed = util.normcase(path)
688 normed = util.normcase(path)
689 folded = self._filefoldmap.get(normed, None)
689 folded = self._filefoldmap.get(normed, None)
690 if folded is None:
690 if folded is None:
691 if isknown:
691 if isknown:
692 folded = path
692 folded = path
693 else:
693 else:
694 folded = self._discoverpath(path, normed, ignoremissing, exists,
694 folded = self._discoverpath(path, normed, ignoremissing, exists,
695 self._filefoldmap)
695 self._filefoldmap)
696 return folded
696 return folded
697
697
698 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
698 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
699 normed = util.normcase(path)
699 normed = util.normcase(path)
700 folded = self._filefoldmap.get(normed, None)
700 folded = self._filefoldmap.get(normed, None)
701 if folded is None:
701 if folded is None:
702 folded = self._dirfoldmap.get(normed, None)
702 folded = self._dirfoldmap.get(normed, None)
703 if folded is None:
703 if folded is None:
704 if isknown:
704 if isknown:
705 folded = path
705 folded = path
706 else:
706 else:
707 # store discovered result in dirfoldmap so that future
707 # store discovered result in dirfoldmap so that future
708 # normalizefile calls don't start matching directories
708 # normalizefile calls don't start matching directories
709 folded = self._discoverpath(path, normed, ignoremissing, exists,
709 folded = self._discoverpath(path, normed, ignoremissing, exists,
710 self._dirfoldmap)
710 self._dirfoldmap)
711 return folded
711 return folded
712
712
713 def normalize(self, path, isknown=False, ignoremissing=False):
713 def normalize(self, path, isknown=False, ignoremissing=False):
714 '''
714 '''
715 normalize the case of a pathname when on a casefolding filesystem
715 normalize the case of a pathname when on a casefolding filesystem
716
716
717 isknown specifies whether the filename came from walking the
717 isknown specifies whether the filename came from walking the
718 disk, to avoid extra filesystem access.
718 disk, to avoid extra filesystem access.
719
719
720 If ignoremissing is True, missing path are returned
720 If ignoremissing is True, missing path are returned
721 unchanged. Otherwise, we try harder to normalize possibly
721 unchanged. Otherwise, we try harder to normalize possibly
722 existing path components.
722 existing path components.
723
723
724 The normalized case is determined based on the following precedence:
724 The normalized case is determined based on the following precedence:
725
725
726 - version of name already stored in the dirstate
726 - version of name already stored in the dirstate
727 - version of name stored on disk
727 - version of name stored on disk
728 - version provided via command arguments
728 - version provided via command arguments
729 '''
729 '''
730
730
731 if self._checkcase:
731 if self._checkcase:
732 return self._normalize(path, isknown, ignoremissing)
732 return self._normalize(path, isknown, ignoremissing)
733 return path
733 return path
734
734
735 def clear(self):
735 def clear(self):
736 self._map = {}
736 self._map = {}
737 self._nonnormalset = set()
737 self._nonnormalset = set()
738 self._otherparentset = set()
738 self._otherparentset = set()
739 if "_dirs" in self.__dict__:
739 if "_dirs" in self.__dict__:
740 delattr(self, "_dirs")
740 delattr(self, "_dirs")
741 self._copymap = {}
741 self._copymap = {}
742 self._pl = [nullid, nullid]
742 self._pl = [nullid, nullid]
743 self._lastnormaltime = 0
743 self._lastnormaltime = 0
744 self._updatedfiles.clear()
744 self._updatedfiles.clear()
745 self._dirty = True
745 self._dirty = True
746
746
747 def rebuild(self, parent, allfiles, changedfiles=None):
747 def rebuild(self, parent, allfiles, changedfiles=None):
748 if changedfiles is None:
748 if changedfiles is None:
749 # Rebuild entire dirstate
749 # Rebuild entire dirstate
750 changedfiles = allfiles
750 changedfiles = allfiles
751 lastnormaltime = self._lastnormaltime
751 lastnormaltime = self._lastnormaltime
752 self.clear()
752 self.clear()
753 self._lastnormaltime = lastnormaltime
753 self._lastnormaltime = lastnormaltime
754
754
755 if self._origpl is None:
755 if self._origpl is None:
756 self._origpl = self._pl
756 self._origpl = self._pl
757 self._pl = (parent, nullid)
757 self._pl = (parent, nullid)
758 for f in changedfiles:
758 for f in changedfiles:
759 if f in allfiles:
759 if f in allfiles:
760 self.normallookup(f)
760 self.normallookup(f)
761 else:
761 else:
762 self.drop(f)
762 self.drop(f)
763
763
764 self._dirty = True
764 self._dirty = True
765
765
766 def identity(self):
766 def identity(self):
767 '''Return identity of dirstate itself to detect changing in storage
767 '''Return identity of dirstate itself to detect changing in storage
768
768
769 If identity of previous dirstate is equal to this, writing
769 If identity of previous dirstate is equal to this, writing
770 changes based on the former dirstate out can keep consistency.
770 changes based on the former dirstate out can keep consistency.
771 '''
771 '''
772 return self._identity
772 return self._identity
773
773
774 def write(self, tr):
774 def write(self, tr):
775 if not self._dirty:
775 if not self._dirty:
776 return
776 return
777
777
778 filename = self._filename
778 filename = self._filename
779 if tr:
779 if tr:
780 # 'dirstate.write()' is not only for writing in-memory
780 # 'dirstate.write()' is not only for writing in-memory
781 # changes out, but also for dropping ambiguous timestamp.
781 # changes out, but also for dropping ambiguous timestamp.
782 # delayed writing re-raise "ambiguous timestamp issue".
782 # delayed writing re-raise "ambiguous timestamp issue".
783 # See also the wiki page below for detail:
783 # See also the wiki page below for detail:
784 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
784 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
785
785
786 # emulate dropping timestamp in 'parsers.pack_dirstate'
786 # emulate dropping timestamp in 'parsers.pack_dirstate'
787 now = _getfsnow(self._opener)
787 now = _getfsnow(self._opener)
788 dmap = self._map
788 dmap = self._map
789 for f in self._updatedfiles:
789 for f in self._updatedfiles:
790 e = dmap.get(f)
790 e = dmap.get(f)
791 if e is not None and e[0] == 'n' and e[3] == now:
791 if e is not None and e[0] == 'n' and e[3] == now:
792 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
792 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
793 self._nonnormalset.add(f)
793 self._nonnormalset.add(f)
794
794
795 # emulate that all 'dirstate.normal' results are written out
795 # emulate that all 'dirstate.normal' results are written out
796 self._lastnormaltime = 0
796 self._lastnormaltime = 0
797 self._updatedfiles.clear()
797 self._updatedfiles.clear()
798
798
799 # delay writing in-memory changes out
799 # delay writing in-memory changes out
800 tr.addfilegenerator('dirstate', (self._filename,),
800 tr.addfilegenerator('dirstate', (self._filename,),
801 self._writedirstate, location='plain')
801 self._writedirstate, location='plain')
802 return
802 return
803
803
804 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
804 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
805 self._writedirstate(st)
805 self._writedirstate(st)
806
806
807 def addparentchangecallback(self, category, callback):
807 def addparentchangecallback(self, category, callback):
808 """add a callback to be called when the wd parents are changed
808 """add a callback to be called when the wd parents are changed
809
809
810 Callback will be called with the following arguments:
810 Callback will be called with the following arguments:
811 dirstate, (oldp1, oldp2), (newp1, newp2)
811 dirstate, (oldp1, oldp2), (newp1, newp2)
812
812
813 Category is a unique identifier to allow overwriting an old callback
813 Category is a unique identifier to allow overwriting an old callback
814 with a newer callback.
814 with a newer callback.
815 """
815 """
816 self._plchangecallbacks[category] = callback
816 self._plchangecallbacks[category] = callback
817
817
818 def _writedirstate(self, st):
818 def _writedirstate(self, st):
819 # notify callbacks about parents change
819 # notify callbacks about parents change
820 if self._origpl is not None and self._origpl != self._pl:
820 if self._origpl is not None and self._origpl != self._pl:
821 for c, callback in sorted(self._plchangecallbacks.iteritems()):
821 for c, callback in sorted(self._plchangecallbacks.iteritems()):
822 callback(self, self._origpl, self._pl)
822 callback(self, self._origpl, self._pl)
823 self._origpl = None
823 self._origpl = None
824 # use the modification time of the newly created temporary file as the
824 # use the modification time of the newly created temporary file as the
825 # filesystem's notion of 'now'
825 # filesystem's notion of 'now'
826 now = util.fstat(st).st_mtime & _rangemask
826 now = util.fstat(st).st_mtime & _rangemask
827
827
828 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
828 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
829 # timestamp of each entries in dirstate, because of 'now > mtime'
829 # timestamp of each entries in dirstate, because of 'now > mtime'
830 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
830 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
831 if delaywrite > 0:
831 if delaywrite > 0:
832 # do we have any files to delay for?
832 # do we have any files to delay for?
833 for f, e in self._map.iteritems():
833 for f, e in self._map.iteritems():
834 if e[0] == 'n' and e[3] == now:
834 if e[0] == 'n' and e[3] == now:
835 import time # to avoid useless import
835 import time # to avoid useless import
836 # rather than sleep n seconds, sleep until the next
836 # rather than sleep n seconds, sleep until the next
837 # multiple of n seconds
837 # multiple of n seconds
838 clock = time.time()
838 clock = time.time()
839 start = int(clock) - (int(clock) % delaywrite)
839 start = int(clock) - (int(clock) % delaywrite)
840 end = start + delaywrite
840 end = start + delaywrite
841 time.sleep(end - clock)
841 time.sleep(end - clock)
842 now = end # trust our estimate that the end is near now
842 now = end # trust our estimate that the end is near now
843 break
843 break
844
844
845 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
845 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
846 self._nonnormalset, self._otherparentset = nonnormalentries(self._map)
846 self._nonnormalset, self._otherparentset = nonnormalentries(self._map)
847 st.close()
847 st.close()
848 self._lastnormaltime = 0
848 self._lastnormaltime = 0
849 self._dirty = self._dirtypl = False
849 self._dirty = self._dirtypl = False
850
850
851 def _dirignore(self, f):
851 def _dirignore(self, f):
852 if f == '.':
852 if f == '.':
853 return False
853 return False
854 if self._ignore(f):
854 if self._ignore(f):
855 return True
855 return True
856 for p in util.finddirs(f):
856 for p in util.finddirs(f):
857 if self._ignore(p):
857 if self._ignore(p):
858 return True
858 return True
859 return False
859 return False
860
860
861 def _ignorefiles(self):
861 def _ignorefiles(self):
862 files = []
862 files = []
863 if os.path.exists(self._join('.hgignore')):
863 if os.path.exists(self._join('.hgignore')):
864 files.append(self._join('.hgignore'))
864 files.append(self._join('.hgignore'))
865 for name, path in self._ui.configitems("ui"):
865 for name, path in self._ui.configitems("ui"):
866 if name == 'ignore' or name.startswith('ignore.'):
866 if name == 'ignore' or name.startswith('ignore.'):
867 # we need to use os.path.join here rather than self._join
867 # we need to use os.path.join here rather than self._join
868 # because path is arbitrary and user-specified
868 # because path is arbitrary and user-specified
869 files.append(os.path.join(self._rootdir, util.expandpath(path)))
869 files.append(os.path.join(self._rootdir, util.expandpath(path)))
870 return files
870 return files
871
871
872 def _ignorefileandline(self, f):
872 def _ignorefileandline(self, f):
873 files = collections.deque(self._ignorefiles())
873 files = collections.deque(self._ignorefiles())
874 visited = set()
874 visited = set()
875 while files:
875 while files:
876 i = files.popleft()
876 i = files.popleft()
877 patterns = matchmod.readpatternfile(i, self._ui.warn,
877 patterns = matchmod.readpatternfile(i, self._ui.warn,
878 sourceinfo=True)
878 sourceinfo=True)
879 for pattern, lineno, line in patterns:
879 for pattern, lineno, line in patterns:
880 kind, p = matchmod._patsplit(pattern, 'glob')
880 kind, p = matchmod._patsplit(pattern, 'glob')
881 if kind == "subinclude":
881 if kind == "subinclude":
882 if p not in visited:
882 if p not in visited:
883 files.append(p)
883 files.append(p)
884 continue
884 continue
885 m = matchmod.match(self._root, '', [], [pattern],
885 m = matchmod.match(self._root, '', [], [pattern],
886 warn=self._ui.warn)
886 warn=self._ui.warn)
887 if m(f):
887 if m(f):
888 return (i, lineno, line)
888 return (i, lineno, line)
889 visited.add(i)
889 visited.add(i)
890 return (None, -1, "")
890 return (None, -1, "")
891
891
892 def _walkexplicit(self, match, subrepos):
892 def _walkexplicit(self, match, subrepos):
893 '''Get stat data about the files explicitly specified by match.
893 '''Get stat data about the files explicitly specified by match.
894
894
895 Return a triple (results, dirsfound, dirsnotfound).
895 Return a triple (results, dirsfound, dirsnotfound).
896 - results is a mapping from filename to stat result. It also contains
896 - results is a mapping from filename to stat result. It also contains
897 listings mapping subrepos and .hg to None.
897 listings mapping subrepos and .hg to None.
898 - dirsfound is a list of files found to be directories.
898 - dirsfound is a list of files found to be directories.
899 - dirsnotfound is a list of files that the dirstate thinks are
899 - dirsnotfound is a list of files that the dirstate thinks are
900 directories and that were not found.'''
900 directories and that were not found.'''
901
901
902 def badtype(mode):
902 def badtype(mode):
903 kind = _('unknown')
903 kind = _('unknown')
904 if stat.S_ISCHR(mode):
904 if stat.S_ISCHR(mode):
905 kind = _('character device')
905 kind = _('character device')
906 elif stat.S_ISBLK(mode):
906 elif stat.S_ISBLK(mode):
907 kind = _('block device')
907 kind = _('block device')
908 elif stat.S_ISFIFO(mode):
908 elif stat.S_ISFIFO(mode):
909 kind = _('fifo')
909 kind = _('fifo')
910 elif stat.S_ISSOCK(mode):
910 elif stat.S_ISSOCK(mode):
911 kind = _('socket')
911 kind = _('socket')
912 elif stat.S_ISDIR(mode):
912 elif stat.S_ISDIR(mode):
913 kind = _('directory')
913 kind = _('directory')
914 return _('unsupported file type (type is %s)') % kind
914 return _('unsupported file type (type is %s)') % kind
915
915
916 matchedir = match.explicitdir
916 matchedir = match.explicitdir
917 badfn = match.bad
917 badfn = match.bad
918 dmap = self._map
918 dmap = self._map
919 lstat = os.lstat
919 lstat = os.lstat
920 getkind = stat.S_IFMT
920 getkind = stat.S_IFMT
921 dirkind = stat.S_IFDIR
921 dirkind = stat.S_IFDIR
922 regkind = stat.S_IFREG
922 regkind = stat.S_IFREG
923 lnkkind = stat.S_IFLNK
923 lnkkind = stat.S_IFLNK
924 join = self._join
924 join = self._join
925 dirsfound = []
925 dirsfound = []
926 foundadd = dirsfound.append
926 foundadd = dirsfound.append
927 dirsnotfound = []
927 dirsnotfound = []
928 notfoundadd = dirsnotfound.append
928 notfoundadd = dirsnotfound.append
929
929
930 if not match.isexact() and self._checkcase:
930 if not match.isexact() and self._checkcase:
931 normalize = self._normalize
931 normalize = self._normalize
932 else:
932 else:
933 normalize = None
933 normalize = None
934
934
935 files = sorted(match.files())
935 files = sorted(match.files())
936 subrepos.sort()
936 subrepos.sort()
937 i, j = 0, 0
937 i, j = 0, 0
938 while i < len(files) and j < len(subrepos):
938 while i < len(files) and j < len(subrepos):
939 subpath = subrepos[j] + "/"
939 subpath = subrepos[j] + "/"
940 if files[i] < subpath:
940 if files[i] < subpath:
941 i += 1
941 i += 1
942 continue
942 continue
943 while i < len(files) and files[i].startswith(subpath):
943 while i < len(files) and files[i].startswith(subpath):
944 del files[i]
944 del files[i]
945 j += 1
945 j += 1
946
946
947 if not files or '.' in files:
947 if not files or '.' in files:
948 files = ['.']
948 files = ['.']
949 results = dict.fromkeys(subrepos)
949 results = dict.fromkeys(subrepos)
950 results['.hg'] = None
950 results['.hg'] = None
951
951
952 alldirs = None
952 alldirs = None
953 for ff in files:
953 for ff in files:
954 # constructing the foldmap is expensive, so don't do it for the
954 # constructing the foldmap is expensive, so don't do it for the
955 # common case where files is ['.']
955 # common case where files is ['.']
956 if normalize and ff != '.':
956 if normalize and ff != '.':
957 nf = normalize(ff, False, True)
957 nf = normalize(ff, False, True)
958 else:
958 else:
959 nf = ff
959 nf = ff
960 if nf in results:
960 if nf in results:
961 continue
961 continue
962
962
963 try:
963 try:
964 st = lstat(join(nf))
964 st = lstat(join(nf))
965 kind = getkind(st.st_mode)
965 kind = getkind(st.st_mode)
966 if kind == dirkind:
966 if kind == dirkind:
967 if nf in dmap:
967 if nf in dmap:
968 # file replaced by dir on disk but still in dirstate
968 # file replaced by dir on disk but still in dirstate
969 results[nf] = None
969 results[nf] = None
970 if matchedir:
970 if matchedir:
971 matchedir(nf)
971 matchedir(nf)
972 foundadd((nf, ff))
972 foundadd((nf, ff))
973 elif kind == regkind or kind == lnkkind:
973 elif kind == regkind or kind == lnkkind:
974 results[nf] = st
974 results[nf] = st
975 else:
975 else:
976 badfn(ff, badtype(kind))
976 badfn(ff, badtype(kind))
977 if nf in dmap:
977 if nf in dmap:
978 results[nf] = None
978 results[nf] = None
979 except OSError as inst: # nf not found on disk - it is dirstate only
979 except OSError as inst: # nf not found on disk - it is dirstate only
980 if nf in dmap: # does it exactly match a missing file?
980 if nf in dmap: # does it exactly match a missing file?
981 results[nf] = None
981 results[nf] = None
982 else: # does it match a missing directory?
982 else: # does it match a missing directory?
983 if alldirs is None:
983 if alldirs is None:
984 alldirs = util.dirs(dmap)
984 alldirs = util.dirs(dmap)
985 if nf in alldirs:
985 if nf in alldirs:
986 if matchedir:
986 if matchedir:
987 matchedir(nf)
987 matchedir(nf)
988 notfoundadd(nf)
988 notfoundadd(nf)
989 else:
989 else:
990 badfn(ff, inst.strerror)
990 badfn(ff, inst.strerror)
991
991
992 # Case insensitive filesystems cannot rely on lstat() failing to detect
992 # Case insensitive filesystems cannot rely on lstat() failing to detect
993 # a case-only rename. Prune the stat object for any file that does not
993 # a case-only rename. Prune the stat object for any file that does not
994 # match the case in the filesystem, if there are multiple files that
994 # match the case in the filesystem, if there are multiple files that
995 # normalize to the same path.
995 # normalize to the same path.
996 if match.isexact() and self._checkcase:
996 if match.isexact() and self._checkcase:
997 normed = {}
997 normed = {}
998
998
999 for f, st in results.iteritems():
999 for f, st in results.iteritems():
1000 if st is None:
1000 if st is None:
1001 continue
1001 continue
1002
1002
1003 nc = util.normcase(f)
1003 nc = util.normcase(f)
1004 paths = normed.get(nc)
1004 paths = normed.get(nc)
1005
1005
1006 if paths is None:
1006 if paths is None:
1007 paths = set()
1007 paths = set()
1008 normed[nc] = paths
1008 normed[nc] = paths
1009
1009
1010 paths.add(f)
1010 paths.add(f)
1011
1011
1012 for norm, paths in normed.iteritems():
1012 for norm, paths in normed.iteritems():
1013 if len(paths) > 1:
1013 if len(paths) > 1:
1014 for path in paths:
1014 for path in paths:
1015 folded = self._discoverpath(path, norm, True, None,
1015 folded = self._discoverpath(path, norm, True, None,
1016 self._dirfoldmap)
1016 self._dirfoldmap)
1017 if path != folded:
1017 if path != folded:
1018 results[path] = None
1018 results[path] = None
1019
1019
1020 return results, dirsfound, dirsnotfound
1020 return results, dirsfound, dirsnotfound
1021
1021
1022 def walk(self, match, subrepos, unknown, ignored, full=True):
1022 def walk(self, match, subrepos, unknown, ignored, full=True):
1023 '''
1023 '''
1024 Walk recursively through the directory tree, finding all files
1024 Walk recursively through the directory tree, finding all files
1025 matched by match.
1025 matched by match.
1026
1026
1027 If full is False, maybe skip some known-clean files.
1027 If full is False, maybe skip some known-clean files.
1028
1028
1029 Return a dict mapping filename to stat-like object (either
1029 Return a dict mapping filename to stat-like object (either
1030 mercurial.osutil.stat instance or return value of os.stat()).
1030 mercurial.osutil.stat instance or return value of os.stat()).
1031
1031
1032 '''
1032 '''
1033 # full is a flag that extensions that hook into walk can use -- this
1033 # full is a flag that extensions that hook into walk can use -- this
1034 # implementation doesn't use it at all. This satisfies the contract
1034 # implementation doesn't use it at all. This satisfies the contract
1035 # because we only guarantee a "maybe".
1035 # because we only guarantee a "maybe".
1036
1036
1037 if ignored:
1037 if ignored:
1038 ignore = util.never
1038 ignore = util.never
1039 dirignore = util.never
1039 dirignore = util.never
1040 elif unknown:
1040 elif unknown:
1041 ignore = self._ignore
1041 ignore = self._ignore
1042 dirignore = self._dirignore
1042 dirignore = self._dirignore
1043 else:
1043 else:
1044 # if not unknown and not ignored, drop dir recursion and step 2
1044 # if not unknown and not ignored, drop dir recursion and step 2
1045 ignore = util.always
1045 ignore = util.always
1046 dirignore = util.always
1046 dirignore = util.always
1047
1047
1048 matchfn = match.matchfn
1048 matchfn = match.matchfn
1049 matchalways = match.always()
1049 matchalways = match.always()
1050 matchtdir = match.traversedir
1050 matchtdir = match.traversedir
1051 dmap = self._map
1051 dmap = self._map
1052 listdir = util.listdir
1052 listdir = util.listdir
1053 lstat = os.lstat
1053 lstat = os.lstat
1054 dirkind = stat.S_IFDIR
1054 dirkind = stat.S_IFDIR
1055 regkind = stat.S_IFREG
1055 regkind = stat.S_IFREG
1056 lnkkind = stat.S_IFLNK
1056 lnkkind = stat.S_IFLNK
1057 join = self._join
1057 join = self._join
1058
1058
1059 exact = skipstep3 = False
1059 exact = skipstep3 = False
1060 if match.isexact(): # match.exact
1060 if match.isexact(): # match.exact
1061 exact = True
1061 exact = True
1062 dirignore = util.always # skip step 2
1062 dirignore = util.always # skip step 2
1063 elif match.prefix(): # match.match, no patterns
1063 elif match.prefix(): # match.match, no patterns
1064 skipstep3 = True
1064 skipstep3 = True
1065
1065
1066 if not exact and self._checkcase:
1066 if not exact and self._checkcase:
1067 normalize = self._normalize
1067 normalize = self._normalize
1068 normalizefile = self._normalizefile
1068 normalizefile = self._normalizefile
1069 skipstep3 = False
1069 skipstep3 = False
1070 else:
1070 else:
1071 normalize = self._normalize
1071 normalize = self._normalize
1072 normalizefile = None
1072 normalizefile = None
1073
1073
1074 # step 1: find all explicit files
1074 # step 1: find all explicit files
1075 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1075 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
1076
1076
1077 skipstep3 = skipstep3 and not (work or dirsnotfound)
1077 skipstep3 = skipstep3 and not (work or dirsnotfound)
1078 work = [d for d in work if not dirignore(d[0])]
1078 work = [d for d in work if not dirignore(d[0])]
1079
1079
1080 # step 2: visit subdirectories
1080 # step 2: visit subdirectories
1081 def traverse(work, alreadynormed):
1081 def traverse(work, alreadynormed):
1082 wadd = work.append
1082 wadd = work.append
1083 while work:
1083 while work:
1084 nd = work.pop()
1084 nd = work.pop()
1085 if not match.visitdir(nd):
1085 if not match.visitdir(nd):
1086 continue
1086 continue
1087 skip = None
1087 skip = None
1088 if nd == '.':
1088 if nd == '.':
1089 nd = ''
1089 nd = ''
1090 else:
1090 else:
1091 skip = '.hg'
1091 skip = '.hg'
1092 try:
1092 try:
1093 entries = listdir(join(nd), stat=True, skip=skip)
1093 entries = listdir(join(nd), stat=True, skip=skip)
1094 except OSError as inst:
1094 except OSError as inst:
1095 if inst.errno in (errno.EACCES, errno.ENOENT):
1095 if inst.errno in (errno.EACCES, errno.ENOENT):
1096 match.bad(self.pathto(nd), inst.strerror)
1096 match.bad(self.pathto(nd), inst.strerror)
1097 continue
1097 continue
1098 raise
1098 raise
1099 for f, kind, st in entries:
1099 for f, kind, st in entries:
1100 if normalizefile:
1100 if normalizefile:
1101 # even though f might be a directory, we're only
1101 # even though f might be a directory, we're only
1102 # interested in comparing it to files currently in the
1102 # interested in comparing it to files currently in the
1103 # dmap -- therefore normalizefile is enough
1103 # dmap -- therefore normalizefile is enough
1104 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1104 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1105 True)
1105 True)
1106 else:
1106 else:
1107 nf = nd and (nd + "/" + f) or f
1107 nf = nd and (nd + "/" + f) or f
1108 if nf not in results:
1108 if nf not in results:
1109 if kind == dirkind:
1109 if kind == dirkind:
1110 if not ignore(nf):
1110 if not ignore(nf):
1111 if matchtdir:
1111 if matchtdir:
1112 matchtdir(nf)
1112 matchtdir(nf)
1113 wadd(nf)
1113 wadd(nf)
1114 if nf in dmap and (matchalways or matchfn(nf)):
1114 if nf in dmap and (matchalways or matchfn(nf)):
1115 results[nf] = None
1115 results[nf] = None
1116 elif kind == regkind or kind == lnkkind:
1116 elif kind == regkind or kind == lnkkind:
1117 if nf in dmap:
1117 if nf in dmap:
1118 if matchalways or matchfn(nf):
1118 if matchalways or matchfn(nf):
1119 results[nf] = st
1119 results[nf] = st
1120 elif ((matchalways or matchfn(nf))
1120 elif ((matchalways or matchfn(nf))
1121 and not ignore(nf)):
1121 and not ignore(nf)):
1122 # unknown file -- normalize if necessary
1122 # unknown file -- normalize if necessary
1123 if not alreadynormed:
1123 if not alreadynormed:
1124 nf = normalize(nf, False, True)
1124 nf = normalize(nf, False, True)
1125 results[nf] = st
1125 results[nf] = st
1126 elif nf in dmap and (matchalways or matchfn(nf)):
1126 elif nf in dmap and (matchalways or matchfn(nf)):
1127 results[nf] = None
1127 results[nf] = None
1128
1128
1129 for nd, d in work:
1129 for nd, d in work:
1130 # alreadynormed means that processwork doesn't have to do any
1130 # alreadynormed means that processwork doesn't have to do any
1131 # expensive directory normalization
1131 # expensive directory normalization
1132 alreadynormed = not normalize or nd == d
1132 alreadynormed = not normalize or nd == d
1133 traverse([d], alreadynormed)
1133 traverse([d], alreadynormed)
1134
1134
1135 for s in subrepos:
1135 for s in subrepos:
1136 del results[s]
1136 del results[s]
1137 del results['.hg']
1137 del results['.hg']
1138
1138
1139 # step 3: visit remaining files from dmap
1139 # step 3: visit remaining files from dmap
1140 if not skipstep3 and not exact:
1140 if not skipstep3 and not exact:
1141 # If a dmap file is not in results yet, it was either
1141 # If a dmap file is not in results yet, it was either
1142 # a) not matching matchfn b) ignored, c) missing, or d) under a
1142 # a) not matching matchfn b) ignored, c) missing, or d) under a
1143 # symlink directory.
1143 # symlink directory.
1144 if not results and matchalways:
1144 if not results and matchalways:
1145 visit = [f for f in dmap]
1145 visit = [f for f in dmap]
1146 else:
1146 else:
1147 visit = [f for f in dmap if f not in results and matchfn(f)]
1147 visit = [f for f in dmap if f not in results and matchfn(f)]
1148 visit.sort()
1148 visit.sort()
1149
1149
1150 if unknown:
1150 if unknown:
1151 # unknown == True means we walked all dirs under the roots
1151 # unknown == True means we walked all dirs under the roots
1152 # that wasn't ignored, and everything that matched was stat'ed
1152 # that wasn't ignored, and everything that matched was stat'ed
1153 # and is already in results.
1153 # and is already in results.
1154 # The rest must thus be ignored or under a symlink.
1154 # The rest must thus be ignored or under a symlink.
1155 audit_path = pathutil.pathauditor(self._root)
1155 audit_path = pathutil.pathauditor(self._root, cached=True)
1156
1156
1157 for nf in iter(visit):
1157 for nf in iter(visit):
1158 # If a stat for the same file was already added with a
1158 # If a stat for the same file was already added with a
1159 # different case, don't add one for this, since that would
1159 # different case, don't add one for this, since that would
1160 # make it appear as if the file exists under both names
1160 # make it appear as if the file exists under both names
1161 # on disk.
1161 # on disk.
1162 if (normalizefile and
1162 if (normalizefile and
1163 normalizefile(nf, True, True) in results):
1163 normalizefile(nf, True, True) in results):
1164 results[nf] = None
1164 results[nf] = None
1165 # Report ignored items in the dmap as long as they are not
1165 # Report ignored items in the dmap as long as they are not
1166 # under a symlink directory.
1166 # under a symlink directory.
1167 elif audit_path.check(nf):
1167 elif audit_path.check(nf):
1168 try:
1168 try:
1169 results[nf] = lstat(join(nf))
1169 results[nf] = lstat(join(nf))
1170 # file was just ignored, no links, and exists
1170 # file was just ignored, no links, and exists
1171 except OSError:
1171 except OSError:
1172 # file doesn't exist
1172 # file doesn't exist
1173 results[nf] = None
1173 results[nf] = None
1174 else:
1174 else:
1175 # It's either missing or under a symlink directory
1175 # It's either missing or under a symlink directory
1176 # which we in this case report as missing
1176 # which we in this case report as missing
1177 results[nf] = None
1177 results[nf] = None
1178 else:
1178 else:
1179 # We may not have walked the full directory tree above,
1179 # We may not have walked the full directory tree above,
1180 # so stat and check everything we missed.
1180 # so stat and check everything we missed.
1181 iv = iter(visit)
1181 iv = iter(visit)
1182 for st in util.statfiles([join(i) for i in visit]):
1182 for st in util.statfiles([join(i) for i in visit]):
1183 results[next(iv)] = st
1183 results[next(iv)] = st
1184 return results
1184 return results
1185
1185
1186 def status(self, match, subrepos, ignored, clean, unknown):
1186 def status(self, match, subrepos, ignored, clean, unknown):
1187 '''Determine the status of the working copy relative to the
1187 '''Determine the status of the working copy relative to the
1188 dirstate and return a pair of (unsure, status), where status is of type
1188 dirstate and return a pair of (unsure, status), where status is of type
1189 scmutil.status and:
1189 scmutil.status and:
1190
1190
1191 unsure:
1191 unsure:
1192 files that might have been modified since the dirstate was
1192 files that might have been modified since the dirstate was
1193 written, but need to be read to be sure (size is the same
1193 written, but need to be read to be sure (size is the same
1194 but mtime differs)
1194 but mtime differs)
1195 status.modified:
1195 status.modified:
1196 files that have definitely been modified since the dirstate
1196 files that have definitely been modified since the dirstate
1197 was written (different size or mode)
1197 was written (different size or mode)
1198 status.clean:
1198 status.clean:
1199 files that have definitely not been modified since the
1199 files that have definitely not been modified since the
1200 dirstate was written
1200 dirstate was written
1201 '''
1201 '''
1202 listignored, listclean, listunknown = ignored, clean, unknown
1202 listignored, listclean, listunknown = ignored, clean, unknown
1203 lookup, modified, added, unknown, ignored = [], [], [], [], []
1203 lookup, modified, added, unknown, ignored = [], [], [], [], []
1204 removed, deleted, clean = [], [], []
1204 removed, deleted, clean = [], [], []
1205
1205
1206 dmap = self._map
1206 dmap = self._map
1207 ladd = lookup.append # aka "unsure"
1207 ladd = lookup.append # aka "unsure"
1208 madd = modified.append
1208 madd = modified.append
1209 aadd = added.append
1209 aadd = added.append
1210 uadd = unknown.append
1210 uadd = unknown.append
1211 iadd = ignored.append
1211 iadd = ignored.append
1212 radd = removed.append
1212 radd = removed.append
1213 dadd = deleted.append
1213 dadd = deleted.append
1214 cadd = clean.append
1214 cadd = clean.append
1215 mexact = match.exact
1215 mexact = match.exact
1216 dirignore = self._dirignore
1216 dirignore = self._dirignore
1217 checkexec = self._checkexec
1217 checkexec = self._checkexec
1218 copymap = self._copymap
1218 copymap = self._copymap
1219 lastnormaltime = self._lastnormaltime
1219 lastnormaltime = self._lastnormaltime
1220
1220
1221 # We need to do full walks when either
1221 # We need to do full walks when either
1222 # - we're listing all clean files, or
1222 # - we're listing all clean files, or
1223 # - match.traversedir does something, because match.traversedir should
1223 # - match.traversedir does something, because match.traversedir should
1224 # be called for every dir in the working dir
1224 # be called for every dir in the working dir
1225 full = listclean or match.traversedir is not None
1225 full = listclean or match.traversedir is not None
1226 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1226 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1227 full=full).iteritems():
1227 full=full).iteritems():
1228 if fn not in dmap:
1228 if fn not in dmap:
1229 if (listignored or mexact(fn)) and dirignore(fn):
1229 if (listignored or mexact(fn)) and dirignore(fn):
1230 if listignored:
1230 if listignored:
1231 iadd(fn)
1231 iadd(fn)
1232 else:
1232 else:
1233 uadd(fn)
1233 uadd(fn)
1234 continue
1234 continue
1235
1235
1236 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1236 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1237 # written like that for performance reasons. dmap[fn] is not a
1237 # written like that for performance reasons. dmap[fn] is not a
1238 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1238 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1239 # opcode has fast paths when the value to be unpacked is a tuple or
1239 # opcode has fast paths when the value to be unpacked is a tuple or
1240 # a list, but falls back to creating a full-fledged iterator in
1240 # a list, but falls back to creating a full-fledged iterator in
1241 # general. That is much slower than simply accessing and storing the
1241 # general. That is much slower than simply accessing and storing the
1242 # tuple members one by one.
1242 # tuple members one by one.
1243 t = dmap[fn]
1243 t = dmap[fn]
1244 state = t[0]
1244 state = t[0]
1245 mode = t[1]
1245 mode = t[1]
1246 size = t[2]
1246 size = t[2]
1247 time = t[3]
1247 time = t[3]
1248
1248
1249 if not st and state in "nma":
1249 if not st and state in "nma":
1250 dadd(fn)
1250 dadd(fn)
1251 elif state == 'n':
1251 elif state == 'n':
1252 if (size >= 0 and
1252 if (size >= 0 and
1253 ((size != st.st_size and size != st.st_size & _rangemask)
1253 ((size != st.st_size and size != st.st_size & _rangemask)
1254 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1254 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1255 or size == -2 # other parent
1255 or size == -2 # other parent
1256 or fn in copymap):
1256 or fn in copymap):
1257 madd(fn)
1257 madd(fn)
1258 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1258 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1259 ladd(fn)
1259 ladd(fn)
1260 elif st.st_mtime == lastnormaltime:
1260 elif st.st_mtime == lastnormaltime:
1261 # fn may have just been marked as normal and it may have
1261 # fn may have just been marked as normal and it may have
1262 # changed in the same second without changing its size.
1262 # changed in the same second without changing its size.
1263 # This can happen if we quickly do multiple commits.
1263 # This can happen if we quickly do multiple commits.
1264 # Force lookup, so we don't miss such a racy file change.
1264 # Force lookup, so we don't miss such a racy file change.
1265 ladd(fn)
1265 ladd(fn)
1266 elif listclean:
1266 elif listclean:
1267 cadd(fn)
1267 cadd(fn)
1268 elif state == 'm':
1268 elif state == 'm':
1269 madd(fn)
1269 madd(fn)
1270 elif state == 'a':
1270 elif state == 'a':
1271 aadd(fn)
1271 aadd(fn)
1272 elif state == 'r':
1272 elif state == 'r':
1273 radd(fn)
1273 radd(fn)
1274
1274
1275 return (lookup, scmutil.status(modified, added, removed, deleted,
1275 return (lookup, scmutil.status(modified, added, removed, deleted,
1276 unknown, ignored, clean))
1276 unknown, ignored, clean))
1277
1277
1278 def matches(self, match):
1278 def matches(self, match):
1279 '''
1279 '''
1280 return files in the dirstate (in whatever state) filtered by match
1280 return files in the dirstate (in whatever state) filtered by match
1281 '''
1281 '''
1282 dmap = self._map
1282 dmap = self._map
1283 if match.always():
1283 if match.always():
1284 return dmap.keys()
1284 return dmap.keys()
1285 files = match.files()
1285 files = match.files()
1286 if match.isexact():
1286 if match.isexact():
1287 # fast path -- filter the other way around, since typically files is
1287 # fast path -- filter the other way around, since typically files is
1288 # much smaller than dmap
1288 # much smaller than dmap
1289 return [f for f in files if f in dmap]
1289 return [f for f in files if f in dmap]
1290 if match.prefix() and all(fn in dmap for fn in files):
1290 if match.prefix() and all(fn in dmap for fn in files):
1291 # fast path -- all the values are known to be files, so just return
1291 # fast path -- all the values are known to be files, so just return
1292 # that
1292 # that
1293 return list(files)
1293 return list(files)
1294 return [f for f in dmap if match(f)]
1294 return [f for f in dmap if match(f)]
1295
1295
1296 def _actualfilename(self, tr):
1296 def _actualfilename(self, tr):
1297 if tr:
1297 if tr:
1298 return self._pendingfilename
1298 return self._pendingfilename
1299 else:
1299 else:
1300 return self._filename
1300 return self._filename
1301
1301
1302 def savebackup(self, tr, backupname):
1302 def savebackup(self, tr, backupname):
1303 '''Save current dirstate into backup file'''
1303 '''Save current dirstate into backup file'''
1304 filename = self._actualfilename(tr)
1304 filename = self._actualfilename(tr)
1305 assert backupname != filename
1305 assert backupname != filename
1306
1306
1307 # use '_writedirstate' instead of 'write' to write changes certainly,
1307 # use '_writedirstate' instead of 'write' to write changes certainly,
1308 # because the latter omits writing out if transaction is running.
1308 # because the latter omits writing out if transaction is running.
1309 # output file will be used to create backup of dirstate at this point.
1309 # output file will be used to create backup of dirstate at this point.
1310 if self._dirty or not self._opener.exists(filename):
1310 if self._dirty or not self._opener.exists(filename):
1311 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1311 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1312 checkambig=True))
1312 checkambig=True))
1313
1313
1314 if tr:
1314 if tr:
1315 # ensure that subsequent tr.writepending returns True for
1315 # ensure that subsequent tr.writepending returns True for
1316 # changes written out above, even if dirstate is never
1316 # changes written out above, even if dirstate is never
1317 # changed after this
1317 # changed after this
1318 tr.addfilegenerator('dirstate', (self._filename,),
1318 tr.addfilegenerator('dirstate', (self._filename,),
1319 self._writedirstate, location='plain')
1319 self._writedirstate, location='plain')
1320
1320
1321 # ensure that pending file written above is unlinked at
1321 # ensure that pending file written above is unlinked at
1322 # failure, even if tr.writepending isn't invoked until the
1322 # failure, even if tr.writepending isn't invoked until the
1323 # end of this transaction
1323 # end of this transaction
1324 tr.registertmp(filename, location='plain')
1324 tr.registertmp(filename, location='plain')
1325
1325
1326 self._opener.tryunlink(backupname)
1326 self._opener.tryunlink(backupname)
1327 # hardlink backup is okay because _writedirstate is always called
1327 # hardlink backup is okay because _writedirstate is always called
1328 # with an "atomictemp=True" file.
1328 # with an "atomictemp=True" file.
1329 util.copyfile(self._opener.join(filename),
1329 util.copyfile(self._opener.join(filename),
1330 self._opener.join(backupname), hardlink=True)
1330 self._opener.join(backupname), hardlink=True)
1331
1331
1332 def restorebackup(self, tr, backupname):
1332 def restorebackup(self, tr, backupname):
1333 '''Restore dirstate by backup file'''
1333 '''Restore dirstate by backup file'''
1334 # this "invalidate()" prevents "wlock.release()" from writing
1334 # this "invalidate()" prevents "wlock.release()" from writing
1335 # changes of dirstate out after restoring from backup file
1335 # changes of dirstate out after restoring from backup file
1336 self.invalidate()
1336 self.invalidate()
1337 filename = self._actualfilename(tr)
1337 filename = self._actualfilename(tr)
1338 self._opener.rename(backupname, filename, checkambig=True)
1338 self._opener.rename(backupname, filename, checkambig=True)
1339
1339
1340 def clearbackup(self, tr, backupname):
1340 def clearbackup(self, tr, backupname):
1341 '''Clear backup file'''
1341 '''Clear backup file'''
1342 self._opener.unlink(backupname)
1342 self._opener.unlink(backupname)
@@ -1,2264 +1,2265 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import inspect
12 import inspect
13 import os
13 import os
14 import random
14 import random
15 import time
15 import time
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 short,
22 short,
23 )
23 )
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 branchmap,
26 branchmap,
27 bundle2,
27 bundle2,
28 changegroup,
28 changegroup,
29 changelog,
29 changelog,
30 color,
30 color,
31 context,
31 context,
32 dirstate,
32 dirstate,
33 dirstateguard,
33 dirstateguard,
34 encoding,
34 encoding,
35 error,
35 error,
36 exchange,
36 exchange,
37 extensions,
37 extensions,
38 filelog,
38 filelog,
39 hook,
39 hook,
40 lock as lockmod,
40 lock as lockmod,
41 manifest,
41 manifest,
42 match as matchmod,
42 match as matchmod,
43 merge as mergemod,
43 merge as mergemod,
44 mergeutil,
44 mergeutil,
45 namespaces,
45 namespaces,
46 obsolete,
46 obsolete,
47 pathutil,
47 pathutil,
48 peer,
48 peer,
49 phases,
49 phases,
50 pushkey,
50 pushkey,
51 pycompat,
51 pycompat,
52 repoview,
52 repoview,
53 revset,
53 revset,
54 revsetlang,
54 revsetlang,
55 scmutil,
55 scmutil,
56 sparse,
56 sparse,
57 store,
57 store,
58 subrepo,
58 subrepo,
59 tags as tagsmod,
59 tags as tagsmod,
60 transaction,
60 transaction,
61 txnutil,
61 txnutil,
62 util,
62 util,
63 vfs as vfsmod,
63 vfs as vfsmod,
64 )
64 )
65
65
66 release = lockmod.release
66 release = lockmod.release
67 urlerr = util.urlerr
67 urlerr = util.urlerr
68 urlreq = util.urlreq
68 urlreq = util.urlreq
69
69
70 # set of (path, vfs-location) tuples. vfs-location is:
70 # set of (path, vfs-location) tuples. vfs-location is:
71 # - 'plain for vfs relative paths
71 # - 'plain for vfs relative paths
72 # - '' for svfs relative paths
72 # - '' for svfs relative paths
73 _cachedfiles = set()
73 _cachedfiles = set()
74
74
75 class _basefilecache(scmutil.filecache):
75 class _basefilecache(scmutil.filecache):
76 """All filecache usage on repo are done for logic that should be unfiltered
76 """All filecache usage on repo are done for logic that should be unfiltered
77 """
77 """
78 def __get__(self, repo, type=None):
78 def __get__(self, repo, type=None):
79 if repo is None:
79 if repo is None:
80 return self
80 return self
81 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
81 return super(_basefilecache, self).__get__(repo.unfiltered(), type)
82 def __set__(self, repo, value):
82 def __set__(self, repo, value):
83 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
83 return super(_basefilecache, self).__set__(repo.unfiltered(), value)
84 def __delete__(self, repo):
84 def __delete__(self, repo):
85 return super(_basefilecache, self).__delete__(repo.unfiltered())
85 return super(_basefilecache, self).__delete__(repo.unfiltered())
86
86
87 class repofilecache(_basefilecache):
87 class repofilecache(_basefilecache):
88 """filecache for files in .hg but outside of .hg/store"""
88 """filecache for files in .hg but outside of .hg/store"""
89 def __init__(self, *paths):
89 def __init__(self, *paths):
90 super(repofilecache, self).__init__(*paths)
90 super(repofilecache, self).__init__(*paths)
91 for path in paths:
91 for path in paths:
92 _cachedfiles.add((path, 'plain'))
92 _cachedfiles.add((path, 'plain'))
93
93
94 def join(self, obj, fname):
94 def join(self, obj, fname):
95 return obj.vfs.join(fname)
95 return obj.vfs.join(fname)
96
96
97 class storecache(_basefilecache):
97 class storecache(_basefilecache):
98 """filecache for files in the store"""
98 """filecache for files in the store"""
99 def __init__(self, *paths):
99 def __init__(self, *paths):
100 super(storecache, self).__init__(*paths)
100 super(storecache, self).__init__(*paths)
101 for path in paths:
101 for path in paths:
102 _cachedfiles.add((path, ''))
102 _cachedfiles.add((path, ''))
103
103
104 def join(self, obj, fname):
104 def join(self, obj, fname):
105 return obj.sjoin(fname)
105 return obj.sjoin(fname)
106
106
107 def isfilecached(repo, name):
107 def isfilecached(repo, name):
108 """check if a repo has already cached "name" filecache-ed property
108 """check if a repo has already cached "name" filecache-ed property
109
109
110 This returns (cachedobj-or-None, iscached) tuple.
110 This returns (cachedobj-or-None, iscached) tuple.
111 """
111 """
112 cacheentry = repo.unfiltered()._filecache.get(name, None)
112 cacheentry = repo.unfiltered()._filecache.get(name, None)
113 if not cacheentry:
113 if not cacheentry:
114 return None, False
114 return None, False
115 return cacheentry.obj, True
115 return cacheentry.obj, True
116
116
117 class unfilteredpropertycache(util.propertycache):
117 class unfilteredpropertycache(util.propertycache):
118 """propertycache that apply to unfiltered repo only"""
118 """propertycache that apply to unfiltered repo only"""
119
119
120 def __get__(self, repo, type=None):
120 def __get__(self, repo, type=None):
121 unfi = repo.unfiltered()
121 unfi = repo.unfiltered()
122 if unfi is repo:
122 if unfi is repo:
123 return super(unfilteredpropertycache, self).__get__(unfi)
123 return super(unfilteredpropertycache, self).__get__(unfi)
124 return getattr(unfi, self.name)
124 return getattr(unfi, self.name)
125
125
126 class filteredpropertycache(util.propertycache):
126 class filteredpropertycache(util.propertycache):
127 """propertycache that must take filtering in account"""
127 """propertycache that must take filtering in account"""
128
128
129 def cachevalue(self, obj, value):
129 def cachevalue(self, obj, value):
130 object.__setattr__(obj, self.name, value)
130 object.__setattr__(obj, self.name, value)
131
131
132
132
133 def hasunfilteredcache(repo, name):
133 def hasunfilteredcache(repo, name):
134 """check if a repo has an unfilteredpropertycache value for <name>"""
134 """check if a repo has an unfilteredpropertycache value for <name>"""
135 return name in vars(repo.unfiltered())
135 return name in vars(repo.unfiltered())
136
136
137 def unfilteredmethod(orig):
137 def unfilteredmethod(orig):
138 """decorate method that always need to be run on unfiltered version"""
138 """decorate method that always need to be run on unfiltered version"""
139 def wrapper(repo, *args, **kwargs):
139 def wrapper(repo, *args, **kwargs):
140 return orig(repo.unfiltered(), *args, **kwargs)
140 return orig(repo.unfiltered(), *args, **kwargs)
141 return wrapper
141 return wrapper
142
142
143 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
143 moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
144 'unbundle'}
144 'unbundle'}
145 legacycaps = moderncaps.union({'changegroupsubset'})
145 legacycaps = moderncaps.union({'changegroupsubset'})
146
146
147 class localpeer(peer.peerrepository):
147 class localpeer(peer.peerrepository):
148 '''peer for a local repo; reflects only the most recent API'''
148 '''peer for a local repo; reflects only the most recent API'''
149
149
150 def __init__(self, repo, caps=None):
150 def __init__(self, repo, caps=None):
151 if caps is None:
151 if caps is None:
152 caps = moderncaps.copy()
152 caps = moderncaps.copy()
153 peer.peerrepository.__init__(self)
153 peer.peerrepository.__init__(self)
154 self._repo = repo.filtered('served')
154 self._repo = repo.filtered('served')
155 self.ui = repo.ui
155 self.ui = repo.ui
156 self._caps = repo._restrictcapabilities(caps)
156 self._caps = repo._restrictcapabilities(caps)
157
157
158 def close(self):
158 def close(self):
159 self._repo.close()
159 self._repo.close()
160
160
161 def _capabilities(self):
161 def _capabilities(self):
162 return self._caps
162 return self._caps
163
163
164 def local(self):
164 def local(self):
165 return self._repo
165 return self._repo
166
166
167 def canpush(self):
167 def canpush(self):
168 return True
168 return True
169
169
170 def url(self):
170 def url(self):
171 return self._repo.url()
171 return self._repo.url()
172
172
173 def lookup(self, key):
173 def lookup(self, key):
174 return self._repo.lookup(key)
174 return self._repo.lookup(key)
175
175
176 def branchmap(self):
176 def branchmap(self):
177 return self._repo.branchmap()
177 return self._repo.branchmap()
178
178
179 def heads(self):
179 def heads(self):
180 return self._repo.heads()
180 return self._repo.heads()
181
181
182 def known(self, nodes):
182 def known(self, nodes):
183 return self._repo.known(nodes)
183 return self._repo.known(nodes)
184
184
185 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
185 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
186 **kwargs):
186 **kwargs):
187 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
187 chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
188 common=common, bundlecaps=bundlecaps,
188 common=common, bundlecaps=bundlecaps,
189 **kwargs)
189 **kwargs)
190 cb = util.chunkbuffer(chunks)
190 cb = util.chunkbuffer(chunks)
191
191
192 if exchange.bundle2requested(bundlecaps):
192 if exchange.bundle2requested(bundlecaps):
193 # When requesting a bundle2, getbundle returns a stream to make the
193 # When requesting a bundle2, getbundle returns a stream to make the
194 # wire level function happier. We need to build a proper object
194 # wire level function happier. We need to build a proper object
195 # from it in local peer.
195 # from it in local peer.
196 return bundle2.getunbundler(self.ui, cb)
196 return bundle2.getunbundler(self.ui, cb)
197 else:
197 else:
198 return changegroup.getunbundler('01', cb, None)
198 return changegroup.getunbundler('01', cb, None)
199
199
200 # TODO We might want to move the next two calls into legacypeer and add
200 # TODO We might want to move the next two calls into legacypeer and add
201 # unbundle instead.
201 # unbundle instead.
202
202
203 def unbundle(self, cg, heads, url):
203 def unbundle(self, cg, heads, url):
204 """apply a bundle on a repo
204 """apply a bundle on a repo
205
205
206 This function handles the repo locking itself."""
206 This function handles the repo locking itself."""
207 try:
207 try:
208 try:
208 try:
209 cg = exchange.readbundle(self.ui, cg, None)
209 cg = exchange.readbundle(self.ui, cg, None)
210 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
210 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
211 if util.safehasattr(ret, 'getchunks'):
211 if util.safehasattr(ret, 'getchunks'):
212 # This is a bundle20 object, turn it into an unbundler.
212 # This is a bundle20 object, turn it into an unbundler.
213 # This little dance should be dropped eventually when the
213 # This little dance should be dropped eventually when the
214 # API is finally improved.
214 # API is finally improved.
215 stream = util.chunkbuffer(ret.getchunks())
215 stream = util.chunkbuffer(ret.getchunks())
216 ret = bundle2.getunbundler(self.ui, stream)
216 ret = bundle2.getunbundler(self.ui, stream)
217 return ret
217 return ret
218 except Exception as exc:
218 except Exception as exc:
219 # If the exception contains output salvaged from a bundle2
219 # If the exception contains output salvaged from a bundle2
220 # reply, we need to make sure it is printed before continuing
220 # reply, we need to make sure it is printed before continuing
221 # to fail. So we build a bundle2 with such output and consume
221 # to fail. So we build a bundle2 with such output and consume
222 # it directly.
222 # it directly.
223 #
223 #
224 # This is not very elegant but allows a "simple" solution for
224 # This is not very elegant but allows a "simple" solution for
225 # issue4594
225 # issue4594
226 output = getattr(exc, '_bundle2salvagedoutput', ())
226 output = getattr(exc, '_bundle2salvagedoutput', ())
227 if output:
227 if output:
228 bundler = bundle2.bundle20(self._repo.ui)
228 bundler = bundle2.bundle20(self._repo.ui)
229 for out in output:
229 for out in output:
230 bundler.addpart(out)
230 bundler.addpart(out)
231 stream = util.chunkbuffer(bundler.getchunks())
231 stream = util.chunkbuffer(bundler.getchunks())
232 b = bundle2.getunbundler(self.ui, stream)
232 b = bundle2.getunbundler(self.ui, stream)
233 bundle2.processbundle(self._repo, b)
233 bundle2.processbundle(self._repo, b)
234 raise
234 raise
235 except error.PushRaced as exc:
235 except error.PushRaced as exc:
236 raise error.ResponseError(_('push failed:'), str(exc))
236 raise error.ResponseError(_('push failed:'), str(exc))
237
237
238 def pushkey(self, namespace, key, old, new):
238 def pushkey(self, namespace, key, old, new):
239 return self._repo.pushkey(namespace, key, old, new)
239 return self._repo.pushkey(namespace, key, old, new)
240
240
241 def listkeys(self, namespace):
241 def listkeys(self, namespace):
242 return self._repo.listkeys(namespace)
242 return self._repo.listkeys(namespace)
243
243
244 def debugwireargs(self, one, two, three=None, four=None, five=None):
244 def debugwireargs(self, one, two, three=None, four=None, five=None):
245 '''used to test argument passing over the wire'''
245 '''used to test argument passing over the wire'''
246 return "%s %s %s %s %s" % (one, two, three, four, five)
246 return "%s %s %s %s %s" % (one, two, three, four, five)
247
247
248 class locallegacypeer(localpeer):
248 class locallegacypeer(localpeer):
249 '''peer extension which implements legacy methods too; used for tests with
249 '''peer extension which implements legacy methods too; used for tests with
250 restricted capabilities'''
250 restricted capabilities'''
251
251
252 def __init__(self, repo):
252 def __init__(self, repo):
253 localpeer.__init__(self, repo, caps=legacycaps)
253 localpeer.__init__(self, repo, caps=legacycaps)
254
254
255 def branches(self, nodes):
255 def branches(self, nodes):
256 return self._repo.branches(nodes)
256 return self._repo.branches(nodes)
257
257
258 def between(self, pairs):
258 def between(self, pairs):
259 return self._repo.between(pairs)
259 return self._repo.between(pairs)
260
260
261 def changegroup(self, basenodes, source):
261 def changegroup(self, basenodes, source):
262 return changegroup.changegroup(self._repo, basenodes, source)
262 return changegroup.changegroup(self._repo, basenodes, source)
263
263
264 def changegroupsubset(self, bases, heads, source):
264 def changegroupsubset(self, bases, heads, source):
265 return changegroup.changegroupsubset(self._repo, bases, heads, source)
265 return changegroup.changegroupsubset(self._repo, bases, heads, source)
266
266
267 # Increment the sub-version when the revlog v2 format changes to lock out old
267 # Increment the sub-version when the revlog v2 format changes to lock out old
268 # clients.
268 # clients.
269 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
269 REVLOGV2_REQUIREMENT = 'exp-revlogv2.0'
270
270
271 class localrepository(object):
271 class localrepository(object):
272
272
273 supportedformats = {
273 supportedformats = {
274 'revlogv1',
274 'revlogv1',
275 'generaldelta',
275 'generaldelta',
276 'treemanifest',
276 'treemanifest',
277 'manifestv2',
277 'manifestv2',
278 REVLOGV2_REQUIREMENT,
278 REVLOGV2_REQUIREMENT,
279 }
279 }
280 _basesupported = supportedformats | {
280 _basesupported = supportedformats | {
281 'store',
281 'store',
282 'fncache',
282 'fncache',
283 'shared',
283 'shared',
284 'relshared',
284 'relshared',
285 'dotencode',
285 'dotencode',
286 'exp-sparse',
286 'exp-sparse',
287 }
287 }
288 openerreqs = {
288 openerreqs = {
289 'revlogv1',
289 'revlogv1',
290 'generaldelta',
290 'generaldelta',
291 'treemanifest',
291 'treemanifest',
292 'manifestv2',
292 'manifestv2',
293 }
293 }
294
294
295 # a list of (ui, featureset) functions.
295 # a list of (ui, featureset) functions.
296 # only functions defined in module of enabled extensions are invoked
296 # only functions defined in module of enabled extensions are invoked
297 featuresetupfuncs = set()
297 featuresetupfuncs = set()
298
298
299 # list of prefix for file which can be written without 'wlock'
299 # list of prefix for file which can be written without 'wlock'
300 # Extensions should extend this list when needed
300 # Extensions should extend this list when needed
301 _wlockfreeprefix = {
301 _wlockfreeprefix = {
302 # We migh consider requiring 'wlock' for the next
302 # We migh consider requiring 'wlock' for the next
303 # two, but pretty much all the existing code assume
303 # two, but pretty much all the existing code assume
304 # wlock is not needed so we keep them excluded for
304 # wlock is not needed so we keep them excluded for
305 # now.
305 # now.
306 'hgrc',
306 'hgrc',
307 'requires',
307 'requires',
308 # XXX cache is a complicatged business someone
308 # XXX cache is a complicatged business someone
309 # should investigate this in depth at some point
309 # should investigate this in depth at some point
310 'cache/',
310 'cache/',
311 # XXX shouldn't be dirstate covered by the wlock?
311 # XXX shouldn't be dirstate covered by the wlock?
312 'dirstate',
312 'dirstate',
313 # XXX bisect was still a bit too messy at the time
313 # XXX bisect was still a bit too messy at the time
314 # this changeset was introduced. Someone should fix
314 # this changeset was introduced. Someone should fix
315 # the remainig bit and drop this line
315 # the remainig bit and drop this line
316 'bisect.state',
316 'bisect.state',
317 }
317 }
318
318
319 def __init__(self, baseui, path, create=False):
319 def __init__(self, baseui, path, create=False):
320 self.requirements = set()
320 self.requirements = set()
321 self.filtername = None
321 self.filtername = None
322 # wvfs: rooted at the repository root, used to access the working copy
322 # wvfs: rooted at the repository root, used to access the working copy
323 self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
323 self.wvfs = vfsmod.vfs(path, expandpath=True, realpath=True)
324 # vfs: rooted at .hg, used to access repo files outside of .hg/store
324 # vfs: rooted at .hg, used to access repo files outside of .hg/store
325 self.vfs = None
325 self.vfs = None
326 # svfs: usually rooted at .hg/store, used to access repository history
326 # svfs: usually rooted at .hg/store, used to access repository history
327 # If this is a shared repository, this vfs may point to another
327 # If this is a shared repository, this vfs may point to another
328 # repository's .hg/store directory.
328 # repository's .hg/store directory.
329 self.svfs = None
329 self.svfs = None
330 self.root = self.wvfs.base
330 self.root = self.wvfs.base
331 self.path = self.wvfs.join(".hg")
331 self.path = self.wvfs.join(".hg")
332 self.origroot = path
332 self.origroot = path
333 # These auditor are not used by the vfs,
333 # These auditor are not used by the vfs,
334 # only used when writing this comment: basectx.match
334 # only used when writing this comment: basectx.match
335 self.auditor = pathutil.pathauditor(self.root, self._checknested)
335 self.auditor = pathutil.pathauditor(self.root, self._checknested)
336 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
336 self.nofsauditor = pathutil.pathauditor(self.root, self._checknested,
337 realfs=False)
337 realfs=False, cached=True)
338 self.baseui = baseui
338 self.baseui = baseui
339 self.ui = baseui.copy()
339 self.ui = baseui.copy()
340 self.ui.copy = baseui.copy # prevent copying repo configuration
340 self.ui.copy = baseui.copy # prevent copying repo configuration
341 self.vfs = vfsmod.vfs(self.path)
341 self.vfs = vfsmod.vfs(self.path, cacheaudited=True)
342 if (self.ui.configbool('devel', 'all-warnings') or
342 if (self.ui.configbool('devel', 'all-warnings') or
343 self.ui.configbool('devel', 'check-locks')):
343 self.ui.configbool('devel', 'check-locks')):
344 self.vfs.audit = self._getvfsward(self.vfs.audit)
344 self.vfs.audit = self._getvfsward(self.vfs.audit)
345 # A list of callback to shape the phase if no data were found.
345 # A list of callback to shape the phase if no data were found.
346 # Callback are in the form: func(repo, roots) --> processed root.
346 # Callback are in the form: func(repo, roots) --> processed root.
347 # This list it to be filled by extension during repo setup
347 # This list it to be filled by extension during repo setup
348 self._phasedefaults = []
348 self._phasedefaults = []
349 try:
349 try:
350 self.ui.readconfig(self.vfs.join("hgrc"), self.root)
350 self.ui.readconfig(self.vfs.join("hgrc"), self.root)
351 self._loadextensions()
351 self._loadextensions()
352 except IOError:
352 except IOError:
353 pass
353 pass
354
354
355 if self.featuresetupfuncs:
355 if self.featuresetupfuncs:
356 self.supported = set(self._basesupported) # use private copy
356 self.supported = set(self._basesupported) # use private copy
357 extmods = set(m.__name__ for n, m
357 extmods = set(m.__name__ for n, m
358 in extensions.extensions(self.ui))
358 in extensions.extensions(self.ui))
359 for setupfunc in self.featuresetupfuncs:
359 for setupfunc in self.featuresetupfuncs:
360 if setupfunc.__module__ in extmods:
360 if setupfunc.__module__ in extmods:
361 setupfunc(self.ui, self.supported)
361 setupfunc(self.ui, self.supported)
362 else:
362 else:
363 self.supported = self._basesupported
363 self.supported = self._basesupported
364 color.setup(self.ui)
364 color.setup(self.ui)
365
365
366 # Add compression engines.
366 # Add compression engines.
367 for name in util.compengines:
367 for name in util.compengines:
368 engine = util.compengines[name]
368 engine = util.compengines[name]
369 if engine.revlogheader():
369 if engine.revlogheader():
370 self.supported.add('exp-compression-%s' % name)
370 self.supported.add('exp-compression-%s' % name)
371
371
372 if not self.vfs.isdir():
372 if not self.vfs.isdir():
373 if create:
373 if create:
374 self.requirements = newreporequirements(self)
374 self.requirements = newreporequirements(self)
375
375
376 if not self.wvfs.exists():
376 if not self.wvfs.exists():
377 self.wvfs.makedirs()
377 self.wvfs.makedirs()
378 self.vfs.makedir(notindexed=True)
378 self.vfs.makedir(notindexed=True)
379
379
380 if 'store' in self.requirements:
380 if 'store' in self.requirements:
381 self.vfs.mkdir("store")
381 self.vfs.mkdir("store")
382
382
383 # create an invalid changelog
383 # create an invalid changelog
384 self.vfs.append(
384 self.vfs.append(
385 "00changelog.i",
385 "00changelog.i",
386 '\0\0\0\2' # represents revlogv2
386 '\0\0\0\2' # represents revlogv2
387 ' dummy changelog to prevent using the old repo layout'
387 ' dummy changelog to prevent using the old repo layout'
388 )
388 )
389 else:
389 else:
390 raise error.RepoError(_("repository %s not found") % path)
390 raise error.RepoError(_("repository %s not found") % path)
391 elif create:
391 elif create:
392 raise error.RepoError(_("repository %s already exists") % path)
392 raise error.RepoError(_("repository %s already exists") % path)
393 else:
393 else:
394 try:
394 try:
395 self.requirements = scmutil.readrequires(
395 self.requirements = scmutil.readrequires(
396 self.vfs, self.supported)
396 self.vfs, self.supported)
397 except IOError as inst:
397 except IOError as inst:
398 if inst.errno != errno.ENOENT:
398 if inst.errno != errno.ENOENT:
399 raise
399 raise
400
400
401 cachepath = self.vfs.join('cache')
401 cachepath = self.vfs.join('cache')
402 self.sharedpath = self.path
402 self.sharedpath = self.path
403 try:
403 try:
404 sharedpath = self.vfs.read("sharedpath").rstrip('\n')
404 sharedpath = self.vfs.read("sharedpath").rstrip('\n')
405 if 'relshared' in self.requirements:
405 if 'relshared' in self.requirements:
406 sharedpath = self.vfs.join(sharedpath)
406 sharedpath = self.vfs.join(sharedpath)
407 vfs = vfsmod.vfs(sharedpath, realpath=True)
407 vfs = vfsmod.vfs(sharedpath, realpath=True)
408 cachepath = vfs.join('cache')
408 cachepath = vfs.join('cache')
409 s = vfs.base
409 s = vfs.base
410 if not vfs.exists():
410 if not vfs.exists():
411 raise error.RepoError(
411 raise error.RepoError(
412 _('.hg/sharedpath points to nonexistent directory %s') % s)
412 _('.hg/sharedpath points to nonexistent directory %s') % s)
413 self.sharedpath = s
413 self.sharedpath = s
414 except IOError as inst:
414 except IOError as inst:
415 if inst.errno != errno.ENOENT:
415 if inst.errno != errno.ENOENT:
416 raise
416 raise
417
417
418 if 'exp-sparse' in self.requirements and not sparse.enabled:
418 if 'exp-sparse' in self.requirements and not sparse.enabled:
419 raise error.RepoError(_('repository is using sparse feature but '
419 raise error.RepoError(_('repository is using sparse feature but '
420 'sparse is not enabled; enable the '
420 'sparse is not enabled; enable the '
421 '"sparse" extensions to access'))
421 '"sparse" extensions to access'))
422
422
423 self.store = store.store(
423 self.store = store.store(
424 self.requirements, self.sharedpath, vfsmod.vfs)
424 self.requirements, self.sharedpath,
425 lambda base: vfsmod.vfs(base, cacheaudited=True))
425 self.spath = self.store.path
426 self.spath = self.store.path
426 self.svfs = self.store.vfs
427 self.svfs = self.store.vfs
427 self.sjoin = self.store.join
428 self.sjoin = self.store.join
428 self.vfs.createmode = self.store.createmode
429 self.vfs.createmode = self.store.createmode
429 self.cachevfs = vfsmod.vfs(cachepath)
430 self.cachevfs = vfsmod.vfs(cachepath, cacheaudited=True)
430 self.cachevfs.createmode = self.store.createmode
431 self.cachevfs.createmode = self.store.createmode
431 if (self.ui.configbool('devel', 'all-warnings') or
432 if (self.ui.configbool('devel', 'all-warnings') or
432 self.ui.configbool('devel', 'check-locks')):
433 self.ui.configbool('devel', 'check-locks')):
433 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
434 if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
434 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
435 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
435 else: # standard vfs
436 else: # standard vfs
436 self.svfs.audit = self._getsvfsward(self.svfs.audit)
437 self.svfs.audit = self._getsvfsward(self.svfs.audit)
437 self._applyopenerreqs()
438 self._applyopenerreqs()
438 if create:
439 if create:
439 self._writerequirements()
440 self._writerequirements()
440
441
441 self._dirstatevalidatewarned = False
442 self._dirstatevalidatewarned = False
442
443
443 self._branchcaches = {}
444 self._branchcaches = {}
444 self._revbranchcache = None
445 self._revbranchcache = None
445 self.filterpats = {}
446 self.filterpats = {}
446 self._datafilters = {}
447 self._datafilters = {}
447 self._transref = self._lockref = self._wlockref = None
448 self._transref = self._lockref = self._wlockref = None
448
449
449 # A cache for various files under .hg/ that tracks file changes,
450 # A cache for various files under .hg/ that tracks file changes,
450 # (used by the filecache decorator)
451 # (used by the filecache decorator)
451 #
452 #
452 # Maps a property name to its util.filecacheentry
453 # Maps a property name to its util.filecacheentry
453 self._filecache = {}
454 self._filecache = {}
454
455
455 # hold sets of revision to be filtered
456 # hold sets of revision to be filtered
456 # should be cleared when something might have changed the filter value:
457 # should be cleared when something might have changed the filter value:
457 # - new changesets,
458 # - new changesets,
458 # - phase change,
459 # - phase change,
459 # - new obsolescence marker,
460 # - new obsolescence marker,
460 # - working directory parent change,
461 # - working directory parent change,
461 # - bookmark changes
462 # - bookmark changes
462 self.filteredrevcache = {}
463 self.filteredrevcache = {}
463
464
464 # post-dirstate-status hooks
465 # post-dirstate-status hooks
465 self._postdsstatus = []
466 self._postdsstatus = []
466
467
467 # Cache of types representing filtered repos.
468 # Cache of types representing filtered repos.
468 self._filteredrepotypes = weakref.WeakKeyDictionary()
469 self._filteredrepotypes = weakref.WeakKeyDictionary()
469
470
470 # generic mapping between names and nodes
471 # generic mapping between names and nodes
471 self.names = namespaces.namespaces()
472 self.names = namespaces.namespaces()
472
473
473 # Key to signature value.
474 # Key to signature value.
474 self._sparsesignaturecache = {}
475 self._sparsesignaturecache = {}
475 # Signature to cached matcher instance.
476 # Signature to cached matcher instance.
476 self._sparsematchercache = {}
477 self._sparsematchercache = {}
477
478
478 def _getvfsward(self, origfunc):
479 def _getvfsward(self, origfunc):
479 """build a ward for self.vfs"""
480 """build a ward for self.vfs"""
480 rref = weakref.ref(self)
481 rref = weakref.ref(self)
481 def checkvfs(path, mode=None):
482 def checkvfs(path, mode=None):
482 ret = origfunc(path, mode=mode)
483 ret = origfunc(path, mode=mode)
483 repo = rref()
484 repo = rref()
484 if (repo is None
485 if (repo is None
485 or not util.safehasattr(repo, '_wlockref')
486 or not util.safehasattr(repo, '_wlockref')
486 or not util.safehasattr(repo, '_lockref')):
487 or not util.safehasattr(repo, '_lockref')):
487 return
488 return
488 if mode in (None, 'r', 'rb'):
489 if mode in (None, 'r', 'rb'):
489 return
490 return
490 if path.startswith(repo.path):
491 if path.startswith(repo.path):
491 # truncate name relative to the repository (.hg)
492 # truncate name relative to the repository (.hg)
492 path = path[len(repo.path) + 1:]
493 path = path[len(repo.path) + 1:]
493 if path.startswith('cache/'):
494 if path.startswith('cache/'):
494 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
495 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
495 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
496 repo.ui.develwarn(msg % path, stacklevel=2, config="cache-vfs")
496 if path.startswith('journal.'):
497 if path.startswith('journal.'):
497 # journal is covered by 'lock'
498 # journal is covered by 'lock'
498 if repo._currentlock(repo._lockref) is None:
499 if repo._currentlock(repo._lockref) is None:
499 repo.ui.develwarn('write with no lock: "%s"' % path,
500 repo.ui.develwarn('write with no lock: "%s"' % path,
500 stacklevel=2, config='check-locks')
501 stacklevel=2, config='check-locks')
501 elif repo._currentlock(repo._wlockref) is None:
502 elif repo._currentlock(repo._wlockref) is None:
502 # rest of vfs files are covered by 'wlock'
503 # rest of vfs files are covered by 'wlock'
503 #
504 #
504 # exclude special files
505 # exclude special files
505 for prefix in self._wlockfreeprefix:
506 for prefix in self._wlockfreeprefix:
506 if path.startswith(prefix):
507 if path.startswith(prefix):
507 return
508 return
508 repo.ui.develwarn('write with no wlock: "%s"' % path,
509 repo.ui.develwarn('write with no wlock: "%s"' % path,
509 stacklevel=2, config='check-locks')
510 stacklevel=2, config='check-locks')
510 return ret
511 return ret
511 return checkvfs
512 return checkvfs
512
513
513 def _getsvfsward(self, origfunc):
514 def _getsvfsward(self, origfunc):
514 """build a ward for self.svfs"""
515 """build a ward for self.svfs"""
515 rref = weakref.ref(self)
516 rref = weakref.ref(self)
516 def checksvfs(path, mode=None):
517 def checksvfs(path, mode=None):
517 ret = origfunc(path, mode=mode)
518 ret = origfunc(path, mode=mode)
518 repo = rref()
519 repo = rref()
519 if repo is None or not util.safehasattr(repo, '_lockref'):
520 if repo is None or not util.safehasattr(repo, '_lockref'):
520 return
521 return
521 if mode in (None, 'r', 'rb'):
522 if mode in (None, 'r', 'rb'):
522 return
523 return
523 if path.startswith(repo.sharedpath):
524 if path.startswith(repo.sharedpath):
524 # truncate name relative to the repository (.hg)
525 # truncate name relative to the repository (.hg)
525 path = path[len(repo.sharedpath) + 1:]
526 path = path[len(repo.sharedpath) + 1:]
526 if repo._currentlock(repo._lockref) is None:
527 if repo._currentlock(repo._lockref) is None:
527 repo.ui.develwarn('write with no lock: "%s"' % path,
528 repo.ui.develwarn('write with no lock: "%s"' % path,
528 stacklevel=3)
529 stacklevel=3)
529 return ret
530 return ret
530 return checksvfs
531 return checksvfs
531
532
532 def close(self):
533 def close(self):
533 self._writecaches()
534 self._writecaches()
534
535
535 def _loadextensions(self):
536 def _loadextensions(self):
536 extensions.loadall(self.ui)
537 extensions.loadall(self.ui)
537
538
538 def _writecaches(self):
539 def _writecaches(self):
539 if self._revbranchcache:
540 if self._revbranchcache:
540 self._revbranchcache.write()
541 self._revbranchcache.write()
541
542
542 def _restrictcapabilities(self, caps):
543 def _restrictcapabilities(self, caps):
543 if self.ui.configbool('experimental', 'bundle2-advertise'):
544 if self.ui.configbool('experimental', 'bundle2-advertise'):
544 caps = set(caps)
545 caps = set(caps)
545 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
546 capsblob = bundle2.encodecaps(bundle2.getrepocaps(self))
546 caps.add('bundle2=' + urlreq.quote(capsblob))
547 caps.add('bundle2=' + urlreq.quote(capsblob))
547 return caps
548 return caps
548
549
549 def _applyopenerreqs(self):
550 def _applyopenerreqs(self):
550 self.svfs.options = dict((r, 1) for r in self.requirements
551 self.svfs.options = dict((r, 1) for r in self.requirements
551 if r in self.openerreqs)
552 if r in self.openerreqs)
552 # experimental config: format.chunkcachesize
553 # experimental config: format.chunkcachesize
553 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
554 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
554 if chunkcachesize is not None:
555 if chunkcachesize is not None:
555 self.svfs.options['chunkcachesize'] = chunkcachesize
556 self.svfs.options['chunkcachesize'] = chunkcachesize
556 # experimental config: format.maxchainlen
557 # experimental config: format.maxchainlen
557 maxchainlen = self.ui.configint('format', 'maxchainlen')
558 maxchainlen = self.ui.configint('format', 'maxchainlen')
558 if maxchainlen is not None:
559 if maxchainlen is not None:
559 self.svfs.options['maxchainlen'] = maxchainlen
560 self.svfs.options['maxchainlen'] = maxchainlen
560 # experimental config: format.manifestcachesize
561 # experimental config: format.manifestcachesize
561 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
562 manifestcachesize = self.ui.configint('format', 'manifestcachesize')
562 if manifestcachesize is not None:
563 if manifestcachesize is not None:
563 self.svfs.options['manifestcachesize'] = manifestcachesize
564 self.svfs.options['manifestcachesize'] = manifestcachesize
564 # experimental config: format.aggressivemergedeltas
565 # experimental config: format.aggressivemergedeltas
565 aggressivemergedeltas = self.ui.configbool('format',
566 aggressivemergedeltas = self.ui.configbool('format',
566 'aggressivemergedeltas')
567 'aggressivemergedeltas')
567 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
568 self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas
568 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
569 self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui)
569 chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan', -1)
570 chainspan = self.ui.configbytes('experimental', 'maxdeltachainspan', -1)
570 if 0 <= chainspan:
571 if 0 <= chainspan:
571 self.svfs.options['maxdeltachainspan'] = chainspan
572 self.svfs.options['maxdeltachainspan'] = chainspan
572
573
573 for r in self.requirements:
574 for r in self.requirements:
574 if r.startswith('exp-compression-'):
575 if r.startswith('exp-compression-'):
575 self.svfs.options['compengine'] = r[len('exp-compression-'):]
576 self.svfs.options['compengine'] = r[len('exp-compression-'):]
576
577
577 # TODO move "revlogv2" to openerreqs once finalized.
578 # TODO move "revlogv2" to openerreqs once finalized.
578 if REVLOGV2_REQUIREMENT in self.requirements:
579 if REVLOGV2_REQUIREMENT in self.requirements:
579 self.svfs.options['revlogv2'] = True
580 self.svfs.options['revlogv2'] = True
580
581
581 def _writerequirements(self):
582 def _writerequirements(self):
582 scmutil.writerequires(self.vfs, self.requirements)
583 scmutil.writerequires(self.vfs, self.requirements)
583
584
584 def _checknested(self, path):
585 def _checknested(self, path):
585 """Determine if path is a legal nested repository."""
586 """Determine if path is a legal nested repository."""
586 if not path.startswith(self.root):
587 if not path.startswith(self.root):
587 return False
588 return False
588 subpath = path[len(self.root) + 1:]
589 subpath = path[len(self.root) + 1:]
589 normsubpath = util.pconvert(subpath)
590 normsubpath = util.pconvert(subpath)
590
591
591 # XXX: Checking against the current working copy is wrong in
592 # XXX: Checking against the current working copy is wrong in
592 # the sense that it can reject things like
593 # the sense that it can reject things like
593 #
594 #
594 # $ hg cat -r 10 sub/x.txt
595 # $ hg cat -r 10 sub/x.txt
595 #
596 #
596 # if sub/ is no longer a subrepository in the working copy
597 # if sub/ is no longer a subrepository in the working copy
597 # parent revision.
598 # parent revision.
598 #
599 #
599 # However, it can of course also allow things that would have
600 # However, it can of course also allow things that would have
600 # been rejected before, such as the above cat command if sub/
601 # been rejected before, such as the above cat command if sub/
601 # is a subrepository now, but was a normal directory before.
602 # is a subrepository now, but was a normal directory before.
602 # The old path auditor would have rejected by mistake since it
603 # The old path auditor would have rejected by mistake since it
603 # panics when it sees sub/.hg/.
604 # panics when it sees sub/.hg/.
604 #
605 #
605 # All in all, checking against the working copy seems sensible
606 # All in all, checking against the working copy seems sensible
606 # since we want to prevent access to nested repositories on
607 # since we want to prevent access to nested repositories on
607 # the filesystem *now*.
608 # the filesystem *now*.
608 ctx = self[None]
609 ctx = self[None]
609 parts = util.splitpath(subpath)
610 parts = util.splitpath(subpath)
610 while parts:
611 while parts:
611 prefix = '/'.join(parts)
612 prefix = '/'.join(parts)
612 if prefix in ctx.substate:
613 if prefix in ctx.substate:
613 if prefix == normsubpath:
614 if prefix == normsubpath:
614 return True
615 return True
615 else:
616 else:
616 sub = ctx.sub(prefix)
617 sub = ctx.sub(prefix)
617 return sub.checknested(subpath[len(prefix) + 1:])
618 return sub.checknested(subpath[len(prefix) + 1:])
618 else:
619 else:
619 parts.pop()
620 parts.pop()
620 return False
621 return False
621
622
622 def peer(self):
623 def peer(self):
623 return localpeer(self) # not cached to avoid reference cycle
624 return localpeer(self) # not cached to avoid reference cycle
624
625
625 def unfiltered(self):
626 def unfiltered(self):
626 """Return unfiltered version of the repository
627 """Return unfiltered version of the repository
627
628
628 Intended to be overwritten by filtered repo."""
629 Intended to be overwritten by filtered repo."""
629 return self
630 return self
630
631
631 def filtered(self, name):
632 def filtered(self, name):
632 """Return a filtered version of a repository"""
633 """Return a filtered version of a repository"""
633 # Python <3.4 easily leaks types via __mro__. See
634 # Python <3.4 easily leaks types via __mro__. See
634 # https://bugs.python.org/issue17950. We cache dynamically
635 # https://bugs.python.org/issue17950. We cache dynamically
635 # created types so this method doesn't leak on every
636 # created types so this method doesn't leak on every
636 # invocation.
637 # invocation.
637
638
638 key = self.unfiltered().__class__
639 key = self.unfiltered().__class__
639 if key not in self._filteredrepotypes:
640 if key not in self._filteredrepotypes:
640 # Build a new type with the repoview mixin and the base
641 # Build a new type with the repoview mixin and the base
641 # class of this repo. Give it a name containing the
642 # class of this repo. Give it a name containing the
642 # filter name to aid debugging.
643 # filter name to aid debugging.
643 bases = (repoview.repoview, key)
644 bases = (repoview.repoview, key)
644 cls = type(r'%sfilteredrepo' % name, bases, {})
645 cls = type(r'%sfilteredrepo' % name, bases, {})
645 self._filteredrepotypes[key] = cls
646 self._filteredrepotypes[key] = cls
646
647
647 return self._filteredrepotypes[key](self, name)
648 return self._filteredrepotypes[key](self, name)
648
649
649 @repofilecache('bookmarks', 'bookmarks.current')
650 @repofilecache('bookmarks', 'bookmarks.current')
650 def _bookmarks(self):
651 def _bookmarks(self):
651 return bookmarks.bmstore(self)
652 return bookmarks.bmstore(self)
652
653
653 @property
654 @property
654 def _activebookmark(self):
655 def _activebookmark(self):
655 return self._bookmarks.active
656 return self._bookmarks.active
656
657
657 # _phaserevs and _phasesets depend on changelog. what we need is to
658 # _phaserevs and _phasesets depend on changelog. what we need is to
658 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
659 # call _phasecache.invalidate() if '00changelog.i' was changed, but it
659 # can't be easily expressed in filecache mechanism.
660 # can't be easily expressed in filecache mechanism.
660 @storecache('phaseroots', '00changelog.i')
661 @storecache('phaseroots', '00changelog.i')
661 def _phasecache(self):
662 def _phasecache(self):
662 return phases.phasecache(self, self._phasedefaults)
663 return phases.phasecache(self, self._phasedefaults)
663
664
664 @storecache('obsstore')
665 @storecache('obsstore')
665 def obsstore(self):
666 def obsstore(self):
666 return obsolete.makestore(self.ui, self)
667 return obsolete.makestore(self.ui, self)
667
668
668 @storecache('00changelog.i')
669 @storecache('00changelog.i')
669 def changelog(self):
670 def changelog(self):
670 return changelog.changelog(self.svfs,
671 return changelog.changelog(self.svfs,
671 trypending=txnutil.mayhavepending(self.root))
672 trypending=txnutil.mayhavepending(self.root))
672
673
673 def _constructmanifest(self):
674 def _constructmanifest(self):
674 # This is a temporary function while we migrate from manifest to
675 # This is a temporary function while we migrate from manifest to
675 # manifestlog. It allows bundlerepo and unionrepo to intercept the
676 # manifestlog. It allows bundlerepo and unionrepo to intercept the
676 # manifest creation.
677 # manifest creation.
677 return manifest.manifestrevlog(self.svfs)
678 return manifest.manifestrevlog(self.svfs)
678
679
679 @storecache('00manifest.i')
680 @storecache('00manifest.i')
680 def manifestlog(self):
681 def manifestlog(self):
681 return manifest.manifestlog(self.svfs, self)
682 return manifest.manifestlog(self.svfs, self)
682
683
683 @repofilecache('dirstate')
684 @repofilecache('dirstate')
684 def dirstate(self):
685 def dirstate(self):
685 sparsematchfn = lambda: sparse.matcher(self)
686 sparsematchfn = lambda: sparse.matcher(self)
686
687
687 return dirstate.dirstate(self.vfs, self.ui, self.root,
688 return dirstate.dirstate(self.vfs, self.ui, self.root,
688 self._dirstatevalidate, sparsematchfn)
689 self._dirstatevalidate, sparsematchfn)
689
690
690 def _dirstatevalidate(self, node):
691 def _dirstatevalidate(self, node):
691 try:
692 try:
692 self.changelog.rev(node)
693 self.changelog.rev(node)
693 return node
694 return node
694 except error.LookupError:
695 except error.LookupError:
695 if not self._dirstatevalidatewarned:
696 if not self._dirstatevalidatewarned:
696 self._dirstatevalidatewarned = True
697 self._dirstatevalidatewarned = True
697 self.ui.warn(_("warning: ignoring unknown"
698 self.ui.warn(_("warning: ignoring unknown"
698 " working parent %s!\n") % short(node))
699 " working parent %s!\n") % short(node))
699 return nullid
700 return nullid
700
701
701 def __getitem__(self, changeid):
702 def __getitem__(self, changeid):
702 if changeid is None:
703 if changeid is None:
703 return context.workingctx(self)
704 return context.workingctx(self)
704 if isinstance(changeid, slice):
705 if isinstance(changeid, slice):
705 # wdirrev isn't contiguous so the slice shouldn't include it
706 # wdirrev isn't contiguous so the slice shouldn't include it
706 return [context.changectx(self, i)
707 return [context.changectx(self, i)
707 for i in xrange(*changeid.indices(len(self)))
708 for i in xrange(*changeid.indices(len(self)))
708 if i not in self.changelog.filteredrevs]
709 if i not in self.changelog.filteredrevs]
709 try:
710 try:
710 return context.changectx(self, changeid)
711 return context.changectx(self, changeid)
711 except error.WdirUnsupported:
712 except error.WdirUnsupported:
712 return context.workingctx(self)
713 return context.workingctx(self)
713
714
714 def __contains__(self, changeid):
715 def __contains__(self, changeid):
715 """True if the given changeid exists
716 """True if the given changeid exists
716
717
717 error.LookupError is raised if an ambiguous node specified.
718 error.LookupError is raised if an ambiguous node specified.
718 """
719 """
719 try:
720 try:
720 self[changeid]
721 self[changeid]
721 return True
722 return True
722 except error.RepoLookupError:
723 except error.RepoLookupError:
723 return False
724 return False
724
725
725 def __nonzero__(self):
726 def __nonzero__(self):
726 return True
727 return True
727
728
728 __bool__ = __nonzero__
729 __bool__ = __nonzero__
729
730
730 def __len__(self):
731 def __len__(self):
731 return len(self.changelog)
732 return len(self.changelog)
732
733
733 def __iter__(self):
734 def __iter__(self):
734 return iter(self.changelog)
735 return iter(self.changelog)
735
736
736 def revs(self, expr, *args):
737 def revs(self, expr, *args):
737 '''Find revisions matching a revset.
738 '''Find revisions matching a revset.
738
739
739 The revset is specified as a string ``expr`` that may contain
740 The revset is specified as a string ``expr`` that may contain
740 %-formatting to escape certain types. See ``revsetlang.formatspec``.
741 %-formatting to escape certain types. See ``revsetlang.formatspec``.
741
742
742 Revset aliases from the configuration are not expanded. To expand
743 Revset aliases from the configuration are not expanded. To expand
743 user aliases, consider calling ``scmutil.revrange()`` or
744 user aliases, consider calling ``scmutil.revrange()`` or
744 ``repo.anyrevs([expr], user=True)``.
745 ``repo.anyrevs([expr], user=True)``.
745
746
746 Returns a revset.abstractsmartset, which is a list-like interface
747 Returns a revset.abstractsmartset, which is a list-like interface
747 that contains integer revisions.
748 that contains integer revisions.
748 '''
749 '''
749 expr = revsetlang.formatspec(expr, *args)
750 expr = revsetlang.formatspec(expr, *args)
750 m = revset.match(None, expr)
751 m = revset.match(None, expr)
751 return m(self)
752 return m(self)
752
753
753 def set(self, expr, *args):
754 def set(self, expr, *args):
754 '''Find revisions matching a revset and emit changectx instances.
755 '''Find revisions matching a revset and emit changectx instances.
755
756
756 This is a convenience wrapper around ``revs()`` that iterates the
757 This is a convenience wrapper around ``revs()`` that iterates the
757 result and is a generator of changectx instances.
758 result and is a generator of changectx instances.
758
759
759 Revset aliases from the configuration are not expanded. To expand
760 Revset aliases from the configuration are not expanded. To expand
760 user aliases, consider calling ``scmutil.revrange()``.
761 user aliases, consider calling ``scmutil.revrange()``.
761 '''
762 '''
762 for r in self.revs(expr, *args):
763 for r in self.revs(expr, *args):
763 yield self[r]
764 yield self[r]
764
765
765 def anyrevs(self, specs, user=False, localalias=None):
766 def anyrevs(self, specs, user=False, localalias=None):
766 '''Find revisions matching one of the given revsets.
767 '''Find revisions matching one of the given revsets.
767
768
768 Revset aliases from the configuration are not expanded by default. To
769 Revset aliases from the configuration are not expanded by default. To
769 expand user aliases, specify ``user=True``. To provide some local
770 expand user aliases, specify ``user=True``. To provide some local
770 definitions overriding user aliases, set ``localalias`` to
771 definitions overriding user aliases, set ``localalias`` to
771 ``{name: definitionstring}``.
772 ``{name: definitionstring}``.
772 '''
773 '''
773 if user:
774 if user:
774 m = revset.matchany(self.ui, specs, repo=self,
775 m = revset.matchany(self.ui, specs, repo=self,
775 localalias=localalias)
776 localalias=localalias)
776 else:
777 else:
777 m = revset.matchany(None, specs, localalias=localalias)
778 m = revset.matchany(None, specs, localalias=localalias)
778 return m(self)
779 return m(self)
779
780
780 def url(self):
781 def url(self):
781 return 'file:' + self.root
782 return 'file:' + self.root
782
783
783 def hook(self, name, throw=False, **args):
784 def hook(self, name, throw=False, **args):
784 """Call a hook, passing this repo instance.
785 """Call a hook, passing this repo instance.
785
786
786 This a convenience method to aid invoking hooks. Extensions likely
787 This a convenience method to aid invoking hooks. Extensions likely
787 won't call this unless they have registered a custom hook or are
788 won't call this unless they have registered a custom hook or are
788 replacing code that is expected to call a hook.
789 replacing code that is expected to call a hook.
789 """
790 """
790 return hook.hook(self.ui, self, name, throw, **args)
791 return hook.hook(self.ui, self, name, throw, **args)
791
792
792 @filteredpropertycache
793 @filteredpropertycache
793 def _tagscache(self):
794 def _tagscache(self):
794 '''Returns a tagscache object that contains various tags related
795 '''Returns a tagscache object that contains various tags related
795 caches.'''
796 caches.'''
796
797
797 # This simplifies its cache management by having one decorated
798 # This simplifies its cache management by having one decorated
798 # function (this one) and the rest simply fetch things from it.
799 # function (this one) and the rest simply fetch things from it.
799 class tagscache(object):
800 class tagscache(object):
800 def __init__(self):
801 def __init__(self):
801 # These two define the set of tags for this repository. tags
802 # These two define the set of tags for this repository. tags
802 # maps tag name to node; tagtypes maps tag name to 'global' or
803 # maps tag name to node; tagtypes maps tag name to 'global' or
803 # 'local'. (Global tags are defined by .hgtags across all
804 # 'local'. (Global tags are defined by .hgtags across all
804 # heads, and local tags are defined in .hg/localtags.)
805 # heads, and local tags are defined in .hg/localtags.)
805 # They constitute the in-memory cache of tags.
806 # They constitute the in-memory cache of tags.
806 self.tags = self.tagtypes = None
807 self.tags = self.tagtypes = None
807
808
808 self.nodetagscache = self.tagslist = None
809 self.nodetagscache = self.tagslist = None
809
810
810 cache = tagscache()
811 cache = tagscache()
811 cache.tags, cache.tagtypes = self._findtags()
812 cache.tags, cache.tagtypes = self._findtags()
812
813
813 return cache
814 return cache
814
815
815 def tags(self):
816 def tags(self):
816 '''return a mapping of tag to node'''
817 '''return a mapping of tag to node'''
817 t = {}
818 t = {}
818 if self.changelog.filteredrevs:
819 if self.changelog.filteredrevs:
819 tags, tt = self._findtags()
820 tags, tt = self._findtags()
820 else:
821 else:
821 tags = self._tagscache.tags
822 tags = self._tagscache.tags
822 for k, v in tags.iteritems():
823 for k, v in tags.iteritems():
823 try:
824 try:
824 # ignore tags to unknown nodes
825 # ignore tags to unknown nodes
825 self.changelog.rev(v)
826 self.changelog.rev(v)
826 t[k] = v
827 t[k] = v
827 except (error.LookupError, ValueError):
828 except (error.LookupError, ValueError):
828 pass
829 pass
829 return t
830 return t
830
831
831 def _findtags(self):
832 def _findtags(self):
832 '''Do the hard work of finding tags. Return a pair of dicts
833 '''Do the hard work of finding tags. Return a pair of dicts
833 (tags, tagtypes) where tags maps tag name to node, and tagtypes
834 (tags, tagtypes) where tags maps tag name to node, and tagtypes
834 maps tag name to a string like \'global\' or \'local\'.
835 maps tag name to a string like \'global\' or \'local\'.
835 Subclasses or extensions are free to add their own tags, but
836 Subclasses or extensions are free to add their own tags, but
836 should be aware that the returned dicts will be retained for the
837 should be aware that the returned dicts will be retained for the
837 duration of the localrepo object.'''
838 duration of the localrepo object.'''
838
839
839 # XXX what tagtype should subclasses/extensions use? Currently
840 # XXX what tagtype should subclasses/extensions use? Currently
840 # mq and bookmarks add tags, but do not set the tagtype at all.
841 # mq and bookmarks add tags, but do not set the tagtype at all.
841 # Should each extension invent its own tag type? Should there
842 # Should each extension invent its own tag type? Should there
842 # be one tagtype for all such "virtual" tags? Or is the status
843 # be one tagtype for all such "virtual" tags? Or is the status
843 # quo fine?
844 # quo fine?
844
845
845
846
846 # map tag name to (node, hist)
847 # map tag name to (node, hist)
847 alltags = tagsmod.findglobaltags(self.ui, self)
848 alltags = tagsmod.findglobaltags(self.ui, self)
848 # map tag name to tag type
849 # map tag name to tag type
849 tagtypes = dict((tag, 'global') for tag in alltags)
850 tagtypes = dict((tag, 'global') for tag in alltags)
850
851
851 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
852 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
852
853
853 # Build the return dicts. Have to re-encode tag names because
854 # Build the return dicts. Have to re-encode tag names because
854 # the tags module always uses UTF-8 (in order not to lose info
855 # the tags module always uses UTF-8 (in order not to lose info
855 # writing to the cache), but the rest of Mercurial wants them in
856 # writing to the cache), but the rest of Mercurial wants them in
856 # local encoding.
857 # local encoding.
857 tags = {}
858 tags = {}
858 for (name, (node, hist)) in alltags.iteritems():
859 for (name, (node, hist)) in alltags.iteritems():
859 if node != nullid:
860 if node != nullid:
860 tags[encoding.tolocal(name)] = node
861 tags[encoding.tolocal(name)] = node
861 tags['tip'] = self.changelog.tip()
862 tags['tip'] = self.changelog.tip()
862 tagtypes = dict([(encoding.tolocal(name), value)
863 tagtypes = dict([(encoding.tolocal(name), value)
863 for (name, value) in tagtypes.iteritems()])
864 for (name, value) in tagtypes.iteritems()])
864 return (tags, tagtypes)
865 return (tags, tagtypes)
865
866
866 def tagtype(self, tagname):
867 def tagtype(self, tagname):
867 '''
868 '''
868 return the type of the given tag. result can be:
869 return the type of the given tag. result can be:
869
870
870 'local' : a local tag
871 'local' : a local tag
871 'global' : a global tag
872 'global' : a global tag
872 None : tag does not exist
873 None : tag does not exist
873 '''
874 '''
874
875
875 return self._tagscache.tagtypes.get(tagname)
876 return self._tagscache.tagtypes.get(tagname)
876
877
877 def tagslist(self):
878 def tagslist(self):
878 '''return a list of tags ordered by revision'''
879 '''return a list of tags ordered by revision'''
879 if not self._tagscache.tagslist:
880 if not self._tagscache.tagslist:
880 l = []
881 l = []
881 for t, n in self.tags().iteritems():
882 for t, n in self.tags().iteritems():
882 l.append((self.changelog.rev(n), t, n))
883 l.append((self.changelog.rev(n), t, n))
883 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
884 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
884
885
885 return self._tagscache.tagslist
886 return self._tagscache.tagslist
886
887
887 def nodetags(self, node):
888 def nodetags(self, node):
888 '''return the tags associated with a node'''
889 '''return the tags associated with a node'''
889 if not self._tagscache.nodetagscache:
890 if not self._tagscache.nodetagscache:
890 nodetagscache = {}
891 nodetagscache = {}
891 for t, n in self._tagscache.tags.iteritems():
892 for t, n in self._tagscache.tags.iteritems():
892 nodetagscache.setdefault(n, []).append(t)
893 nodetagscache.setdefault(n, []).append(t)
893 for tags in nodetagscache.itervalues():
894 for tags in nodetagscache.itervalues():
894 tags.sort()
895 tags.sort()
895 self._tagscache.nodetagscache = nodetagscache
896 self._tagscache.nodetagscache = nodetagscache
896 return self._tagscache.nodetagscache.get(node, [])
897 return self._tagscache.nodetagscache.get(node, [])
897
898
898 def nodebookmarks(self, node):
899 def nodebookmarks(self, node):
899 """return the list of bookmarks pointing to the specified node"""
900 """return the list of bookmarks pointing to the specified node"""
900 marks = []
901 marks = []
901 for bookmark, n in self._bookmarks.iteritems():
902 for bookmark, n in self._bookmarks.iteritems():
902 if n == node:
903 if n == node:
903 marks.append(bookmark)
904 marks.append(bookmark)
904 return sorted(marks)
905 return sorted(marks)
905
906
906 def branchmap(self):
907 def branchmap(self):
907 '''returns a dictionary {branch: [branchheads]} with branchheads
908 '''returns a dictionary {branch: [branchheads]} with branchheads
908 ordered by increasing revision number'''
909 ordered by increasing revision number'''
909 branchmap.updatecache(self)
910 branchmap.updatecache(self)
910 return self._branchcaches[self.filtername]
911 return self._branchcaches[self.filtername]
911
912
912 @unfilteredmethod
913 @unfilteredmethod
913 def revbranchcache(self):
914 def revbranchcache(self):
914 if not self._revbranchcache:
915 if not self._revbranchcache:
915 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
916 self._revbranchcache = branchmap.revbranchcache(self.unfiltered())
916 return self._revbranchcache
917 return self._revbranchcache
917
918
918 def branchtip(self, branch, ignoremissing=False):
919 def branchtip(self, branch, ignoremissing=False):
919 '''return the tip node for a given branch
920 '''return the tip node for a given branch
920
921
921 If ignoremissing is True, then this method will not raise an error.
922 If ignoremissing is True, then this method will not raise an error.
922 This is helpful for callers that only expect None for a missing branch
923 This is helpful for callers that only expect None for a missing branch
923 (e.g. namespace).
924 (e.g. namespace).
924
925
925 '''
926 '''
926 try:
927 try:
927 return self.branchmap().branchtip(branch)
928 return self.branchmap().branchtip(branch)
928 except KeyError:
929 except KeyError:
929 if not ignoremissing:
930 if not ignoremissing:
930 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
931 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
931 else:
932 else:
932 pass
933 pass
933
934
934 def lookup(self, key):
935 def lookup(self, key):
935 return self[key].node()
936 return self[key].node()
936
937
937 def lookupbranch(self, key, remote=None):
938 def lookupbranch(self, key, remote=None):
938 repo = remote or self
939 repo = remote or self
939 if key in repo.branchmap():
940 if key in repo.branchmap():
940 return key
941 return key
941
942
942 repo = (remote and remote.local()) and remote or self
943 repo = (remote and remote.local()) and remote or self
943 return repo[key].branch()
944 return repo[key].branch()
944
945
945 def known(self, nodes):
946 def known(self, nodes):
946 cl = self.changelog
947 cl = self.changelog
947 nm = cl.nodemap
948 nm = cl.nodemap
948 filtered = cl.filteredrevs
949 filtered = cl.filteredrevs
949 result = []
950 result = []
950 for n in nodes:
951 for n in nodes:
951 r = nm.get(n)
952 r = nm.get(n)
952 resp = not (r is None or r in filtered)
953 resp = not (r is None or r in filtered)
953 result.append(resp)
954 result.append(resp)
954 return result
955 return result
955
956
956 def local(self):
957 def local(self):
957 return self
958 return self
958
959
959 def publishing(self):
960 def publishing(self):
960 # it's safe (and desirable) to trust the publish flag unconditionally
961 # it's safe (and desirable) to trust the publish flag unconditionally
961 # so that we don't finalize changes shared between users via ssh or nfs
962 # so that we don't finalize changes shared between users via ssh or nfs
962 return self.ui.configbool('phases', 'publish', untrusted=True)
963 return self.ui.configbool('phases', 'publish', untrusted=True)
963
964
964 def cancopy(self):
965 def cancopy(self):
965 # so statichttprepo's override of local() works
966 # so statichttprepo's override of local() works
966 if not self.local():
967 if not self.local():
967 return False
968 return False
968 if not self.publishing():
969 if not self.publishing():
969 return True
970 return True
970 # if publishing we can't copy if there is filtered content
971 # if publishing we can't copy if there is filtered content
971 return not self.filtered('visible').changelog.filteredrevs
972 return not self.filtered('visible').changelog.filteredrevs
972
973
973 def shared(self):
974 def shared(self):
974 '''the type of shared repository (None if not shared)'''
975 '''the type of shared repository (None if not shared)'''
975 if self.sharedpath != self.path:
976 if self.sharedpath != self.path:
976 return 'store'
977 return 'store'
977 return None
978 return None
978
979
979 def wjoin(self, f, *insidef):
980 def wjoin(self, f, *insidef):
980 return self.vfs.reljoin(self.root, f, *insidef)
981 return self.vfs.reljoin(self.root, f, *insidef)
981
982
982 def file(self, f):
983 def file(self, f):
983 if f[0] == '/':
984 if f[0] == '/':
984 f = f[1:]
985 f = f[1:]
985 return filelog.filelog(self.svfs, f)
986 return filelog.filelog(self.svfs, f)
986
987
987 def changectx(self, changeid):
988 def changectx(self, changeid):
988 return self[changeid]
989 return self[changeid]
989
990
990 def setparents(self, p1, p2=nullid):
991 def setparents(self, p1, p2=nullid):
991 with self.dirstate.parentchange():
992 with self.dirstate.parentchange():
992 copies = self.dirstate.setparents(p1, p2)
993 copies = self.dirstate.setparents(p1, p2)
993 pctx = self[p1]
994 pctx = self[p1]
994 if copies:
995 if copies:
995 # Adjust copy records, the dirstate cannot do it, it
996 # Adjust copy records, the dirstate cannot do it, it
996 # requires access to parents manifests. Preserve them
997 # requires access to parents manifests. Preserve them
997 # only for entries added to first parent.
998 # only for entries added to first parent.
998 for f in copies:
999 for f in copies:
999 if f not in pctx and copies[f] in pctx:
1000 if f not in pctx and copies[f] in pctx:
1000 self.dirstate.copy(copies[f], f)
1001 self.dirstate.copy(copies[f], f)
1001 if p2 == nullid:
1002 if p2 == nullid:
1002 for f, s in sorted(self.dirstate.copies().items()):
1003 for f, s in sorted(self.dirstate.copies().items()):
1003 if f not in pctx and s not in pctx:
1004 if f not in pctx and s not in pctx:
1004 self.dirstate.copy(None, f)
1005 self.dirstate.copy(None, f)
1005
1006
1006 def filectx(self, path, changeid=None, fileid=None):
1007 def filectx(self, path, changeid=None, fileid=None):
1007 """changeid can be a changeset revision, node, or tag.
1008 """changeid can be a changeset revision, node, or tag.
1008 fileid can be a file revision or node."""
1009 fileid can be a file revision or node."""
1009 return context.filectx(self, path, changeid, fileid)
1010 return context.filectx(self, path, changeid, fileid)
1010
1011
1011 def getcwd(self):
1012 def getcwd(self):
1012 return self.dirstate.getcwd()
1013 return self.dirstate.getcwd()
1013
1014
1014 def pathto(self, f, cwd=None):
1015 def pathto(self, f, cwd=None):
1015 return self.dirstate.pathto(f, cwd)
1016 return self.dirstate.pathto(f, cwd)
1016
1017
1017 def _loadfilter(self, filter):
1018 def _loadfilter(self, filter):
1018 if filter not in self.filterpats:
1019 if filter not in self.filterpats:
1019 l = []
1020 l = []
1020 for pat, cmd in self.ui.configitems(filter):
1021 for pat, cmd in self.ui.configitems(filter):
1021 if cmd == '!':
1022 if cmd == '!':
1022 continue
1023 continue
1023 mf = matchmod.match(self.root, '', [pat])
1024 mf = matchmod.match(self.root, '', [pat])
1024 fn = None
1025 fn = None
1025 params = cmd
1026 params = cmd
1026 for name, filterfn in self._datafilters.iteritems():
1027 for name, filterfn in self._datafilters.iteritems():
1027 if cmd.startswith(name):
1028 if cmd.startswith(name):
1028 fn = filterfn
1029 fn = filterfn
1029 params = cmd[len(name):].lstrip()
1030 params = cmd[len(name):].lstrip()
1030 break
1031 break
1031 if not fn:
1032 if not fn:
1032 fn = lambda s, c, **kwargs: util.filter(s, c)
1033 fn = lambda s, c, **kwargs: util.filter(s, c)
1033 # Wrap old filters not supporting keyword arguments
1034 # Wrap old filters not supporting keyword arguments
1034 if not inspect.getargspec(fn)[2]:
1035 if not inspect.getargspec(fn)[2]:
1035 oldfn = fn
1036 oldfn = fn
1036 fn = lambda s, c, **kwargs: oldfn(s, c)
1037 fn = lambda s, c, **kwargs: oldfn(s, c)
1037 l.append((mf, fn, params))
1038 l.append((mf, fn, params))
1038 self.filterpats[filter] = l
1039 self.filterpats[filter] = l
1039 return self.filterpats[filter]
1040 return self.filterpats[filter]
1040
1041
1041 def _filter(self, filterpats, filename, data):
1042 def _filter(self, filterpats, filename, data):
1042 for mf, fn, cmd in filterpats:
1043 for mf, fn, cmd in filterpats:
1043 if mf(filename):
1044 if mf(filename):
1044 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1045 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
1045 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1046 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
1046 break
1047 break
1047
1048
1048 return data
1049 return data
1049
1050
1050 @unfilteredpropertycache
1051 @unfilteredpropertycache
1051 def _encodefilterpats(self):
1052 def _encodefilterpats(self):
1052 return self._loadfilter('encode')
1053 return self._loadfilter('encode')
1053
1054
1054 @unfilteredpropertycache
1055 @unfilteredpropertycache
1055 def _decodefilterpats(self):
1056 def _decodefilterpats(self):
1056 return self._loadfilter('decode')
1057 return self._loadfilter('decode')
1057
1058
1058 def adddatafilter(self, name, filter):
1059 def adddatafilter(self, name, filter):
1059 self._datafilters[name] = filter
1060 self._datafilters[name] = filter
1060
1061
1061 def wread(self, filename):
1062 def wread(self, filename):
1062 if self.wvfs.islink(filename):
1063 if self.wvfs.islink(filename):
1063 data = self.wvfs.readlink(filename)
1064 data = self.wvfs.readlink(filename)
1064 else:
1065 else:
1065 data = self.wvfs.read(filename)
1066 data = self.wvfs.read(filename)
1066 return self._filter(self._encodefilterpats, filename, data)
1067 return self._filter(self._encodefilterpats, filename, data)
1067
1068
1068 def wwrite(self, filename, data, flags, backgroundclose=False):
1069 def wwrite(self, filename, data, flags, backgroundclose=False):
1069 """write ``data`` into ``filename`` in the working directory
1070 """write ``data`` into ``filename`` in the working directory
1070
1071
1071 This returns length of written (maybe decoded) data.
1072 This returns length of written (maybe decoded) data.
1072 """
1073 """
1073 data = self._filter(self._decodefilterpats, filename, data)
1074 data = self._filter(self._decodefilterpats, filename, data)
1074 if 'l' in flags:
1075 if 'l' in flags:
1075 self.wvfs.symlink(data, filename)
1076 self.wvfs.symlink(data, filename)
1076 else:
1077 else:
1077 self.wvfs.write(filename, data, backgroundclose=backgroundclose)
1078 self.wvfs.write(filename, data, backgroundclose=backgroundclose)
1078 if 'x' in flags:
1079 if 'x' in flags:
1079 self.wvfs.setflags(filename, False, True)
1080 self.wvfs.setflags(filename, False, True)
1080 return len(data)
1081 return len(data)
1081
1082
1082 def wwritedata(self, filename, data):
1083 def wwritedata(self, filename, data):
1083 return self._filter(self._decodefilterpats, filename, data)
1084 return self._filter(self._decodefilterpats, filename, data)
1084
1085
1085 def currenttransaction(self):
1086 def currenttransaction(self):
1086 """return the current transaction or None if non exists"""
1087 """return the current transaction or None if non exists"""
1087 if self._transref:
1088 if self._transref:
1088 tr = self._transref()
1089 tr = self._transref()
1089 else:
1090 else:
1090 tr = None
1091 tr = None
1091
1092
1092 if tr and tr.running():
1093 if tr and tr.running():
1093 return tr
1094 return tr
1094 return None
1095 return None
1095
1096
1096 def transaction(self, desc, report=None):
1097 def transaction(self, desc, report=None):
1097 if (self.ui.configbool('devel', 'all-warnings')
1098 if (self.ui.configbool('devel', 'all-warnings')
1098 or self.ui.configbool('devel', 'check-locks')):
1099 or self.ui.configbool('devel', 'check-locks')):
1099 if self._currentlock(self._lockref) is None:
1100 if self._currentlock(self._lockref) is None:
1100 raise error.ProgrammingError('transaction requires locking')
1101 raise error.ProgrammingError('transaction requires locking')
1101 tr = self.currenttransaction()
1102 tr = self.currenttransaction()
1102 if tr is not None:
1103 if tr is not None:
1103 scmutil.registersummarycallback(self, tr, desc)
1104 scmutil.registersummarycallback(self, tr, desc)
1104 return tr.nest()
1105 return tr.nest()
1105
1106
1106 # abort here if the journal already exists
1107 # abort here if the journal already exists
1107 if self.svfs.exists("journal"):
1108 if self.svfs.exists("journal"):
1108 raise error.RepoError(
1109 raise error.RepoError(
1109 _("abandoned transaction found"),
1110 _("abandoned transaction found"),
1110 hint=_("run 'hg recover' to clean up transaction"))
1111 hint=_("run 'hg recover' to clean up transaction"))
1111
1112
1112 idbase = "%.40f#%f" % (random.random(), time.time())
1113 idbase = "%.40f#%f" % (random.random(), time.time())
1113 ha = hex(hashlib.sha1(idbase).digest())
1114 ha = hex(hashlib.sha1(idbase).digest())
1114 txnid = 'TXN:' + ha
1115 txnid = 'TXN:' + ha
1115 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1116 self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid)
1116
1117
1117 self._writejournal(desc)
1118 self._writejournal(desc)
1118 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1119 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
1119 if report:
1120 if report:
1120 rp = report
1121 rp = report
1121 else:
1122 else:
1122 rp = self.ui.warn
1123 rp = self.ui.warn
1123 vfsmap = {'plain': self.vfs} # root of .hg/
1124 vfsmap = {'plain': self.vfs} # root of .hg/
1124 # we must avoid cyclic reference between repo and transaction.
1125 # we must avoid cyclic reference between repo and transaction.
1125 reporef = weakref.ref(self)
1126 reporef = weakref.ref(self)
1126 # Code to track tag movement
1127 # Code to track tag movement
1127 #
1128 #
1128 # Since tags are all handled as file content, it is actually quite hard
1129 # Since tags are all handled as file content, it is actually quite hard
1129 # to track these movement from a code perspective. So we fallback to a
1130 # to track these movement from a code perspective. So we fallback to a
1130 # tracking at the repository level. One could envision to track changes
1131 # tracking at the repository level. One could envision to track changes
1131 # to the '.hgtags' file through changegroup apply but that fails to
1132 # to the '.hgtags' file through changegroup apply but that fails to
1132 # cope with case where transaction expose new heads without changegroup
1133 # cope with case where transaction expose new heads without changegroup
1133 # being involved (eg: phase movement).
1134 # being involved (eg: phase movement).
1134 #
1135 #
1135 # For now, We gate the feature behind a flag since this likely comes
1136 # For now, We gate the feature behind a flag since this likely comes
1136 # with performance impacts. The current code run more often than needed
1137 # with performance impacts. The current code run more often than needed
1137 # and do not use caches as much as it could. The current focus is on
1138 # and do not use caches as much as it could. The current focus is on
1138 # the behavior of the feature so we disable it by default. The flag
1139 # the behavior of the feature so we disable it by default. The flag
1139 # will be removed when we are happy with the performance impact.
1140 # will be removed when we are happy with the performance impact.
1140 #
1141 #
1141 # Once this feature is no longer experimental move the following
1142 # Once this feature is no longer experimental move the following
1142 # documentation to the appropriate help section:
1143 # documentation to the appropriate help section:
1143 #
1144 #
1144 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1145 # The ``HG_TAG_MOVED`` variable will be set if the transaction touched
1145 # tags (new or changed or deleted tags). In addition the details of
1146 # tags (new or changed or deleted tags). In addition the details of
1146 # these changes are made available in a file at:
1147 # these changes are made available in a file at:
1147 # ``REPOROOT/.hg/changes/tags.changes``.
1148 # ``REPOROOT/.hg/changes/tags.changes``.
1148 # Make sure you check for HG_TAG_MOVED before reading that file as it
1149 # Make sure you check for HG_TAG_MOVED before reading that file as it
1149 # might exist from a previous transaction even if no tag were touched
1150 # might exist from a previous transaction even if no tag were touched
1150 # in this one. Changes are recorded in a line base format::
1151 # in this one. Changes are recorded in a line base format::
1151 #
1152 #
1152 # <action> <hex-node> <tag-name>\n
1153 # <action> <hex-node> <tag-name>\n
1153 #
1154 #
1154 # Actions are defined as follow:
1155 # Actions are defined as follow:
1155 # "-R": tag is removed,
1156 # "-R": tag is removed,
1156 # "+A": tag is added,
1157 # "+A": tag is added,
1157 # "-M": tag is moved (old value),
1158 # "-M": tag is moved (old value),
1158 # "+M": tag is moved (new value),
1159 # "+M": tag is moved (new value),
1159 tracktags = lambda x: None
1160 tracktags = lambda x: None
1160 # experimental config: experimental.hook-track-tags
1161 # experimental config: experimental.hook-track-tags
1161 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1162 shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
1162 if desc != 'strip' and shouldtracktags:
1163 if desc != 'strip' and shouldtracktags:
1163 oldheads = self.changelog.headrevs()
1164 oldheads = self.changelog.headrevs()
1164 def tracktags(tr2):
1165 def tracktags(tr2):
1165 repo = reporef()
1166 repo = reporef()
1166 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1167 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
1167 newheads = repo.changelog.headrevs()
1168 newheads = repo.changelog.headrevs()
1168 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1169 newfnodes = tagsmod.fnoderevs(repo.ui, repo, newheads)
1169 # notes: we compare lists here.
1170 # notes: we compare lists here.
1170 # As we do it only once buiding set would not be cheaper
1171 # As we do it only once buiding set would not be cheaper
1171 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1172 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
1172 if changes:
1173 if changes:
1173 tr2.hookargs['tag_moved'] = '1'
1174 tr2.hookargs['tag_moved'] = '1'
1174 with repo.vfs('changes/tags.changes', 'w',
1175 with repo.vfs('changes/tags.changes', 'w',
1175 atomictemp=True) as changesfile:
1176 atomictemp=True) as changesfile:
1176 # note: we do not register the file to the transaction
1177 # note: we do not register the file to the transaction
1177 # because we needs it to still exist on the transaction
1178 # because we needs it to still exist on the transaction
1178 # is close (for txnclose hooks)
1179 # is close (for txnclose hooks)
1179 tagsmod.writediff(changesfile, changes)
1180 tagsmod.writediff(changesfile, changes)
1180 def validate(tr2):
1181 def validate(tr2):
1181 """will run pre-closing hooks"""
1182 """will run pre-closing hooks"""
1182 # XXX the transaction API is a bit lacking here so we take a hacky
1183 # XXX the transaction API is a bit lacking here so we take a hacky
1183 # path for now
1184 # path for now
1184 #
1185 #
1185 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1186 # We cannot add this as a "pending" hooks since the 'tr.hookargs'
1186 # dict is copied before these run. In addition we needs the data
1187 # dict is copied before these run. In addition we needs the data
1187 # available to in memory hooks too.
1188 # available to in memory hooks too.
1188 #
1189 #
1189 # Moreover, we also need to make sure this runs before txnclose
1190 # Moreover, we also need to make sure this runs before txnclose
1190 # hooks and there is no "pending" mechanism that would execute
1191 # hooks and there is no "pending" mechanism that would execute
1191 # logic only if hooks are about to run.
1192 # logic only if hooks are about to run.
1192 #
1193 #
1193 # Fixing this limitation of the transaction is also needed to track
1194 # Fixing this limitation of the transaction is also needed to track
1194 # other families of changes (bookmarks, phases, obsolescence).
1195 # other families of changes (bookmarks, phases, obsolescence).
1195 #
1196 #
1196 # This will have to be fixed before we remove the experimental
1197 # This will have to be fixed before we remove the experimental
1197 # gating.
1198 # gating.
1198 tracktags(tr2)
1199 tracktags(tr2)
1199 reporef().hook('pretxnclose', throw=True,
1200 reporef().hook('pretxnclose', throw=True,
1200 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1201 txnname=desc, **pycompat.strkwargs(tr.hookargs))
1201 def releasefn(tr, success):
1202 def releasefn(tr, success):
1202 repo = reporef()
1203 repo = reporef()
1203 if success:
1204 if success:
1204 # this should be explicitly invoked here, because
1205 # this should be explicitly invoked here, because
1205 # in-memory changes aren't written out at closing
1206 # in-memory changes aren't written out at closing
1206 # transaction, if tr.addfilegenerator (via
1207 # transaction, if tr.addfilegenerator (via
1207 # dirstate.write or so) isn't invoked while
1208 # dirstate.write or so) isn't invoked while
1208 # transaction running
1209 # transaction running
1209 repo.dirstate.write(None)
1210 repo.dirstate.write(None)
1210 else:
1211 else:
1211 # discard all changes (including ones already written
1212 # discard all changes (including ones already written
1212 # out) in this transaction
1213 # out) in this transaction
1213 repo.dirstate.restorebackup(None, 'journal.dirstate')
1214 repo.dirstate.restorebackup(None, 'journal.dirstate')
1214
1215
1215 repo.invalidate(clearfilecache=True)
1216 repo.invalidate(clearfilecache=True)
1216
1217
1217 tr = transaction.transaction(rp, self.svfs, vfsmap,
1218 tr = transaction.transaction(rp, self.svfs, vfsmap,
1218 "journal",
1219 "journal",
1219 "undo",
1220 "undo",
1220 aftertrans(renames),
1221 aftertrans(renames),
1221 self.store.createmode,
1222 self.store.createmode,
1222 validator=validate,
1223 validator=validate,
1223 releasefn=releasefn,
1224 releasefn=releasefn,
1224 checkambigfiles=_cachedfiles)
1225 checkambigfiles=_cachedfiles)
1225 tr.changes['revs'] = set()
1226 tr.changes['revs'] = set()
1226 tr.changes['obsmarkers'] = set()
1227 tr.changes['obsmarkers'] = set()
1227 tr.changes['phases'] = {}
1228 tr.changes['phases'] = {}
1228 tr.changes['bookmarks'] = {}
1229 tr.changes['bookmarks'] = {}
1229
1230
1230 tr.hookargs['txnid'] = txnid
1231 tr.hookargs['txnid'] = txnid
1231 # note: writing the fncache only during finalize mean that the file is
1232 # note: writing the fncache only during finalize mean that the file is
1232 # outdated when running hooks. As fncache is used for streaming clone,
1233 # outdated when running hooks. As fncache is used for streaming clone,
1233 # this is not expected to break anything that happen during the hooks.
1234 # this is not expected to break anything that happen during the hooks.
1234 tr.addfinalize('flush-fncache', self.store.write)
1235 tr.addfinalize('flush-fncache', self.store.write)
1235 def txnclosehook(tr2):
1236 def txnclosehook(tr2):
1236 """To be run if transaction is successful, will schedule a hook run
1237 """To be run if transaction is successful, will schedule a hook run
1237 """
1238 """
1238 # Don't reference tr2 in hook() so we don't hold a reference.
1239 # Don't reference tr2 in hook() so we don't hold a reference.
1239 # This reduces memory consumption when there are multiple
1240 # This reduces memory consumption when there are multiple
1240 # transactions per lock. This can likely go away if issue5045
1241 # transactions per lock. This can likely go away if issue5045
1241 # fixes the function accumulation.
1242 # fixes the function accumulation.
1242 hookargs = tr2.hookargs
1243 hookargs = tr2.hookargs
1243
1244
1244 def hook():
1245 def hook():
1245 reporef().hook('txnclose', throw=False, txnname=desc,
1246 reporef().hook('txnclose', throw=False, txnname=desc,
1246 **pycompat.strkwargs(hookargs))
1247 **pycompat.strkwargs(hookargs))
1247 reporef()._afterlock(hook)
1248 reporef()._afterlock(hook)
1248 tr.addfinalize('txnclose-hook', txnclosehook)
1249 tr.addfinalize('txnclose-hook', txnclosehook)
1249 tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
1250 tr.addpostclose('warms-cache', self._buildcacheupdater(tr))
1250 def txnaborthook(tr2):
1251 def txnaborthook(tr2):
1251 """To be run if transaction is aborted
1252 """To be run if transaction is aborted
1252 """
1253 """
1253 reporef().hook('txnabort', throw=False, txnname=desc,
1254 reporef().hook('txnabort', throw=False, txnname=desc,
1254 **tr2.hookargs)
1255 **tr2.hookargs)
1255 tr.addabort('txnabort-hook', txnaborthook)
1256 tr.addabort('txnabort-hook', txnaborthook)
1256 # avoid eager cache invalidation. in-memory data should be identical
1257 # avoid eager cache invalidation. in-memory data should be identical
1257 # to stored data if transaction has no error.
1258 # to stored data if transaction has no error.
1258 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1259 tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats)
1259 self._transref = weakref.ref(tr)
1260 self._transref = weakref.ref(tr)
1260 scmutil.registersummarycallback(self, tr, desc)
1261 scmutil.registersummarycallback(self, tr, desc)
1261 return tr
1262 return tr
1262
1263
1263 def _journalfiles(self):
1264 def _journalfiles(self):
1264 return ((self.svfs, 'journal'),
1265 return ((self.svfs, 'journal'),
1265 (self.vfs, 'journal.dirstate'),
1266 (self.vfs, 'journal.dirstate'),
1266 (self.vfs, 'journal.branch'),
1267 (self.vfs, 'journal.branch'),
1267 (self.vfs, 'journal.desc'),
1268 (self.vfs, 'journal.desc'),
1268 (self.vfs, 'journal.bookmarks'),
1269 (self.vfs, 'journal.bookmarks'),
1269 (self.svfs, 'journal.phaseroots'))
1270 (self.svfs, 'journal.phaseroots'))
1270
1271
1271 def undofiles(self):
1272 def undofiles(self):
1272 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1273 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
1273
1274
1274 @unfilteredmethod
1275 @unfilteredmethod
1275 def _writejournal(self, desc):
1276 def _writejournal(self, desc):
1276 self.dirstate.savebackup(None, 'journal.dirstate')
1277 self.dirstate.savebackup(None, 'journal.dirstate')
1277 self.vfs.write("journal.branch",
1278 self.vfs.write("journal.branch",
1278 encoding.fromlocal(self.dirstate.branch()))
1279 encoding.fromlocal(self.dirstate.branch()))
1279 self.vfs.write("journal.desc",
1280 self.vfs.write("journal.desc",
1280 "%d\n%s\n" % (len(self), desc))
1281 "%d\n%s\n" % (len(self), desc))
1281 self.vfs.write("journal.bookmarks",
1282 self.vfs.write("journal.bookmarks",
1282 self.vfs.tryread("bookmarks"))
1283 self.vfs.tryread("bookmarks"))
1283 self.svfs.write("journal.phaseroots",
1284 self.svfs.write("journal.phaseroots",
1284 self.svfs.tryread("phaseroots"))
1285 self.svfs.tryread("phaseroots"))
1285
1286
1286 def recover(self):
1287 def recover(self):
1287 with self.lock():
1288 with self.lock():
1288 if self.svfs.exists("journal"):
1289 if self.svfs.exists("journal"):
1289 self.ui.status(_("rolling back interrupted transaction\n"))
1290 self.ui.status(_("rolling back interrupted transaction\n"))
1290 vfsmap = {'': self.svfs,
1291 vfsmap = {'': self.svfs,
1291 'plain': self.vfs,}
1292 'plain': self.vfs,}
1292 transaction.rollback(self.svfs, vfsmap, "journal",
1293 transaction.rollback(self.svfs, vfsmap, "journal",
1293 self.ui.warn,
1294 self.ui.warn,
1294 checkambigfiles=_cachedfiles)
1295 checkambigfiles=_cachedfiles)
1295 self.invalidate()
1296 self.invalidate()
1296 return True
1297 return True
1297 else:
1298 else:
1298 self.ui.warn(_("no interrupted transaction available\n"))
1299 self.ui.warn(_("no interrupted transaction available\n"))
1299 return False
1300 return False
1300
1301
1301 def rollback(self, dryrun=False, force=False):
1302 def rollback(self, dryrun=False, force=False):
1302 wlock = lock = dsguard = None
1303 wlock = lock = dsguard = None
1303 try:
1304 try:
1304 wlock = self.wlock()
1305 wlock = self.wlock()
1305 lock = self.lock()
1306 lock = self.lock()
1306 if self.svfs.exists("undo"):
1307 if self.svfs.exists("undo"):
1307 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1308 dsguard = dirstateguard.dirstateguard(self, 'rollback')
1308
1309
1309 return self._rollback(dryrun, force, dsguard)
1310 return self._rollback(dryrun, force, dsguard)
1310 else:
1311 else:
1311 self.ui.warn(_("no rollback information available\n"))
1312 self.ui.warn(_("no rollback information available\n"))
1312 return 1
1313 return 1
1313 finally:
1314 finally:
1314 release(dsguard, lock, wlock)
1315 release(dsguard, lock, wlock)
1315
1316
1316 @unfilteredmethod # Until we get smarter cache management
1317 @unfilteredmethod # Until we get smarter cache management
1317 def _rollback(self, dryrun, force, dsguard):
1318 def _rollback(self, dryrun, force, dsguard):
1318 ui = self.ui
1319 ui = self.ui
1319 try:
1320 try:
1320 args = self.vfs.read('undo.desc').splitlines()
1321 args = self.vfs.read('undo.desc').splitlines()
1321 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1322 (oldlen, desc, detail) = (int(args[0]), args[1], None)
1322 if len(args) >= 3:
1323 if len(args) >= 3:
1323 detail = args[2]
1324 detail = args[2]
1324 oldtip = oldlen - 1
1325 oldtip = oldlen - 1
1325
1326
1326 if detail and ui.verbose:
1327 if detail and ui.verbose:
1327 msg = (_('repository tip rolled back to revision %d'
1328 msg = (_('repository tip rolled back to revision %d'
1328 ' (undo %s: %s)\n')
1329 ' (undo %s: %s)\n')
1329 % (oldtip, desc, detail))
1330 % (oldtip, desc, detail))
1330 else:
1331 else:
1331 msg = (_('repository tip rolled back to revision %d'
1332 msg = (_('repository tip rolled back to revision %d'
1332 ' (undo %s)\n')
1333 ' (undo %s)\n')
1333 % (oldtip, desc))
1334 % (oldtip, desc))
1334 except IOError:
1335 except IOError:
1335 msg = _('rolling back unknown transaction\n')
1336 msg = _('rolling back unknown transaction\n')
1336 desc = None
1337 desc = None
1337
1338
1338 if not force and self['.'] != self['tip'] and desc == 'commit':
1339 if not force and self['.'] != self['tip'] and desc == 'commit':
1339 raise error.Abort(
1340 raise error.Abort(
1340 _('rollback of last commit while not checked out '
1341 _('rollback of last commit while not checked out '
1341 'may lose data'), hint=_('use -f to force'))
1342 'may lose data'), hint=_('use -f to force'))
1342
1343
1343 ui.status(msg)
1344 ui.status(msg)
1344 if dryrun:
1345 if dryrun:
1345 return 0
1346 return 0
1346
1347
1347 parents = self.dirstate.parents()
1348 parents = self.dirstate.parents()
1348 self.destroying()
1349 self.destroying()
1349 vfsmap = {'plain': self.vfs, '': self.svfs}
1350 vfsmap = {'plain': self.vfs, '': self.svfs}
1350 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1351 transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
1351 checkambigfiles=_cachedfiles)
1352 checkambigfiles=_cachedfiles)
1352 if self.vfs.exists('undo.bookmarks'):
1353 if self.vfs.exists('undo.bookmarks'):
1353 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1354 self.vfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
1354 if self.svfs.exists('undo.phaseroots'):
1355 if self.svfs.exists('undo.phaseroots'):
1355 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1356 self.svfs.rename('undo.phaseroots', 'phaseroots', checkambig=True)
1356 self.invalidate()
1357 self.invalidate()
1357
1358
1358 parentgone = (parents[0] not in self.changelog.nodemap or
1359 parentgone = (parents[0] not in self.changelog.nodemap or
1359 parents[1] not in self.changelog.nodemap)
1360 parents[1] not in self.changelog.nodemap)
1360 if parentgone:
1361 if parentgone:
1361 # prevent dirstateguard from overwriting already restored one
1362 # prevent dirstateguard from overwriting already restored one
1362 dsguard.close()
1363 dsguard.close()
1363
1364
1364 self.dirstate.restorebackup(None, 'undo.dirstate')
1365 self.dirstate.restorebackup(None, 'undo.dirstate')
1365 try:
1366 try:
1366 branch = self.vfs.read('undo.branch')
1367 branch = self.vfs.read('undo.branch')
1367 self.dirstate.setbranch(encoding.tolocal(branch))
1368 self.dirstate.setbranch(encoding.tolocal(branch))
1368 except IOError:
1369 except IOError:
1369 ui.warn(_('named branch could not be reset: '
1370 ui.warn(_('named branch could not be reset: '
1370 'current branch is still \'%s\'\n')
1371 'current branch is still \'%s\'\n')
1371 % self.dirstate.branch())
1372 % self.dirstate.branch())
1372
1373
1373 parents = tuple([p.rev() for p in self[None].parents()])
1374 parents = tuple([p.rev() for p in self[None].parents()])
1374 if len(parents) > 1:
1375 if len(parents) > 1:
1375 ui.status(_('working directory now based on '
1376 ui.status(_('working directory now based on '
1376 'revisions %d and %d\n') % parents)
1377 'revisions %d and %d\n') % parents)
1377 else:
1378 else:
1378 ui.status(_('working directory now based on '
1379 ui.status(_('working directory now based on '
1379 'revision %d\n') % parents)
1380 'revision %d\n') % parents)
1380 mergemod.mergestate.clean(self, self['.'].node())
1381 mergemod.mergestate.clean(self, self['.'].node())
1381
1382
1382 # TODO: if we know which new heads may result from this rollback, pass
1383 # TODO: if we know which new heads may result from this rollback, pass
1383 # them to destroy(), which will prevent the branchhead cache from being
1384 # them to destroy(), which will prevent the branchhead cache from being
1384 # invalidated.
1385 # invalidated.
1385 self.destroyed()
1386 self.destroyed()
1386 return 0
1387 return 0
1387
1388
1388 def _buildcacheupdater(self, newtransaction):
1389 def _buildcacheupdater(self, newtransaction):
1389 """called during transaction to build the callback updating cache
1390 """called during transaction to build the callback updating cache
1390
1391
1391 Lives on the repository to help extension who might want to augment
1392 Lives on the repository to help extension who might want to augment
1392 this logic. For this purpose, the created transaction is passed to the
1393 this logic. For this purpose, the created transaction is passed to the
1393 method.
1394 method.
1394 """
1395 """
1395 # we must avoid cyclic reference between repo and transaction.
1396 # we must avoid cyclic reference between repo and transaction.
1396 reporef = weakref.ref(self)
1397 reporef = weakref.ref(self)
1397 def updater(tr):
1398 def updater(tr):
1398 repo = reporef()
1399 repo = reporef()
1399 repo.updatecaches(tr)
1400 repo.updatecaches(tr)
1400 return updater
1401 return updater
1401
1402
1402 @unfilteredmethod
1403 @unfilteredmethod
1403 def updatecaches(self, tr=None):
1404 def updatecaches(self, tr=None):
1404 """warm appropriate caches
1405 """warm appropriate caches
1405
1406
1406 If this function is called after a transaction closed. The transaction
1407 If this function is called after a transaction closed. The transaction
1407 will be available in the 'tr' argument. This can be used to selectively
1408 will be available in the 'tr' argument. This can be used to selectively
1408 update caches relevant to the changes in that transaction.
1409 update caches relevant to the changes in that transaction.
1409 """
1410 """
1410 if tr is not None and tr.hookargs.get('source') == 'strip':
1411 if tr is not None and tr.hookargs.get('source') == 'strip':
1411 # During strip, many caches are invalid but
1412 # During strip, many caches are invalid but
1412 # later call to `destroyed` will refresh them.
1413 # later call to `destroyed` will refresh them.
1413 return
1414 return
1414
1415
1415 if tr is None or tr.changes['revs']:
1416 if tr is None or tr.changes['revs']:
1416 # updating the unfiltered branchmap should refresh all the others,
1417 # updating the unfiltered branchmap should refresh all the others,
1417 self.ui.debug('updating the branch cache\n')
1418 self.ui.debug('updating the branch cache\n')
1418 branchmap.updatecache(self.filtered('served'))
1419 branchmap.updatecache(self.filtered('served'))
1419
1420
1420 def invalidatecaches(self):
1421 def invalidatecaches(self):
1421
1422
1422 if '_tagscache' in vars(self):
1423 if '_tagscache' in vars(self):
1423 # can't use delattr on proxy
1424 # can't use delattr on proxy
1424 del self.__dict__['_tagscache']
1425 del self.__dict__['_tagscache']
1425
1426
1426 self.unfiltered()._branchcaches.clear()
1427 self.unfiltered()._branchcaches.clear()
1427 self.invalidatevolatilesets()
1428 self.invalidatevolatilesets()
1428 self._sparsesignaturecache.clear()
1429 self._sparsesignaturecache.clear()
1429
1430
1430 def invalidatevolatilesets(self):
1431 def invalidatevolatilesets(self):
1431 self.filteredrevcache.clear()
1432 self.filteredrevcache.clear()
1432 obsolete.clearobscaches(self)
1433 obsolete.clearobscaches(self)
1433
1434
1434 def invalidatedirstate(self):
1435 def invalidatedirstate(self):
1435 '''Invalidates the dirstate, causing the next call to dirstate
1436 '''Invalidates the dirstate, causing the next call to dirstate
1436 to check if it was modified since the last time it was read,
1437 to check if it was modified since the last time it was read,
1437 rereading it if it has.
1438 rereading it if it has.
1438
1439
1439 This is different to dirstate.invalidate() that it doesn't always
1440 This is different to dirstate.invalidate() that it doesn't always
1440 rereads the dirstate. Use dirstate.invalidate() if you want to
1441 rereads the dirstate. Use dirstate.invalidate() if you want to
1441 explicitly read the dirstate again (i.e. restoring it to a previous
1442 explicitly read the dirstate again (i.e. restoring it to a previous
1442 known good state).'''
1443 known good state).'''
1443 if hasunfilteredcache(self, 'dirstate'):
1444 if hasunfilteredcache(self, 'dirstate'):
1444 for k in self.dirstate._filecache:
1445 for k in self.dirstate._filecache:
1445 try:
1446 try:
1446 delattr(self.dirstate, k)
1447 delattr(self.dirstate, k)
1447 except AttributeError:
1448 except AttributeError:
1448 pass
1449 pass
1449 delattr(self.unfiltered(), 'dirstate')
1450 delattr(self.unfiltered(), 'dirstate')
1450
1451
1451 def invalidate(self, clearfilecache=False):
1452 def invalidate(self, clearfilecache=False):
1452 '''Invalidates both store and non-store parts other than dirstate
1453 '''Invalidates both store and non-store parts other than dirstate
1453
1454
1454 If a transaction is running, invalidation of store is omitted,
1455 If a transaction is running, invalidation of store is omitted,
1455 because discarding in-memory changes might cause inconsistency
1456 because discarding in-memory changes might cause inconsistency
1456 (e.g. incomplete fncache causes unintentional failure, but
1457 (e.g. incomplete fncache causes unintentional failure, but
1457 redundant one doesn't).
1458 redundant one doesn't).
1458 '''
1459 '''
1459 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1460 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1460 for k in list(self._filecache.keys()):
1461 for k in list(self._filecache.keys()):
1461 # dirstate is invalidated separately in invalidatedirstate()
1462 # dirstate is invalidated separately in invalidatedirstate()
1462 if k == 'dirstate':
1463 if k == 'dirstate':
1463 continue
1464 continue
1464 if (k == 'changelog' and
1465 if (k == 'changelog' and
1465 self.currenttransaction() and
1466 self.currenttransaction() and
1466 self.changelog._delayed):
1467 self.changelog._delayed):
1467 # The changelog object may store unwritten revisions. We don't
1468 # The changelog object may store unwritten revisions. We don't
1468 # want to lose them.
1469 # want to lose them.
1469 # TODO: Solve the problem instead of working around it.
1470 # TODO: Solve the problem instead of working around it.
1470 continue
1471 continue
1471
1472
1472 if clearfilecache:
1473 if clearfilecache:
1473 del self._filecache[k]
1474 del self._filecache[k]
1474 try:
1475 try:
1475 delattr(unfiltered, k)
1476 delattr(unfiltered, k)
1476 except AttributeError:
1477 except AttributeError:
1477 pass
1478 pass
1478 self.invalidatecaches()
1479 self.invalidatecaches()
1479 if not self.currenttransaction():
1480 if not self.currenttransaction():
1480 # TODO: Changing contents of store outside transaction
1481 # TODO: Changing contents of store outside transaction
1481 # causes inconsistency. We should make in-memory store
1482 # causes inconsistency. We should make in-memory store
1482 # changes detectable, and abort if changed.
1483 # changes detectable, and abort if changed.
1483 self.store.invalidatecaches()
1484 self.store.invalidatecaches()
1484
1485
1485 def invalidateall(self):
1486 def invalidateall(self):
1486 '''Fully invalidates both store and non-store parts, causing the
1487 '''Fully invalidates both store and non-store parts, causing the
1487 subsequent operation to reread any outside changes.'''
1488 subsequent operation to reread any outside changes.'''
1488 # extension should hook this to invalidate its caches
1489 # extension should hook this to invalidate its caches
1489 self.invalidate()
1490 self.invalidate()
1490 self.invalidatedirstate()
1491 self.invalidatedirstate()
1491
1492
1492 @unfilteredmethod
1493 @unfilteredmethod
1493 def _refreshfilecachestats(self, tr):
1494 def _refreshfilecachestats(self, tr):
1494 """Reload stats of cached files so that they are flagged as valid"""
1495 """Reload stats of cached files so that they are flagged as valid"""
1495 for k, ce in self._filecache.items():
1496 for k, ce in self._filecache.items():
1496 if k == 'dirstate' or k not in self.__dict__:
1497 if k == 'dirstate' or k not in self.__dict__:
1497 continue
1498 continue
1498 ce.refresh()
1499 ce.refresh()
1499
1500
1500 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1501 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
1501 inheritchecker=None, parentenvvar=None):
1502 inheritchecker=None, parentenvvar=None):
1502 parentlock = None
1503 parentlock = None
1503 # the contents of parentenvvar are used by the underlying lock to
1504 # the contents of parentenvvar are used by the underlying lock to
1504 # determine whether it can be inherited
1505 # determine whether it can be inherited
1505 if parentenvvar is not None:
1506 if parentenvvar is not None:
1506 parentlock = encoding.environ.get(parentenvvar)
1507 parentlock = encoding.environ.get(parentenvvar)
1507 try:
1508 try:
1508 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1509 l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn,
1509 acquirefn=acquirefn, desc=desc,
1510 acquirefn=acquirefn, desc=desc,
1510 inheritchecker=inheritchecker,
1511 inheritchecker=inheritchecker,
1511 parentlock=parentlock)
1512 parentlock=parentlock)
1512 except error.LockHeld as inst:
1513 except error.LockHeld as inst:
1513 if not wait:
1514 if not wait:
1514 raise
1515 raise
1515 # show more details for new-style locks
1516 # show more details for new-style locks
1516 if ':' in inst.locker:
1517 if ':' in inst.locker:
1517 host, pid = inst.locker.split(":", 1)
1518 host, pid = inst.locker.split(":", 1)
1518 self.ui.warn(
1519 self.ui.warn(
1519 _("waiting for lock on %s held by process %r "
1520 _("waiting for lock on %s held by process %r "
1520 "on host %r\n") % (desc, pid, host))
1521 "on host %r\n") % (desc, pid, host))
1521 else:
1522 else:
1522 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1523 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1523 (desc, inst.locker))
1524 (desc, inst.locker))
1524 # default to 600 seconds timeout
1525 # default to 600 seconds timeout
1525 l = lockmod.lock(vfs, lockname,
1526 l = lockmod.lock(vfs, lockname,
1526 int(self.ui.config("ui", "timeout")),
1527 int(self.ui.config("ui", "timeout")),
1527 releasefn=releasefn, acquirefn=acquirefn,
1528 releasefn=releasefn, acquirefn=acquirefn,
1528 desc=desc)
1529 desc=desc)
1529 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1530 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1530 return l
1531 return l
1531
1532
1532 def _afterlock(self, callback):
1533 def _afterlock(self, callback):
1533 """add a callback to be run when the repository is fully unlocked
1534 """add a callback to be run when the repository is fully unlocked
1534
1535
1535 The callback will be executed when the outermost lock is released
1536 The callback will be executed when the outermost lock is released
1536 (with wlock being higher level than 'lock')."""
1537 (with wlock being higher level than 'lock')."""
1537 for ref in (self._wlockref, self._lockref):
1538 for ref in (self._wlockref, self._lockref):
1538 l = ref and ref()
1539 l = ref and ref()
1539 if l and l.held:
1540 if l and l.held:
1540 l.postrelease.append(callback)
1541 l.postrelease.append(callback)
1541 break
1542 break
1542 else: # no lock have been found.
1543 else: # no lock have been found.
1543 callback()
1544 callback()
1544
1545
1545 def lock(self, wait=True):
1546 def lock(self, wait=True):
1546 '''Lock the repository store (.hg/store) and return a weak reference
1547 '''Lock the repository store (.hg/store) and return a weak reference
1547 to the lock. Use this before modifying the store (e.g. committing or
1548 to the lock. Use this before modifying the store (e.g. committing or
1548 stripping). If you are opening a transaction, get a lock as well.)
1549 stripping). If you are opening a transaction, get a lock as well.)
1549
1550
1550 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1551 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1551 'wlock' first to avoid a dead-lock hazard.'''
1552 'wlock' first to avoid a dead-lock hazard.'''
1552 l = self._currentlock(self._lockref)
1553 l = self._currentlock(self._lockref)
1553 if l is not None:
1554 if l is not None:
1554 l.lock()
1555 l.lock()
1555 return l
1556 return l
1556
1557
1557 l = self._lock(self.svfs, "lock", wait, None,
1558 l = self._lock(self.svfs, "lock", wait, None,
1558 self.invalidate, _('repository %s') % self.origroot)
1559 self.invalidate, _('repository %s') % self.origroot)
1559 self._lockref = weakref.ref(l)
1560 self._lockref = weakref.ref(l)
1560 return l
1561 return l
1561
1562
1562 def _wlockchecktransaction(self):
1563 def _wlockchecktransaction(self):
1563 if self.currenttransaction() is not None:
1564 if self.currenttransaction() is not None:
1564 raise error.LockInheritanceContractViolation(
1565 raise error.LockInheritanceContractViolation(
1565 'wlock cannot be inherited in the middle of a transaction')
1566 'wlock cannot be inherited in the middle of a transaction')
1566
1567
1567 def wlock(self, wait=True):
1568 def wlock(self, wait=True):
1568 '''Lock the non-store parts of the repository (everything under
1569 '''Lock the non-store parts of the repository (everything under
1569 .hg except .hg/store) and return a weak reference to the lock.
1570 .hg except .hg/store) and return a weak reference to the lock.
1570
1571
1571 Use this before modifying files in .hg.
1572 Use this before modifying files in .hg.
1572
1573
1573 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1574 If both 'lock' and 'wlock' must be acquired, ensure you always acquires
1574 'wlock' first to avoid a dead-lock hazard.'''
1575 'wlock' first to avoid a dead-lock hazard.'''
1575 l = self._wlockref and self._wlockref()
1576 l = self._wlockref and self._wlockref()
1576 if l is not None and l.held:
1577 if l is not None and l.held:
1577 l.lock()
1578 l.lock()
1578 return l
1579 return l
1579
1580
1580 # We do not need to check for non-waiting lock acquisition. Such
1581 # We do not need to check for non-waiting lock acquisition. Such
1581 # acquisition would not cause dead-lock as they would just fail.
1582 # acquisition would not cause dead-lock as they would just fail.
1582 if wait and (self.ui.configbool('devel', 'all-warnings')
1583 if wait and (self.ui.configbool('devel', 'all-warnings')
1583 or self.ui.configbool('devel', 'check-locks')):
1584 or self.ui.configbool('devel', 'check-locks')):
1584 if self._currentlock(self._lockref) is not None:
1585 if self._currentlock(self._lockref) is not None:
1585 self.ui.develwarn('"wlock" acquired after "lock"')
1586 self.ui.develwarn('"wlock" acquired after "lock"')
1586
1587
1587 def unlock():
1588 def unlock():
1588 if self.dirstate.pendingparentchange():
1589 if self.dirstate.pendingparentchange():
1589 self.dirstate.invalidate()
1590 self.dirstate.invalidate()
1590 else:
1591 else:
1591 self.dirstate.write(None)
1592 self.dirstate.write(None)
1592
1593
1593 self._filecache['dirstate'].refresh()
1594 self._filecache['dirstate'].refresh()
1594
1595
1595 l = self._lock(self.vfs, "wlock", wait, unlock,
1596 l = self._lock(self.vfs, "wlock", wait, unlock,
1596 self.invalidatedirstate, _('working directory of %s') %
1597 self.invalidatedirstate, _('working directory of %s') %
1597 self.origroot,
1598 self.origroot,
1598 inheritchecker=self._wlockchecktransaction,
1599 inheritchecker=self._wlockchecktransaction,
1599 parentenvvar='HG_WLOCK_LOCKER')
1600 parentenvvar='HG_WLOCK_LOCKER')
1600 self._wlockref = weakref.ref(l)
1601 self._wlockref = weakref.ref(l)
1601 return l
1602 return l
1602
1603
1603 def _currentlock(self, lockref):
1604 def _currentlock(self, lockref):
1604 """Returns the lock if it's held, or None if it's not."""
1605 """Returns the lock if it's held, or None if it's not."""
1605 if lockref is None:
1606 if lockref is None:
1606 return None
1607 return None
1607 l = lockref()
1608 l = lockref()
1608 if l is None or not l.held:
1609 if l is None or not l.held:
1609 return None
1610 return None
1610 return l
1611 return l
1611
1612
1612 def currentwlock(self):
1613 def currentwlock(self):
1613 """Returns the wlock if it's held, or None if it's not."""
1614 """Returns the wlock if it's held, or None if it's not."""
1614 return self._currentlock(self._wlockref)
1615 return self._currentlock(self._wlockref)
1615
1616
1616 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1617 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1617 """
1618 """
1618 commit an individual file as part of a larger transaction
1619 commit an individual file as part of a larger transaction
1619 """
1620 """
1620
1621
1621 fname = fctx.path()
1622 fname = fctx.path()
1622 fparent1 = manifest1.get(fname, nullid)
1623 fparent1 = manifest1.get(fname, nullid)
1623 fparent2 = manifest2.get(fname, nullid)
1624 fparent2 = manifest2.get(fname, nullid)
1624 if isinstance(fctx, context.filectx):
1625 if isinstance(fctx, context.filectx):
1625 node = fctx.filenode()
1626 node = fctx.filenode()
1626 if node in [fparent1, fparent2]:
1627 if node in [fparent1, fparent2]:
1627 self.ui.debug('reusing %s filelog entry\n' % fname)
1628 self.ui.debug('reusing %s filelog entry\n' % fname)
1628 if manifest1.flags(fname) != fctx.flags():
1629 if manifest1.flags(fname) != fctx.flags():
1629 changelist.append(fname)
1630 changelist.append(fname)
1630 return node
1631 return node
1631
1632
1632 flog = self.file(fname)
1633 flog = self.file(fname)
1633 meta = {}
1634 meta = {}
1634 copy = fctx.renamed()
1635 copy = fctx.renamed()
1635 if copy and copy[0] != fname:
1636 if copy and copy[0] != fname:
1636 # Mark the new revision of this file as a copy of another
1637 # Mark the new revision of this file as a copy of another
1637 # file. This copy data will effectively act as a parent
1638 # file. This copy data will effectively act as a parent
1638 # of this new revision. If this is a merge, the first
1639 # of this new revision. If this is a merge, the first
1639 # parent will be the nullid (meaning "look up the copy data")
1640 # parent will be the nullid (meaning "look up the copy data")
1640 # and the second one will be the other parent. For example:
1641 # and the second one will be the other parent. For example:
1641 #
1642 #
1642 # 0 --- 1 --- 3 rev1 changes file foo
1643 # 0 --- 1 --- 3 rev1 changes file foo
1643 # \ / rev2 renames foo to bar and changes it
1644 # \ / rev2 renames foo to bar and changes it
1644 # \- 2 -/ rev3 should have bar with all changes and
1645 # \- 2 -/ rev3 should have bar with all changes and
1645 # should record that bar descends from
1646 # should record that bar descends from
1646 # bar in rev2 and foo in rev1
1647 # bar in rev2 and foo in rev1
1647 #
1648 #
1648 # this allows this merge to succeed:
1649 # this allows this merge to succeed:
1649 #
1650 #
1650 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1651 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1651 # \ / merging rev3 and rev4 should use bar@rev2
1652 # \ / merging rev3 and rev4 should use bar@rev2
1652 # \- 2 --- 4 as the merge base
1653 # \- 2 --- 4 as the merge base
1653 #
1654 #
1654
1655
1655 cfname = copy[0]
1656 cfname = copy[0]
1656 crev = manifest1.get(cfname)
1657 crev = manifest1.get(cfname)
1657 newfparent = fparent2
1658 newfparent = fparent2
1658
1659
1659 if manifest2: # branch merge
1660 if manifest2: # branch merge
1660 if fparent2 == nullid or crev is None: # copied on remote side
1661 if fparent2 == nullid or crev is None: # copied on remote side
1661 if cfname in manifest2:
1662 if cfname in manifest2:
1662 crev = manifest2[cfname]
1663 crev = manifest2[cfname]
1663 newfparent = fparent1
1664 newfparent = fparent1
1664
1665
1665 # Here, we used to search backwards through history to try to find
1666 # Here, we used to search backwards through history to try to find
1666 # where the file copy came from if the source of a copy was not in
1667 # where the file copy came from if the source of a copy was not in
1667 # the parent directory. However, this doesn't actually make sense to
1668 # the parent directory. However, this doesn't actually make sense to
1668 # do (what does a copy from something not in your working copy even
1669 # do (what does a copy from something not in your working copy even
1669 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1670 # mean?) and it causes bugs (eg, issue4476). Instead, we will warn
1670 # the user that copy information was dropped, so if they didn't
1671 # the user that copy information was dropped, so if they didn't
1671 # expect this outcome it can be fixed, but this is the correct
1672 # expect this outcome it can be fixed, but this is the correct
1672 # behavior in this circumstance.
1673 # behavior in this circumstance.
1673
1674
1674 if crev:
1675 if crev:
1675 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1676 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1676 meta["copy"] = cfname
1677 meta["copy"] = cfname
1677 meta["copyrev"] = hex(crev)
1678 meta["copyrev"] = hex(crev)
1678 fparent1, fparent2 = nullid, newfparent
1679 fparent1, fparent2 = nullid, newfparent
1679 else:
1680 else:
1680 self.ui.warn(_("warning: can't find ancestor for '%s' "
1681 self.ui.warn(_("warning: can't find ancestor for '%s' "
1681 "copied from '%s'!\n") % (fname, cfname))
1682 "copied from '%s'!\n") % (fname, cfname))
1682
1683
1683 elif fparent1 == nullid:
1684 elif fparent1 == nullid:
1684 fparent1, fparent2 = fparent2, nullid
1685 fparent1, fparent2 = fparent2, nullid
1685 elif fparent2 != nullid:
1686 elif fparent2 != nullid:
1686 # is one parent an ancestor of the other?
1687 # is one parent an ancestor of the other?
1687 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1688 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1688 if fparent1 in fparentancestors:
1689 if fparent1 in fparentancestors:
1689 fparent1, fparent2 = fparent2, nullid
1690 fparent1, fparent2 = fparent2, nullid
1690 elif fparent2 in fparentancestors:
1691 elif fparent2 in fparentancestors:
1691 fparent2 = nullid
1692 fparent2 = nullid
1692
1693
1693 # is the file changed?
1694 # is the file changed?
1694 text = fctx.data()
1695 text = fctx.data()
1695 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1696 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1696 changelist.append(fname)
1697 changelist.append(fname)
1697 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1698 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1698 # are just the flags changed during merge?
1699 # are just the flags changed during merge?
1699 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1700 elif fname in manifest1 and manifest1.flags(fname) != fctx.flags():
1700 changelist.append(fname)
1701 changelist.append(fname)
1701
1702
1702 return fparent1
1703 return fparent1
1703
1704
1704 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1705 def checkcommitpatterns(self, wctx, vdirs, match, status, fail):
1705 """check for commit arguments that aren't committable"""
1706 """check for commit arguments that aren't committable"""
1706 if match.isexact() or match.prefix():
1707 if match.isexact() or match.prefix():
1707 matched = set(status.modified + status.added + status.removed)
1708 matched = set(status.modified + status.added + status.removed)
1708
1709
1709 for f in match.files():
1710 for f in match.files():
1710 f = self.dirstate.normalize(f)
1711 f = self.dirstate.normalize(f)
1711 if f == '.' or f in matched or f in wctx.substate:
1712 if f == '.' or f in matched or f in wctx.substate:
1712 continue
1713 continue
1713 if f in status.deleted:
1714 if f in status.deleted:
1714 fail(f, _('file not found!'))
1715 fail(f, _('file not found!'))
1715 if f in vdirs: # visited directory
1716 if f in vdirs: # visited directory
1716 d = f + '/'
1717 d = f + '/'
1717 for mf in matched:
1718 for mf in matched:
1718 if mf.startswith(d):
1719 if mf.startswith(d):
1719 break
1720 break
1720 else:
1721 else:
1721 fail(f, _("no match under directory!"))
1722 fail(f, _("no match under directory!"))
1722 elif f not in self.dirstate:
1723 elif f not in self.dirstate:
1723 fail(f, _("file not tracked!"))
1724 fail(f, _("file not tracked!"))
1724
1725
1725 @unfilteredmethod
1726 @unfilteredmethod
1726 def commit(self, text="", user=None, date=None, match=None, force=False,
1727 def commit(self, text="", user=None, date=None, match=None, force=False,
1727 editor=False, extra=None):
1728 editor=False, extra=None):
1728 """Add a new revision to current repository.
1729 """Add a new revision to current repository.
1729
1730
1730 Revision information is gathered from the working directory,
1731 Revision information is gathered from the working directory,
1731 match can be used to filter the committed files. If editor is
1732 match can be used to filter the committed files. If editor is
1732 supplied, it is called to get a commit message.
1733 supplied, it is called to get a commit message.
1733 """
1734 """
1734 if extra is None:
1735 if extra is None:
1735 extra = {}
1736 extra = {}
1736
1737
1737 def fail(f, msg):
1738 def fail(f, msg):
1738 raise error.Abort('%s: %s' % (f, msg))
1739 raise error.Abort('%s: %s' % (f, msg))
1739
1740
1740 if not match:
1741 if not match:
1741 match = matchmod.always(self.root, '')
1742 match = matchmod.always(self.root, '')
1742
1743
1743 if not force:
1744 if not force:
1744 vdirs = []
1745 vdirs = []
1745 match.explicitdir = vdirs.append
1746 match.explicitdir = vdirs.append
1746 match.bad = fail
1747 match.bad = fail
1747
1748
1748 wlock = lock = tr = None
1749 wlock = lock = tr = None
1749 try:
1750 try:
1750 wlock = self.wlock()
1751 wlock = self.wlock()
1751 lock = self.lock() # for recent changelog (see issue4368)
1752 lock = self.lock() # for recent changelog (see issue4368)
1752
1753
1753 wctx = self[None]
1754 wctx = self[None]
1754 merge = len(wctx.parents()) > 1
1755 merge = len(wctx.parents()) > 1
1755
1756
1756 if not force and merge and not match.always():
1757 if not force and merge and not match.always():
1757 raise error.Abort(_('cannot partially commit a merge '
1758 raise error.Abort(_('cannot partially commit a merge '
1758 '(do not specify files or patterns)'))
1759 '(do not specify files or patterns)'))
1759
1760
1760 status = self.status(match=match, clean=force)
1761 status = self.status(match=match, clean=force)
1761 if force:
1762 if force:
1762 status.modified.extend(status.clean) # mq may commit clean files
1763 status.modified.extend(status.clean) # mq may commit clean files
1763
1764
1764 # check subrepos
1765 # check subrepos
1765 subs = []
1766 subs = []
1766 commitsubs = set()
1767 commitsubs = set()
1767 newstate = wctx.substate.copy()
1768 newstate = wctx.substate.copy()
1768 # only manage subrepos and .hgsubstate if .hgsub is present
1769 # only manage subrepos and .hgsubstate if .hgsub is present
1769 if '.hgsub' in wctx:
1770 if '.hgsub' in wctx:
1770 # we'll decide whether to track this ourselves, thanks
1771 # we'll decide whether to track this ourselves, thanks
1771 for c in status.modified, status.added, status.removed:
1772 for c in status.modified, status.added, status.removed:
1772 if '.hgsubstate' in c:
1773 if '.hgsubstate' in c:
1773 c.remove('.hgsubstate')
1774 c.remove('.hgsubstate')
1774
1775
1775 # compare current state to last committed state
1776 # compare current state to last committed state
1776 # build new substate based on last committed state
1777 # build new substate based on last committed state
1777 oldstate = wctx.p1().substate
1778 oldstate = wctx.p1().substate
1778 for s in sorted(newstate.keys()):
1779 for s in sorted(newstate.keys()):
1779 if not match(s):
1780 if not match(s):
1780 # ignore working copy, use old state if present
1781 # ignore working copy, use old state if present
1781 if s in oldstate:
1782 if s in oldstate:
1782 newstate[s] = oldstate[s]
1783 newstate[s] = oldstate[s]
1783 continue
1784 continue
1784 if not force:
1785 if not force:
1785 raise error.Abort(
1786 raise error.Abort(
1786 _("commit with new subrepo %s excluded") % s)
1787 _("commit with new subrepo %s excluded") % s)
1787 dirtyreason = wctx.sub(s).dirtyreason(True)
1788 dirtyreason = wctx.sub(s).dirtyreason(True)
1788 if dirtyreason:
1789 if dirtyreason:
1789 if not self.ui.configbool('ui', 'commitsubrepos'):
1790 if not self.ui.configbool('ui', 'commitsubrepos'):
1790 raise error.Abort(dirtyreason,
1791 raise error.Abort(dirtyreason,
1791 hint=_("use --subrepos for recursive commit"))
1792 hint=_("use --subrepos for recursive commit"))
1792 subs.append(s)
1793 subs.append(s)
1793 commitsubs.add(s)
1794 commitsubs.add(s)
1794 else:
1795 else:
1795 bs = wctx.sub(s).basestate()
1796 bs = wctx.sub(s).basestate()
1796 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1797 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1797 if oldstate.get(s, (None, None, None))[1] != bs:
1798 if oldstate.get(s, (None, None, None))[1] != bs:
1798 subs.append(s)
1799 subs.append(s)
1799
1800
1800 # check for removed subrepos
1801 # check for removed subrepos
1801 for p in wctx.parents():
1802 for p in wctx.parents():
1802 r = [s for s in p.substate if s not in newstate]
1803 r = [s for s in p.substate if s not in newstate]
1803 subs += [s for s in r if match(s)]
1804 subs += [s for s in r if match(s)]
1804 if subs:
1805 if subs:
1805 if (not match('.hgsub') and
1806 if (not match('.hgsub') and
1806 '.hgsub' in (wctx.modified() + wctx.added())):
1807 '.hgsub' in (wctx.modified() + wctx.added())):
1807 raise error.Abort(
1808 raise error.Abort(
1808 _("can't commit subrepos without .hgsub"))
1809 _("can't commit subrepos without .hgsub"))
1809 status.modified.insert(0, '.hgsubstate')
1810 status.modified.insert(0, '.hgsubstate')
1810
1811
1811 elif '.hgsub' in status.removed:
1812 elif '.hgsub' in status.removed:
1812 # clean up .hgsubstate when .hgsub is removed
1813 # clean up .hgsubstate when .hgsub is removed
1813 if ('.hgsubstate' in wctx and
1814 if ('.hgsubstate' in wctx and
1814 '.hgsubstate' not in (status.modified + status.added +
1815 '.hgsubstate' not in (status.modified + status.added +
1815 status.removed)):
1816 status.removed)):
1816 status.removed.insert(0, '.hgsubstate')
1817 status.removed.insert(0, '.hgsubstate')
1817
1818
1818 # make sure all explicit patterns are matched
1819 # make sure all explicit patterns are matched
1819 if not force:
1820 if not force:
1820 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1821 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
1821
1822
1822 cctx = context.workingcommitctx(self, status,
1823 cctx = context.workingcommitctx(self, status,
1823 text, user, date, extra)
1824 text, user, date, extra)
1824
1825
1825 # internal config: ui.allowemptycommit
1826 # internal config: ui.allowemptycommit
1826 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1827 allowemptycommit = (wctx.branch() != wctx.p1().branch()
1827 or extra.get('close') or merge or cctx.files()
1828 or extra.get('close') or merge or cctx.files()
1828 or self.ui.configbool('ui', 'allowemptycommit'))
1829 or self.ui.configbool('ui', 'allowemptycommit'))
1829 if not allowemptycommit:
1830 if not allowemptycommit:
1830 return None
1831 return None
1831
1832
1832 if merge and cctx.deleted():
1833 if merge and cctx.deleted():
1833 raise error.Abort(_("cannot commit merge with missing files"))
1834 raise error.Abort(_("cannot commit merge with missing files"))
1834
1835
1835 ms = mergemod.mergestate.read(self)
1836 ms = mergemod.mergestate.read(self)
1836 mergeutil.checkunresolved(ms)
1837 mergeutil.checkunresolved(ms)
1837
1838
1838 if editor:
1839 if editor:
1839 cctx._text = editor(self, cctx, subs)
1840 cctx._text = editor(self, cctx, subs)
1840 edited = (text != cctx._text)
1841 edited = (text != cctx._text)
1841
1842
1842 # Save commit message in case this transaction gets rolled back
1843 # Save commit message in case this transaction gets rolled back
1843 # (e.g. by a pretxncommit hook). Leave the content alone on
1844 # (e.g. by a pretxncommit hook). Leave the content alone on
1844 # the assumption that the user will use the same editor again.
1845 # the assumption that the user will use the same editor again.
1845 msgfn = self.savecommitmessage(cctx._text)
1846 msgfn = self.savecommitmessage(cctx._text)
1846
1847
1847 # commit subs and write new state
1848 # commit subs and write new state
1848 if subs:
1849 if subs:
1849 for s in sorted(commitsubs):
1850 for s in sorted(commitsubs):
1850 sub = wctx.sub(s)
1851 sub = wctx.sub(s)
1851 self.ui.status(_('committing subrepository %s\n') %
1852 self.ui.status(_('committing subrepository %s\n') %
1852 subrepo.subrelpath(sub))
1853 subrepo.subrelpath(sub))
1853 sr = sub.commit(cctx._text, user, date)
1854 sr = sub.commit(cctx._text, user, date)
1854 newstate[s] = (newstate[s][0], sr)
1855 newstate[s] = (newstate[s][0], sr)
1855 subrepo.writestate(self, newstate)
1856 subrepo.writestate(self, newstate)
1856
1857
1857 p1, p2 = self.dirstate.parents()
1858 p1, p2 = self.dirstate.parents()
1858 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1859 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1859 try:
1860 try:
1860 self.hook("precommit", throw=True, parent1=hookp1,
1861 self.hook("precommit", throw=True, parent1=hookp1,
1861 parent2=hookp2)
1862 parent2=hookp2)
1862 tr = self.transaction('commit')
1863 tr = self.transaction('commit')
1863 ret = self.commitctx(cctx, True)
1864 ret = self.commitctx(cctx, True)
1864 except: # re-raises
1865 except: # re-raises
1865 if edited:
1866 if edited:
1866 self.ui.write(
1867 self.ui.write(
1867 _('note: commit message saved in %s\n') % msgfn)
1868 _('note: commit message saved in %s\n') % msgfn)
1868 raise
1869 raise
1869 # update bookmarks, dirstate and mergestate
1870 # update bookmarks, dirstate and mergestate
1870 bookmarks.update(self, [p1, p2], ret)
1871 bookmarks.update(self, [p1, p2], ret)
1871 cctx.markcommitted(ret)
1872 cctx.markcommitted(ret)
1872 ms.reset()
1873 ms.reset()
1873 tr.close()
1874 tr.close()
1874
1875
1875 finally:
1876 finally:
1876 lockmod.release(tr, lock, wlock)
1877 lockmod.release(tr, lock, wlock)
1877
1878
1878 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1879 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1879 # hack for command that use a temporary commit (eg: histedit)
1880 # hack for command that use a temporary commit (eg: histedit)
1880 # temporary commit got stripped before hook release
1881 # temporary commit got stripped before hook release
1881 if self.changelog.hasnode(ret):
1882 if self.changelog.hasnode(ret):
1882 self.hook("commit", node=node, parent1=parent1,
1883 self.hook("commit", node=node, parent1=parent1,
1883 parent2=parent2)
1884 parent2=parent2)
1884 self._afterlock(commithook)
1885 self._afterlock(commithook)
1885 return ret
1886 return ret
1886
1887
1887 @unfilteredmethod
1888 @unfilteredmethod
1888 def commitctx(self, ctx, error=False):
1889 def commitctx(self, ctx, error=False):
1889 """Add a new revision to current repository.
1890 """Add a new revision to current repository.
1890 Revision information is passed via the context argument.
1891 Revision information is passed via the context argument.
1891 """
1892 """
1892
1893
1893 tr = None
1894 tr = None
1894 p1, p2 = ctx.p1(), ctx.p2()
1895 p1, p2 = ctx.p1(), ctx.p2()
1895 user = ctx.user()
1896 user = ctx.user()
1896
1897
1897 lock = self.lock()
1898 lock = self.lock()
1898 try:
1899 try:
1899 tr = self.transaction("commit")
1900 tr = self.transaction("commit")
1900 trp = weakref.proxy(tr)
1901 trp = weakref.proxy(tr)
1901
1902
1902 if ctx.manifestnode():
1903 if ctx.manifestnode():
1903 # reuse an existing manifest revision
1904 # reuse an existing manifest revision
1904 mn = ctx.manifestnode()
1905 mn = ctx.manifestnode()
1905 files = ctx.files()
1906 files = ctx.files()
1906 elif ctx.files():
1907 elif ctx.files():
1907 m1ctx = p1.manifestctx()
1908 m1ctx = p1.manifestctx()
1908 m2ctx = p2.manifestctx()
1909 m2ctx = p2.manifestctx()
1909 mctx = m1ctx.copy()
1910 mctx = m1ctx.copy()
1910
1911
1911 m = mctx.read()
1912 m = mctx.read()
1912 m1 = m1ctx.read()
1913 m1 = m1ctx.read()
1913 m2 = m2ctx.read()
1914 m2 = m2ctx.read()
1914
1915
1915 # check in files
1916 # check in files
1916 added = []
1917 added = []
1917 changed = []
1918 changed = []
1918 removed = list(ctx.removed())
1919 removed = list(ctx.removed())
1919 linkrev = len(self)
1920 linkrev = len(self)
1920 self.ui.note(_("committing files:\n"))
1921 self.ui.note(_("committing files:\n"))
1921 for f in sorted(ctx.modified() + ctx.added()):
1922 for f in sorted(ctx.modified() + ctx.added()):
1922 self.ui.note(f + "\n")
1923 self.ui.note(f + "\n")
1923 try:
1924 try:
1924 fctx = ctx[f]
1925 fctx = ctx[f]
1925 if fctx is None:
1926 if fctx is None:
1926 removed.append(f)
1927 removed.append(f)
1927 else:
1928 else:
1928 added.append(f)
1929 added.append(f)
1929 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1930 m[f] = self._filecommit(fctx, m1, m2, linkrev,
1930 trp, changed)
1931 trp, changed)
1931 m.setflag(f, fctx.flags())
1932 m.setflag(f, fctx.flags())
1932 except OSError as inst:
1933 except OSError as inst:
1933 self.ui.warn(_("trouble committing %s!\n") % f)
1934 self.ui.warn(_("trouble committing %s!\n") % f)
1934 raise
1935 raise
1935 except IOError as inst:
1936 except IOError as inst:
1936 errcode = getattr(inst, 'errno', errno.ENOENT)
1937 errcode = getattr(inst, 'errno', errno.ENOENT)
1937 if error or errcode and errcode != errno.ENOENT:
1938 if error or errcode and errcode != errno.ENOENT:
1938 self.ui.warn(_("trouble committing %s!\n") % f)
1939 self.ui.warn(_("trouble committing %s!\n") % f)
1939 raise
1940 raise
1940
1941
1941 # update manifest
1942 # update manifest
1942 self.ui.note(_("committing manifest\n"))
1943 self.ui.note(_("committing manifest\n"))
1943 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1944 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1944 drop = [f for f in removed if f in m]
1945 drop = [f for f in removed if f in m]
1945 for f in drop:
1946 for f in drop:
1946 del m[f]
1947 del m[f]
1947 mn = mctx.write(trp, linkrev,
1948 mn = mctx.write(trp, linkrev,
1948 p1.manifestnode(), p2.manifestnode(),
1949 p1.manifestnode(), p2.manifestnode(),
1949 added, drop)
1950 added, drop)
1950 files = changed + removed
1951 files = changed + removed
1951 else:
1952 else:
1952 mn = p1.manifestnode()
1953 mn = p1.manifestnode()
1953 files = []
1954 files = []
1954
1955
1955 # update changelog
1956 # update changelog
1956 self.ui.note(_("committing changelog\n"))
1957 self.ui.note(_("committing changelog\n"))
1957 self.changelog.delayupdate(tr)
1958 self.changelog.delayupdate(tr)
1958 n = self.changelog.add(mn, files, ctx.description(),
1959 n = self.changelog.add(mn, files, ctx.description(),
1959 trp, p1.node(), p2.node(),
1960 trp, p1.node(), p2.node(),
1960 user, ctx.date(), ctx.extra().copy())
1961 user, ctx.date(), ctx.extra().copy())
1961 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1962 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1962 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1963 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1963 parent2=xp2)
1964 parent2=xp2)
1964 # set the new commit is proper phase
1965 # set the new commit is proper phase
1965 targetphase = subrepo.newcommitphase(self.ui, ctx)
1966 targetphase = subrepo.newcommitphase(self.ui, ctx)
1966 if targetphase:
1967 if targetphase:
1967 # retract boundary do not alter parent changeset.
1968 # retract boundary do not alter parent changeset.
1968 # if a parent have higher the resulting phase will
1969 # if a parent have higher the resulting phase will
1969 # be compliant anyway
1970 # be compliant anyway
1970 #
1971 #
1971 # if minimal phase was 0 we don't need to retract anything
1972 # if minimal phase was 0 we don't need to retract anything
1972 phases.registernew(self, tr, targetphase, [n])
1973 phases.registernew(self, tr, targetphase, [n])
1973 tr.close()
1974 tr.close()
1974 return n
1975 return n
1975 finally:
1976 finally:
1976 if tr:
1977 if tr:
1977 tr.release()
1978 tr.release()
1978 lock.release()
1979 lock.release()
1979
1980
1980 @unfilteredmethod
1981 @unfilteredmethod
1981 def destroying(self):
1982 def destroying(self):
1982 '''Inform the repository that nodes are about to be destroyed.
1983 '''Inform the repository that nodes are about to be destroyed.
1983 Intended for use by strip and rollback, so there's a common
1984 Intended for use by strip and rollback, so there's a common
1984 place for anything that has to be done before destroying history.
1985 place for anything that has to be done before destroying history.
1985
1986
1986 This is mostly useful for saving state that is in memory and waiting
1987 This is mostly useful for saving state that is in memory and waiting
1987 to be flushed when the current lock is released. Because a call to
1988 to be flushed when the current lock is released. Because a call to
1988 destroyed is imminent, the repo will be invalidated causing those
1989 destroyed is imminent, the repo will be invalidated causing those
1989 changes to stay in memory (waiting for the next unlock), or vanish
1990 changes to stay in memory (waiting for the next unlock), or vanish
1990 completely.
1991 completely.
1991 '''
1992 '''
1992 # When using the same lock to commit and strip, the phasecache is left
1993 # When using the same lock to commit and strip, the phasecache is left
1993 # dirty after committing. Then when we strip, the repo is invalidated,
1994 # dirty after committing. Then when we strip, the repo is invalidated,
1994 # causing those changes to disappear.
1995 # causing those changes to disappear.
1995 if '_phasecache' in vars(self):
1996 if '_phasecache' in vars(self):
1996 self._phasecache.write()
1997 self._phasecache.write()
1997
1998
1998 @unfilteredmethod
1999 @unfilteredmethod
1999 def destroyed(self):
2000 def destroyed(self):
2000 '''Inform the repository that nodes have been destroyed.
2001 '''Inform the repository that nodes have been destroyed.
2001 Intended for use by strip and rollback, so there's a common
2002 Intended for use by strip and rollback, so there's a common
2002 place for anything that has to be done after destroying history.
2003 place for anything that has to be done after destroying history.
2003 '''
2004 '''
2004 # When one tries to:
2005 # When one tries to:
2005 # 1) destroy nodes thus calling this method (e.g. strip)
2006 # 1) destroy nodes thus calling this method (e.g. strip)
2006 # 2) use phasecache somewhere (e.g. commit)
2007 # 2) use phasecache somewhere (e.g. commit)
2007 #
2008 #
2008 # then 2) will fail because the phasecache contains nodes that were
2009 # then 2) will fail because the phasecache contains nodes that were
2009 # removed. We can either remove phasecache from the filecache,
2010 # removed. We can either remove phasecache from the filecache,
2010 # causing it to reload next time it is accessed, or simply filter
2011 # causing it to reload next time it is accessed, or simply filter
2011 # the removed nodes now and write the updated cache.
2012 # the removed nodes now and write the updated cache.
2012 self._phasecache.filterunknown(self)
2013 self._phasecache.filterunknown(self)
2013 self._phasecache.write()
2014 self._phasecache.write()
2014
2015
2015 # refresh all repository caches
2016 # refresh all repository caches
2016 self.updatecaches()
2017 self.updatecaches()
2017
2018
2018 # Ensure the persistent tag cache is updated. Doing it now
2019 # Ensure the persistent tag cache is updated. Doing it now
2019 # means that the tag cache only has to worry about destroyed
2020 # means that the tag cache only has to worry about destroyed
2020 # heads immediately after a strip/rollback. That in turn
2021 # heads immediately after a strip/rollback. That in turn
2021 # guarantees that "cachetip == currenttip" (comparing both rev
2022 # guarantees that "cachetip == currenttip" (comparing both rev
2022 # and node) always means no nodes have been added or destroyed.
2023 # and node) always means no nodes have been added or destroyed.
2023
2024
2024 # XXX this is suboptimal when qrefresh'ing: we strip the current
2025 # XXX this is suboptimal when qrefresh'ing: we strip the current
2025 # head, refresh the tag cache, then immediately add a new head.
2026 # head, refresh the tag cache, then immediately add a new head.
2026 # But I think doing it this way is necessary for the "instant
2027 # But I think doing it this way is necessary for the "instant
2027 # tag cache retrieval" case to work.
2028 # tag cache retrieval" case to work.
2028 self.invalidate()
2029 self.invalidate()
2029
2030
2030 def walk(self, match, node=None):
2031 def walk(self, match, node=None):
2031 '''
2032 '''
2032 walk recursively through the directory tree or a given
2033 walk recursively through the directory tree or a given
2033 changeset, finding all files matched by the match
2034 changeset, finding all files matched by the match
2034 function
2035 function
2035 '''
2036 '''
2036 self.ui.deprecwarn('use repo[node].walk instead of repo.walk', '4.3')
2037 self.ui.deprecwarn('use repo[node].walk instead of repo.walk', '4.3')
2037 return self[node].walk(match)
2038 return self[node].walk(match)
2038
2039
2039 def status(self, node1='.', node2=None, match=None,
2040 def status(self, node1='.', node2=None, match=None,
2040 ignored=False, clean=False, unknown=False,
2041 ignored=False, clean=False, unknown=False,
2041 listsubrepos=False):
2042 listsubrepos=False):
2042 '''a convenience method that calls node1.status(node2)'''
2043 '''a convenience method that calls node1.status(node2)'''
2043 return self[node1].status(node2, match, ignored, clean, unknown,
2044 return self[node1].status(node2, match, ignored, clean, unknown,
2044 listsubrepos)
2045 listsubrepos)
2045
2046
2046 def addpostdsstatus(self, ps):
2047 def addpostdsstatus(self, ps):
2047 """Add a callback to run within the wlock, at the point at which status
2048 """Add a callback to run within the wlock, at the point at which status
2048 fixups happen.
2049 fixups happen.
2049
2050
2050 On status completion, callback(wctx, status) will be called with the
2051 On status completion, callback(wctx, status) will be called with the
2051 wlock held, unless the dirstate has changed from underneath or the wlock
2052 wlock held, unless the dirstate has changed from underneath or the wlock
2052 couldn't be grabbed.
2053 couldn't be grabbed.
2053
2054
2054 Callbacks should not capture and use a cached copy of the dirstate --
2055 Callbacks should not capture and use a cached copy of the dirstate --
2055 it might change in the meanwhile. Instead, they should access the
2056 it might change in the meanwhile. Instead, they should access the
2056 dirstate via wctx.repo().dirstate.
2057 dirstate via wctx.repo().dirstate.
2057
2058
2058 This list is emptied out after each status run -- extensions should
2059 This list is emptied out after each status run -- extensions should
2059 make sure it adds to this list each time dirstate.status is called.
2060 make sure it adds to this list each time dirstate.status is called.
2060 Extensions should also make sure they don't call this for statuses
2061 Extensions should also make sure they don't call this for statuses
2061 that don't involve the dirstate.
2062 that don't involve the dirstate.
2062 """
2063 """
2063
2064
2064 # The list is located here for uniqueness reasons -- it is actually
2065 # The list is located here for uniqueness reasons -- it is actually
2065 # managed by the workingctx, but that isn't unique per-repo.
2066 # managed by the workingctx, but that isn't unique per-repo.
2066 self._postdsstatus.append(ps)
2067 self._postdsstatus.append(ps)
2067
2068
2068 def postdsstatus(self):
2069 def postdsstatus(self):
2069 """Used by workingctx to get the list of post-dirstate-status hooks."""
2070 """Used by workingctx to get the list of post-dirstate-status hooks."""
2070 return self._postdsstatus
2071 return self._postdsstatus
2071
2072
2072 def clearpostdsstatus(self):
2073 def clearpostdsstatus(self):
2073 """Used by workingctx to clear post-dirstate-status hooks."""
2074 """Used by workingctx to clear post-dirstate-status hooks."""
2074 del self._postdsstatus[:]
2075 del self._postdsstatus[:]
2075
2076
2076 def heads(self, start=None):
2077 def heads(self, start=None):
2077 if start is None:
2078 if start is None:
2078 cl = self.changelog
2079 cl = self.changelog
2079 headrevs = reversed(cl.headrevs())
2080 headrevs = reversed(cl.headrevs())
2080 return [cl.node(rev) for rev in headrevs]
2081 return [cl.node(rev) for rev in headrevs]
2081
2082
2082 heads = self.changelog.heads(start)
2083 heads = self.changelog.heads(start)
2083 # sort the output in rev descending order
2084 # sort the output in rev descending order
2084 return sorted(heads, key=self.changelog.rev, reverse=True)
2085 return sorted(heads, key=self.changelog.rev, reverse=True)
2085
2086
2086 def branchheads(self, branch=None, start=None, closed=False):
2087 def branchheads(self, branch=None, start=None, closed=False):
2087 '''return a (possibly filtered) list of heads for the given branch
2088 '''return a (possibly filtered) list of heads for the given branch
2088
2089
2089 Heads are returned in topological order, from newest to oldest.
2090 Heads are returned in topological order, from newest to oldest.
2090 If branch is None, use the dirstate branch.
2091 If branch is None, use the dirstate branch.
2091 If start is not None, return only heads reachable from start.
2092 If start is not None, return only heads reachable from start.
2092 If closed is True, return heads that are marked as closed as well.
2093 If closed is True, return heads that are marked as closed as well.
2093 '''
2094 '''
2094 if branch is None:
2095 if branch is None:
2095 branch = self[None].branch()
2096 branch = self[None].branch()
2096 branches = self.branchmap()
2097 branches = self.branchmap()
2097 if branch not in branches:
2098 if branch not in branches:
2098 return []
2099 return []
2099 # the cache returns heads ordered lowest to highest
2100 # the cache returns heads ordered lowest to highest
2100 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2101 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
2101 if start is not None:
2102 if start is not None:
2102 # filter out the heads that cannot be reached from startrev
2103 # filter out the heads that cannot be reached from startrev
2103 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2104 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
2104 bheads = [h for h in bheads if h in fbheads]
2105 bheads = [h for h in bheads if h in fbheads]
2105 return bheads
2106 return bheads
2106
2107
2107 def branches(self, nodes):
2108 def branches(self, nodes):
2108 if not nodes:
2109 if not nodes:
2109 nodes = [self.changelog.tip()]
2110 nodes = [self.changelog.tip()]
2110 b = []
2111 b = []
2111 for n in nodes:
2112 for n in nodes:
2112 t = n
2113 t = n
2113 while True:
2114 while True:
2114 p = self.changelog.parents(n)
2115 p = self.changelog.parents(n)
2115 if p[1] != nullid or p[0] == nullid:
2116 if p[1] != nullid or p[0] == nullid:
2116 b.append((t, n, p[0], p[1]))
2117 b.append((t, n, p[0], p[1]))
2117 break
2118 break
2118 n = p[0]
2119 n = p[0]
2119 return b
2120 return b
2120
2121
2121 def between(self, pairs):
2122 def between(self, pairs):
2122 r = []
2123 r = []
2123
2124
2124 for top, bottom in pairs:
2125 for top, bottom in pairs:
2125 n, l, i = top, [], 0
2126 n, l, i = top, [], 0
2126 f = 1
2127 f = 1
2127
2128
2128 while n != bottom and n != nullid:
2129 while n != bottom and n != nullid:
2129 p = self.changelog.parents(n)[0]
2130 p = self.changelog.parents(n)[0]
2130 if i == f:
2131 if i == f:
2131 l.append(n)
2132 l.append(n)
2132 f = f * 2
2133 f = f * 2
2133 n = p
2134 n = p
2134 i += 1
2135 i += 1
2135
2136
2136 r.append(l)
2137 r.append(l)
2137
2138
2138 return r
2139 return r
2139
2140
2140 def checkpush(self, pushop):
2141 def checkpush(self, pushop):
2141 """Extensions can override this function if additional checks have
2142 """Extensions can override this function if additional checks have
2142 to be performed before pushing, or call it if they override push
2143 to be performed before pushing, or call it if they override push
2143 command.
2144 command.
2144 """
2145 """
2145 pass
2146 pass
2146
2147
2147 @unfilteredpropertycache
2148 @unfilteredpropertycache
2148 def prepushoutgoinghooks(self):
2149 def prepushoutgoinghooks(self):
2149 """Return util.hooks consists of a pushop with repo, remote, outgoing
2150 """Return util.hooks consists of a pushop with repo, remote, outgoing
2150 methods, which are called before pushing changesets.
2151 methods, which are called before pushing changesets.
2151 """
2152 """
2152 return util.hooks()
2153 return util.hooks()
2153
2154
2154 def pushkey(self, namespace, key, old, new):
2155 def pushkey(self, namespace, key, old, new):
2155 try:
2156 try:
2156 tr = self.currenttransaction()
2157 tr = self.currenttransaction()
2157 hookargs = {}
2158 hookargs = {}
2158 if tr is not None:
2159 if tr is not None:
2159 hookargs.update(tr.hookargs)
2160 hookargs.update(tr.hookargs)
2160 hookargs['namespace'] = namespace
2161 hookargs['namespace'] = namespace
2161 hookargs['key'] = key
2162 hookargs['key'] = key
2162 hookargs['old'] = old
2163 hookargs['old'] = old
2163 hookargs['new'] = new
2164 hookargs['new'] = new
2164 self.hook('prepushkey', throw=True, **hookargs)
2165 self.hook('prepushkey', throw=True, **hookargs)
2165 except error.HookAbort as exc:
2166 except error.HookAbort as exc:
2166 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2167 self.ui.write_err(_("pushkey-abort: %s\n") % exc)
2167 if exc.hint:
2168 if exc.hint:
2168 self.ui.write_err(_("(%s)\n") % exc.hint)
2169 self.ui.write_err(_("(%s)\n") % exc.hint)
2169 return False
2170 return False
2170 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2171 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
2171 ret = pushkey.push(self, namespace, key, old, new)
2172 ret = pushkey.push(self, namespace, key, old, new)
2172 def runhook():
2173 def runhook():
2173 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2174 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
2174 ret=ret)
2175 ret=ret)
2175 self._afterlock(runhook)
2176 self._afterlock(runhook)
2176 return ret
2177 return ret
2177
2178
2178 def listkeys(self, namespace):
2179 def listkeys(self, namespace):
2179 self.hook('prelistkeys', throw=True, namespace=namespace)
2180 self.hook('prelistkeys', throw=True, namespace=namespace)
2180 self.ui.debug('listing keys for "%s"\n' % namespace)
2181 self.ui.debug('listing keys for "%s"\n' % namespace)
2181 values = pushkey.list(self, namespace)
2182 values = pushkey.list(self, namespace)
2182 self.hook('listkeys', namespace=namespace, values=values)
2183 self.hook('listkeys', namespace=namespace, values=values)
2183 return values
2184 return values
2184
2185
2185 def debugwireargs(self, one, two, three=None, four=None, five=None):
2186 def debugwireargs(self, one, two, three=None, four=None, five=None):
2186 '''used to test argument passing over the wire'''
2187 '''used to test argument passing over the wire'''
2187 return "%s %s %s %s %s" % (one, two, three, four, five)
2188 return "%s %s %s %s %s" % (one, two, three, four, five)
2188
2189
2189 def savecommitmessage(self, text):
2190 def savecommitmessage(self, text):
2190 fp = self.vfs('last-message.txt', 'wb')
2191 fp = self.vfs('last-message.txt', 'wb')
2191 try:
2192 try:
2192 fp.write(text)
2193 fp.write(text)
2193 finally:
2194 finally:
2194 fp.close()
2195 fp.close()
2195 return self.pathto(fp.name[len(self.root) + 1:])
2196 return self.pathto(fp.name[len(self.root) + 1:])
2196
2197
2197 # used to avoid circular references so destructors work
2198 # used to avoid circular references so destructors work
2198 def aftertrans(files):
2199 def aftertrans(files):
2199 renamefiles = [tuple(t) for t in files]
2200 renamefiles = [tuple(t) for t in files]
2200 def a():
2201 def a():
2201 for vfs, src, dest in renamefiles:
2202 for vfs, src, dest in renamefiles:
2202 # if src and dest refer to a same file, vfs.rename is a no-op,
2203 # if src and dest refer to a same file, vfs.rename is a no-op,
2203 # leaving both src and dest on disk. delete dest to make sure
2204 # leaving both src and dest on disk. delete dest to make sure
2204 # the rename couldn't be such a no-op.
2205 # the rename couldn't be such a no-op.
2205 vfs.tryunlink(dest)
2206 vfs.tryunlink(dest)
2206 try:
2207 try:
2207 vfs.rename(src, dest)
2208 vfs.rename(src, dest)
2208 except OSError: # journal file does not yet exist
2209 except OSError: # journal file does not yet exist
2209 pass
2210 pass
2210 return a
2211 return a
2211
2212
2212 def undoname(fn):
2213 def undoname(fn):
2213 base, name = os.path.split(fn)
2214 base, name = os.path.split(fn)
2214 assert name.startswith('journal')
2215 assert name.startswith('journal')
2215 return os.path.join(base, name.replace('journal', 'undo', 1))
2216 return os.path.join(base, name.replace('journal', 'undo', 1))
2216
2217
2217 def instance(ui, path, create):
2218 def instance(ui, path, create):
2218 return localrepository(ui, util.urllocalpath(path), create)
2219 return localrepository(ui, util.urllocalpath(path), create)
2219
2220
2220 def islocal(path):
2221 def islocal(path):
2221 return True
2222 return True
2222
2223
2223 def newreporequirements(repo):
2224 def newreporequirements(repo):
2224 """Determine the set of requirements for a new local repository.
2225 """Determine the set of requirements for a new local repository.
2225
2226
2226 Extensions can wrap this function to specify custom requirements for
2227 Extensions can wrap this function to specify custom requirements for
2227 new repositories.
2228 new repositories.
2228 """
2229 """
2229 ui = repo.ui
2230 ui = repo.ui
2230 requirements = {'revlogv1'}
2231 requirements = {'revlogv1'}
2231 if ui.configbool('format', 'usestore'):
2232 if ui.configbool('format', 'usestore'):
2232 requirements.add('store')
2233 requirements.add('store')
2233 if ui.configbool('format', 'usefncache'):
2234 if ui.configbool('format', 'usefncache'):
2234 requirements.add('fncache')
2235 requirements.add('fncache')
2235 if ui.configbool('format', 'dotencode'):
2236 if ui.configbool('format', 'dotencode'):
2236 requirements.add('dotencode')
2237 requirements.add('dotencode')
2237
2238
2238 compengine = ui.config('experimental', 'format.compression')
2239 compengine = ui.config('experimental', 'format.compression')
2239 if compengine not in util.compengines:
2240 if compengine not in util.compengines:
2240 raise error.Abort(_('compression engine %s defined by '
2241 raise error.Abort(_('compression engine %s defined by '
2241 'experimental.format.compression not available') %
2242 'experimental.format.compression not available') %
2242 compengine,
2243 compengine,
2243 hint=_('run "hg debuginstall" to list available '
2244 hint=_('run "hg debuginstall" to list available '
2244 'compression engines'))
2245 'compression engines'))
2245
2246
2246 # zlib is the historical default and doesn't need an explicit requirement.
2247 # zlib is the historical default and doesn't need an explicit requirement.
2247 if compengine != 'zlib':
2248 if compengine != 'zlib':
2248 requirements.add('exp-compression-%s' % compengine)
2249 requirements.add('exp-compression-%s' % compengine)
2249
2250
2250 if scmutil.gdinitconfig(ui):
2251 if scmutil.gdinitconfig(ui):
2251 requirements.add('generaldelta')
2252 requirements.add('generaldelta')
2252 if ui.configbool('experimental', 'treemanifest'):
2253 if ui.configbool('experimental', 'treemanifest'):
2253 requirements.add('treemanifest')
2254 requirements.add('treemanifest')
2254 if ui.configbool('experimental', 'manifestv2'):
2255 if ui.configbool('experimental', 'manifestv2'):
2255 requirements.add('manifestv2')
2256 requirements.add('manifestv2')
2256
2257
2257 revlogv2 = ui.config('experimental', 'revlogv2')
2258 revlogv2 = ui.config('experimental', 'revlogv2')
2258 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2259 if revlogv2 == 'enable-unstable-format-and-corrupt-my-data':
2259 requirements.remove('revlogv1')
2260 requirements.remove('revlogv1')
2260 # generaldelta is implied by revlogv2.
2261 # generaldelta is implied by revlogv2.
2261 requirements.discard('generaldelta')
2262 requirements.discard('generaldelta')
2262 requirements.add(REVLOGV2_REQUIREMENT)
2263 requirements.add(REVLOGV2_REQUIREMENT)
2263
2264
2264 return requirements
2265 return requirements
@@ -1,215 +1,221 b''
1 from __future__ import absolute_import
1 from __future__ import absolute_import
2
2
3 import errno
3 import errno
4 import os
4 import os
5 import posixpath
5 import posixpath
6 import stat
6 import stat
7
7
8 from .i18n import _
8 from .i18n import _
9 from . import (
9 from . import (
10 encoding,
10 encoding,
11 error,
11 error,
12 pycompat,
12 pycompat,
13 util,
13 util,
14 )
14 )
15
15
16 def _lowerclean(s):
16 def _lowerclean(s):
17 return encoding.hfsignoreclean(s.lower())
17 return encoding.hfsignoreclean(s.lower())
18
18
19 class pathauditor(object):
19 class pathauditor(object):
20 '''ensure that a filesystem path contains no banned components.
20 '''ensure that a filesystem path contains no banned components.
21 the following properties of a path are checked:
21 the following properties of a path are checked:
22
22
23 - ends with a directory separator
23 - ends with a directory separator
24 - under top-level .hg
24 - under top-level .hg
25 - starts at the root of a windows drive
25 - starts at the root of a windows drive
26 - contains ".."
26 - contains ".."
27
27
28 More check are also done about the file system states:
28 More check are also done about the file system states:
29 - traverses a symlink (e.g. a/symlink_here/b)
29 - traverses a symlink (e.g. a/symlink_here/b)
30 - inside a nested repository (a callback can be used to approve
30 - inside a nested repository (a callback can be used to approve
31 some nested repositories, e.g., subrepositories)
31 some nested repositories, e.g., subrepositories)
32
32
33 The file system checks are only done when 'realfs' is set to True (the
33 The file system checks are only done when 'realfs' is set to True (the
34 default). They should be disable then we are auditing path for operation on
34 default). They should be disable then we are auditing path for operation on
35 stored history.
35 stored history.
36
37 If 'cached' is set to True, audited paths and sub-directories are cached.
38 Be careful to not keep the cache of unmanaged directories for long because
39 audited paths may be replaced with symlinks.
36 '''
40 '''
37
41
38 def __init__(self, root, callback=None, realfs=True):
42 def __init__(self, root, callback=None, realfs=True, cached=False):
39 self.audited = set()
43 self.audited = set()
40 self.auditeddir = set()
44 self.auditeddir = set()
41 self.root = root
45 self.root = root
42 self._realfs = realfs
46 self._realfs = realfs
47 self._cached = cached
43 self.callback = callback
48 self.callback = callback
44 if os.path.lexists(root) and not util.fscasesensitive(root):
49 if os.path.lexists(root) and not util.fscasesensitive(root):
45 self.normcase = util.normcase
50 self.normcase = util.normcase
46 else:
51 else:
47 self.normcase = lambda x: x
52 self.normcase = lambda x: x
48
53
49 def __call__(self, path, mode=None):
54 def __call__(self, path, mode=None):
50 '''Check the relative path.
55 '''Check the relative path.
51 path may contain a pattern (e.g. foodir/**.txt)'''
56 path may contain a pattern (e.g. foodir/**.txt)'''
52
57
53 path = util.localpath(path)
58 path = util.localpath(path)
54 normpath = self.normcase(path)
59 normpath = self.normcase(path)
55 if normpath in self.audited:
60 if normpath in self.audited:
56 return
61 return
57 # AIX ignores "/" at end of path, others raise EISDIR.
62 # AIX ignores "/" at end of path, others raise EISDIR.
58 if util.endswithsep(path):
63 if util.endswithsep(path):
59 raise error.Abort(_("path ends in directory separator: %s") % path)
64 raise error.Abort(_("path ends in directory separator: %s") % path)
60 parts = util.splitpath(path)
65 parts = util.splitpath(path)
61 if (os.path.splitdrive(path)[0]
66 if (os.path.splitdrive(path)[0]
62 or _lowerclean(parts[0]) in ('.hg', '.hg.', '')
67 or _lowerclean(parts[0]) in ('.hg', '.hg.', '')
63 or os.pardir in parts):
68 or os.pardir in parts):
64 raise error.Abort(_("path contains illegal component: %s") % path)
69 raise error.Abort(_("path contains illegal component: %s") % path)
65 # Windows shortname aliases
70 # Windows shortname aliases
66 for p in parts:
71 for p in parts:
67 if "~" in p:
72 if "~" in p:
68 first, last = p.split("~", 1)
73 first, last = p.split("~", 1)
69 if last.isdigit() and first.upper() in ["HG", "HG8B6C"]:
74 if last.isdigit() and first.upper() in ["HG", "HG8B6C"]:
70 raise error.Abort(_("path contains illegal component: %s")
75 raise error.Abort(_("path contains illegal component: %s")
71 % path)
76 % path)
72 if '.hg' in _lowerclean(path):
77 if '.hg' in _lowerclean(path):
73 lparts = [_lowerclean(p.lower()) for p in parts]
78 lparts = [_lowerclean(p.lower()) for p in parts]
74 for p in '.hg', '.hg.':
79 for p in '.hg', '.hg.':
75 if p in lparts[1:]:
80 if p in lparts[1:]:
76 pos = lparts.index(p)
81 pos = lparts.index(p)
77 base = os.path.join(*parts[:pos])
82 base = os.path.join(*parts[:pos])
78 raise error.Abort(_("path '%s' is inside nested repo %r")
83 raise error.Abort(_("path '%s' is inside nested repo %r")
79 % (path, base))
84 % (path, base))
80
85
81 normparts = util.splitpath(normpath)
86 normparts = util.splitpath(normpath)
82 assert len(parts) == len(normparts)
87 assert len(parts) == len(normparts)
83
88
84 parts.pop()
89 parts.pop()
85 normparts.pop()
90 normparts.pop()
86 prefixes = []
91 prefixes = []
87 # It's important that we check the path parts starting from the root.
92 # It's important that we check the path parts starting from the root.
88 # This means we won't accidentally traverse a symlink into some other
93 # This means we won't accidentally traverse a symlink into some other
89 # filesystem (which is potentially expensive to access).
94 # filesystem (which is potentially expensive to access).
90 for i in range(len(parts)):
95 for i in range(len(parts)):
91 prefix = pycompat.ossep.join(parts[:i + 1])
96 prefix = pycompat.ossep.join(parts[:i + 1])
92 normprefix = pycompat.ossep.join(normparts[:i + 1])
97 normprefix = pycompat.ossep.join(normparts[:i + 1])
93 if normprefix in self.auditeddir:
98 if normprefix in self.auditeddir:
94 continue
99 continue
95 if self._realfs:
100 if self._realfs:
96 self._checkfs(prefix, path)
101 self._checkfs(prefix, path)
97 prefixes.append(normprefix)
102 prefixes.append(normprefix)
98
103
99 self.audited.add(normpath)
104 if self._cached:
100 # only add prefixes to the cache after checking everything: we don't
105 self.audited.add(normpath)
101 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
106 # only add prefixes to the cache after checking everything: we don't
102 self.auditeddir.update(prefixes)
107 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
108 self.auditeddir.update(prefixes)
103
109
104 def _checkfs(self, prefix, path):
110 def _checkfs(self, prefix, path):
105 """raise exception if a file system backed check fails"""
111 """raise exception if a file system backed check fails"""
106 curpath = os.path.join(self.root, prefix)
112 curpath = os.path.join(self.root, prefix)
107 try:
113 try:
108 st = os.lstat(curpath)
114 st = os.lstat(curpath)
109 except OSError as err:
115 except OSError as err:
110 # EINVAL can be raised as invalid path syntax under win32.
116 # EINVAL can be raised as invalid path syntax under win32.
111 # They must be ignored for patterns can be checked too.
117 # They must be ignored for patterns can be checked too.
112 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
118 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
113 raise
119 raise
114 else:
120 else:
115 if stat.S_ISLNK(st.st_mode):
121 if stat.S_ISLNK(st.st_mode):
116 msg = _('path %r traverses symbolic link %r') % (path, prefix)
122 msg = _('path %r traverses symbolic link %r') % (path, prefix)
117 raise error.Abort(msg)
123 raise error.Abort(msg)
118 elif (stat.S_ISDIR(st.st_mode) and
124 elif (stat.S_ISDIR(st.st_mode) and
119 os.path.isdir(os.path.join(curpath, '.hg'))):
125 os.path.isdir(os.path.join(curpath, '.hg'))):
120 if not self.callback or not self.callback(curpath):
126 if not self.callback or not self.callback(curpath):
121 msg = _("path '%s' is inside nested repo %r")
127 msg = _("path '%s' is inside nested repo %r")
122 raise error.Abort(msg % (path, prefix))
128 raise error.Abort(msg % (path, prefix))
123
129
124 def check(self, path):
130 def check(self, path):
125 try:
131 try:
126 self(path)
132 self(path)
127 return True
133 return True
128 except (OSError, error.Abort):
134 except (OSError, error.Abort):
129 return False
135 return False
130
136
131 def canonpath(root, cwd, myname, auditor=None):
137 def canonpath(root, cwd, myname, auditor=None):
132 '''return the canonical path of myname, given cwd and root'''
138 '''return the canonical path of myname, given cwd and root'''
133 if util.endswithsep(root):
139 if util.endswithsep(root):
134 rootsep = root
140 rootsep = root
135 else:
141 else:
136 rootsep = root + pycompat.ossep
142 rootsep = root + pycompat.ossep
137 name = myname
143 name = myname
138 if not os.path.isabs(name):
144 if not os.path.isabs(name):
139 name = os.path.join(root, cwd, name)
145 name = os.path.join(root, cwd, name)
140 name = os.path.normpath(name)
146 name = os.path.normpath(name)
141 if auditor is None:
147 if auditor is None:
142 auditor = pathauditor(root)
148 auditor = pathauditor(root)
143 if name != rootsep and name.startswith(rootsep):
149 if name != rootsep and name.startswith(rootsep):
144 name = name[len(rootsep):]
150 name = name[len(rootsep):]
145 auditor(name)
151 auditor(name)
146 return util.pconvert(name)
152 return util.pconvert(name)
147 elif name == root:
153 elif name == root:
148 return ''
154 return ''
149 else:
155 else:
150 # Determine whether `name' is in the hierarchy at or beneath `root',
156 # Determine whether `name' is in the hierarchy at or beneath `root',
151 # by iterating name=dirname(name) until that causes no change (can't
157 # by iterating name=dirname(name) until that causes no change (can't
152 # check name == '/', because that doesn't work on windows). The list
158 # check name == '/', because that doesn't work on windows). The list
153 # `rel' holds the reversed list of components making up the relative
159 # `rel' holds the reversed list of components making up the relative
154 # file name we want.
160 # file name we want.
155 rel = []
161 rel = []
156 while True:
162 while True:
157 try:
163 try:
158 s = util.samefile(name, root)
164 s = util.samefile(name, root)
159 except OSError:
165 except OSError:
160 s = False
166 s = False
161 if s:
167 if s:
162 if not rel:
168 if not rel:
163 # name was actually the same as root (maybe a symlink)
169 # name was actually the same as root (maybe a symlink)
164 return ''
170 return ''
165 rel.reverse()
171 rel.reverse()
166 name = os.path.join(*rel)
172 name = os.path.join(*rel)
167 auditor(name)
173 auditor(name)
168 return util.pconvert(name)
174 return util.pconvert(name)
169 dirname, basename = util.split(name)
175 dirname, basename = util.split(name)
170 rel.append(basename)
176 rel.append(basename)
171 if dirname == name:
177 if dirname == name:
172 break
178 break
173 name = dirname
179 name = dirname
174
180
175 # A common mistake is to use -R, but specify a file relative to the repo
181 # A common mistake is to use -R, but specify a file relative to the repo
176 # instead of cwd. Detect that case, and provide a hint to the user.
182 # instead of cwd. Detect that case, and provide a hint to the user.
177 hint = None
183 hint = None
178 try:
184 try:
179 if cwd != root:
185 if cwd != root:
180 canonpath(root, root, myname, auditor)
186 canonpath(root, root, myname, auditor)
181 hint = (_("consider using '--cwd %s'")
187 hint = (_("consider using '--cwd %s'")
182 % os.path.relpath(root, cwd))
188 % os.path.relpath(root, cwd))
183 except error.Abort:
189 except error.Abort:
184 pass
190 pass
185
191
186 raise error.Abort(_("%s not under root '%s'") % (myname, root),
192 raise error.Abort(_("%s not under root '%s'") % (myname, root),
187 hint=hint)
193 hint=hint)
188
194
189 def normasprefix(path):
195 def normasprefix(path):
190 '''normalize the specified path as path prefix
196 '''normalize the specified path as path prefix
191
197
192 Returned value can be used safely for "p.startswith(prefix)",
198 Returned value can be used safely for "p.startswith(prefix)",
193 "p[len(prefix):]", and so on.
199 "p[len(prefix):]", and so on.
194
200
195 For efficiency, this expects "path" argument to be already
201 For efficiency, this expects "path" argument to be already
196 normalized by "os.path.normpath", "os.path.realpath", and so on.
202 normalized by "os.path.normpath", "os.path.realpath", and so on.
197
203
198 See also issue3033 for detail about need of this function.
204 See also issue3033 for detail about need of this function.
199
205
200 >>> normasprefix('/foo/bar').replace(os.sep, '/')
206 >>> normasprefix('/foo/bar').replace(os.sep, '/')
201 '/foo/bar/'
207 '/foo/bar/'
202 >>> normasprefix('/').replace(os.sep, '/')
208 >>> normasprefix('/').replace(os.sep, '/')
203 '/'
209 '/'
204 '''
210 '''
205 d, p = os.path.splitdrive(path)
211 d, p = os.path.splitdrive(path)
206 if len(p) != len(pycompat.ossep):
212 if len(p) != len(pycompat.ossep):
207 return path + pycompat.ossep
213 return path + pycompat.ossep
208 else:
214 else:
209 return path
215 return path
210
216
211 # forward two methods from posixpath that do what we need, but we'd
217 # forward two methods from posixpath that do what we need, but we'd
212 # rather not let our internals know that we're thinking in posix terms
218 # rather not let our internals know that we're thinking in posix terms
213 # - instead we'll let them be oblivious.
219 # - instead we'll let them be oblivious.
214 join = posixpath.join
220 join = posixpath.join
215 dirname = posixpath.dirname
221 dirname = posixpath.dirname
@@ -1,668 +1,675 b''
1 # posix.py - Posix utility function implementations for Mercurial
1 # posix.py - Posix utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import fcntl
11 import fcntl
12 import getpass
12 import getpass
13 import grp
13 import grp
14 import os
14 import os
15 import pwd
15 import pwd
16 import re
16 import re
17 import select
17 import select
18 import stat
18 import stat
19 import sys
19 import sys
20 import tempfile
20 import tempfile
21 import unicodedata
21 import unicodedata
22
22
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 encoding,
25 encoding,
26 error,
26 pycompat,
27 pycompat,
27 )
28 )
28
29
29 posixfile = open
30 posixfile = open
30 normpath = os.path.normpath
31 normpath = os.path.normpath
31 samestat = os.path.samestat
32 samestat = os.path.samestat
32 try:
33 try:
33 oslink = os.link
34 oslink = os.link
34 except AttributeError:
35 except AttributeError:
35 # Some platforms build Python without os.link on systems that are
36 # Some platforms build Python without os.link on systems that are
36 # vaguely unix-like but don't have hardlink support. For those
37 # vaguely unix-like but don't have hardlink support. For those
37 # poor souls, just say we tried and that it failed so we fall back
38 # poor souls, just say we tried and that it failed so we fall back
38 # to copies.
39 # to copies.
39 def oslink(src, dst):
40 def oslink(src, dst):
40 raise OSError(errno.EINVAL,
41 raise OSError(errno.EINVAL,
41 'hardlinks not supported: %s to %s' % (src, dst))
42 'hardlinks not supported: %s to %s' % (src, dst))
42 unlink = os.unlink
43 unlink = os.unlink
43 rename = os.rename
44 rename = os.rename
44 removedirs = os.removedirs
45 removedirs = os.removedirs
45 expandglobs = False
46 expandglobs = False
46
47
47 umask = os.umask(0)
48 umask = os.umask(0)
48 os.umask(umask)
49 os.umask(umask)
49
50
50 def split(p):
51 def split(p):
51 '''Same as posixpath.split, but faster
52 '''Same as posixpath.split, but faster
52
53
53 >>> import posixpath
54 >>> import posixpath
54 >>> for f in ['/absolute/path/to/file',
55 >>> for f in ['/absolute/path/to/file',
55 ... 'relative/path/to/file',
56 ... 'relative/path/to/file',
56 ... 'file_alone',
57 ... 'file_alone',
57 ... 'path/to/directory/',
58 ... 'path/to/directory/',
58 ... '/multiple/path//separators',
59 ... '/multiple/path//separators',
59 ... '/file_at_root',
60 ... '/file_at_root',
60 ... '///multiple_leading_separators_at_root',
61 ... '///multiple_leading_separators_at_root',
61 ... '']:
62 ... '']:
62 ... assert split(f) == posixpath.split(f), f
63 ... assert split(f) == posixpath.split(f), f
63 '''
64 '''
64 ht = p.rsplit('/', 1)
65 ht = p.rsplit('/', 1)
65 if len(ht) == 1:
66 if len(ht) == 1:
66 return '', p
67 return '', p
67 nh = ht[0].rstrip('/')
68 nh = ht[0].rstrip('/')
68 if nh:
69 if nh:
69 return nh, ht[1]
70 return nh, ht[1]
70 return ht[0] + '/', ht[1]
71 return ht[0] + '/', ht[1]
71
72
72 def openhardlinks():
73 def openhardlinks():
73 '''return true if it is safe to hold open file handles to hardlinks'''
74 '''return true if it is safe to hold open file handles to hardlinks'''
74 return True
75 return True
75
76
76 def nlinks(name):
77 def nlinks(name):
77 '''return number of hardlinks for the given file'''
78 '''return number of hardlinks for the given file'''
78 return os.lstat(name).st_nlink
79 return os.lstat(name).st_nlink
79
80
80 def parsepatchoutput(output_line):
81 def parsepatchoutput(output_line):
81 """parses the output produced by patch and returns the filename"""
82 """parses the output produced by patch and returns the filename"""
82 pf = output_line[14:]
83 pf = output_line[14:]
83 if pycompat.sysplatform == 'OpenVMS':
84 if pycompat.sysplatform == 'OpenVMS':
84 if pf[0] == '`':
85 if pf[0] == '`':
85 pf = pf[1:-1] # Remove the quotes
86 pf = pf[1:-1] # Remove the quotes
86 else:
87 else:
87 if pf.startswith("'") and pf.endswith("'") and " " in pf:
88 if pf.startswith("'") and pf.endswith("'") and " " in pf:
88 pf = pf[1:-1] # Remove the quotes
89 pf = pf[1:-1] # Remove the quotes
89 return pf
90 return pf
90
91
91 def sshargs(sshcmd, host, user, port):
92 def sshargs(sshcmd, host, user, port):
92 '''Build argument list for ssh'''
93 '''Build argument list for ssh'''
93 args = user and ("%s@%s" % (user, host)) or host
94 args = user and ("%s@%s" % (user, host)) or host
94 return port and ("%s -p %s" % (args, port)) or args
95 if '-' in args[:1]:
96 raise error.Abort(
97 _('illegal ssh hostname or username starting with -: %s') % args)
98 args = shellquote(args)
99 if port:
100 args = '-p %s %s' % (shellquote(port), args)
101 return args
95
102
96 def isexec(f):
103 def isexec(f):
97 """check whether a file is executable"""
104 """check whether a file is executable"""
98 return (os.lstat(f).st_mode & 0o100 != 0)
105 return (os.lstat(f).st_mode & 0o100 != 0)
99
106
100 def setflags(f, l, x):
107 def setflags(f, l, x):
101 st = os.lstat(f)
108 st = os.lstat(f)
102 s = st.st_mode
109 s = st.st_mode
103 if l:
110 if l:
104 if not stat.S_ISLNK(s):
111 if not stat.S_ISLNK(s):
105 # switch file to link
112 # switch file to link
106 fp = open(f)
113 fp = open(f)
107 data = fp.read()
114 data = fp.read()
108 fp.close()
115 fp.close()
109 unlink(f)
116 unlink(f)
110 try:
117 try:
111 os.symlink(data, f)
118 os.symlink(data, f)
112 except OSError:
119 except OSError:
113 # failed to make a link, rewrite file
120 # failed to make a link, rewrite file
114 fp = open(f, "w")
121 fp = open(f, "w")
115 fp.write(data)
122 fp.write(data)
116 fp.close()
123 fp.close()
117 # no chmod needed at this point
124 # no chmod needed at this point
118 return
125 return
119 if stat.S_ISLNK(s):
126 if stat.S_ISLNK(s):
120 # switch link to file
127 # switch link to file
121 data = os.readlink(f)
128 data = os.readlink(f)
122 unlink(f)
129 unlink(f)
123 fp = open(f, "w")
130 fp = open(f, "w")
124 fp.write(data)
131 fp.write(data)
125 fp.close()
132 fp.close()
126 s = 0o666 & ~umask # avoid restatting for chmod
133 s = 0o666 & ~umask # avoid restatting for chmod
127
134
128 sx = s & 0o100
135 sx = s & 0o100
129 if st.st_nlink > 1 and bool(x) != bool(sx):
136 if st.st_nlink > 1 and bool(x) != bool(sx):
130 # the file is a hardlink, break it
137 # the file is a hardlink, break it
131 with open(f, "rb") as fp:
138 with open(f, "rb") as fp:
132 data = fp.read()
139 data = fp.read()
133 unlink(f)
140 unlink(f)
134 with open(f, "wb") as fp:
141 with open(f, "wb") as fp:
135 fp.write(data)
142 fp.write(data)
136
143
137 if x and not sx:
144 if x and not sx:
138 # Turn on +x for every +r bit when making a file executable
145 # Turn on +x for every +r bit when making a file executable
139 # and obey umask.
146 # and obey umask.
140 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
147 os.chmod(f, s | (s & 0o444) >> 2 & ~umask)
141 elif not x and sx:
148 elif not x and sx:
142 # Turn off all +x bits
149 # Turn off all +x bits
143 os.chmod(f, s & 0o666)
150 os.chmod(f, s & 0o666)
144
151
145 def copymode(src, dst, mode=None):
152 def copymode(src, dst, mode=None):
146 '''Copy the file mode from the file at path src to dst.
153 '''Copy the file mode from the file at path src to dst.
147 If src doesn't exist, we're using mode instead. If mode is None, we're
154 If src doesn't exist, we're using mode instead. If mode is None, we're
148 using umask.'''
155 using umask.'''
149 try:
156 try:
150 st_mode = os.lstat(src).st_mode & 0o777
157 st_mode = os.lstat(src).st_mode & 0o777
151 except OSError as inst:
158 except OSError as inst:
152 if inst.errno != errno.ENOENT:
159 if inst.errno != errno.ENOENT:
153 raise
160 raise
154 st_mode = mode
161 st_mode = mode
155 if st_mode is None:
162 if st_mode is None:
156 st_mode = ~umask
163 st_mode = ~umask
157 st_mode &= 0o666
164 st_mode &= 0o666
158 os.chmod(dst, st_mode)
165 os.chmod(dst, st_mode)
159
166
160 def checkexec(path):
167 def checkexec(path):
161 """
168 """
162 Check whether the given path is on a filesystem with UNIX-like exec flags
169 Check whether the given path is on a filesystem with UNIX-like exec flags
163
170
164 Requires a directory (like /foo/.hg)
171 Requires a directory (like /foo/.hg)
165 """
172 """
166
173
167 # VFAT on some Linux versions can flip mode but it doesn't persist
174 # VFAT on some Linux versions can flip mode but it doesn't persist
168 # a FS remount. Frequently we can detect it if files are created
175 # a FS remount. Frequently we can detect it if files are created
169 # with exec bit on.
176 # with exec bit on.
170
177
171 try:
178 try:
172 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
179 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
173 cachedir = os.path.join(path, '.hg', 'cache')
180 cachedir = os.path.join(path, '.hg', 'cache')
174 if os.path.isdir(cachedir):
181 if os.path.isdir(cachedir):
175 checkisexec = os.path.join(cachedir, 'checkisexec')
182 checkisexec = os.path.join(cachedir, 'checkisexec')
176 checknoexec = os.path.join(cachedir, 'checknoexec')
183 checknoexec = os.path.join(cachedir, 'checknoexec')
177
184
178 try:
185 try:
179 m = os.stat(checkisexec).st_mode
186 m = os.stat(checkisexec).st_mode
180 except OSError as e:
187 except OSError as e:
181 if e.errno != errno.ENOENT:
188 if e.errno != errno.ENOENT:
182 raise
189 raise
183 # checkisexec does not exist - fall through ...
190 # checkisexec does not exist - fall through ...
184 else:
191 else:
185 # checkisexec exists, check if it actually is exec
192 # checkisexec exists, check if it actually is exec
186 if m & EXECFLAGS != 0:
193 if m & EXECFLAGS != 0:
187 # ensure checkisexec exists, check it isn't exec
194 # ensure checkisexec exists, check it isn't exec
188 try:
195 try:
189 m = os.stat(checknoexec).st_mode
196 m = os.stat(checknoexec).st_mode
190 except OSError as e:
197 except OSError as e:
191 if e.errno != errno.ENOENT:
198 if e.errno != errno.ENOENT:
192 raise
199 raise
193 open(checknoexec, 'w').close() # might fail
200 open(checknoexec, 'w').close() # might fail
194 m = os.stat(checknoexec).st_mode
201 m = os.stat(checknoexec).st_mode
195 if m & EXECFLAGS == 0:
202 if m & EXECFLAGS == 0:
196 # check-exec is exec and check-no-exec is not exec
203 # check-exec is exec and check-no-exec is not exec
197 return True
204 return True
198 # checknoexec exists but is exec - delete it
205 # checknoexec exists but is exec - delete it
199 unlink(checknoexec)
206 unlink(checknoexec)
200 # checkisexec exists but is not exec - delete it
207 # checkisexec exists but is not exec - delete it
201 unlink(checkisexec)
208 unlink(checkisexec)
202
209
203 # check using one file, leave it as checkisexec
210 # check using one file, leave it as checkisexec
204 checkdir = cachedir
211 checkdir = cachedir
205 else:
212 else:
206 # check directly in path and don't leave checkisexec behind
213 # check directly in path and don't leave checkisexec behind
207 checkdir = path
214 checkdir = path
208 checkisexec = None
215 checkisexec = None
209 fh, fn = tempfile.mkstemp(dir=checkdir, prefix='hg-checkexec-')
216 fh, fn = tempfile.mkstemp(dir=checkdir, prefix='hg-checkexec-')
210 try:
217 try:
211 os.close(fh)
218 os.close(fh)
212 m = os.stat(fn).st_mode
219 m = os.stat(fn).st_mode
213 if m & EXECFLAGS == 0:
220 if m & EXECFLAGS == 0:
214 os.chmod(fn, m & 0o777 | EXECFLAGS)
221 os.chmod(fn, m & 0o777 | EXECFLAGS)
215 if os.stat(fn).st_mode & EXECFLAGS != 0:
222 if os.stat(fn).st_mode & EXECFLAGS != 0:
216 if checkisexec is not None:
223 if checkisexec is not None:
217 os.rename(fn, checkisexec)
224 os.rename(fn, checkisexec)
218 fn = None
225 fn = None
219 return True
226 return True
220 finally:
227 finally:
221 if fn is not None:
228 if fn is not None:
222 unlink(fn)
229 unlink(fn)
223 except (IOError, OSError):
230 except (IOError, OSError):
224 # we don't care, the user probably won't be able to commit anyway
231 # we don't care, the user probably won't be able to commit anyway
225 return False
232 return False
226
233
227 def checklink(path):
234 def checklink(path):
228 """check whether the given path is on a symlink-capable filesystem"""
235 """check whether the given path is on a symlink-capable filesystem"""
229 # mktemp is not racy because symlink creation will fail if the
236 # mktemp is not racy because symlink creation will fail if the
230 # file already exists
237 # file already exists
231 while True:
238 while True:
232 cachedir = os.path.join(path, '.hg', 'cache')
239 cachedir = os.path.join(path, '.hg', 'cache')
233 checklink = os.path.join(cachedir, 'checklink')
240 checklink = os.path.join(cachedir, 'checklink')
234 # try fast path, read only
241 # try fast path, read only
235 if os.path.islink(checklink):
242 if os.path.islink(checklink):
236 return True
243 return True
237 if os.path.isdir(cachedir):
244 if os.path.isdir(cachedir):
238 checkdir = cachedir
245 checkdir = cachedir
239 else:
246 else:
240 checkdir = path
247 checkdir = path
241 cachedir = None
248 cachedir = None
242 fscheckdir = pycompat.fsdecode(checkdir)
249 fscheckdir = pycompat.fsdecode(checkdir)
243 name = tempfile.mktemp(dir=fscheckdir,
250 name = tempfile.mktemp(dir=fscheckdir,
244 prefix=r'checklink-')
251 prefix=r'checklink-')
245 name = pycompat.fsencode(name)
252 name = pycompat.fsencode(name)
246 try:
253 try:
247 fd = None
254 fd = None
248 if cachedir is None:
255 if cachedir is None:
249 fd = tempfile.NamedTemporaryFile(dir=fscheckdir,
256 fd = tempfile.NamedTemporaryFile(dir=fscheckdir,
250 prefix=r'hg-checklink-')
257 prefix=r'hg-checklink-')
251 target = pycompat.fsencode(os.path.basename(fd.name))
258 target = pycompat.fsencode(os.path.basename(fd.name))
252 else:
259 else:
253 # create a fixed file to link to; doesn't matter if it
260 # create a fixed file to link to; doesn't matter if it
254 # already exists.
261 # already exists.
255 target = 'checklink-target'
262 target = 'checklink-target'
256 try:
263 try:
257 open(os.path.join(cachedir, target), 'w').close()
264 open(os.path.join(cachedir, target), 'w').close()
258 except IOError as inst:
265 except IOError as inst:
259 if inst[0] == errno.EACCES:
266 if inst[0] == errno.EACCES:
260 # If we can't write to cachedir, just pretend
267 # If we can't write to cachedir, just pretend
261 # that the fs is readonly and by association
268 # that the fs is readonly and by association
262 # that the fs won't support symlinks. This
269 # that the fs won't support symlinks. This
263 # seems like the least dangerous way to avoid
270 # seems like the least dangerous way to avoid
264 # data loss.
271 # data loss.
265 return False
272 return False
266 raise
273 raise
267 try:
274 try:
268 os.symlink(target, name)
275 os.symlink(target, name)
269 if cachedir is None:
276 if cachedir is None:
270 unlink(name)
277 unlink(name)
271 else:
278 else:
272 try:
279 try:
273 os.rename(name, checklink)
280 os.rename(name, checklink)
274 except OSError:
281 except OSError:
275 unlink(name)
282 unlink(name)
276 return True
283 return True
277 except OSError as inst:
284 except OSError as inst:
278 # link creation might race, try again
285 # link creation might race, try again
279 if inst[0] == errno.EEXIST:
286 if inst[0] == errno.EEXIST:
280 continue
287 continue
281 raise
288 raise
282 finally:
289 finally:
283 if fd is not None:
290 if fd is not None:
284 fd.close()
291 fd.close()
285 except AttributeError:
292 except AttributeError:
286 return False
293 return False
287 except OSError as inst:
294 except OSError as inst:
288 # sshfs might report failure while successfully creating the link
295 # sshfs might report failure while successfully creating the link
289 if inst[0] == errno.EIO and os.path.exists(name):
296 if inst[0] == errno.EIO and os.path.exists(name):
290 unlink(name)
297 unlink(name)
291 return False
298 return False
292
299
293 def checkosfilename(path):
300 def checkosfilename(path):
294 '''Check that the base-relative path is a valid filename on this platform.
301 '''Check that the base-relative path is a valid filename on this platform.
295 Returns None if the path is ok, or a UI string describing the problem.'''
302 Returns None if the path is ok, or a UI string describing the problem.'''
296 pass # on posix platforms, every path is ok
303 pass # on posix platforms, every path is ok
297
304
298 def setbinary(fd):
305 def setbinary(fd):
299 pass
306 pass
300
307
301 def pconvert(path):
308 def pconvert(path):
302 return path
309 return path
303
310
304 def localpath(path):
311 def localpath(path):
305 return path
312 return path
306
313
307 def samefile(fpath1, fpath2):
314 def samefile(fpath1, fpath2):
308 """Returns whether path1 and path2 refer to the same file. This is only
315 """Returns whether path1 and path2 refer to the same file. This is only
309 guaranteed to work for files, not directories."""
316 guaranteed to work for files, not directories."""
310 return os.path.samefile(fpath1, fpath2)
317 return os.path.samefile(fpath1, fpath2)
311
318
312 def samedevice(fpath1, fpath2):
319 def samedevice(fpath1, fpath2):
313 """Returns whether fpath1 and fpath2 are on the same device. This is only
320 """Returns whether fpath1 and fpath2 are on the same device. This is only
314 guaranteed to work for files, not directories."""
321 guaranteed to work for files, not directories."""
315 st1 = os.lstat(fpath1)
322 st1 = os.lstat(fpath1)
316 st2 = os.lstat(fpath2)
323 st2 = os.lstat(fpath2)
317 return st1.st_dev == st2.st_dev
324 return st1.st_dev == st2.st_dev
318
325
319 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
326 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
320 def normcase(path):
327 def normcase(path):
321 return path.lower()
328 return path.lower()
322
329
323 # what normcase does to ASCII strings
330 # what normcase does to ASCII strings
324 normcasespec = encoding.normcasespecs.lower
331 normcasespec = encoding.normcasespecs.lower
325 # fallback normcase function for non-ASCII strings
332 # fallback normcase function for non-ASCII strings
326 normcasefallback = normcase
333 normcasefallback = normcase
327
334
328 if pycompat.sysplatform == 'darwin':
335 if pycompat.sysplatform == 'darwin':
329
336
330 def normcase(path):
337 def normcase(path):
331 '''
338 '''
332 Normalize a filename for OS X-compatible comparison:
339 Normalize a filename for OS X-compatible comparison:
333 - escape-encode invalid characters
340 - escape-encode invalid characters
334 - decompose to NFD
341 - decompose to NFD
335 - lowercase
342 - lowercase
336 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
343 - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff]
337
344
338 >>> normcase('UPPER')
345 >>> normcase('UPPER')
339 'upper'
346 'upper'
340 >>> normcase('Caf\xc3\xa9')
347 >>> normcase('Caf\xc3\xa9')
341 'cafe\\xcc\\x81'
348 'cafe\\xcc\\x81'
342 >>> normcase('\xc3\x89')
349 >>> normcase('\xc3\x89')
343 'e\\xcc\\x81'
350 'e\\xcc\\x81'
344 >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918
351 >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918
345 '%b8%ca%c3\\xca\\xbe%c8.jpg'
352 '%b8%ca%c3\\xca\\xbe%c8.jpg'
346 '''
353 '''
347
354
348 try:
355 try:
349 return encoding.asciilower(path) # exception for non-ASCII
356 return encoding.asciilower(path) # exception for non-ASCII
350 except UnicodeDecodeError:
357 except UnicodeDecodeError:
351 return normcasefallback(path)
358 return normcasefallback(path)
352
359
353 normcasespec = encoding.normcasespecs.lower
360 normcasespec = encoding.normcasespecs.lower
354
361
355 def normcasefallback(path):
362 def normcasefallback(path):
356 try:
363 try:
357 u = path.decode('utf-8')
364 u = path.decode('utf-8')
358 except UnicodeDecodeError:
365 except UnicodeDecodeError:
359 # OS X percent-encodes any bytes that aren't valid utf-8
366 # OS X percent-encodes any bytes that aren't valid utf-8
360 s = ''
367 s = ''
361 pos = 0
368 pos = 0
362 l = len(path)
369 l = len(path)
363 while pos < l:
370 while pos < l:
364 try:
371 try:
365 c = encoding.getutf8char(path, pos)
372 c = encoding.getutf8char(path, pos)
366 pos += len(c)
373 pos += len(c)
367 except ValueError:
374 except ValueError:
368 c = '%%%02X' % ord(path[pos])
375 c = '%%%02X' % ord(path[pos])
369 pos += 1
376 pos += 1
370 s += c
377 s += c
371
378
372 u = s.decode('utf-8')
379 u = s.decode('utf-8')
373
380
374 # Decompose then lowercase (HFS+ technote specifies lower)
381 # Decompose then lowercase (HFS+ technote specifies lower)
375 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
382 enc = unicodedata.normalize('NFD', u).lower().encode('utf-8')
376 # drop HFS+ ignored characters
383 # drop HFS+ ignored characters
377 return encoding.hfsignoreclean(enc)
384 return encoding.hfsignoreclean(enc)
378
385
379 if pycompat.sysplatform == 'cygwin':
386 if pycompat.sysplatform == 'cygwin':
380 # workaround for cygwin, in which mount point part of path is
387 # workaround for cygwin, in which mount point part of path is
381 # treated as case sensitive, even though underlying NTFS is case
388 # treated as case sensitive, even though underlying NTFS is case
382 # insensitive.
389 # insensitive.
383
390
384 # default mount points
391 # default mount points
385 cygwinmountpoints = sorted([
392 cygwinmountpoints = sorted([
386 "/usr/bin",
393 "/usr/bin",
387 "/usr/lib",
394 "/usr/lib",
388 "/cygdrive",
395 "/cygdrive",
389 ], reverse=True)
396 ], reverse=True)
390
397
391 # use upper-ing as normcase as same as NTFS workaround
398 # use upper-ing as normcase as same as NTFS workaround
392 def normcase(path):
399 def normcase(path):
393 pathlen = len(path)
400 pathlen = len(path)
394 if (pathlen == 0) or (path[0] != pycompat.ossep):
401 if (pathlen == 0) or (path[0] != pycompat.ossep):
395 # treat as relative
402 # treat as relative
396 return encoding.upper(path)
403 return encoding.upper(path)
397
404
398 # to preserve case of mountpoint part
405 # to preserve case of mountpoint part
399 for mp in cygwinmountpoints:
406 for mp in cygwinmountpoints:
400 if not path.startswith(mp):
407 if not path.startswith(mp):
401 continue
408 continue
402
409
403 mplen = len(mp)
410 mplen = len(mp)
404 if mplen == pathlen: # mount point itself
411 if mplen == pathlen: # mount point itself
405 return mp
412 return mp
406 if path[mplen] == pycompat.ossep:
413 if path[mplen] == pycompat.ossep:
407 return mp + encoding.upper(path[mplen:])
414 return mp + encoding.upper(path[mplen:])
408
415
409 return encoding.upper(path)
416 return encoding.upper(path)
410
417
411 normcasespec = encoding.normcasespecs.other
418 normcasespec = encoding.normcasespecs.other
412 normcasefallback = normcase
419 normcasefallback = normcase
413
420
414 # Cygwin translates native ACLs to POSIX permissions,
421 # Cygwin translates native ACLs to POSIX permissions,
415 # but these translations are not supported by native
422 # but these translations are not supported by native
416 # tools, so the exec bit tends to be set erroneously.
423 # tools, so the exec bit tends to be set erroneously.
417 # Therefore, disable executable bit access on Cygwin.
424 # Therefore, disable executable bit access on Cygwin.
418 def checkexec(path):
425 def checkexec(path):
419 return False
426 return False
420
427
421 # Similarly, Cygwin's symlink emulation is likely to create
428 # Similarly, Cygwin's symlink emulation is likely to create
422 # problems when Mercurial is used from both Cygwin and native
429 # problems when Mercurial is used from both Cygwin and native
423 # Windows, with other native tools, or on shared volumes
430 # Windows, with other native tools, or on shared volumes
424 def checklink(path):
431 def checklink(path):
425 return False
432 return False
426
433
427 _needsshellquote = None
434 _needsshellquote = None
428 def shellquote(s):
435 def shellquote(s):
429 if pycompat.sysplatform == 'OpenVMS':
436 if pycompat.sysplatform == 'OpenVMS':
430 return '"%s"' % s
437 return '"%s"' % s
431 global _needsshellquote
438 global _needsshellquote
432 if _needsshellquote is None:
439 if _needsshellquote is None:
433 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
440 _needsshellquote = re.compile(br'[^a-zA-Z0-9._/+-]').search
434 if s and not _needsshellquote(s):
441 if s and not _needsshellquote(s):
435 # "s" shouldn't have to be quoted
442 # "s" shouldn't have to be quoted
436 return s
443 return s
437 else:
444 else:
438 return "'%s'" % s.replace("'", "'\\''")
445 return "'%s'" % s.replace("'", "'\\''")
439
446
440 def quotecommand(cmd):
447 def quotecommand(cmd):
441 return cmd
448 return cmd
442
449
443 def popen(command, mode='r'):
450 def popen(command, mode='r'):
444 return os.popen(command, mode)
451 return os.popen(command, mode)
445
452
446 def testpid(pid):
453 def testpid(pid):
447 '''return False if pid dead, True if running or not sure'''
454 '''return False if pid dead, True if running or not sure'''
448 if pycompat.sysplatform == 'OpenVMS':
455 if pycompat.sysplatform == 'OpenVMS':
449 return True
456 return True
450 try:
457 try:
451 os.kill(pid, 0)
458 os.kill(pid, 0)
452 return True
459 return True
453 except OSError as inst:
460 except OSError as inst:
454 return inst.errno != errno.ESRCH
461 return inst.errno != errno.ESRCH
455
462
456 def explainexit(code):
463 def explainexit(code):
457 """return a 2-tuple (desc, code) describing a subprocess status
464 """return a 2-tuple (desc, code) describing a subprocess status
458 (codes from kill are negative - not os.system/wait encoding)"""
465 (codes from kill are negative - not os.system/wait encoding)"""
459 if code >= 0:
466 if code >= 0:
460 return _("exited with status %d") % code, code
467 return _("exited with status %d") % code, code
461 return _("killed by signal %d") % -code, -code
468 return _("killed by signal %d") % -code, -code
462
469
463 def isowner(st):
470 def isowner(st):
464 """Return True if the stat object st is from the current user."""
471 """Return True if the stat object st is from the current user."""
465 return st.st_uid == os.getuid()
472 return st.st_uid == os.getuid()
466
473
467 def findexe(command):
474 def findexe(command):
468 '''Find executable for command searching like which does.
475 '''Find executable for command searching like which does.
469 If command is a basename then PATH is searched for command.
476 If command is a basename then PATH is searched for command.
470 PATH isn't searched if command is an absolute or relative path.
477 PATH isn't searched if command is an absolute or relative path.
471 If command isn't found None is returned.'''
478 If command isn't found None is returned.'''
472 if pycompat.sysplatform == 'OpenVMS':
479 if pycompat.sysplatform == 'OpenVMS':
473 return command
480 return command
474
481
475 def findexisting(executable):
482 def findexisting(executable):
476 'Will return executable if existing file'
483 'Will return executable if existing file'
477 if os.path.isfile(executable) and os.access(executable, os.X_OK):
484 if os.path.isfile(executable) and os.access(executable, os.X_OK):
478 return executable
485 return executable
479 return None
486 return None
480
487
481 if pycompat.ossep in command:
488 if pycompat.ossep in command:
482 return findexisting(command)
489 return findexisting(command)
483
490
484 if pycompat.sysplatform == 'plan9':
491 if pycompat.sysplatform == 'plan9':
485 return findexisting(os.path.join('/bin', command))
492 return findexisting(os.path.join('/bin', command))
486
493
487 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
494 for path in encoding.environ.get('PATH', '').split(pycompat.ospathsep):
488 executable = findexisting(os.path.join(path, command))
495 executable = findexisting(os.path.join(path, command))
489 if executable is not None:
496 if executable is not None:
490 return executable
497 return executable
491 return None
498 return None
492
499
493 def setsignalhandler():
500 def setsignalhandler():
494 pass
501 pass
495
502
496 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
503 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
497
504
498 def statfiles(files):
505 def statfiles(files):
499 '''Stat each file in files. Yield each stat, or None if a file does not
506 '''Stat each file in files. Yield each stat, or None if a file does not
500 exist or has a type we don't care about.'''
507 exist or has a type we don't care about.'''
501 lstat = os.lstat
508 lstat = os.lstat
502 getkind = stat.S_IFMT
509 getkind = stat.S_IFMT
503 for nf in files:
510 for nf in files:
504 try:
511 try:
505 st = lstat(nf)
512 st = lstat(nf)
506 if getkind(st.st_mode) not in _wantedkinds:
513 if getkind(st.st_mode) not in _wantedkinds:
507 st = None
514 st = None
508 except OSError as err:
515 except OSError as err:
509 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
516 if err.errno not in (errno.ENOENT, errno.ENOTDIR):
510 raise
517 raise
511 st = None
518 st = None
512 yield st
519 yield st
513
520
514 def getuser():
521 def getuser():
515 '''return name of current user'''
522 '''return name of current user'''
516 return pycompat.fsencode(getpass.getuser())
523 return pycompat.fsencode(getpass.getuser())
517
524
518 def username(uid=None):
525 def username(uid=None):
519 """Return the name of the user with the given uid.
526 """Return the name of the user with the given uid.
520
527
521 If uid is None, return the name of the current user."""
528 If uid is None, return the name of the current user."""
522
529
523 if uid is None:
530 if uid is None:
524 uid = os.getuid()
531 uid = os.getuid()
525 try:
532 try:
526 return pwd.getpwuid(uid)[0]
533 return pwd.getpwuid(uid)[0]
527 except KeyError:
534 except KeyError:
528 return str(uid)
535 return str(uid)
529
536
530 def groupname(gid=None):
537 def groupname(gid=None):
531 """Return the name of the group with the given gid.
538 """Return the name of the group with the given gid.
532
539
533 If gid is None, return the name of the current group."""
540 If gid is None, return the name of the current group."""
534
541
535 if gid is None:
542 if gid is None:
536 gid = os.getgid()
543 gid = os.getgid()
537 try:
544 try:
538 return grp.getgrgid(gid)[0]
545 return grp.getgrgid(gid)[0]
539 except KeyError:
546 except KeyError:
540 return str(gid)
547 return str(gid)
541
548
542 def groupmembers(name):
549 def groupmembers(name):
543 """Return the list of members of the group with the given
550 """Return the list of members of the group with the given
544 name, KeyError if the group does not exist.
551 name, KeyError if the group does not exist.
545 """
552 """
546 return list(grp.getgrnam(name).gr_mem)
553 return list(grp.getgrnam(name).gr_mem)
547
554
548 def spawndetached(args):
555 def spawndetached(args):
549 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
556 return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
550 args[0], args)
557 args[0], args)
551
558
552 def gethgcmd():
559 def gethgcmd():
553 return sys.argv[:1]
560 return sys.argv[:1]
554
561
555 def makedir(path, notindexed):
562 def makedir(path, notindexed):
556 os.mkdir(path)
563 os.mkdir(path)
557
564
558 def lookupreg(key, name=None, scope=None):
565 def lookupreg(key, name=None, scope=None):
559 return None
566 return None
560
567
561 def hidewindow():
568 def hidewindow():
562 """Hide current shell window.
569 """Hide current shell window.
563
570
564 Used to hide the window opened when starting asynchronous
571 Used to hide the window opened when starting asynchronous
565 child process under Windows, unneeded on other systems.
572 child process under Windows, unneeded on other systems.
566 """
573 """
567 pass
574 pass
568
575
569 class cachestat(object):
576 class cachestat(object):
570 def __init__(self, path):
577 def __init__(self, path):
571 self.stat = os.stat(path)
578 self.stat = os.stat(path)
572
579
573 def cacheable(self):
580 def cacheable(self):
574 return bool(self.stat.st_ino)
581 return bool(self.stat.st_ino)
575
582
576 __hash__ = object.__hash__
583 __hash__ = object.__hash__
577
584
578 def __eq__(self, other):
585 def __eq__(self, other):
579 try:
586 try:
580 # Only dev, ino, size, mtime and atime are likely to change. Out
587 # Only dev, ino, size, mtime and atime are likely to change. Out
581 # of these, we shouldn't compare atime but should compare the
588 # of these, we shouldn't compare atime but should compare the
582 # rest. However, one of the other fields changing indicates
589 # rest. However, one of the other fields changing indicates
583 # something fishy going on, so return False if anything but atime
590 # something fishy going on, so return False if anything but atime
584 # changes.
591 # changes.
585 return (self.stat.st_mode == other.stat.st_mode and
592 return (self.stat.st_mode == other.stat.st_mode and
586 self.stat.st_ino == other.stat.st_ino and
593 self.stat.st_ino == other.stat.st_ino and
587 self.stat.st_dev == other.stat.st_dev and
594 self.stat.st_dev == other.stat.st_dev and
588 self.stat.st_nlink == other.stat.st_nlink and
595 self.stat.st_nlink == other.stat.st_nlink and
589 self.stat.st_uid == other.stat.st_uid and
596 self.stat.st_uid == other.stat.st_uid and
590 self.stat.st_gid == other.stat.st_gid and
597 self.stat.st_gid == other.stat.st_gid and
591 self.stat.st_size == other.stat.st_size and
598 self.stat.st_size == other.stat.st_size and
592 self.stat.st_mtime == other.stat.st_mtime and
599 self.stat.st_mtime == other.stat.st_mtime and
593 self.stat.st_ctime == other.stat.st_ctime)
600 self.stat.st_ctime == other.stat.st_ctime)
594 except AttributeError:
601 except AttributeError:
595 return False
602 return False
596
603
597 def __ne__(self, other):
604 def __ne__(self, other):
598 return not self == other
605 return not self == other
599
606
600 def executablepath():
607 def executablepath():
601 return None # available on Windows only
608 return None # available on Windows only
602
609
603 def statislink(st):
610 def statislink(st):
604 '''check whether a stat result is a symlink'''
611 '''check whether a stat result is a symlink'''
605 return st and stat.S_ISLNK(st.st_mode)
612 return st and stat.S_ISLNK(st.st_mode)
606
613
607 def statisexec(st):
614 def statisexec(st):
608 '''check whether a stat result is an executable file'''
615 '''check whether a stat result is an executable file'''
609 return st and (st.st_mode & 0o100 != 0)
616 return st and (st.st_mode & 0o100 != 0)
610
617
611 def poll(fds):
618 def poll(fds):
612 """block until something happens on any file descriptor
619 """block until something happens on any file descriptor
613
620
614 This is a generic helper that will check for any activity
621 This is a generic helper that will check for any activity
615 (read, write. exception) and return the list of touched files.
622 (read, write. exception) and return the list of touched files.
616
623
617 In unsupported cases, it will raise a NotImplementedError"""
624 In unsupported cases, it will raise a NotImplementedError"""
618 try:
625 try:
619 while True:
626 while True:
620 try:
627 try:
621 res = select.select(fds, fds, fds)
628 res = select.select(fds, fds, fds)
622 break
629 break
623 except select.error as inst:
630 except select.error as inst:
624 if inst.args[0] == errno.EINTR:
631 if inst.args[0] == errno.EINTR:
625 continue
632 continue
626 raise
633 raise
627 except ValueError: # out of range file descriptor
634 except ValueError: # out of range file descriptor
628 raise NotImplementedError()
635 raise NotImplementedError()
629 return sorted(list(set(sum(res, []))))
636 return sorted(list(set(sum(res, []))))
630
637
631 def readpipe(pipe):
638 def readpipe(pipe):
632 """Read all available data from a pipe."""
639 """Read all available data from a pipe."""
633 # We can't fstat() a pipe because Linux will always report 0.
640 # We can't fstat() a pipe because Linux will always report 0.
634 # So, we set the pipe to non-blocking mode and read everything
641 # So, we set the pipe to non-blocking mode and read everything
635 # that's available.
642 # that's available.
636 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
643 flags = fcntl.fcntl(pipe, fcntl.F_GETFL)
637 flags |= os.O_NONBLOCK
644 flags |= os.O_NONBLOCK
638 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
645 oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags)
639
646
640 try:
647 try:
641 chunks = []
648 chunks = []
642 while True:
649 while True:
643 try:
650 try:
644 s = pipe.read()
651 s = pipe.read()
645 if not s:
652 if not s:
646 break
653 break
647 chunks.append(s)
654 chunks.append(s)
648 except IOError:
655 except IOError:
649 break
656 break
650
657
651 return ''.join(chunks)
658 return ''.join(chunks)
652 finally:
659 finally:
653 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
660 fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
654
661
655 def bindunixsocket(sock, path):
662 def bindunixsocket(sock, path):
656 """Bind the UNIX domain socket to the specified path"""
663 """Bind the UNIX domain socket to the specified path"""
657 # use relative path instead of full path at bind() if possible, since
664 # use relative path instead of full path at bind() if possible, since
658 # AF_UNIX path has very small length limit (107 chars) on common
665 # AF_UNIX path has very small length limit (107 chars) on common
659 # platforms (see sys/un.h)
666 # platforms (see sys/un.h)
660 dirname, basename = os.path.split(path)
667 dirname, basename = os.path.split(path)
661 bakwdfd = None
668 bakwdfd = None
662 if dirname:
669 if dirname:
663 bakwdfd = os.open('.', os.O_DIRECTORY)
670 bakwdfd = os.open('.', os.O_DIRECTORY)
664 os.chdir(dirname)
671 os.chdir(dirname)
665 sock.bind(basename)
672 sock.bind(basename)
666 if bakwdfd:
673 if bakwdfd:
667 os.fchdir(bakwdfd)
674 os.fchdir(bakwdfd)
668 os.close(bakwdfd)
675 os.close(bakwdfd)
@@ -1,1105 +1,1105 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 hex,
20 hex,
21 nullid,
21 nullid,
22 wdirid,
22 wdirid,
23 wdirrev,
23 wdirrev,
24 )
24 )
25
25
26 from . import (
26 from . import (
27 encoding,
27 encoding,
28 error,
28 error,
29 match as matchmod,
29 match as matchmod,
30 obsolete,
30 obsolete,
31 obsutil,
31 obsutil,
32 pathutil,
32 pathutil,
33 phases,
33 phases,
34 pycompat,
34 pycompat,
35 revsetlang,
35 revsetlang,
36 similar,
36 similar,
37 util,
37 util,
38 )
38 )
39
39
40 if pycompat.osname == 'nt':
40 if pycompat.osname == 'nt':
41 from . import scmwindows as scmplatform
41 from . import scmwindows as scmplatform
42 else:
42 else:
43 from . import scmposix as scmplatform
43 from . import scmposix as scmplatform
44
44
45 termsize = scmplatform.termsize
45 termsize = scmplatform.termsize
46
46
47 class status(tuple):
47 class status(tuple):
48 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
48 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
49 and 'ignored' properties are only relevant to the working copy.
49 and 'ignored' properties are only relevant to the working copy.
50 '''
50 '''
51
51
52 __slots__ = ()
52 __slots__ = ()
53
53
54 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
54 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
55 clean):
55 clean):
56 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
56 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
57 ignored, clean))
57 ignored, clean))
58
58
59 @property
59 @property
60 def modified(self):
60 def modified(self):
61 '''files that have been modified'''
61 '''files that have been modified'''
62 return self[0]
62 return self[0]
63
63
64 @property
64 @property
65 def added(self):
65 def added(self):
66 '''files that have been added'''
66 '''files that have been added'''
67 return self[1]
67 return self[1]
68
68
69 @property
69 @property
70 def removed(self):
70 def removed(self):
71 '''files that have been removed'''
71 '''files that have been removed'''
72 return self[2]
72 return self[2]
73
73
74 @property
74 @property
75 def deleted(self):
75 def deleted(self):
76 '''files that are in the dirstate, but have been deleted from the
76 '''files that are in the dirstate, but have been deleted from the
77 working copy (aka "missing")
77 working copy (aka "missing")
78 '''
78 '''
79 return self[3]
79 return self[3]
80
80
81 @property
81 @property
82 def unknown(self):
82 def unknown(self):
83 '''files not in the dirstate that are not ignored'''
83 '''files not in the dirstate that are not ignored'''
84 return self[4]
84 return self[4]
85
85
86 @property
86 @property
87 def ignored(self):
87 def ignored(self):
88 '''files not in the dirstate that are ignored (by _dirignore())'''
88 '''files not in the dirstate that are ignored (by _dirignore())'''
89 return self[5]
89 return self[5]
90
90
91 @property
91 @property
92 def clean(self):
92 def clean(self):
93 '''files that have not been modified'''
93 '''files that have not been modified'''
94 return self[6]
94 return self[6]
95
95
96 def __repr__(self, *args, **kwargs):
96 def __repr__(self, *args, **kwargs):
97 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
97 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
98 'unknown=%r, ignored=%r, clean=%r>') % self)
98 'unknown=%r, ignored=%r, clean=%r>') % self)
99
99
100 def itersubrepos(ctx1, ctx2):
100 def itersubrepos(ctx1, ctx2):
101 """find subrepos in ctx1 or ctx2"""
101 """find subrepos in ctx1 or ctx2"""
102 # Create a (subpath, ctx) mapping where we prefer subpaths from
102 # Create a (subpath, ctx) mapping where we prefer subpaths from
103 # ctx1. The subpaths from ctx2 are important when the .hgsub file
103 # ctx1. The subpaths from ctx2 are important when the .hgsub file
104 # has been modified (in ctx2) but not yet committed (in ctx1).
104 # has been modified (in ctx2) but not yet committed (in ctx1).
105 subpaths = dict.fromkeys(ctx2.substate, ctx2)
105 subpaths = dict.fromkeys(ctx2.substate, ctx2)
106 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
106 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
107
107
108 missing = set()
108 missing = set()
109
109
110 for subpath in ctx2.substate:
110 for subpath in ctx2.substate:
111 if subpath not in ctx1.substate:
111 if subpath not in ctx1.substate:
112 del subpaths[subpath]
112 del subpaths[subpath]
113 missing.add(subpath)
113 missing.add(subpath)
114
114
115 for subpath, ctx in sorted(subpaths.iteritems()):
115 for subpath, ctx in sorted(subpaths.iteritems()):
116 yield subpath, ctx.sub(subpath)
116 yield subpath, ctx.sub(subpath)
117
117
118 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
118 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
119 # status and diff will have an accurate result when it does
119 # status and diff will have an accurate result when it does
120 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
120 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
121 # against itself.
121 # against itself.
122 for subpath in missing:
122 for subpath in missing:
123 yield subpath, ctx2.nullsub(subpath, ctx1)
123 yield subpath, ctx2.nullsub(subpath, ctx1)
124
124
125 def nochangesfound(ui, repo, excluded=None):
125 def nochangesfound(ui, repo, excluded=None):
126 '''Report no changes for push/pull, excluded is None or a list of
126 '''Report no changes for push/pull, excluded is None or a list of
127 nodes excluded from the push/pull.
127 nodes excluded from the push/pull.
128 '''
128 '''
129 secretlist = []
129 secretlist = []
130 if excluded:
130 if excluded:
131 for n in excluded:
131 for n in excluded:
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 % len(secretlist))
138 % len(secretlist))
139 else:
139 else:
140 ui.status(_("no changes found\n"))
140 ui.status(_("no changes found\n"))
141
141
142 def callcatch(ui, func):
142 def callcatch(ui, func):
143 """call func() with global exception handling
143 """call func() with global exception handling
144
144
145 return func() if no exception happens. otherwise do some error handling
145 return func() if no exception happens. otherwise do some error handling
146 and return an exit code accordingly. does not handle all exceptions.
146 and return an exit code accordingly. does not handle all exceptions.
147 """
147 """
148 try:
148 try:
149 try:
149 try:
150 return func()
150 return func()
151 except: # re-raises
151 except: # re-raises
152 ui.traceback()
152 ui.traceback()
153 raise
153 raise
154 # Global exception handling, alphabetically
154 # Global exception handling, alphabetically
155 # Mercurial-specific first, followed by built-in and library exceptions
155 # Mercurial-specific first, followed by built-in and library exceptions
156 except error.LockHeld as inst:
156 except error.LockHeld as inst:
157 if inst.errno == errno.ETIMEDOUT:
157 if inst.errno == errno.ETIMEDOUT:
158 reason = _('timed out waiting for lock held by %r') % inst.locker
158 reason = _('timed out waiting for lock held by %r') % inst.locker
159 else:
159 else:
160 reason = _('lock held by %r') % inst.locker
160 reason = _('lock held by %r') % inst.locker
161 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
161 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
162 if not inst.locker:
162 if not inst.locker:
163 ui.warn(_("(lock might be very busy)\n"))
163 ui.warn(_("(lock might be very busy)\n"))
164 except error.LockUnavailable as inst:
164 except error.LockUnavailable as inst:
165 ui.warn(_("abort: could not lock %s: %s\n") %
165 ui.warn(_("abort: could not lock %s: %s\n") %
166 (inst.desc or inst.filename, inst.strerror))
166 (inst.desc or inst.filename, inst.strerror))
167 except error.OutOfBandError as inst:
167 except error.OutOfBandError as inst:
168 if inst.args:
168 if inst.args:
169 msg = _("abort: remote error:\n")
169 msg = _("abort: remote error:\n")
170 else:
170 else:
171 msg = _("abort: remote error\n")
171 msg = _("abort: remote error\n")
172 ui.warn(msg)
172 ui.warn(msg)
173 if inst.args:
173 if inst.args:
174 ui.warn(''.join(inst.args))
174 ui.warn(''.join(inst.args))
175 if inst.hint:
175 if inst.hint:
176 ui.warn('(%s)\n' % inst.hint)
176 ui.warn('(%s)\n' % inst.hint)
177 except error.RepoError as inst:
177 except error.RepoError as inst:
178 ui.warn(_("abort: %s!\n") % inst)
178 ui.warn(_("abort: %s!\n") % inst)
179 if inst.hint:
179 if inst.hint:
180 ui.warn(_("(%s)\n") % inst.hint)
180 ui.warn(_("(%s)\n") % inst.hint)
181 except error.ResponseError as inst:
181 except error.ResponseError as inst:
182 ui.warn(_("abort: %s") % inst.args[0])
182 ui.warn(_("abort: %s") % inst.args[0])
183 if not isinstance(inst.args[1], basestring):
183 if not isinstance(inst.args[1], basestring):
184 ui.warn(" %r\n" % (inst.args[1],))
184 ui.warn(" %r\n" % (inst.args[1],))
185 elif not inst.args[1]:
185 elif not inst.args[1]:
186 ui.warn(_(" empty string\n"))
186 ui.warn(_(" empty string\n"))
187 else:
187 else:
188 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
188 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
189 except error.CensoredNodeError as inst:
189 except error.CensoredNodeError as inst:
190 ui.warn(_("abort: file censored %s!\n") % inst)
190 ui.warn(_("abort: file censored %s!\n") % inst)
191 except error.RevlogError as inst:
191 except error.RevlogError as inst:
192 ui.warn(_("abort: %s!\n") % inst)
192 ui.warn(_("abort: %s!\n") % inst)
193 except error.InterventionRequired as inst:
193 except error.InterventionRequired as inst:
194 ui.warn("%s\n" % inst)
194 ui.warn("%s\n" % inst)
195 if inst.hint:
195 if inst.hint:
196 ui.warn(_("(%s)\n") % inst.hint)
196 ui.warn(_("(%s)\n") % inst.hint)
197 return 1
197 return 1
198 except error.WdirUnsupported:
198 except error.WdirUnsupported:
199 ui.warn(_("abort: working directory revision cannot be specified\n"))
199 ui.warn(_("abort: working directory revision cannot be specified\n"))
200 except error.Abort as inst:
200 except error.Abort as inst:
201 ui.warn(_("abort: %s\n") % inst)
201 ui.warn(_("abort: %s\n") % inst)
202 if inst.hint:
202 if inst.hint:
203 ui.warn(_("(%s)\n") % inst.hint)
203 ui.warn(_("(%s)\n") % inst.hint)
204 except ImportError as inst:
204 except ImportError as inst:
205 ui.warn(_("abort: %s!\n") % inst)
205 ui.warn(_("abort: %s!\n") % inst)
206 m = str(inst).split()[-1]
206 m = str(inst).split()[-1]
207 if m in "mpatch bdiff".split():
207 if m in "mpatch bdiff".split():
208 ui.warn(_("(did you forget to compile extensions?)\n"))
208 ui.warn(_("(did you forget to compile extensions?)\n"))
209 elif m in "zlib".split():
209 elif m in "zlib".split():
210 ui.warn(_("(is your Python install correct?)\n"))
210 ui.warn(_("(is your Python install correct?)\n"))
211 except IOError as inst:
211 except IOError as inst:
212 if util.safehasattr(inst, "code"):
212 if util.safehasattr(inst, "code"):
213 ui.warn(_("abort: %s\n") % inst)
213 ui.warn(_("abort: %s\n") % inst)
214 elif util.safehasattr(inst, "reason"):
214 elif util.safehasattr(inst, "reason"):
215 try: # usually it is in the form (errno, strerror)
215 try: # usually it is in the form (errno, strerror)
216 reason = inst.reason.args[1]
216 reason = inst.reason.args[1]
217 except (AttributeError, IndexError):
217 except (AttributeError, IndexError):
218 # it might be anything, for example a string
218 # it might be anything, for example a string
219 reason = inst.reason
219 reason = inst.reason
220 if isinstance(reason, unicode):
220 if isinstance(reason, unicode):
221 # SSLError of Python 2.7.9 contains a unicode
221 # SSLError of Python 2.7.9 contains a unicode
222 reason = encoding.unitolocal(reason)
222 reason = encoding.unitolocal(reason)
223 ui.warn(_("abort: error: %s\n") % reason)
223 ui.warn(_("abort: error: %s\n") % reason)
224 elif (util.safehasattr(inst, "args")
224 elif (util.safehasattr(inst, "args")
225 and inst.args and inst.args[0] == errno.EPIPE):
225 and inst.args and inst.args[0] == errno.EPIPE):
226 pass
226 pass
227 elif getattr(inst, "strerror", None):
227 elif getattr(inst, "strerror", None):
228 if getattr(inst, "filename", None):
228 if getattr(inst, "filename", None):
229 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
229 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
230 else:
230 else:
231 ui.warn(_("abort: %s\n") % inst.strerror)
231 ui.warn(_("abort: %s\n") % inst.strerror)
232 else:
232 else:
233 raise
233 raise
234 except OSError as inst:
234 except OSError as inst:
235 if getattr(inst, "filename", None) is not None:
235 if getattr(inst, "filename", None) is not None:
236 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
236 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
237 else:
237 else:
238 ui.warn(_("abort: %s\n") % inst.strerror)
238 ui.warn(_("abort: %s\n") % inst.strerror)
239 except MemoryError:
239 except MemoryError:
240 ui.warn(_("abort: out of memory\n"))
240 ui.warn(_("abort: out of memory\n"))
241 except SystemExit as inst:
241 except SystemExit as inst:
242 # Commands shouldn't sys.exit directly, but give a return code.
242 # Commands shouldn't sys.exit directly, but give a return code.
243 # Just in case catch this and and pass exit code to caller.
243 # Just in case catch this and and pass exit code to caller.
244 return inst.code
244 return inst.code
245 except socket.error as inst:
245 except socket.error as inst:
246 ui.warn(_("abort: %s\n") % inst.args[-1])
246 ui.warn(_("abort: %s\n") % inst.args[-1])
247
247
248 return -1
248 return -1
249
249
250 def checknewlabel(repo, lbl, kind):
250 def checknewlabel(repo, lbl, kind):
251 # Do not use the "kind" parameter in ui output.
251 # Do not use the "kind" parameter in ui output.
252 # It makes strings difficult to translate.
252 # It makes strings difficult to translate.
253 if lbl in ['tip', '.', 'null']:
253 if lbl in ['tip', '.', 'null']:
254 raise error.Abort(_("the name '%s' is reserved") % lbl)
254 raise error.Abort(_("the name '%s' is reserved") % lbl)
255 for c in (':', '\0', '\n', '\r'):
255 for c in (':', '\0', '\n', '\r'):
256 if c in lbl:
256 if c in lbl:
257 raise error.Abort(_("%r cannot be used in a name") % c)
257 raise error.Abort(_("%r cannot be used in a name") % c)
258 try:
258 try:
259 int(lbl)
259 int(lbl)
260 raise error.Abort(_("cannot use an integer as a name"))
260 raise error.Abort(_("cannot use an integer as a name"))
261 except ValueError:
261 except ValueError:
262 pass
262 pass
263
263
264 def checkfilename(f):
264 def checkfilename(f):
265 '''Check that the filename f is an acceptable filename for a tracked file'''
265 '''Check that the filename f is an acceptable filename for a tracked file'''
266 if '\r' in f or '\n' in f:
266 if '\r' in f or '\n' in f:
267 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
267 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
268
268
269 def checkportable(ui, f):
269 def checkportable(ui, f):
270 '''Check if filename f is portable and warn or abort depending on config'''
270 '''Check if filename f is portable and warn or abort depending on config'''
271 checkfilename(f)
271 checkfilename(f)
272 abort, warn = checkportabilityalert(ui)
272 abort, warn = checkportabilityalert(ui)
273 if abort or warn:
273 if abort or warn:
274 msg = util.checkwinfilename(f)
274 msg = util.checkwinfilename(f)
275 if msg:
275 if msg:
276 msg = "%s: %r" % (msg, f)
276 msg = "%s: %r" % (msg, f)
277 if abort:
277 if abort:
278 raise error.Abort(msg)
278 raise error.Abort(msg)
279 ui.warn(_("warning: %s\n") % msg)
279 ui.warn(_("warning: %s\n") % msg)
280
280
281 def checkportabilityalert(ui):
281 def checkportabilityalert(ui):
282 '''check if the user's config requests nothing, a warning, or abort for
282 '''check if the user's config requests nothing, a warning, or abort for
283 non-portable filenames'''
283 non-portable filenames'''
284 val = ui.config('ui', 'portablefilenames')
284 val = ui.config('ui', 'portablefilenames')
285 lval = val.lower()
285 lval = val.lower()
286 bval = util.parsebool(val)
286 bval = util.parsebool(val)
287 abort = pycompat.osname == 'nt' or lval == 'abort'
287 abort = pycompat.osname == 'nt' or lval == 'abort'
288 warn = bval or lval == 'warn'
288 warn = bval or lval == 'warn'
289 if bval is None and not (warn or abort or lval == 'ignore'):
289 if bval is None and not (warn or abort or lval == 'ignore'):
290 raise error.ConfigError(
290 raise error.ConfigError(
291 _("ui.portablefilenames value is invalid ('%s')") % val)
291 _("ui.portablefilenames value is invalid ('%s')") % val)
292 return abort, warn
292 return abort, warn
293
293
294 class casecollisionauditor(object):
294 class casecollisionauditor(object):
295 def __init__(self, ui, abort, dirstate):
295 def __init__(self, ui, abort, dirstate):
296 self._ui = ui
296 self._ui = ui
297 self._abort = abort
297 self._abort = abort
298 allfiles = '\0'.join(dirstate._map)
298 allfiles = '\0'.join(dirstate._map)
299 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
299 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
300 self._dirstate = dirstate
300 self._dirstate = dirstate
301 # The purpose of _newfiles is so that we don't complain about
301 # The purpose of _newfiles is so that we don't complain about
302 # case collisions if someone were to call this object with the
302 # case collisions if someone were to call this object with the
303 # same filename twice.
303 # same filename twice.
304 self._newfiles = set()
304 self._newfiles = set()
305
305
306 def __call__(self, f):
306 def __call__(self, f):
307 if f in self._newfiles:
307 if f in self._newfiles:
308 return
308 return
309 fl = encoding.lower(f)
309 fl = encoding.lower(f)
310 if fl in self._loweredfiles and f not in self._dirstate:
310 if fl in self._loweredfiles and f not in self._dirstate:
311 msg = _('possible case-folding collision for %s') % f
311 msg = _('possible case-folding collision for %s') % f
312 if self._abort:
312 if self._abort:
313 raise error.Abort(msg)
313 raise error.Abort(msg)
314 self._ui.warn(_("warning: %s\n") % msg)
314 self._ui.warn(_("warning: %s\n") % msg)
315 self._loweredfiles.add(fl)
315 self._loweredfiles.add(fl)
316 self._newfiles.add(f)
316 self._newfiles.add(f)
317
317
318 def filteredhash(repo, maxrev):
318 def filteredhash(repo, maxrev):
319 """build hash of filtered revisions in the current repoview.
319 """build hash of filtered revisions in the current repoview.
320
320
321 Multiple caches perform up-to-date validation by checking that the
321 Multiple caches perform up-to-date validation by checking that the
322 tiprev and tipnode stored in the cache file match the current repository.
322 tiprev and tipnode stored in the cache file match the current repository.
323 However, this is not sufficient for validating repoviews because the set
323 However, this is not sufficient for validating repoviews because the set
324 of revisions in the view may change without the repository tiprev and
324 of revisions in the view may change without the repository tiprev and
325 tipnode changing.
325 tipnode changing.
326
326
327 This function hashes all the revs filtered from the view and returns
327 This function hashes all the revs filtered from the view and returns
328 that SHA-1 digest.
328 that SHA-1 digest.
329 """
329 """
330 cl = repo.changelog
330 cl = repo.changelog
331 if not cl.filteredrevs:
331 if not cl.filteredrevs:
332 return None
332 return None
333 key = None
333 key = None
334 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
334 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
335 if revs:
335 if revs:
336 s = hashlib.sha1()
336 s = hashlib.sha1()
337 for rev in revs:
337 for rev in revs:
338 s.update('%d;' % rev)
338 s.update('%d;' % rev)
339 key = s.digest()
339 key = s.digest()
340 return key
340 return key
341
341
342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
343 '''yield every hg repository under path, always recursively.
343 '''yield every hg repository under path, always recursively.
344 The recurse flag will only control recursion into repo working dirs'''
344 The recurse flag will only control recursion into repo working dirs'''
345 def errhandler(err):
345 def errhandler(err):
346 if err.filename == path:
346 if err.filename == path:
347 raise err
347 raise err
348 samestat = getattr(os.path, 'samestat', None)
348 samestat = getattr(os.path, 'samestat', None)
349 if followsym and samestat is not None:
349 if followsym and samestat is not None:
350 def adddir(dirlst, dirname):
350 def adddir(dirlst, dirname):
351 match = False
351 match = False
352 dirstat = os.stat(dirname)
352 dirstat = os.stat(dirname)
353 for lstdirstat in dirlst:
353 for lstdirstat in dirlst:
354 if samestat(dirstat, lstdirstat):
354 if samestat(dirstat, lstdirstat):
355 match = True
355 match = True
356 break
356 break
357 if not match:
357 if not match:
358 dirlst.append(dirstat)
358 dirlst.append(dirstat)
359 return not match
359 return not match
360 else:
360 else:
361 followsym = False
361 followsym = False
362
362
363 if (seen_dirs is None) and followsym:
363 if (seen_dirs is None) and followsym:
364 seen_dirs = []
364 seen_dirs = []
365 adddir(seen_dirs, path)
365 adddir(seen_dirs, path)
366 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
366 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
367 dirs.sort()
367 dirs.sort()
368 if '.hg' in dirs:
368 if '.hg' in dirs:
369 yield root # found a repository
369 yield root # found a repository
370 qroot = os.path.join(root, '.hg', 'patches')
370 qroot = os.path.join(root, '.hg', 'patches')
371 if os.path.isdir(os.path.join(qroot, '.hg')):
371 if os.path.isdir(os.path.join(qroot, '.hg')):
372 yield qroot # we have a patch queue repo here
372 yield qroot # we have a patch queue repo here
373 if recurse:
373 if recurse:
374 # avoid recursing inside the .hg directory
374 # avoid recursing inside the .hg directory
375 dirs.remove('.hg')
375 dirs.remove('.hg')
376 else:
376 else:
377 dirs[:] = [] # don't descend further
377 dirs[:] = [] # don't descend further
378 elif followsym:
378 elif followsym:
379 newdirs = []
379 newdirs = []
380 for d in dirs:
380 for d in dirs:
381 fname = os.path.join(root, d)
381 fname = os.path.join(root, d)
382 if adddir(seen_dirs, fname):
382 if adddir(seen_dirs, fname):
383 if os.path.islink(fname):
383 if os.path.islink(fname):
384 for hgname in walkrepos(fname, True, seen_dirs):
384 for hgname in walkrepos(fname, True, seen_dirs):
385 yield hgname
385 yield hgname
386 else:
386 else:
387 newdirs.append(d)
387 newdirs.append(d)
388 dirs[:] = newdirs
388 dirs[:] = newdirs
389
389
390 def binnode(ctx):
390 def binnode(ctx):
391 """Return binary node id for a given basectx"""
391 """Return binary node id for a given basectx"""
392 node = ctx.node()
392 node = ctx.node()
393 if node is None:
393 if node is None:
394 return wdirid
394 return wdirid
395 return node
395 return node
396
396
397 def intrev(ctx):
397 def intrev(ctx):
398 """Return integer for a given basectx that can be used in comparison or
398 """Return integer for a given basectx that can be used in comparison or
399 arithmetic operation"""
399 arithmetic operation"""
400 rev = ctx.rev()
400 rev = ctx.rev()
401 if rev is None:
401 if rev is None:
402 return wdirrev
402 return wdirrev
403 return rev
403 return rev
404
404
405 def revsingle(repo, revspec, default='.'):
405 def revsingle(repo, revspec, default='.'):
406 if not revspec and revspec != 0:
406 if not revspec and revspec != 0:
407 return repo[default]
407 return repo[default]
408
408
409 l = revrange(repo, [revspec])
409 l = revrange(repo, [revspec])
410 if not l:
410 if not l:
411 raise error.Abort(_('empty revision set'))
411 raise error.Abort(_('empty revision set'))
412 return repo[l.last()]
412 return repo[l.last()]
413
413
414 def _pairspec(revspec):
414 def _pairspec(revspec):
415 tree = revsetlang.parse(revspec)
415 tree = revsetlang.parse(revspec)
416 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
416 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
417
417
418 def revpair(repo, revs):
418 def revpair(repo, revs):
419 if not revs:
419 if not revs:
420 return repo.dirstate.p1(), None
420 return repo.dirstate.p1(), None
421
421
422 l = revrange(repo, revs)
422 l = revrange(repo, revs)
423
423
424 if not l:
424 if not l:
425 first = second = None
425 first = second = None
426 elif l.isascending():
426 elif l.isascending():
427 first = l.min()
427 first = l.min()
428 second = l.max()
428 second = l.max()
429 elif l.isdescending():
429 elif l.isdescending():
430 first = l.max()
430 first = l.max()
431 second = l.min()
431 second = l.min()
432 else:
432 else:
433 first = l.first()
433 first = l.first()
434 second = l.last()
434 second = l.last()
435
435
436 if first is None:
436 if first is None:
437 raise error.Abort(_('empty revision range'))
437 raise error.Abort(_('empty revision range'))
438 if (first == second and len(revs) >= 2
438 if (first == second and len(revs) >= 2
439 and not all(revrange(repo, [r]) for r in revs)):
439 and not all(revrange(repo, [r]) for r in revs)):
440 raise error.Abort(_('empty revision on one side of range'))
440 raise error.Abort(_('empty revision on one side of range'))
441
441
442 # if top-level is range expression, the result must always be a pair
442 # if top-level is range expression, the result must always be a pair
443 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
443 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
444 return repo.lookup(first), None
444 return repo.lookup(first), None
445
445
446 return repo.lookup(first), repo.lookup(second)
446 return repo.lookup(first), repo.lookup(second)
447
447
448 def revrange(repo, specs):
448 def revrange(repo, specs):
449 """Execute 1 to many revsets and return the union.
449 """Execute 1 to many revsets and return the union.
450
450
451 This is the preferred mechanism for executing revsets using user-specified
451 This is the preferred mechanism for executing revsets using user-specified
452 config options, such as revset aliases.
452 config options, such as revset aliases.
453
453
454 The revsets specified by ``specs`` will be executed via a chained ``OR``
454 The revsets specified by ``specs`` will be executed via a chained ``OR``
455 expression. If ``specs`` is empty, an empty result is returned.
455 expression. If ``specs`` is empty, an empty result is returned.
456
456
457 ``specs`` can contain integers, in which case they are assumed to be
457 ``specs`` can contain integers, in which case they are assumed to be
458 revision numbers.
458 revision numbers.
459
459
460 It is assumed the revsets are already formatted. If you have arguments
460 It is assumed the revsets are already formatted. If you have arguments
461 that need to be expanded in the revset, call ``revsetlang.formatspec()``
461 that need to be expanded in the revset, call ``revsetlang.formatspec()``
462 and pass the result as an element of ``specs``.
462 and pass the result as an element of ``specs``.
463
463
464 Specifying a single revset is allowed.
464 Specifying a single revset is allowed.
465
465
466 Returns a ``revset.abstractsmartset`` which is a list-like interface over
466 Returns a ``revset.abstractsmartset`` which is a list-like interface over
467 integer revisions.
467 integer revisions.
468 """
468 """
469 allspecs = []
469 allspecs = []
470 for spec in specs:
470 for spec in specs:
471 if isinstance(spec, int):
471 if isinstance(spec, int):
472 spec = revsetlang.formatspec('rev(%d)', spec)
472 spec = revsetlang.formatspec('rev(%d)', spec)
473 allspecs.append(spec)
473 allspecs.append(spec)
474 return repo.anyrevs(allspecs, user=True)
474 return repo.anyrevs(allspecs, user=True)
475
475
476 def meaningfulparents(repo, ctx):
476 def meaningfulparents(repo, ctx):
477 """Return list of meaningful (or all if debug) parentrevs for rev.
477 """Return list of meaningful (or all if debug) parentrevs for rev.
478
478
479 For merges (two non-nullrev revisions) both parents are meaningful.
479 For merges (two non-nullrev revisions) both parents are meaningful.
480 Otherwise the first parent revision is considered meaningful if it
480 Otherwise the first parent revision is considered meaningful if it
481 is not the preceding revision.
481 is not the preceding revision.
482 """
482 """
483 parents = ctx.parents()
483 parents = ctx.parents()
484 if len(parents) > 1:
484 if len(parents) > 1:
485 return parents
485 return parents
486 if repo.ui.debugflag:
486 if repo.ui.debugflag:
487 return [parents[0], repo['null']]
487 return [parents[0], repo['null']]
488 if parents[0].rev() >= intrev(ctx) - 1:
488 if parents[0].rev() >= intrev(ctx) - 1:
489 return []
489 return []
490 return parents
490 return parents
491
491
492 def expandpats(pats):
492 def expandpats(pats):
493 '''Expand bare globs when running on windows.
493 '''Expand bare globs when running on windows.
494 On posix we assume it already has already been done by sh.'''
494 On posix we assume it already has already been done by sh.'''
495 if not util.expandglobs:
495 if not util.expandglobs:
496 return list(pats)
496 return list(pats)
497 ret = []
497 ret = []
498 for kindpat in pats:
498 for kindpat in pats:
499 kind, pat = matchmod._patsplit(kindpat, None)
499 kind, pat = matchmod._patsplit(kindpat, None)
500 if kind is None:
500 if kind is None:
501 try:
501 try:
502 globbed = glob.glob(pat)
502 globbed = glob.glob(pat)
503 except re.error:
503 except re.error:
504 globbed = [pat]
504 globbed = [pat]
505 if globbed:
505 if globbed:
506 ret.extend(globbed)
506 ret.extend(globbed)
507 continue
507 continue
508 ret.append(kindpat)
508 ret.append(kindpat)
509 return ret
509 return ret
510
510
511 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
511 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
512 badfn=None):
512 badfn=None):
513 '''Return a matcher and the patterns that were used.
513 '''Return a matcher and the patterns that were used.
514 The matcher will warn about bad matches, unless an alternate badfn callback
514 The matcher will warn about bad matches, unless an alternate badfn callback
515 is provided.'''
515 is provided.'''
516 if pats == ("",):
516 if pats == ("",):
517 pats = []
517 pats = []
518 if opts is None:
518 if opts is None:
519 opts = {}
519 opts = {}
520 if not globbed and default == 'relpath':
520 if not globbed and default == 'relpath':
521 pats = expandpats(pats or [])
521 pats = expandpats(pats or [])
522
522
523 def bad(f, msg):
523 def bad(f, msg):
524 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
524 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
525
525
526 if badfn is None:
526 if badfn is None:
527 badfn = bad
527 badfn = bad
528
528
529 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
529 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
530 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
530 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
531
531
532 if m.always():
532 if m.always():
533 pats = []
533 pats = []
534 return m, pats
534 return m, pats
535
535
536 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
536 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
537 badfn=None):
537 badfn=None):
538 '''Return a matcher that will warn about bad matches.'''
538 '''Return a matcher that will warn about bad matches.'''
539 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
539 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
540
540
541 def matchall(repo):
541 def matchall(repo):
542 '''Return a matcher that will efficiently match everything.'''
542 '''Return a matcher that will efficiently match everything.'''
543 return matchmod.always(repo.root, repo.getcwd())
543 return matchmod.always(repo.root, repo.getcwd())
544
544
545 def matchfiles(repo, files, badfn=None):
545 def matchfiles(repo, files, badfn=None):
546 '''Return a matcher that will efficiently match exactly these files.'''
546 '''Return a matcher that will efficiently match exactly these files.'''
547 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
547 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
548
548
549 def origpath(ui, repo, filepath):
549 def origpath(ui, repo, filepath):
550 '''customize where .orig files are created
550 '''customize where .orig files are created
551
551
552 Fetch user defined path from config file: [ui] origbackuppath = <path>
552 Fetch user defined path from config file: [ui] origbackuppath = <path>
553 Fall back to default (filepath) if not specified
553 Fall back to default (filepath) if not specified
554 '''
554 '''
555 origbackuppath = ui.config('ui', 'origbackuppath')
555 origbackuppath = ui.config('ui', 'origbackuppath')
556 if origbackuppath is None:
556 if origbackuppath is None:
557 return filepath + ".orig"
557 return filepath + ".orig"
558
558
559 filepathfromroot = os.path.relpath(filepath, start=repo.root)
559 filepathfromroot = os.path.relpath(filepath, start=repo.root)
560 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
560 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
561
561
562 origbackupdir = repo.vfs.dirname(fullorigpath)
562 origbackupdir = repo.vfs.dirname(fullorigpath)
563 if not repo.vfs.exists(origbackupdir):
563 if not repo.vfs.exists(origbackupdir):
564 ui.note(_('creating directory: %s\n') % origbackupdir)
564 ui.note(_('creating directory: %s\n') % origbackupdir)
565 util.makedirs(origbackupdir)
565 util.makedirs(origbackupdir)
566
566
567 return fullorigpath + ".orig"
567 return fullorigpath + ".orig"
568
568
569 class _containsnode(object):
569 class _containsnode(object):
570 """proxy __contains__(node) to container.__contains__ which accepts revs"""
570 """proxy __contains__(node) to container.__contains__ which accepts revs"""
571
571
572 def __init__(self, repo, revcontainer):
572 def __init__(self, repo, revcontainer):
573 self._torev = repo.changelog.rev
573 self._torev = repo.changelog.rev
574 self._revcontains = revcontainer.__contains__
574 self._revcontains = revcontainer.__contains__
575
575
576 def __contains__(self, node):
576 def __contains__(self, node):
577 return self._revcontains(self._torev(node))
577 return self._revcontains(self._torev(node))
578
578
579 def cleanupnodes(repo, mapping, operation):
579 def cleanupnodes(repo, mapping, operation):
580 """do common cleanups when old nodes are replaced by new nodes
580 """do common cleanups when old nodes are replaced by new nodes
581
581
582 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
582 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
583 (we might also want to move working directory parent in the future)
583 (we might also want to move working directory parent in the future)
584
584
585 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
585 mapping is {oldnode: [newnode]} or a iterable of nodes if they do not have
586 replacements. operation is a string, like "rebase".
586 replacements. operation is a string, like "rebase".
587 """
587 """
588 if not util.safehasattr(mapping, 'items'):
588 if not util.safehasattr(mapping, 'items'):
589 mapping = {n: () for n in mapping}
589 mapping = {n: () for n in mapping}
590
590
591 with repo.transaction('cleanup') as tr:
591 with repo.transaction('cleanup') as tr:
592 # Move bookmarks
592 # Move bookmarks
593 bmarks = repo._bookmarks
593 bmarks = repo._bookmarks
594 bmarkchanges = []
594 bmarkchanges = []
595 allnewnodes = [n for ns in mapping.values() for n in ns]
595 allnewnodes = [n for ns in mapping.values() for n in ns]
596 for oldnode, newnodes in mapping.items():
596 for oldnode, newnodes in mapping.items():
597 oldbmarks = repo.nodebookmarks(oldnode)
597 oldbmarks = repo.nodebookmarks(oldnode)
598 if not oldbmarks:
598 if not oldbmarks:
599 continue
599 continue
600 from . import bookmarks # avoid import cycle
600 from . import bookmarks # avoid import cycle
601 if len(newnodes) > 1:
601 if len(newnodes) > 1:
602 # usually a split, take the one with biggest rev number
602 # usually a split, take the one with biggest rev number
603 newnode = next(repo.set('max(%ln)', newnodes)).node()
603 newnode = next(repo.set('max(%ln)', newnodes)).node()
604 elif len(newnodes) == 0:
604 elif len(newnodes) == 0:
605 # move bookmark backwards
605 # move bookmark backwards
606 roots = list(repo.set('max((::%n) - %ln)', oldnode,
606 roots = list(repo.set('max((::%n) - %ln)', oldnode,
607 list(mapping)))
607 list(mapping)))
608 if roots:
608 if roots:
609 newnode = roots[0].node()
609 newnode = roots[0].node()
610 else:
610 else:
611 newnode = nullid
611 newnode = nullid
612 else:
612 else:
613 newnode = newnodes[0]
613 newnode = newnodes[0]
614 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
614 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
615 (oldbmarks, hex(oldnode), hex(newnode)))
615 (oldbmarks, hex(oldnode), hex(newnode)))
616 # Delete divergent bookmarks being parents of related newnodes
616 # Delete divergent bookmarks being parents of related newnodes
617 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
617 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
618 allnewnodes, newnode, oldnode)
618 allnewnodes, newnode, oldnode)
619 deletenodes = _containsnode(repo, deleterevs)
619 deletenodes = _containsnode(repo, deleterevs)
620 for name in oldbmarks:
620 for name in oldbmarks:
621 bmarkchanges.append((name, newnode))
621 bmarkchanges.append((name, newnode))
622 for b in bookmarks.divergent2delete(repo, deletenodes, name):
622 for b in bookmarks.divergent2delete(repo, deletenodes, name):
623 bmarkchanges.append((b, None))
623 bmarkchanges.append((b, None))
624
624
625 if bmarkchanges:
625 if bmarkchanges:
626 bmarks.applychanges(repo, tr, bmarkchanges)
626 bmarks.applychanges(repo, tr, bmarkchanges)
627
627
628 # Obsolete or strip nodes
628 # Obsolete or strip nodes
629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
630 # If a node is already obsoleted, and we want to obsolete it
630 # If a node is already obsoleted, and we want to obsolete it
631 # without a successor, skip that obssolete request since it's
631 # without a successor, skip that obssolete request since it's
632 # unnecessary. That's the "if s or not isobs(n)" check below.
632 # unnecessary. That's the "if s or not isobs(n)" check below.
633 # Also sort the node in topology order, that might be useful for
633 # Also sort the node in topology order, that might be useful for
634 # some obsstore logic.
634 # some obsstore logic.
635 # NOTE: the filtering and sorting might belong to createmarkers.
635 # NOTE: the filtering and sorting might belong to createmarkers.
636 # Unfiltered repo is needed since nodes in mapping might be hidden.
636 # Unfiltered repo is needed since nodes in mapping might be hidden.
637 unfi = repo.unfiltered()
637 unfi = repo.unfiltered()
638 isobs = unfi.obsstore.successors.__contains__
638 isobs = unfi.obsstore.successors.__contains__
639 torev = unfi.changelog.rev
639 torev = unfi.changelog.rev
640 sortfunc = lambda ns: torev(ns[0])
640 sortfunc = lambda ns: torev(ns[0])
641 rels = [(unfi[n], tuple(unfi[m] for m in s))
641 rels = [(unfi[n], tuple(unfi[m] for m in s))
642 for n, s in sorted(mapping.items(), key=sortfunc)
642 for n, s in sorted(mapping.items(), key=sortfunc)
643 if s or not isobs(n)]
643 if s or not isobs(n)]
644 obsolete.createmarkers(repo, rels, operation=operation)
644 obsolete.createmarkers(repo, rels, operation=operation)
645 else:
645 else:
646 from . import repair # avoid import cycle
646 from . import repair # avoid import cycle
647 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
647 repair.delayedstrip(repo.ui, repo, list(mapping), operation)
648
648
649 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
649 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
650 if opts is None:
650 if opts is None:
651 opts = {}
651 opts = {}
652 m = matcher
652 m = matcher
653 if dry_run is None:
653 if dry_run is None:
654 dry_run = opts.get('dry_run')
654 dry_run = opts.get('dry_run')
655 if similarity is None:
655 if similarity is None:
656 similarity = float(opts.get('similarity') or 0)
656 similarity = float(opts.get('similarity') or 0)
657
657
658 ret = 0
658 ret = 0
659 join = lambda f: os.path.join(prefix, f)
659 join = lambda f: os.path.join(prefix, f)
660
660
661 wctx = repo[None]
661 wctx = repo[None]
662 for subpath in sorted(wctx.substate):
662 for subpath in sorted(wctx.substate):
663 submatch = matchmod.subdirmatcher(subpath, m)
663 submatch = matchmod.subdirmatcher(subpath, m)
664 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
664 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
665 sub = wctx.sub(subpath)
665 sub = wctx.sub(subpath)
666 try:
666 try:
667 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
667 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
668 ret = 1
668 ret = 1
669 except error.LookupError:
669 except error.LookupError:
670 repo.ui.status(_("skipping missing subrepository: %s\n")
670 repo.ui.status(_("skipping missing subrepository: %s\n")
671 % join(subpath))
671 % join(subpath))
672
672
673 rejected = []
673 rejected = []
674 def badfn(f, msg):
674 def badfn(f, msg):
675 if f in m.files():
675 if f in m.files():
676 m.bad(f, msg)
676 m.bad(f, msg)
677 rejected.append(f)
677 rejected.append(f)
678
678
679 badmatch = matchmod.badmatch(m, badfn)
679 badmatch = matchmod.badmatch(m, badfn)
680 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
680 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
681 badmatch)
681 badmatch)
682
682
683 unknownset = set(unknown + forgotten)
683 unknownset = set(unknown + forgotten)
684 toprint = unknownset.copy()
684 toprint = unknownset.copy()
685 toprint.update(deleted)
685 toprint.update(deleted)
686 for abs in sorted(toprint):
686 for abs in sorted(toprint):
687 if repo.ui.verbose or not m.exact(abs):
687 if repo.ui.verbose or not m.exact(abs):
688 if abs in unknownset:
688 if abs in unknownset:
689 status = _('adding %s\n') % m.uipath(abs)
689 status = _('adding %s\n') % m.uipath(abs)
690 else:
690 else:
691 status = _('removing %s\n') % m.uipath(abs)
691 status = _('removing %s\n') % m.uipath(abs)
692 repo.ui.status(status)
692 repo.ui.status(status)
693
693
694 renames = _findrenames(repo, m, added + unknown, removed + deleted,
694 renames = _findrenames(repo, m, added + unknown, removed + deleted,
695 similarity)
695 similarity)
696
696
697 if not dry_run:
697 if not dry_run:
698 _markchanges(repo, unknown + forgotten, deleted, renames)
698 _markchanges(repo, unknown + forgotten, deleted, renames)
699
699
700 for f in rejected:
700 for f in rejected:
701 if f in m.files():
701 if f in m.files():
702 return 1
702 return 1
703 return ret
703 return ret
704
704
705 def marktouched(repo, files, similarity=0.0):
705 def marktouched(repo, files, similarity=0.0):
706 '''Assert that files have somehow been operated upon. files are relative to
706 '''Assert that files have somehow been operated upon. files are relative to
707 the repo root.'''
707 the repo root.'''
708 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
708 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
709 rejected = []
709 rejected = []
710
710
711 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
711 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
712
712
713 if repo.ui.verbose:
713 if repo.ui.verbose:
714 unknownset = set(unknown + forgotten)
714 unknownset = set(unknown + forgotten)
715 toprint = unknownset.copy()
715 toprint = unknownset.copy()
716 toprint.update(deleted)
716 toprint.update(deleted)
717 for abs in sorted(toprint):
717 for abs in sorted(toprint):
718 if abs in unknownset:
718 if abs in unknownset:
719 status = _('adding %s\n') % abs
719 status = _('adding %s\n') % abs
720 else:
720 else:
721 status = _('removing %s\n') % abs
721 status = _('removing %s\n') % abs
722 repo.ui.status(status)
722 repo.ui.status(status)
723
723
724 renames = _findrenames(repo, m, added + unknown, removed + deleted,
724 renames = _findrenames(repo, m, added + unknown, removed + deleted,
725 similarity)
725 similarity)
726
726
727 _markchanges(repo, unknown + forgotten, deleted, renames)
727 _markchanges(repo, unknown + forgotten, deleted, renames)
728
728
729 for f in rejected:
729 for f in rejected:
730 if f in m.files():
730 if f in m.files():
731 return 1
731 return 1
732 return 0
732 return 0
733
733
734 def _interestingfiles(repo, matcher):
734 def _interestingfiles(repo, matcher):
735 '''Walk dirstate with matcher, looking for files that addremove would care
735 '''Walk dirstate with matcher, looking for files that addremove would care
736 about.
736 about.
737
737
738 This is different from dirstate.status because it doesn't care about
738 This is different from dirstate.status because it doesn't care about
739 whether files are modified or clean.'''
739 whether files are modified or clean.'''
740 added, unknown, deleted, removed, forgotten = [], [], [], [], []
740 added, unknown, deleted, removed, forgotten = [], [], [], [], []
741 audit_path = pathutil.pathauditor(repo.root)
741 audit_path = pathutil.pathauditor(repo.root, cached=True)
742
742
743 ctx = repo[None]
743 ctx = repo[None]
744 dirstate = repo.dirstate
744 dirstate = repo.dirstate
745 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
745 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
746 full=False)
746 full=False)
747 for abs, st in walkresults.iteritems():
747 for abs, st in walkresults.iteritems():
748 dstate = dirstate[abs]
748 dstate = dirstate[abs]
749 if dstate == '?' and audit_path.check(abs):
749 if dstate == '?' and audit_path.check(abs):
750 unknown.append(abs)
750 unknown.append(abs)
751 elif dstate != 'r' and not st:
751 elif dstate != 'r' and not st:
752 deleted.append(abs)
752 deleted.append(abs)
753 elif dstate == 'r' and st:
753 elif dstate == 'r' and st:
754 forgotten.append(abs)
754 forgotten.append(abs)
755 # for finding renames
755 # for finding renames
756 elif dstate == 'r' and not st:
756 elif dstate == 'r' and not st:
757 removed.append(abs)
757 removed.append(abs)
758 elif dstate == 'a':
758 elif dstate == 'a':
759 added.append(abs)
759 added.append(abs)
760
760
761 return added, unknown, deleted, removed, forgotten
761 return added, unknown, deleted, removed, forgotten
762
762
763 def _findrenames(repo, matcher, added, removed, similarity):
763 def _findrenames(repo, matcher, added, removed, similarity):
764 '''Find renames from removed files to added ones.'''
764 '''Find renames from removed files to added ones.'''
765 renames = {}
765 renames = {}
766 if similarity > 0:
766 if similarity > 0:
767 for old, new, score in similar.findrenames(repo, added, removed,
767 for old, new, score in similar.findrenames(repo, added, removed,
768 similarity):
768 similarity):
769 if (repo.ui.verbose or not matcher.exact(old)
769 if (repo.ui.verbose or not matcher.exact(old)
770 or not matcher.exact(new)):
770 or not matcher.exact(new)):
771 repo.ui.status(_('recording removal of %s as rename to %s '
771 repo.ui.status(_('recording removal of %s as rename to %s '
772 '(%d%% similar)\n') %
772 '(%d%% similar)\n') %
773 (matcher.rel(old), matcher.rel(new),
773 (matcher.rel(old), matcher.rel(new),
774 score * 100))
774 score * 100))
775 renames[new] = old
775 renames[new] = old
776 return renames
776 return renames
777
777
778 def _markchanges(repo, unknown, deleted, renames):
778 def _markchanges(repo, unknown, deleted, renames):
779 '''Marks the files in unknown as added, the files in deleted as removed,
779 '''Marks the files in unknown as added, the files in deleted as removed,
780 and the files in renames as copied.'''
780 and the files in renames as copied.'''
781 wctx = repo[None]
781 wctx = repo[None]
782 with repo.wlock():
782 with repo.wlock():
783 wctx.forget(deleted)
783 wctx.forget(deleted)
784 wctx.add(unknown)
784 wctx.add(unknown)
785 for new, old in renames.iteritems():
785 for new, old in renames.iteritems():
786 wctx.copy(old, new)
786 wctx.copy(old, new)
787
787
788 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
788 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
789 """Update the dirstate to reflect the intent of copying src to dst. For
789 """Update the dirstate to reflect the intent of copying src to dst. For
790 different reasons it might not end with dst being marked as copied from src.
790 different reasons it might not end with dst being marked as copied from src.
791 """
791 """
792 origsrc = repo.dirstate.copied(src) or src
792 origsrc = repo.dirstate.copied(src) or src
793 if dst == origsrc: # copying back a copy?
793 if dst == origsrc: # copying back a copy?
794 if repo.dirstate[dst] not in 'mn' and not dryrun:
794 if repo.dirstate[dst] not in 'mn' and not dryrun:
795 repo.dirstate.normallookup(dst)
795 repo.dirstate.normallookup(dst)
796 else:
796 else:
797 if repo.dirstate[origsrc] == 'a' and origsrc == src:
797 if repo.dirstate[origsrc] == 'a' and origsrc == src:
798 if not ui.quiet:
798 if not ui.quiet:
799 ui.warn(_("%s has not been committed yet, so no copy "
799 ui.warn(_("%s has not been committed yet, so no copy "
800 "data will be stored for %s.\n")
800 "data will be stored for %s.\n")
801 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
801 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
802 if repo.dirstate[dst] in '?r' and not dryrun:
802 if repo.dirstate[dst] in '?r' and not dryrun:
803 wctx.add([dst])
803 wctx.add([dst])
804 elif not dryrun:
804 elif not dryrun:
805 wctx.copy(origsrc, dst)
805 wctx.copy(origsrc, dst)
806
806
807 def readrequires(opener, supported):
807 def readrequires(opener, supported):
808 '''Reads and parses .hg/requires and checks if all entries found
808 '''Reads and parses .hg/requires and checks if all entries found
809 are in the list of supported features.'''
809 are in the list of supported features.'''
810 requirements = set(opener.read("requires").splitlines())
810 requirements = set(opener.read("requires").splitlines())
811 missings = []
811 missings = []
812 for r in requirements:
812 for r in requirements:
813 if r not in supported:
813 if r not in supported:
814 if not r or not r[0].isalnum():
814 if not r or not r[0].isalnum():
815 raise error.RequirementError(_(".hg/requires file is corrupt"))
815 raise error.RequirementError(_(".hg/requires file is corrupt"))
816 missings.append(r)
816 missings.append(r)
817 missings.sort()
817 missings.sort()
818 if missings:
818 if missings:
819 raise error.RequirementError(
819 raise error.RequirementError(
820 _("repository requires features unknown to this Mercurial: %s")
820 _("repository requires features unknown to this Mercurial: %s")
821 % " ".join(missings),
821 % " ".join(missings),
822 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
822 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
823 " for more information"))
823 " for more information"))
824 return requirements
824 return requirements
825
825
826 def writerequires(opener, requirements):
826 def writerequires(opener, requirements):
827 with opener('requires', 'w') as fp:
827 with opener('requires', 'w') as fp:
828 for r in sorted(requirements):
828 for r in sorted(requirements):
829 fp.write("%s\n" % r)
829 fp.write("%s\n" % r)
830
830
831 class filecachesubentry(object):
831 class filecachesubentry(object):
832 def __init__(self, path, stat):
832 def __init__(self, path, stat):
833 self.path = path
833 self.path = path
834 self.cachestat = None
834 self.cachestat = None
835 self._cacheable = None
835 self._cacheable = None
836
836
837 if stat:
837 if stat:
838 self.cachestat = filecachesubentry.stat(self.path)
838 self.cachestat = filecachesubentry.stat(self.path)
839
839
840 if self.cachestat:
840 if self.cachestat:
841 self._cacheable = self.cachestat.cacheable()
841 self._cacheable = self.cachestat.cacheable()
842 else:
842 else:
843 # None means we don't know yet
843 # None means we don't know yet
844 self._cacheable = None
844 self._cacheable = None
845
845
846 def refresh(self):
846 def refresh(self):
847 if self.cacheable():
847 if self.cacheable():
848 self.cachestat = filecachesubentry.stat(self.path)
848 self.cachestat = filecachesubentry.stat(self.path)
849
849
850 def cacheable(self):
850 def cacheable(self):
851 if self._cacheable is not None:
851 if self._cacheable is not None:
852 return self._cacheable
852 return self._cacheable
853
853
854 # we don't know yet, assume it is for now
854 # we don't know yet, assume it is for now
855 return True
855 return True
856
856
857 def changed(self):
857 def changed(self):
858 # no point in going further if we can't cache it
858 # no point in going further if we can't cache it
859 if not self.cacheable():
859 if not self.cacheable():
860 return True
860 return True
861
861
862 newstat = filecachesubentry.stat(self.path)
862 newstat = filecachesubentry.stat(self.path)
863
863
864 # we may not know if it's cacheable yet, check again now
864 # we may not know if it's cacheable yet, check again now
865 if newstat and self._cacheable is None:
865 if newstat and self._cacheable is None:
866 self._cacheable = newstat.cacheable()
866 self._cacheable = newstat.cacheable()
867
867
868 # check again
868 # check again
869 if not self._cacheable:
869 if not self._cacheable:
870 return True
870 return True
871
871
872 if self.cachestat != newstat:
872 if self.cachestat != newstat:
873 self.cachestat = newstat
873 self.cachestat = newstat
874 return True
874 return True
875 else:
875 else:
876 return False
876 return False
877
877
878 @staticmethod
878 @staticmethod
879 def stat(path):
879 def stat(path):
880 try:
880 try:
881 return util.cachestat(path)
881 return util.cachestat(path)
882 except OSError as e:
882 except OSError as e:
883 if e.errno != errno.ENOENT:
883 if e.errno != errno.ENOENT:
884 raise
884 raise
885
885
886 class filecacheentry(object):
886 class filecacheentry(object):
887 def __init__(self, paths, stat=True):
887 def __init__(self, paths, stat=True):
888 self._entries = []
888 self._entries = []
889 for path in paths:
889 for path in paths:
890 self._entries.append(filecachesubentry(path, stat))
890 self._entries.append(filecachesubentry(path, stat))
891
891
892 def changed(self):
892 def changed(self):
893 '''true if any entry has changed'''
893 '''true if any entry has changed'''
894 for entry in self._entries:
894 for entry in self._entries:
895 if entry.changed():
895 if entry.changed():
896 return True
896 return True
897 return False
897 return False
898
898
899 def refresh(self):
899 def refresh(self):
900 for entry in self._entries:
900 for entry in self._entries:
901 entry.refresh()
901 entry.refresh()
902
902
903 class filecache(object):
903 class filecache(object):
904 '''A property like decorator that tracks files under .hg/ for updates.
904 '''A property like decorator that tracks files under .hg/ for updates.
905
905
906 Records stat info when called in _filecache.
906 Records stat info when called in _filecache.
907
907
908 On subsequent calls, compares old stat info with new info, and recreates the
908 On subsequent calls, compares old stat info with new info, and recreates the
909 object when any of the files changes, updating the new stat info in
909 object when any of the files changes, updating the new stat info in
910 _filecache.
910 _filecache.
911
911
912 Mercurial either atomic renames or appends for files under .hg,
912 Mercurial either atomic renames or appends for files under .hg,
913 so to ensure the cache is reliable we need the filesystem to be able
913 so to ensure the cache is reliable we need the filesystem to be able
914 to tell us if a file has been replaced. If it can't, we fallback to
914 to tell us if a file has been replaced. If it can't, we fallback to
915 recreating the object on every call (essentially the same behavior as
915 recreating the object on every call (essentially the same behavior as
916 propertycache).
916 propertycache).
917
917
918 '''
918 '''
919 def __init__(self, *paths):
919 def __init__(self, *paths):
920 self.paths = paths
920 self.paths = paths
921
921
922 def join(self, obj, fname):
922 def join(self, obj, fname):
923 """Used to compute the runtime path of a cached file.
923 """Used to compute the runtime path of a cached file.
924
924
925 Users should subclass filecache and provide their own version of this
925 Users should subclass filecache and provide their own version of this
926 function to call the appropriate join function on 'obj' (an instance
926 function to call the appropriate join function on 'obj' (an instance
927 of the class that its member function was decorated).
927 of the class that its member function was decorated).
928 """
928 """
929 raise NotImplementedError
929 raise NotImplementedError
930
930
931 def __call__(self, func):
931 def __call__(self, func):
932 self.func = func
932 self.func = func
933 self.name = func.__name__.encode('ascii')
933 self.name = func.__name__.encode('ascii')
934 return self
934 return self
935
935
936 def __get__(self, obj, type=None):
936 def __get__(self, obj, type=None):
937 # if accessed on the class, return the descriptor itself.
937 # if accessed on the class, return the descriptor itself.
938 if obj is None:
938 if obj is None:
939 return self
939 return self
940 # do we need to check if the file changed?
940 # do we need to check if the file changed?
941 if self.name in obj.__dict__:
941 if self.name in obj.__dict__:
942 assert self.name in obj._filecache, self.name
942 assert self.name in obj._filecache, self.name
943 return obj.__dict__[self.name]
943 return obj.__dict__[self.name]
944
944
945 entry = obj._filecache.get(self.name)
945 entry = obj._filecache.get(self.name)
946
946
947 if entry:
947 if entry:
948 if entry.changed():
948 if entry.changed():
949 entry.obj = self.func(obj)
949 entry.obj = self.func(obj)
950 else:
950 else:
951 paths = [self.join(obj, path) for path in self.paths]
951 paths = [self.join(obj, path) for path in self.paths]
952
952
953 # We stat -before- creating the object so our cache doesn't lie if
953 # We stat -before- creating the object so our cache doesn't lie if
954 # a writer modified between the time we read and stat
954 # a writer modified between the time we read and stat
955 entry = filecacheentry(paths, True)
955 entry = filecacheentry(paths, True)
956 entry.obj = self.func(obj)
956 entry.obj = self.func(obj)
957
957
958 obj._filecache[self.name] = entry
958 obj._filecache[self.name] = entry
959
959
960 obj.__dict__[self.name] = entry.obj
960 obj.__dict__[self.name] = entry.obj
961 return entry.obj
961 return entry.obj
962
962
963 def __set__(self, obj, value):
963 def __set__(self, obj, value):
964 if self.name not in obj._filecache:
964 if self.name not in obj._filecache:
965 # we add an entry for the missing value because X in __dict__
965 # we add an entry for the missing value because X in __dict__
966 # implies X in _filecache
966 # implies X in _filecache
967 paths = [self.join(obj, path) for path in self.paths]
967 paths = [self.join(obj, path) for path in self.paths]
968 ce = filecacheentry(paths, False)
968 ce = filecacheentry(paths, False)
969 obj._filecache[self.name] = ce
969 obj._filecache[self.name] = ce
970 else:
970 else:
971 ce = obj._filecache[self.name]
971 ce = obj._filecache[self.name]
972
972
973 ce.obj = value # update cached copy
973 ce.obj = value # update cached copy
974 obj.__dict__[self.name] = value # update copy returned by obj.x
974 obj.__dict__[self.name] = value # update copy returned by obj.x
975
975
976 def __delete__(self, obj):
976 def __delete__(self, obj):
977 try:
977 try:
978 del obj.__dict__[self.name]
978 del obj.__dict__[self.name]
979 except KeyError:
979 except KeyError:
980 raise AttributeError(self.name)
980 raise AttributeError(self.name)
981
981
982 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
982 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
983 if lock is None:
983 if lock is None:
984 raise error.LockInheritanceContractViolation(
984 raise error.LockInheritanceContractViolation(
985 'lock can only be inherited while held')
985 'lock can only be inherited while held')
986 if environ is None:
986 if environ is None:
987 environ = {}
987 environ = {}
988 with lock.inherit() as locker:
988 with lock.inherit() as locker:
989 environ[envvar] = locker
989 environ[envvar] = locker
990 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
990 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
991
991
992 def wlocksub(repo, cmd, *args, **kwargs):
992 def wlocksub(repo, cmd, *args, **kwargs):
993 """run cmd as a subprocess that allows inheriting repo's wlock
993 """run cmd as a subprocess that allows inheriting repo's wlock
994
994
995 This can only be called while the wlock is held. This takes all the
995 This can only be called while the wlock is held. This takes all the
996 arguments that ui.system does, and returns the exit code of the
996 arguments that ui.system does, and returns the exit code of the
997 subprocess."""
997 subprocess."""
998 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
998 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
999 **kwargs)
999 **kwargs)
1000
1000
1001 def gdinitconfig(ui):
1001 def gdinitconfig(ui):
1002 """helper function to know if a repo should be created as general delta
1002 """helper function to know if a repo should be created as general delta
1003 """
1003 """
1004 # experimental config: format.generaldelta
1004 # experimental config: format.generaldelta
1005 return (ui.configbool('format', 'generaldelta')
1005 return (ui.configbool('format', 'generaldelta')
1006 or ui.configbool('format', 'usegeneraldelta'))
1006 or ui.configbool('format', 'usegeneraldelta'))
1007
1007
1008 def gddeltaconfig(ui):
1008 def gddeltaconfig(ui):
1009 """helper function to know if incoming delta should be optimised
1009 """helper function to know if incoming delta should be optimised
1010 """
1010 """
1011 # experimental config: format.generaldelta
1011 # experimental config: format.generaldelta
1012 return ui.configbool('format', 'generaldelta')
1012 return ui.configbool('format', 'generaldelta')
1013
1013
1014 class simplekeyvaluefile(object):
1014 class simplekeyvaluefile(object):
1015 """A simple file with key=value lines
1015 """A simple file with key=value lines
1016
1016
1017 Keys must be alphanumerics and start with a letter, values must not
1017 Keys must be alphanumerics and start with a letter, values must not
1018 contain '\n' characters"""
1018 contain '\n' characters"""
1019 firstlinekey = '__firstline'
1019 firstlinekey = '__firstline'
1020
1020
1021 def __init__(self, vfs, path, keys=None):
1021 def __init__(self, vfs, path, keys=None):
1022 self.vfs = vfs
1022 self.vfs = vfs
1023 self.path = path
1023 self.path = path
1024
1024
1025 def read(self, firstlinenonkeyval=False):
1025 def read(self, firstlinenonkeyval=False):
1026 """Read the contents of a simple key-value file
1026 """Read the contents of a simple key-value file
1027
1027
1028 'firstlinenonkeyval' indicates whether the first line of file should
1028 'firstlinenonkeyval' indicates whether the first line of file should
1029 be treated as a key-value pair or reuturned fully under the
1029 be treated as a key-value pair or reuturned fully under the
1030 __firstline key."""
1030 __firstline key."""
1031 lines = self.vfs.readlines(self.path)
1031 lines = self.vfs.readlines(self.path)
1032 d = {}
1032 d = {}
1033 if firstlinenonkeyval:
1033 if firstlinenonkeyval:
1034 if not lines:
1034 if not lines:
1035 e = _("empty simplekeyvalue file")
1035 e = _("empty simplekeyvalue file")
1036 raise error.CorruptedState(e)
1036 raise error.CorruptedState(e)
1037 # we don't want to include '\n' in the __firstline
1037 # we don't want to include '\n' in the __firstline
1038 d[self.firstlinekey] = lines[0][:-1]
1038 d[self.firstlinekey] = lines[0][:-1]
1039 del lines[0]
1039 del lines[0]
1040
1040
1041 try:
1041 try:
1042 # the 'if line.strip()' part prevents us from failing on empty
1042 # the 'if line.strip()' part prevents us from failing on empty
1043 # lines which only contain '\n' therefore are not skipped
1043 # lines which only contain '\n' therefore are not skipped
1044 # by 'if line'
1044 # by 'if line'
1045 updatedict = dict(line[:-1].split('=', 1) for line in lines
1045 updatedict = dict(line[:-1].split('=', 1) for line in lines
1046 if line.strip())
1046 if line.strip())
1047 if self.firstlinekey in updatedict:
1047 if self.firstlinekey in updatedict:
1048 e = _("%r can't be used as a key")
1048 e = _("%r can't be used as a key")
1049 raise error.CorruptedState(e % self.firstlinekey)
1049 raise error.CorruptedState(e % self.firstlinekey)
1050 d.update(updatedict)
1050 d.update(updatedict)
1051 except ValueError as e:
1051 except ValueError as e:
1052 raise error.CorruptedState(str(e))
1052 raise error.CorruptedState(str(e))
1053 return d
1053 return d
1054
1054
1055 def write(self, data, firstline=None):
1055 def write(self, data, firstline=None):
1056 """Write key=>value mapping to a file
1056 """Write key=>value mapping to a file
1057 data is a dict. Keys must be alphanumerical and start with a letter.
1057 data is a dict. Keys must be alphanumerical and start with a letter.
1058 Values must not contain newline characters.
1058 Values must not contain newline characters.
1059
1059
1060 If 'firstline' is not None, it is written to file before
1060 If 'firstline' is not None, it is written to file before
1061 everything else, as it is, not in a key=value form"""
1061 everything else, as it is, not in a key=value form"""
1062 lines = []
1062 lines = []
1063 if firstline is not None:
1063 if firstline is not None:
1064 lines.append('%s\n' % firstline)
1064 lines.append('%s\n' % firstline)
1065
1065
1066 for k, v in data.items():
1066 for k, v in data.items():
1067 if k == self.firstlinekey:
1067 if k == self.firstlinekey:
1068 e = "key name '%s' is reserved" % self.firstlinekey
1068 e = "key name '%s' is reserved" % self.firstlinekey
1069 raise error.ProgrammingError(e)
1069 raise error.ProgrammingError(e)
1070 if not k[0].isalpha():
1070 if not k[0].isalpha():
1071 e = "keys must start with a letter in a key-value file"
1071 e = "keys must start with a letter in a key-value file"
1072 raise error.ProgrammingError(e)
1072 raise error.ProgrammingError(e)
1073 if not k.isalnum():
1073 if not k.isalnum():
1074 e = "invalid key name in a simple key-value file"
1074 e = "invalid key name in a simple key-value file"
1075 raise error.ProgrammingError(e)
1075 raise error.ProgrammingError(e)
1076 if '\n' in v:
1076 if '\n' in v:
1077 e = "invalid value in a simple key-value file"
1077 e = "invalid value in a simple key-value file"
1078 raise error.ProgrammingError(e)
1078 raise error.ProgrammingError(e)
1079 lines.append("%s=%s\n" % (k, v))
1079 lines.append("%s=%s\n" % (k, v))
1080 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1080 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1081 fp.write(''.join(lines))
1081 fp.write(''.join(lines))
1082
1082
1083 _reportobsoletedsource = [
1083 _reportobsoletedsource = [
1084 'debugobsolete',
1084 'debugobsolete',
1085 'pull',
1085 'pull',
1086 'push',
1086 'push',
1087 'serve',
1087 'serve',
1088 'unbundle',
1088 'unbundle',
1089 ]
1089 ]
1090
1090
1091 def registersummarycallback(repo, otr, txnname=''):
1091 def registersummarycallback(repo, otr, txnname=''):
1092 """register a callback to issue a summary after the transaction is closed
1092 """register a callback to issue a summary after the transaction is closed
1093 """
1093 """
1094 for source in _reportobsoletedsource:
1094 for source in _reportobsoletedsource:
1095 if txnname.startswith(source):
1095 if txnname.startswith(source):
1096 reporef = weakref.ref(repo)
1096 reporef = weakref.ref(repo)
1097 def reportsummary(tr):
1097 def reportsummary(tr):
1098 """the actual callback reporting the summary"""
1098 """the actual callback reporting the summary"""
1099 repo = reporef()
1099 repo = reporef()
1100 obsoleted = obsutil.getobsoleted(repo, tr)
1100 obsoleted = obsutil.getobsoleted(repo, tr)
1101 if obsoleted:
1101 if obsoleted:
1102 repo.ui.status(_('obsoleted %i changesets\n')
1102 repo.ui.status(_('obsoleted %i changesets\n')
1103 % len(obsoleted))
1103 % len(obsoleted))
1104 otr.addpostclose('00-txnreport', reportsummary)
1104 otr.addpostclose('00-txnreport', reportsummary)
1105 break
1105 break
@@ -1,325 +1,324 b''
1 # sshpeer.py - ssh repository proxy class for mercurial
1 # sshpeer.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import re
10 import re
11
11
12 from .i18n import _
12 from .i18n import _
13 from . import (
13 from . import (
14 error,
14 error,
15 pycompat,
15 pycompat,
16 util,
16 util,
17 wireproto,
17 wireproto,
18 )
18 )
19
19
20 def _serverquote(s):
20 def _serverquote(s):
21 if not s:
21 if not s:
22 return s
22 return s
23 '''quote a string for the remote shell ... which we assume is sh'''
23 '''quote a string for the remote shell ... which we assume is sh'''
24 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
24 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
25 return s
25 return s
26 return "'%s'" % s.replace("'", "'\\''")
26 return "'%s'" % s.replace("'", "'\\''")
27
27
28 def _forwardoutput(ui, pipe):
28 def _forwardoutput(ui, pipe):
29 """display all data currently available on pipe as remote output.
29 """display all data currently available on pipe as remote output.
30
30
31 This is non blocking."""
31 This is non blocking."""
32 s = util.readpipe(pipe)
32 s = util.readpipe(pipe)
33 if s:
33 if s:
34 for l in s.splitlines():
34 for l in s.splitlines():
35 ui.status(_("remote: "), l, '\n')
35 ui.status(_("remote: "), l, '\n')
36
36
37 class doublepipe(object):
37 class doublepipe(object):
38 """Operate a side-channel pipe in addition of a main one
38 """Operate a side-channel pipe in addition of a main one
39
39
40 The side-channel pipe contains server output to be forwarded to the user
40 The side-channel pipe contains server output to be forwarded to the user
41 input. The double pipe will behave as the "main" pipe, but will ensure the
41 input. The double pipe will behave as the "main" pipe, but will ensure the
42 content of the "side" pipe is properly processed while we wait for blocking
42 content of the "side" pipe is properly processed while we wait for blocking
43 call on the "main" pipe.
43 call on the "main" pipe.
44
44
45 If large amounts of data are read from "main", the forward will cease after
45 If large amounts of data are read from "main", the forward will cease after
46 the first bytes start to appear. This simplifies the implementation
46 the first bytes start to appear. This simplifies the implementation
47 without affecting actual output of sshpeer too much as we rarely issue
47 without affecting actual output of sshpeer too much as we rarely issue
48 large read for data not yet emitted by the server.
48 large read for data not yet emitted by the server.
49
49
50 The main pipe is expected to be a 'bufferedinputpipe' from the util module
50 The main pipe is expected to be a 'bufferedinputpipe' from the util module
51 that handle all the os specific bits. This class lives in this module
51 that handle all the os specific bits. This class lives in this module
52 because it focus on behavior specific to the ssh protocol."""
52 because it focus on behavior specific to the ssh protocol."""
53
53
54 def __init__(self, ui, main, side):
54 def __init__(self, ui, main, side):
55 self._ui = ui
55 self._ui = ui
56 self._main = main
56 self._main = main
57 self._side = side
57 self._side = side
58
58
59 def _wait(self):
59 def _wait(self):
60 """wait until some data are available on main or side
60 """wait until some data are available on main or side
61
61
62 return a pair of boolean (ismainready, issideready)
62 return a pair of boolean (ismainready, issideready)
63
63
64 (This will only wait for data if the setup is supported by `util.poll`)
64 (This will only wait for data if the setup is supported by `util.poll`)
65 """
65 """
66 if getattr(self._main, 'hasbuffer', False): # getattr for classic pipe
66 if getattr(self._main, 'hasbuffer', False): # getattr for classic pipe
67 return (True, True) # main has data, assume side is worth poking at.
67 return (True, True) # main has data, assume side is worth poking at.
68 fds = [self._main.fileno(), self._side.fileno()]
68 fds = [self._main.fileno(), self._side.fileno()]
69 try:
69 try:
70 act = util.poll(fds)
70 act = util.poll(fds)
71 except NotImplementedError:
71 except NotImplementedError:
72 # non supported yet case, assume all have data.
72 # non supported yet case, assume all have data.
73 act = fds
73 act = fds
74 return (self._main.fileno() in act, self._side.fileno() in act)
74 return (self._main.fileno() in act, self._side.fileno() in act)
75
75
76 def write(self, data):
76 def write(self, data):
77 return self._call('write', data)
77 return self._call('write', data)
78
78
79 def read(self, size):
79 def read(self, size):
80 r = self._call('read', size)
80 r = self._call('read', size)
81 if size != 0 and not r:
81 if size != 0 and not r:
82 # We've observed a condition that indicates the
82 # We've observed a condition that indicates the
83 # stdout closed unexpectedly. Check stderr one
83 # stdout closed unexpectedly. Check stderr one
84 # more time and snag anything that's there before
84 # more time and snag anything that's there before
85 # letting anyone know the main part of the pipe
85 # letting anyone know the main part of the pipe
86 # closed prematurely.
86 # closed prematurely.
87 _forwardoutput(self._ui, self._side)
87 _forwardoutput(self._ui, self._side)
88 return r
88 return r
89
89
90 def readline(self):
90 def readline(self):
91 return self._call('readline')
91 return self._call('readline')
92
92
93 def _call(self, methname, data=None):
93 def _call(self, methname, data=None):
94 """call <methname> on "main", forward output of "side" while blocking
94 """call <methname> on "main", forward output of "side" while blocking
95 """
95 """
96 # data can be '' or 0
96 # data can be '' or 0
97 if (data is not None and not data) or self._main.closed:
97 if (data is not None and not data) or self._main.closed:
98 _forwardoutput(self._ui, self._side)
98 _forwardoutput(self._ui, self._side)
99 return ''
99 return ''
100 while True:
100 while True:
101 mainready, sideready = self._wait()
101 mainready, sideready = self._wait()
102 if sideready:
102 if sideready:
103 _forwardoutput(self._ui, self._side)
103 _forwardoutput(self._ui, self._side)
104 if mainready:
104 if mainready:
105 meth = getattr(self._main, methname)
105 meth = getattr(self._main, methname)
106 if data is None:
106 if data is None:
107 return meth()
107 return meth()
108 else:
108 else:
109 return meth(data)
109 return meth(data)
110
110
111 def close(self):
111 def close(self):
112 return self._main.close()
112 return self._main.close()
113
113
114 def flush(self):
114 def flush(self):
115 return self._main.flush()
115 return self._main.flush()
116
116
117 class sshpeer(wireproto.wirepeer):
117 class sshpeer(wireproto.wirepeer):
118 def __init__(self, ui, path, create=False):
118 def __init__(self, ui, path, create=False):
119 self._url = path
119 self._url = path
120 self.ui = ui
120 self.ui = ui
121 self.pipeo = self.pipei = self.pipee = None
121 self.pipeo = self.pipei = self.pipee = None
122
122
123 u = util.url(path, parsequery=False, parsefragment=False)
123 u = util.url(path, parsequery=False, parsefragment=False)
124 if u.scheme != 'ssh' or not u.host or u.path is None:
124 if u.scheme != 'ssh' or not u.host or u.path is None:
125 self._abort(error.RepoError(_("couldn't parse location %s") % path))
125 self._abort(error.RepoError(_("couldn't parse location %s") % path))
126
126
127 util.checksafessh(path)
128
127 self.user = u.user
129 self.user = u.user
128 if u.passwd is not None:
130 if u.passwd is not None:
129 self._abort(error.RepoError(_("password in URL not supported")))
131 self._abort(error.RepoError(_("password in URL not supported")))
130 self.host = u.host
132 self.host = u.host
131 self.port = u.port
133 self.port = u.port
132 self.path = u.path or "."
134 self.path = u.path or "."
133
135
134 sshcmd = self.ui.config("ui", "ssh")
136 sshcmd = self.ui.config("ui", "ssh")
135 remotecmd = self.ui.config("ui", "remotecmd")
137 remotecmd = self.ui.config("ui", "remotecmd")
136
138
137 args = util.sshargs(sshcmd,
139 args = util.sshargs(sshcmd, self.host, self.user, self.port)
138 _serverquote(self.host),
139 _serverquote(self.user),
140 _serverquote(self.port))
141
140
142 if create:
141 if create:
143 cmd = '%s %s %s' % (sshcmd, args,
142 cmd = '%s %s %s' % (sshcmd, args,
144 util.shellquote("%s init %s" %
143 util.shellquote("%s init %s" %
145 (_serverquote(remotecmd), _serverquote(self.path))))
144 (_serverquote(remotecmd), _serverquote(self.path))))
146 ui.debug('running %s\n' % cmd)
145 ui.debug('running %s\n' % cmd)
147 res = ui.system(cmd, blockedtag='sshpeer')
146 res = ui.system(cmd, blockedtag='sshpeer')
148 if res != 0:
147 if res != 0:
149 self._abort(error.RepoError(_("could not create remote repo")))
148 self._abort(error.RepoError(_("could not create remote repo")))
150
149
151 self._validaterepo(sshcmd, args, remotecmd)
150 self._validaterepo(sshcmd, args, remotecmd)
152
151
153 def url(self):
152 def url(self):
154 return self._url
153 return self._url
155
154
156 def _validaterepo(self, sshcmd, args, remotecmd):
155 def _validaterepo(self, sshcmd, args, remotecmd):
157 # cleanup up previous run
156 # cleanup up previous run
158 self.cleanup()
157 self.cleanup()
159
158
160 cmd = '%s %s %s' % (sshcmd, args,
159 cmd = '%s %s %s' % (sshcmd, args,
161 util.shellquote("%s -R %s serve --stdio" %
160 util.shellquote("%s -R %s serve --stdio" %
162 (_serverquote(remotecmd), _serverquote(self.path))))
161 (_serverquote(remotecmd), _serverquote(self.path))))
163 self.ui.debug('running %s\n' % cmd)
162 self.ui.debug('running %s\n' % cmd)
164 cmd = util.quotecommand(cmd)
163 cmd = util.quotecommand(cmd)
165
164
166 # while self.subprocess isn't used, having it allows the subprocess to
165 # while self.subprocess isn't used, having it allows the subprocess to
167 # to clean up correctly later
166 # to clean up correctly later
168 #
167 #
169 # no buffer allow the use of 'select'
168 # no buffer allow the use of 'select'
170 # feel free to remove buffering and select usage when we ultimately
169 # feel free to remove buffering and select usage when we ultimately
171 # move to threading.
170 # move to threading.
172 sub = util.popen4(cmd, bufsize=0)
171 sub = util.popen4(cmd, bufsize=0)
173 self.pipeo, self.pipei, self.pipee, self.subprocess = sub
172 self.pipeo, self.pipei, self.pipee, self.subprocess = sub
174
173
175 self.pipei = util.bufferedinputpipe(self.pipei)
174 self.pipei = util.bufferedinputpipe(self.pipei)
176 self.pipei = doublepipe(self.ui, self.pipei, self.pipee)
175 self.pipei = doublepipe(self.ui, self.pipei, self.pipee)
177 self.pipeo = doublepipe(self.ui, self.pipeo, self.pipee)
176 self.pipeo = doublepipe(self.ui, self.pipeo, self.pipee)
178
177
179 # skip any noise generated by remote shell
178 # skip any noise generated by remote shell
180 self._callstream("hello")
179 self._callstream("hello")
181 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
180 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
182 lines = ["", "dummy"]
181 lines = ["", "dummy"]
183 max_noise = 500
182 max_noise = 500
184 while lines[-1] and max_noise:
183 while lines[-1] and max_noise:
185 l = r.readline()
184 l = r.readline()
186 self.readerr()
185 self.readerr()
187 if lines[-1] == "1\n" and l == "\n":
186 if lines[-1] == "1\n" and l == "\n":
188 break
187 break
189 if l:
188 if l:
190 self.ui.debug("remote: ", l)
189 self.ui.debug("remote: ", l)
191 lines.append(l)
190 lines.append(l)
192 max_noise -= 1
191 max_noise -= 1
193 else:
192 else:
194 self._abort(error.RepoError(_('no suitable response from '
193 self._abort(error.RepoError(_('no suitable response from '
195 'remote hg')))
194 'remote hg')))
196
195
197 self._caps = set()
196 self._caps = set()
198 for l in reversed(lines):
197 for l in reversed(lines):
199 if l.startswith("capabilities:"):
198 if l.startswith("capabilities:"):
200 self._caps.update(l[:-1].split(":")[1].split())
199 self._caps.update(l[:-1].split(":")[1].split())
201 break
200 break
202
201
203 def _capabilities(self):
202 def _capabilities(self):
204 return self._caps
203 return self._caps
205
204
206 def readerr(self):
205 def readerr(self):
207 _forwardoutput(self.ui, self.pipee)
206 _forwardoutput(self.ui, self.pipee)
208
207
209 def _abort(self, exception):
208 def _abort(self, exception):
210 self.cleanup()
209 self.cleanup()
211 raise exception
210 raise exception
212
211
213 def cleanup(self):
212 def cleanup(self):
214 if self.pipeo is None:
213 if self.pipeo is None:
215 return
214 return
216 self.pipeo.close()
215 self.pipeo.close()
217 self.pipei.close()
216 self.pipei.close()
218 try:
217 try:
219 # read the error descriptor until EOF
218 # read the error descriptor until EOF
220 for l in self.pipee:
219 for l in self.pipee:
221 self.ui.status(_("remote: "), l)
220 self.ui.status(_("remote: "), l)
222 except (IOError, ValueError):
221 except (IOError, ValueError):
223 pass
222 pass
224 self.pipee.close()
223 self.pipee.close()
225
224
226 __del__ = cleanup
225 __del__ = cleanup
227
226
228 def _submitbatch(self, req):
227 def _submitbatch(self, req):
229 rsp = self._callstream("batch", cmds=wireproto.encodebatchcmds(req))
228 rsp = self._callstream("batch", cmds=wireproto.encodebatchcmds(req))
230 available = self._getamount()
229 available = self._getamount()
231 # TODO this response parsing is probably suboptimal for large
230 # TODO this response parsing is probably suboptimal for large
232 # batches with large responses.
231 # batches with large responses.
233 toread = min(available, 1024)
232 toread = min(available, 1024)
234 work = rsp.read(toread)
233 work = rsp.read(toread)
235 available -= toread
234 available -= toread
236 chunk = work
235 chunk = work
237 while chunk:
236 while chunk:
238 while ';' in work:
237 while ';' in work:
239 one, work = work.split(';', 1)
238 one, work = work.split(';', 1)
240 yield wireproto.unescapearg(one)
239 yield wireproto.unescapearg(one)
241 toread = min(available, 1024)
240 toread = min(available, 1024)
242 chunk = rsp.read(toread)
241 chunk = rsp.read(toread)
243 available -= toread
242 available -= toread
244 work += chunk
243 work += chunk
245 yield wireproto.unescapearg(work)
244 yield wireproto.unescapearg(work)
246
245
247 def _callstream(self, cmd, **args):
246 def _callstream(self, cmd, **args):
248 args = pycompat.byteskwargs(args)
247 args = pycompat.byteskwargs(args)
249 self.ui.debug("sending %s command\n" % cmd)
248 self.ui.debug("sending %s command\n" % cmd)
250 self.pipeo.write("%s\n" % cmd)
249 self.pipeo.write("%s\n" % cmd)
251 _func, names = wireproto.commands[cmd]
250 _func, names = wireproto.commands[cmd]
252 keys = names.split()
251 keys = names.split()
253 wireargs = {}
252 wireargs = {}
254 for k in keys:
253 for k in keys:
255 if k == '*':
254 if k == '*':
256 wireargs['*'] = args
255 wireargs['*'] = args
257 break
256 break
258 else:
257 else:
259 wireargs[k] = args[k]
258 wireargs[k] = args[k]
260 del args[k]
259 del args[k]
261 for k, v in sorted(wireargs.iteritems()):
260 for k, v in sorted(wireargs.iteritems()):
262 self.pipeo.write("%s %d\n" % (k, len(v)))
261 self.pipeo.write("%s %d\n" % (k, len(v)))
263 if isinstance(v, dict):
262 if isinstance(v, dict):
264 for dk, dv in v.iteritems():
263 for dk, dv in v.iteritems():
265 self.pipeo.write("%s %d\n" % (dk, len(dv)))
264 self.pipeo.write("%s %d\n" % (dk, len(dv)))
266 self.pipeo.write(dv)
265 self.pipeo.write(dv)
267 else:
266 else:
268 self.pipeo.write(v)
267 self.pipeo.write(v)
269 self.pipeo.flush()
268 self.pipeo.flush()
270
269
271 return self.pipei
270 return self.pipei
272
271
273 def _callcompressable(self, cmd, **args):
272 def _callcompressable(self, cmd, **args):
274 return self._callstream(cmd, **args)
273 return self._callstream(cmd, **args)
275
274
276 def _call(self, cmd, **args):
275 def _call(self, cmd, **args):
277 self._callstream(cmd, **args)
276 self._callstream(cmd, **args)
278 return self._recv()
277 return self._recv()
279
278
280 def _callpush(self, cmd, fp, **args):
279 def _callpush(self, cmd, fp, **args):
281 r = self._call(cmd, **args)
280 r = self._call(cmd, **args)
282 if r:
281 if r:
283 return '', r
282 return '', r
284 for d in iter(lambda: fp.read(4096), ''):
283 for d in iter(lambda: fp.read(4096), ''):
285 self._send(d)
284 self._send(d)
286 self._send("", flush=True)
285 self._send("", flush=True)
287 r = self._recv()
286 r = self._recv()
288 if r:
287 if r:
289 return '', r
288 return '', r
290 return self._recv(), ''
289 return self._recv(), ''
291
290
292 def _calltwowaystream(self, cmd, fp, **args):
291 def _calltwowaystream(self, cmd, fp, **args):
293 r = self._call(cmd, **args)
292 r = self._call(cmd, **args)
294 if r:
293 if r:
295 # XXX needs to be made better
294 # XXX needs to be made better
296 raise error.Abort(_('unexpected remote reply: %s') % r)
295 raise error.Abort(_('unexpected remote reply: %s') % r)
297 for d in iter(lambda: fp.read(4096), ''):
296 for d in iter(lambda: fp.read(4096), ''):
298 self._send(d)
297 self._send(d)
299 self._send("", flush=True)
298 self._send("", flush=True)
300 return self.pipei
299 return self.pipei
301
300
302 def _getamount(self):
301 def _getamount(self):
303 l = self.pipei.readline()
302 l = self.pipei.readline()
304 if l == '\n':
303 if l == '\n':
305 self.readerr()
304 self.readerr()
306 msg = _('check previous remote output')
305 msg = _('check previous remote output')
307 self._abort(error.OutOfBandError(hint=msg))
306 self._abort(error.OutOfBandError(hint=msg))
308 self.readerr()
307 self.readerr()
309 try:
308 try:
310 return int(l)
309 return int(l)
311 except ValueError:
310 except ValueError:
312 self._abort(error.ResponseError(_("unexpected response:"), l))
311 self._abort(error.ResponseError(_("unexpected response:"), l))
313
312
314 def _recv(self):
313 def _recv(self):
315 return self.pipei.read(self._getamount())
314 return self.pipei.read(self._getamount())
316
315
317 def _send(self, data, flush=False):
316 def _send(self, data, flush=False):
318 self.pipeo.write("%d\n" % len(data))
317 self.pipeo.write("%d\n" % len(data))
319 if data:
318 if data:
320 self.pipeo.write(data)
319 self.pipeo.write(data)
321 if flush:
320 if flush:
322 self.pipeo.flush()
321 self.pipeo.flush()
323 self.readerr()
322 self.readerr()
324
323
325 instance = sshpeer
324 instance = sshpeer
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now