Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,149 +1,151 | |||||
1 | 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A= |
|
1 | 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0 iD8DBQBEYmO2ywK+sNU5EO8RAnaYAKCO7x15xUn5mnhqWNXqk/ehlhRt2QCfRDfY0LrUq2q4oK/KypuJYPHgq1A= | |
2 | 2be3001847cb18a23c403439d9e7d0ace30804e9 0 iD8DBQBExUbjywK+sNU5EO8RAhzxAKCtyHAQUzcTSZTqlfJ0by6vhREwWQCghaQFHfkfN0l9/40EowNhuMOKnJk= |
|
2 | 2be3001847cb18a23c403439d9e7d0ace30804e9 0 iD8DBQBExUbjywK+sNU5EO8RAhzxAKCtyHAQUzcTSZTqlfJ0by6vhREwWQCghaQFHfkfN0l9/40EowNhuMOKnJk= | |
3 | 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys= |
|
3 | 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0 iD8DBQBFfL2QywK+sNU5EO8RAjYFAKCoGlaWRTeMsjdmxAjUYx6diZxOBwCfY6IpBYsKvPTwB3oktnPt5Rmrlys= | |
4 | 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4= |
|
4 | 27230c29bfec36d5540fbe1c976810aefecfd1d2 0 iD8DBQBFheweywK+sNU5EO8RAt7VAKCrqJQWT2/uo2RWf0ZI4bLp6v82jACgjrMdsaTbxRsypcmEsdPhlG6/8F4= | |
5 | fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I= |
|
5 | fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0 iD8DBQBGgHicywK+sNU5EO8RAgNxAJ0VG8ixAaeudx4sZbhngI1syu49HQCeNUJQfWBgA8bkJ2pvsFpNxwYaX3I= | |
6 | 23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU= |
|
6 | 23889160905a1b09fffe1c07378e9fc1827606eb 0 iD8DBQBHGTzoywK+sNU5EO8RAr/UAJ0Y8s4jQtzgS+G9vM8z6CWBThZ8fwCcCT5XDj2XwxKkz/0s6UELwjsO3LU= | |
7 | bae2e9c838e90a393bae3973a7850280413e091a 0 iD8DBQBH6DO5ywK+sNU5EO8RAsfrAJ0e4r9c9GF/MJsM7Xjd3NesLRC3+ACffj6+6HXdZf8cswAoFPO+DY00oD0= |
|
7 | bae2e9c838e90a393bae3973a7850280413e091a 0 iD8DBQBH6DO5ywK+sNU5EO8RAsfrAJ0e4r9c9GF/MJsM7Xjd3NesLRC3+ACffj6+6HXdZf8cswAoFPO+DY00oD0= | |
8 | d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 0 iD8DBQBINdwsywK+sNU5EO8RAjIUAKCPmlFJSpsPAAUKF+iNHAwVnwmzeQCdEXrL27CWclXuUKdbQC8De7LICtE= |
|
8 | d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 0 iD8DBQBINdwsywK+sNU5EO8RAjIUAKCPmlFJSpsPAAUKF+iNHAwVnwmzeQCdEXrL27CWclXuUKdbQC8De7LICtE= | |
9 | d2375bbee6d47e62ba8e415c86e83a465dc4dce9 0 iD8DBQBIo1wpywK+sNU5EO8RAmRNAJ94x3OFt6blbqu/yBoypm/AJ44fuACfUaldXcV5z9tht97hSp22DVTEPGc= |
|
9 | d2375bbee6d47e62ba8e415c86e83a465dc4dce9 0 iD8DBQBIo1wpywK+sNU5EO8RAmRNAJ94x3OFt6blbqu/yBoypm/AJ44fuACfUaldXcV5z9tht97hSp22DVTEPGc= | |
10 | 2a67430f92f15ea5159c26b09ec4839a0c549a26 0 iEYEABECAAYFAkk1hykACgkQywK+sNU5EO85QACeNJNUanjc2tl4wUoPHNuv+lSj0ZMAoIm93wSTc/feyYnO2YCaQ1iyd9Nu |
|
10 | 2a67430f92f15ea5159c26b09ec4839a0c549a26 0 iEYEABECAAYFAkk1hykACgkQywK+sNU5EO85QACeNJNUanjc2tl4wUoPHNuv+lSj0ZMAoIm93wSTc/feyYnO2YCaQ1iyd9Nu | |
11 | 3773e510d433969e277b1863c317b674cbee2065 0 iEYEABECAAYFAklNbbAACgkQywK+sNU5EO8o+gCfeb2/lfIJZMvyDA1m+G1CsBAxfFsAoIa6iAMG8SBY7hW1Q85Yf/LXEvaE |
|
11 | 3773e510d433969e277b1863c317b674cbee2065 0 iEYEABECAAYFAklNbbAACgkQywK+sNU5EO8o+gCfeb2/lfIJZMvyDA1m+G1CsBAxfFsAoIa6iAMG8SBY7hW1Q85Yf/LXEvaE | |
12 | 11a4eb81fb4f4742451591489e2797dc47903277 0 iEYEABECAAYFAklcAnsACgkQywK+sNU5EO+uXwCbBVHNNsLy1g7BlAyQJwadYVyHOXoAoKvtAVO71+bv7EbVoukwTzT+P4Sx |
|
12 | 11a4eb81fb4f4742451591489e2797dc47903277 0 iEYEABECAAYFAklcAnsACgkQywK+sNU5EO+uXwCbBVHNNsLy1g7BlAyQJwadYVyHOXoAoKvtAVO71+bv7EbVoukwTzT+P4Sx | |
13 | 11efa41037e280d08cfb07c09ad485df30fb0ea8 0 iEYEABECAAYFAkmvJRQACgkQywK+sNU5EO9XZwCeLMgDgPSMWMm6vgjL4lDs2pEc5+0AnRxfiFbpbBfuEFTqKz9nbzeyoBlx |
|
13 | 11efa41037e280d08cfb07c09ad485df30fb0ea8 0 iEYEABECAAYFAkmvJRQACgkQywK+sNU5EO9XZwCeLMgDgPSMWMm6vgjL4lDs2pEc5+0AnRxfiFbpbBfuEFTqKz9nbzeyoBlx | |
14 | 02981000012e3adf40c4849bd7b3d5618f9ce82d 0 iEYEABECAAYFAknEH3wACgkQywK+sNU5EO+uXwCeI+LbLMmhjU1lKSfU3UWJHjjUC7oAoIZLvYDGOL/tNZFUuatc3RnZ2eje |
|
14 | 02981000012e3adf40c4849bd7b3d5618f9ce82d 0 iEYEABECAAYFAknEH3wACgkQywK+sNU5EO+uXwCeI+LbLMmhjU1lKSfU3UWJHjjUC7oAoIZLvYDGOL/tNZFUuatc3RnZ2eje | |
15 | 196d40e7c885fa6e95f89134809b3ec7bdbca34b 0 iEYEABECAAYFAkpL2X4ACgkQywK+sNU5EO9FOwCfXJycjyKJXsvQqKkHrglwOQhEKS4An36GfKzptfN8b1qNc3+ya/5c2WOM |
|
15 | 196d40e7c885fa6e95f89134809b3ec7bdbca34b 0 iEYEABECAAYFAkpL2X4ACgkQywK+sNU5EO9FOwCfXJycjyKJXsvQqKkHrglwOQhEKS4An36GfKzptfN8b1qNc3+ya/5c2WOM | |
16 | 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 0 iEYEABECAAYFAkpopLIACgkQywK+sNU5EO8QSgCfZ0ztsd071rOa2lhmp9Fyue/WoI0AoLTei80/xrhRlB8L/rZEf2KBl8dA |
|
16 | 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 0 iEYEABECAAYFAkpopLIACgkQywK+sNU5EO8QSgCfZ0ztsd071rOa2lhmp9Fyue/WoI0AoLTei80/xrhRlB8L/rZEf2KBl8dA | |
17 | 31ec469f9b556f11819937cf68ee53f2be927ebf 0 iEYEABECAAYFAksBuxAACgkQywK+sNU5EO+mBwCfagB+A0txzWZ6dRpug3LEoK7Z1QsAoKpbk8vsLjv6/oRDicSk/qBu33+m |
|
17 | 31ec469f9b556f11819937cf68ee53f2be927ebf 0 iEYEABECAAYFAksBuxAACgkQywK+sNU5EO+mBwCfagB+A0txzWZ6dRpug3LEoK7Z1QsAoKpbk8vsLjv6/oRDicSk/qBu33+m | |
18 | 439d7ea6fe3aa4ab9ec274a68846779153789de9 0 iEYEABECAAYFAksVw0kACgkQywK+sNU5EO/oZwCfdfBEkgp38xq6wN2F4nj+SzofrJIAnjmxt04vaJSeOOeHylHvk6lzuQsw |
|
18 | 439d7ea6fe3aa4ab9ec274a68846779153789de9 0 iEYEABECAAYFAksVw0kACgkQywK+sNU5EO/oZwCfdfBEkgp38xq6wN2F4nj+SzofrJIAnjmxt04vaJSeOOeHylHvk6lzuQsw | |
19 | 296a0b14a68621f6990c54fdba0083f6f20935bf 0 iEYEABECAAYFAks+jCoACgkQywK+sNU5EO9J8wCeMUGF9E/gS2UBsqIz56WS4HMPRPUAoI5J95mwEIK8Clrl7qFRidNI6APq |
|
19 | 296a0b14a68621f6990c54fdba0083f6f20935bf 0 iEYEABECAAYFAks+jCoACgkQywK+sNU5EO9J8wCeMUGF9E/gS2UBsqIz56WS4HMPRPUAoI5J95mwEIK8Clrl7qFRidNI6APq | |
20 | 4aa619c4c2c09907034d9824ebb1dd0e878206eb 0 iEYEABECAAYFAktm9IsACgkQywK+sNU5EO9XGgCgk4HclRQhexEtooPE5GcUCdB6M8EAn2ptOhMVbIoO+JncA+tNACPFXh0O |
|
20 | 4aa619c4c2c09907034d9824ebb1dd0e878206eb 0 iEYEABECAAYFAktm9IsACgkQywK+sNU5EO9XGgCgk4HclRQhexEtooPE5GcUCdB6M8EAn2ptOhMVbIoO+JncA+tNACPFXh0O | |
21 | ff2704a8ded37fbebd8b6eb5ec733731d725da8a 0 iEYEABECAAYFAkuRoSQACgkQywK+sNU5EO//3QCeJDc5r2uFyFCtAlpSA27DEE5rrxAAn2FSwTy9fhrB3QAdDQlwkEZcQzDh |
|
21 | ff2704a8ded37fbebd8b6eb5ec733731d725da8a 0 iEYEABECAAYFAkuRoSQACgkQywK+sNU5EO//3QCeJDc5r2uFyFCtAlpSA27DEE5rrxAAn2FSwTy9fhrB3QAdDQlwkEZcQzDh | |
22 | 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 0 iEYEABECAAYFAku1IwIACgkQywK+sNU5EO9MjgCdHLVwkTZlNHxhcznZKBL1rjN+J7cAoLLWi9LTL6f/TgBaPSKOy1ublbaW |
|
22 | 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 0 iEYEABECAAYFAku1IwIACgkQywK+sNU5EO9MjgCdHLVwkTZlNHxhcznZKBL1rjN+J7cAoLLWi9LTL6f/TgBaPSKOy1ublbaW | |
23 | 39f725929f0c48c5fb3b90c071fc3066012456ca 0 iEYEABECAAYFAkvclvsACgkQywK+sNU5EO9FSwCeL9i5x8ALW/LE5+lCX6MFEAe4MhwAn1ev5o6SX6GrNdDfKweiemfO2VBk |
|
23 | 39f725929f0c48c5fb3b90c071fc3066012456ca 0 iEYEABECAAYFAkvclvsACgkQywK+sNU5EO9FSwCeL9i5x8ALW/LE5+lCX6MFEAe4MhwAn1ev5o6SX6GrNdDfKweiemfO2VBk | |
24 | fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 0 iEYEABECAAYFAkvsKTkACgkQywK+sNU5EO9qEACgiSiRGvTG2vXGJ65tUSOIYihTuFAAnRzRIqEVSw8M8/RGeUXRps0IzaCO |
|
24 | fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 0 iEYEABECAAYFAkvsKTkACgkQywK+sNU5EO9qEACgiSiRGvTG2vXGJ65tUSOIYihTuFAAnRzRIqEVSw8M8/RGeUXRps0IzaCO | |
25 | 24fe2629c6fd0c74c90bd066e77387c2b02e8437 0 iEYEABECAAYFAkwFLRsACgkQywK+sNU5EO+pJACgp13tPI+pbwKZV+LeMjcQ4H6tCZYAoJebzhd6a8yYx6qiwpJxA9BXZNXy |
|
25 | 24fe2629c6fd0c74c90bd066e77387c2b02e8437 0 iEYEABECAAYFAkwFLRsACgkQywK+sNU5EO+pJACgp13tPI+pbwKZV+LeMjcQ4H6tCZYAoJebzhd6a8yYx6qiwpJxA9BXZNXy | |
26 | f786fc4b8764cd2a5526d259cf2f94d8a66924d9 0 iEYEABECAAYFAkwsyxcACgkQywK+sNU5EO+crACfUpNAF57PmClkSri9nJcBjb2goN4AniPCNaKvnki7TnUsi1u2oxltpKKL |
|
26 | f786fc4b8764cd2a5526d259cf2f94d8a66924d9 0 iEYEABECAAYFAkwsyxcACgkQywK+sNU5EO+crACfUpNAF57PmClkSri9nJcBjb2goN4AniPCNaKvnki7TnUsi1u2oxltpKKL | |
27 | bf1774d95bde614af3956d92b20e2a0c68c5fec7 0 iEYEABECAAYFAkxVwccACgkQywK+sNU5EO+oFQCeJzwZ+we1fIIyBGCddHceOUAN++cAnjvT6A8ZWW0zV21NXIFF1qQmjxJd |
|
27 | bf1774d95bde614af3956d92b20e2a0c68c5fec7 0 iEYEABECAAYFAkxVwccACgkQywK+sNU5EO+oFQCeJzwZ+we1fIIyBGCddHceOUAN++cAnjvT6A8ZWW0zV21NXIFF1qQmjxJd | |
28 | c00f03a4982e467fb6b6bd45908767db6df4771d 0 iEYEABECAAYFAkxXDqsACgkQywK+sNU5EO/GJACfT9Rz4hZOxPQEs91JwtmfjevO84gAmwSmtfo5mmWSm8gtTUebCcdTv0Kf |
|
28 | c00f03a4982e467fb6b6bd45908767db6df4771d 0 iEYEABECAAYFAkxXDqsACgkQywK+sNU5EO/GJACfT9Rz4hZOxPQEs91JwtmfjevO84gAmwSmtfo5mmWSm8gtTUebCcdTv0Kf | |
29 | ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 0 iD8DBQBMdo+qywK+sNU5EO8RAqQpAJ975BL2CCAiWMz9SXthNQ9xG181IwCgp4O+KViHPkufZVFn2aTKMNvcr1A= |
|
29 | ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 0 iD8DBQBMdo+qywK+sNU5EO8RAqQpAJ975BL2CCAiWMz9SXthNQ9xG181IwCgp4O+KViHPkufZVFn2aTKMNvcr1A= | |
30 | 93d8bff78c96fe7e33237b257558ee97290048a4 0 iD8DBQBMpfvdywK+sNU5EO8RAsxVAJ0UaL1XB51C76JUBhafc9GBefuMxwCdEWkTOzwvE0SarJBe9i008jhbqW4= |
|
30 | 93d8bff78c96fe7e33237b257558ee97290048a4 0 iD8DBQBMpfvdywK+sNU5EO8RAsxVAJ0UaL1XB51C76JUBhafc9GBefuMxwCdEWkTOzwvE0SarJBe9i008jhbqW4= | |
31 | 333421b9e0f96c7bc788e5667c146a58a9440a55 0 iD8DBQBMz0HOywK+sNU5EO8RAlsEAJ0USh6yOG7OrWkADGunVt9QimBQnwCbBqeMnKgSbwEw8jZwE3Iz1mdrYlo= |
|
31 | 333421b9e0f96c7bc788e5667c146a58a9440a55 0 iD8DBQBMz0HOywK+sNU5EO8RAlsEAJ0USh6yOG7OrWkADGunVt9QimBQnwCbBqeMnKgSbwEw8jZwE3Iz1mdrYlo= | |
32 | 4438875ec01bd0fc32be92b0872eb6daeed4d44f 0 iD8DBQBM4WYUywK+sNU5EO8RAhCVAJ0dJswachwFAHALmk1x0RJehxzqPQCbBNskP9n/X689jB+btNTZTyKU/fw= |
|
32 | 4438875ec01bd0fc32be92b0872eb6daeed4d44f 0 iD8DBQBM4WYUywK+sNU5EO8RAhCVAJ0dJswachwFAHALmk1x0RJehxzqPQCbBNskP9n/X689jB+btNTZTyKU/fw= | |
33 | 6aff4f144ad356311318b0011df0bb21f2c97429 0 iD8DBQBM9uxXywK+sNU5EO8RAv+4AKCDj4qKP16GdPaq1tP6BUwpM/M1OACfRyzLPp/qiiN8xJTWoWYSe/XjJug= |
|
33 | 6aff4f144ad356311318b0011df0bb21f2c97429 0 iD8DBQBM9uxXywK+sNU5EO8RAv+4AKCDj4qKP16GdPaq1tP6BUwpM/M1OACfRyzLPp/qiiN8xJTWoWYSe/XjJug= | |
34 | e3bf16703e2601de99e563cdb3a5d50b64e6d320 0 iD8DBQBNH8WqywK+sNU5EO8RAiQTAJ9sBO+TeiGro4si77VVaQaA6jcRUgCfSA28dBbjj0oFoQwvPoZjANiZBH8= |
|
34 | e3bf16703e2601de99e563cdb3a5d50b64e6d320 0 iD8DBQBNH8WqywK+sNU5EO8RAiQTAJ9sBO+TeiGro4si77VVaQaA6jcRUgCfSA28dBbjj0oFoQwvPoZjANiZBH8= | |
35 | a6c855c32ea081da3c3b8ff628f1847ff271482f 0 iD8DBQBNSJJ+ywK+sNU5EO8RAoJaAKCweDEF70fu+r1Zn7pYDXdlk5RuSgCeO9gK/eit8Lin/1n3pO7aYguFLok= |
|
35 | a6c855c32ea081da3c3b8ff628f1847ff271482f 0 iD8DBQBNSJJ+ywK+sNU5EO8RAoJaAKCweDEF70fu+r1Zn7pYDXdlk5RuSgCeO9gK/eit8Lin/1n3pO7aYguFLok= | |
36 | 2b2155623ee2559caf288fd333f30475966c4525 0 iD8DBQBNSJeBywK+sNU5EO8RAm1KAJ4hW9Cm9nHaaGJguchBaPLlAr+O3wCgqgmMok8bdAS06N6PL60PSTM//Gg= |
|
36 | 2b2155623ee2559caf288fd333f30475966c4525 0 iD8DBQBNSJeBywK+sNU5EO8RAm1KAJ4hW9Cm9nHaaGJguchBaPLlAr+O3wCgqgmMok8bdAS06N6PL60PSTM//Gg= | |
37 | 2616325766e3504c8ae7c84bd15ee610901fe91d 0 iD8DBQBNbWy9ywK+sNU5EO8RAlWCAJ4mW8HbzjJj9GpK98muX7k+7EvEHwCfaTLbC/DH3QEsZBhEP+M8tzL6RU4= |
|
37 | 2616325766e3504c8ae7c84bd15ee610901fe91d 0 iD8DBQBNbWy9ywK+sNU5EO8RAlWCAJ4mW8HbzjJj9GpK98muX7k+7EvEHwCfaTLbC/DH3QEsZBhEP+M8tzL6RU4= | |
38 | aa1f3be38ab127280761889d2dca906ca465b5f4 0 iD8DBQBNeQq7ywK+sNU5EO8RAlEOAJ4tlEDdetE9lKfjGgjbkcR8PrC3egCfXCfF3qNVvU/2YYjpgvRwevjvDy0= |
|
38 | aa1f3be38ab127280761889d2dca906ca465b5f4 0 iD8DBQBNeQq7ywK+sNU5EO8RAlEOAJ4tlEDdetE9lKfjGgjbkcR8PrC3egCfXCfF3qNVvU/2YYjpgvRwevjvDy0= | |
39 | b032bec2c0a651ca0ddecb65714bfe6770f67d70 0 iD8DBQBNlg5kywK+sNU5EO8RAnGEAJ9gmEx6MfaR4XcG2m/93vwtfyzs3gCgltzx8/YdHPwqDwRX/WbpYgi33is= |
|
39 | b032bec2c0a651ca0ddecb65714bfe6770f67d70 0 iD8DBQBNlg5kywK+sNU5EO8RAnGEAJ9gmEx6MfaR4XcG2m/93vwtfyzs3gCgltzx8/YdHPwqDwRX/WbpYgi33is= | |
40 | 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 0 iD8DBQBNvTy4ywK+sNU5EO8RAmp8AJ9QnxK4jTJ7G722MyeBxf0UXEdGwACgtlM7BKtNQfbEH/fOW5y+45W88VI= |
|
40 | 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 0 iD8DBQBNvTy4ywK+sNU5EO8RAmp8AJ9QnxK4jTJ7G722MyeBxf0UXEdGwACgtlM7BKtNQfbEH/fOW5y+45W88VI= | |
41 | 733af5d9f6b22387913e1d11350fb8cb7c1487dd 0 iD8DBQBN5q/8ywK+sNU5EO8RArRGAKCNGT94GKIYtSuwZ57z1sQbcw6uLACfffpbMV4NAPMl8womAwg+7ZPKnIU= |
|
41 | 733af5d9f6b22387913e1d11350fb8cb7c1487dd 0 iD8DBQBN5q/8ywK+sNU5EO8RArRGAKCNGT94GKIYtSuwZ57z1sQbcw6uLACfffpbMV4NAPMl8womAwg+7ZPKnIU= | |
42 | de9eb6b1da4fc522b1cab16d86ca166204c24f25 0 iD8DBQBODhfhywK+sNU5EO8RAr2+AJ4ugbAj8ae8/K0bYZzx3sascIAg1QCeK3b+zbbVVqd3b7CDpwFnaX8kTd4= |
|
42 | de9eb6b1da4fc522b1cab16d86ca166204c24f25 0 iD8DBQBODhfhywK+sNU5EO8RAr2+AJ4ugbAj8ae8/K0bYZzx3sascIAg1QCeK3b+zbbVVqd3b7CDpwFnaX8kTd4= | |
43 | 4a43e23b8c55b4566b8200bf69fe2158485a2634 0 iD8DBQBONzIMywK+sNU5EO8RAj5SAJ0aPS3+JHnyI6bHB2Fl0LImbDmagwCdGbDLp1S7TFobxXudOH49bX45Iik= |
|
43 | 4a43e23b8c55b4566b8200bf69fe2158485a2634 0 iD8DBQBONzIMywK+sNU5EO8RAj5SAJ0aPS3+JHnyI6bHB2Fl0LImbDmagwCdGbDLp1S7TFobxXudOH49bX45Iik= | |
44 | d629f1e89021103f1753addcef6b310e4435b184 0 iD8DBQBOWAsBywK+sNU5EO8RAht4AJwJl9oNFopuGkj5m8aKuf7bqPkoAQCeNrEm7UhFsZKYT5iUOjnMV7s2LaM= |
|
44 | d629f1e89021103f1753addcef6b310e4435b184 0 iD8DBQBOWAsBywK+sNU5EO8RAht4AJwJl9oNFopuGkj5m8aKuf7bqPkoAQCeNrEm7UhFsZKYT5iUOjnMV7s2LaM= | |
45 | 351a9292e430e35766c552066ed3e87c557b803b 0 iD8DBQBOh3zUywK+sNU5EO8RApFMAKCD3Y/u3avDFndznwqfG5UeTHMlvACfUivPIVQZyDZnhZMq0UhC6zhCEQg= |
|
45 | 351a9292e430e35766c552066ed3e87c557b803b 0 iD8DBQBOh3zUywK+sNU5EO8RApFMAKCD3Y/u3avDFndznwqfG5UeTHMlvACfUivPIVQZyDZnhZMq0UhC6zhCEQg= | |
46 | 384082750f2c51dc917d85a7145748330fa6ef4d 0 iD8DBQBOmd+OywK+sNU5EO8RAgDgAJ9V/X+G7VLwhTpHrZNiOHabzSyzYQCdE2kKfIevJUYB9QLAWCWP6DPwrwI= |
|
46 | 384082750f2c51dc917d85a7145748330fa6ef4d 0 iD8DBQBOmd+OywK+sNU5EO8RAgDgAJ9V/X+G7VLwhTpHrZNiOHabzSyzYQCdE2kKfIevJUYB9QLAWCWP6DPwrwI= | |
47 | 41453d55b481ddfcc1dacb445179649e24ca861d 0 iD8DBQBOsFhpywK+sNU5EO8RAqM6AKCyfxUae3/zLuiLdQz+JR78690eMACfQ6JTBQib4AbE+rUDdkeFYg9K/+4= |
|
47 | 41453d55b481ddfcc1dacb445179649e24ca861d 0 iD8DBQBOsFhpywK+sNU5EO8RAqM6AKCyfxUae3/zLuiLdQz+JR78690eMACfQ6JTBQib4AbE+rUDdkeFYg9K/+4= | |
48 | 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 0 iD8DBQBO1/fWywK+sNU5EO8RAmoPAKCR5lpv1D6JLURHD8KVLSV4GRVEBgCgnd0Sy78ligNfqAMafmACRDvj7vo= |
|
48 | 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 0 iD8DBQBO1/fWywK+sNU5EO8RAmoPAKCR5lpv1D6JLURHD8KVLSV4GRVEBgCgnd0Sy78ligNfqAMafmACRDvj7vo= | |
49 | 6344043924497cd06d781d9014c66802285072e4 0 iD8DBQBPALgmywK+sNU5EO8RAlfhAJ9nYOdWnhfVDHYtDTJAyJtXBAQS9wCgnefoSQt7QABkbGxM+Q85UYEBuD0= |
|
49 | 6344043924497cd06d781d9014c66802285072e4 0 iD8DBQBPALgmywK+sNU5EO8RAlfhAJ9nYOdWnhfVDHYtDTJAyJtXBAQS9wCgnefoSQt7QABkbGxM+Q85UYEBuD0= | |
50 | db33555eafeaf9df1e18950e29439eaa706d399b 0 iD8DBQBPGdzxywK+sNU5EO8RAppkAJ9jOXhUVE/97CPgiMA0pMGiIYnesQCfengAszcBiSiKGugiI8Okc9ghU+Y= |
|
50 | db33555eafeaf9df1e18950e29439eaa706d399b 0 iD8DBQBPGdzxywK+sNU5EO8RAppkAJ9jOXhUVE/97CPgiMA0pMGiIYnesQCfengAszcBiSiKGugiI8Okc9ghU+Y= | |
51 | 2aa5b51f310fb3befd26bed99c02267f5c12c734 0 iD8DBQBPKZ9bywK+sNU5EO8RAt1TAJ45r1eJ0YqSkInzrrayg4TVCh0SnQCgm0GA/Ua74jnnDwVQ60lAwROuz1Q= |
|
51 | 2aa5b51f310fb3befd26bed99c02267f5c12c734 0 iD8DBQBPKZ9bywK+sNU5EO8RAt1TAJ45r1eJ0YqSkInzrrayg4TVCh0SnQCgm0GA/Ua74jnnDwVQ60lAwROuz1Q= | |
52 | 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 0 iD8DBQBPT/fvywK+sNU5EO8RAnfYAKCn7d0vwqIb100YfWm1F7nFD5B+FACeM02YHpQLSNsztrBCObtqcnfod7Q= |
|
52 | 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 0 iD8DBQBPT/fvywK+sNU5EO8RAnfYAKCn7d0vwqIb100YfWm1F7nFD5B+FACeM02YHpQLSNsztrBCObtqcnfod7Q= | |
53 | b9bd95e61b49c221c4cca24e6da7c946fc02f992 0 iD8DBQBPeLsIywK+sNU5EO8RAvpNAKCtKe2gitz8dYn52IRF0hFOPCR7AQCfRJL/RWCFweu2T1vH/mUOCf8SXXc= |
|
53 | b9bd95e61b49c221c4cca24e6da7c946fc02f992 0 iD8DBQBPeLsIywK+sNU5EO8RAvpNAKCtKe2gitz8dYn52IRF0hFOPCR7AQCfRJL/RWCFweu2T1vH/mUOCf8SXXc= | |
54 | d9e2f09d5488c395ae9ddbb320ceacd24757e055 0 iD8DBQBPju/dywK+sNU5EO8RArBYAJ9xtifdbk+hCOJO8OZa4JfHX8OYZQCeKPMBaBWiT8N/WHoOm1XU0q+iono= |
|
54 | d9e2f09d5488c395ae9ddbb320ceacd24757e055 0 iD8DBQBPju/dywK+sNU5EO8RArBYAJ9xtifdbk+hCOJO8OZa4JfHX8OYZQCeKPMBaBWiT8N/WHoOm1XU0q+iono= | |
55 | 00182b3d087909e3c3ae44761efecdde8f319ef3 0 iD8DBQBPoFhIywK+sNU5EO8RAhzhAKCBj1n2jxPTkZNJJ5pSp3soa+XHIgCgsZZpAQxOpXwCp0eCdNGe0+pmxmg= |
|
55 | 00182b3d087909e3c3ae44761efecdde8f319ef3 0 iD8DBQBPoFhIywK+sNU5EO8RAhzhAKCBj1n2jxPTkZNJJ5pSp3soa+XHIgCgsZZpAQxOpXwCp0eCdNGe0+pmxmg= | |
56 | 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 0 iD8DBQBPovNWywK+sNU5EO8RAhgiAJ980T91FdPTRMmVONDhpkMsZwVIMACgg3bKvoWSeuCW28llUhAJtUjrMv0= |
|
56 | 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 0 iD8DBQBPovNWywK+sNU5EO8RAhgiAJ980T91FdPTRMmVONDhpkMsZwVIMACgg3bKvoWSeuCW28llUhAJtUjrMv0= | |
57 | 85a358df5bbbe404ca25730c9c459b34263441dc 0 iD8DBQBPyZsWywK+sNU5EO8RAnpLAJ48qrGDJRT+pteS0mSQ11haqHstPwCdG4ccGbk+0JHb7aNy8/NRGAOqn9w= |
|
57 | 85a358df5bbbe404ca25730c9c459b34263441dc 0 iD8DBQBPyZsWywK+sNU5EO8RAnpLAJ48qrGDJRT+pteS0mSQ11haqHstPwCdG4ccGbk+0JHb7aNy8/NRGAOqn9w= | |
58 | b013baa3898e117959984fc64c29d8c784d2f28b 0 iD8DBQBP8QOPywK+sNU5EO8RAqimAKCFRSx0lvG6y8vne2IhNG062Hn0dACeMLI5/zhpWpHBIVeAAquYfx2XFeA= |
|
58 | b013baa3898e117959984fc64c29d8c784d2f28b 0 iD8DBQBP8QOPywK+sNU5EO8RAqimAKCFRSx0lvG6y8vne2IhNG062Hn0dACeMLI5/zhpWpHBIVeAAquYfx2XFeA= | |
59 | 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 0 iD8DBQBQGiL8ywK+sNU5EO8RAq5oAJ4rMMCPx6O+OuzNXVOexogedWz/QgCeIiIxLd76I4pXO48tdXhr0hQcBuM= |
|
59 | 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 0 iD8DBQBQGiL8ywK+sNU5EO8RAq5oAJ4rMMCPx6O+OuzNXVOexogedWz/QgCeIiIxLd76I4pXO48tdXhr0hQcBuM= | |
60 | 072209ae4ddb654eb2d5fd35bff358c738414432 0 iD8DBQBQQkq0ywK+sNU5EO8RArDTAJ9nk5CySnNAjAXYvqvx4uWCw9ThZwCgqmFRehH/l+oTwj3f8nw8u8qTCdc= |
|
60 | 072209ae4ddb654eb2d5fd35bff358c738414432 0 iD8DBQBQQkq0ywK+sNU5EO8RArDTAJ9nk5CySnNAjAXYvqvx4uWCw9ThZwCgqmFRehH/l+oTwj3f8nw8u8qTCdc= | |
61 | b3f0f9a39c4e1d0250048cd803ab03542d6f140a 0 iD8DBQBQamltywK+sNU5EO8RAlsqAJ4qF/m6aFu4mJCOKTiAP5RvZFK02ACfawYShUZO6OXEFfveU0aAxDR0M1k= |
|
61 | b3f0f9a39c4e1d0250048cd803ab03542d6f140a 0 iD8DBQBQamltywK+sNU5EO8RAlsqAJ4qF/m6aFu4mJCOKTiAP5RvZFK02ACfawYShUZO6OXEFfveU0aAxDR0M1k= | |
62 | d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 0 iD8DBQBQgPV5ywK+sNU5EO8RArylAJ0abcx5NlDjyv3ZDWpAfRIHyRsJtQCgn4TMuEayqgxzrvadQZHdTEU2g38= |
|
62 | d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 0 iD8DBQBQgPV5ywK+sNU5EO8RArylAJ0abcx5NlDjyv3ZDWpAfRIHyRsJtQCgn4TMuEayqgxzrvadQZHdTEU2g38= | |
63 | 195ad823b5d58c68903a6153a25e3fb4ed25239d 0 iD8DBQBQkuT9ywK+sNU5EO8RAhB4AKCeerItoK2Jipm2cVf4euGofAa/WACeJj3TVd4pFILpb+ogj7ebweFLJi0= |
|
63 | 195ad823b5d58c68903a6153a25e3fb4ed25239d 0 iD8DBQBQkuT9ywK+sNU5EO8RAhB4AKCeerItoK2Jipm2cVf4euGofAa/WACeJj3TVd4pFILpb+ogj7ebweFLJi0= | |
64 | 0c10cf8191469e7c3c8844922e17e71a176cb7cb 0 iD8DBQBQvQWoywK+sNU5EO8RAnq3AJoCn98u4geFx5YaQaeh99gFhCd7bQCgjoBwBSUyOvGd0yBy60E3Vv3VZhM= |
|
64 | 0c10cf8191469e7c3c8844922e17e71a176cb7cb 0 iD8DBQBQvQWoywK+sNU5EO8RAnq3AJoCn98u4geFx5YaQaeh99gFhCd7bQCgjoBwBSUyOvGd0yBy60E3Vv3VZhM= | |
65 | a4765077b65e6ae29ba42bab7834717b5072d5ba 0 iD8DBQBQ486sywK+sNU5EO8RAhmJAJ90aLfLKZhmcZN7kqphigQJxiFOQACeJ5IUZxjGKH4xzi3MrgIcx9n+dB0= |
|
65 | a4765077b65e6ae29ba42bab7834717b5072d5ba 0 iD8DBQBQ486sywK+sNU5EO8RAhmJAJ90aLfLKZhmcZN7kqphigQJxiFOQACeJ5IUZxjGKH4xzi3MrgIcx9n+dB0= | |
66 | f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 0 iD8DBQBQ+yuYywK+sNU5EO8RAm9JAJoD/UciWvpGeKBcpGtZJBFJVcL/HACghDXSgQ+xQDjB+6uGrdgAQsRR1Lg= |
|
66 | f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 0 iD8DBQBQ+yuYywK+sNU5EO8RAm9JAJoD/UciWvpGeKBcpGtZJBFJVcL/HACghDXSgQ+xQDjB+6uGrdgAQsRR1Lg= | |
67 | a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 0 iD8DBQBRDDROywK+sNU5EO8RAh75AJ9uJCGoCWnP0Lv/+XuYs4hvUl+sAgCcD36QgAnuw8IQXrvv684BAXAnHcA= |
|
67 | a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 0 iD8DBQBRDDROywK+sNU5EO8RAh75AJ9uJCGoCWnP0Lv/+XuYs4hvUl+sAgCcD36QgAnuw8IQXrvv684BAXAnHcA= | |
68 | 7511d4df752e61fe7ae4f3682e0a0008573b0402 0 iD8DBQBRFYaoywK+sNU5EO8RAuErAJoDyhXn+lptU3+AevVdwAIeNFyR2gCdHzPHyWd+JDeWCUR+pSOBi8O2ppM= |
|
68 | 7511d4df752e61fe7ae4f3682e0a0008573b0402 0 iD8DBQBRFYaoywK+sNU5EO8RAuErAJoDyhXn+lptU3+AevVdwAIeNFyR2gCdHzPHyWd+JDeWCUR+pSOBi8O2ppM= | |
69 | 5b7175377babacce80a6c1e12366d8032a6d4340 0 iD8DBQBRMCYgywK+sNU5EO8RAq1/AKCWKlt9ysibyQgYwoxxIOZv5J8rpwCcDSHQaaf1fFZUTnQsOePwcM2Y/Sg= |
|
69 | 5b7175377babacce80a6c1e12366d8032a6d4340 0 iD8DBQBRMCYgywK+sNU5EO8RAq1/AKCWKlt9ysibyQgYwoxxIOZv5J8rpwCcDSHQaaf1fFZUTnQsOePwcM2Y/Sg= | |
70 | 50c922c1b5145dab8baefefb0437d363b6a6c21c 0 iD8DBQBRWnUnywK+sNU5EO8RAuQRAJwM42cJqJPeqJ0jVNdMqKMDqr4dSACeP0cRVGz1gitMuV0x8f3mrZrqc7I= |
|
70 | 50c922c1b5145dab8baefefb0437d363b6a6c21c 0 iD8DBQBRWnUnywK+sNU5EO8RAuQRAJwM42cJqJPeqJ0jVNdMqKMDqr4dSACeP0cRVGz1gitMuV0x8f3mrZrqc7I= | |
71 | 8a7bd2dccd44ed571afe7424cd7f95594f27c092 0 iD8DBQBRXfBvywK+sNU5EO8RAn+LAKCsMmflbuXjYRxlzFwId5ptm8TZcwCdGkyLbZcASBOkzQUm/WW1qfknJHU= |
|
71 | 8a7bd2dccd44ed571afe7424cd7f95594f27c092 0 iD8DBQBRXfBvywK+sNU5EO8RAn+LAKCsMmflbuXjYRxlzFwId5ptm8TZcwCdGkyLbZcASBOkzQUm/WW1qfknJHU= | |
72 | 292cd385856d98bacb2c3086f8897bc660c2beea 0 iD8DBQBRcM0BywK+sNU5EO8RAjp4AKCJBykQbvXhKuvLSMxKx3a2TBiXcACfbr/kLg5GlZTF/XDPmY+PyHgI/GM= |
|
72 | 292cd385856d98bacb2c3086f8897bc660c2beea 0 iD8DBQBRcM0BywK+sNU5EO8RAjp4AKCJBykQbvXhKuvLSMxKx3a2TBiXcACfbr/kLg5GlZTF/XDPmY+PyHgI/GM= | |
73 | 23f785b38af38d2fca6b8f3db56b8007a84cd73a 0 iD8DBQBRgZwNywK+sNU5EO8RAmO4AJ4u2ILGuimRP6MJgE2t65LZ5dAdkACgiENEstIdrlFC80p+sWKD81kKIYI= |
|
73 | 23f785b38af38d2fca6b8f3db56b8007a84cd73a 0 iD8DBQBRgZwNywK+sNU5EO8RAmO4AJ4u2ILGuimRP6MJgE2t65LZ5dAdkACgiENEstIdrlFC80p+sWKD81kKIYI= | |
74 | ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 0 iD8DBQBRkswvywK+sNU5EO8RAiYYAJsHTHyHbJeAgmGvBTmDrfcKu4doUgCeLm7eGBjx7yAPUvEtxef8rAkQmXI= |
|
74 | ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 0 iD8DBQBRkswvywK+sNU5EO8RAiYYAJsHTHyHbJeAgmGvBTmDrfcKu4doUgCeLm7eGBjx7yAPUvEtxef8rAkQmXI= | |
75 | cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 0 iD8DBQBRqnFLywK+sNU5EO8RAsWNAJ9RR6t+y1DLFc2HeH0eN9VfZAKF9gCeJ8ezvhtKq/LMs0/nvcgKQc/d5jk= |
|
75 | cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 0 iD8DBQBRqnFLywK+sNU5EO8RAsWNAJ9RR6t+y1DLFc2HeH0eN9VfZAKF9gCeJ8ezvhtKq/LMs0/nvcgKQc/d5jk= | |
76 | 009794acc6e37a650f0fae37872e733382ac1c0c 0 iD8DBQBR0guxywK+sNU5EO8RArNkAKCq9pMihVzP8Os5kCmgbWpe5C37wgCgqzuPZTHvAsXF5wTyaSTMVa9Ccq4= |
|
76 | 009794acc6e37a650f0fae37872e733382ac1c0c 0 iD8DBQBR0guxywK+sNU5EO8RArNkAKCq9pMihVzP8Os5kCmgbWpe5C37wgCgqzuPZTHvAsXF5wTyaSTMVa9Ccq4= | |
77 | f0d7721d7322dcfb5af33599c2543f27335334bb 0 iD8DBQBR8taaywK+sNU5EO8RAqeEAJ4idDhhDuEsgsUjeQgWNj498matHACfT67gSF5w0ylsrBx1Hb52HkGXDm0= |
|
77 | f0d7721d7322dcfb5af33599c2543f27335334bb 0 iD8DBQBR8taaywK+sNU5EO8RAqeEAJ4idDhhDuEsgsUjeQgWNj498matHACfT67gSF5w0ylsrBx1Hb52HkGXDm0= | |
78 | f37b5a17e6a0ee17afde2cdde5393dd74715fb58 0 iD8DBQBR+ymFywK+sNU5EO8RAuSdAJkBMcd9DAZ3rWE9WGKPm2YZ8LBoXACfXn/wbEsVy7ZgJoUwiWmHSnQaWCI= |
|
78 | f37b5a17e6a0ee17afde2cdde5393dd74715fb58 0 iD8DBQBR+ymFywK+sNU5EO8RAuSdAJkBMcd9DAZ3rWE9WGKPm2YZ8LBoXACfXn/wbEsVy7ZgJoUwiWmHSnQaWCI= | |
79 | 335a558f81dc73afeab4d7be63617392b130117f 0 iQIVAwUAUiZrIyBXgaxoKi1yAQK2iw//cquNqqSkc8Re5/TZT9I6NH+lh6DbOKjJP0Xl1Wqq0K+KSIUgZG4G32ovaEb2l5X0uY+3unRPiZ0ebl0YSw4Fb2ZiPIADXLBTOYRrY2Wwd3tpJeGI6wEgZt3SfcITV/g7NJrCjT3FlYoSOIayrExM80InSdcEM0Q3Rx6HKzY2acyxzgZeAtAW5ohFvHilSvY6p5Gcm4+QptMxvw45GPdreUmjeXZxNXNXZ8P+MjMz/QJbai/N7PjmK8lqnhkBsT48Ng/KhhmOkGntNJ2/ImBWLFGcWngSvJ7sfWwnyhndvGhe0Hq1NcCf7I8TjNDxU5TR+m+uW7xjXdLoDbUjBdX4sKXnh8ZjbYiODKBOrrDq25cf8nA/tnpKyE/qsVy60kOk6loY4XKiYmn1V49Ta0emmDx0hqo3HgxHHsHX0NDnGdWGol7cPRET0RzVobKq1A0jnrhPooWidvLh9bPzLonrWDo+ib+DuySoRkuYUK4pgZJ2mbg6daFOBEZygkSyRB8bo1UQUP7EgQDrWe4khb/5GHEfDkrQz3qu/sXvc0Ir1mOUWBFPHC2DjjCn/oMJuUkG1SwM8l2Bfv7h67ssES6YQ2+RjOix4yid7EXS/Ogl45PzCIPSI5+BbNs10JhE0w5uErBHlF53EDTe/TSLc+GU6DB6PP6dH912Njdr3jpNSUQ= |
|
79 | 335a558f81dc73afeab4d7be63617392b130117f 0 iQIVAwUAUiZrIyBXgaxoKi1yAQK2iw//cquNqqSkc8Re5/TZT9I6NH+lh6DbOKjJP0Xl1Wqq0K+KSIUgZG4G32ovaEb2l5X0uY+3unRPiZ0ebl0YSw4Fb2ZiPIADXLBTOYRrY2Wwd3tpJeGI6wEgZt3SfcITV/g7NJrCjT3FlYoSOIayrExM80InSdcEM0Q3Rx6HKzY2acyxzgZeAtAW5ohFvHilSvY6p5Gcm4+QptMxvw45GPdreUmjeXZxNXNXZ8P+MjMz/QJbai/N7PjmK8lqnhkBsT48Ng/KhhmOkGntNJ2/ImBWLFGcWngSvJ7sfWwnyhndvGhe0Hq1NcCf7I8TjNDxU5TR+m+uW7xjXdLoDbUjBdX4sKXnh8ZjbYiODKBOrrDq25cf8nA/tnpKyE/qsVy60kOk6loY4XKiYmn1V49Ta0emmDx0hqo3HgxHHsHX0NDnGdWGol7cPRET0RzVobKq1A0jnrhPooWidvLh9bPzLonrWDo+ib+DuySoRkuYUK4pgZJ2mbg6daFOBEZygkSyRB8bo1UQUP7EgQDrWe4khb/5GHEfDkrQz3qu/sXvc0Ir1mOUWBFPHC2DjjCn/oMJuUkG1SwM8l2Bfv7h67ssES6YQ2+RjOix4yid7EXS/Ogl45PzCIPSI5+BbNs10JhE0w5uErBHlF53EDTe/TSLc+GU6DB6PP6dH912Njdr3jpNSUQ= | |
80 | e7fa36d2ad3a7944a52dca126458d6f482db3524 0 iQIVAwUAUktg4yBXgaxoKi1yAQLO0g//du/2ypYYUfmM/yZ4zztNKIvgMSGTDVbCCGB2y2/wk2EcolpjpGTkcgnJT413ksYtw78ZU+mvv0RjgrFCm8DQ8kroJaQZ2qHmtSUb42hPBPvtg6kL9YaA4yvp87uUBpFRavGS5uX4hhEIyvZKzhXUBvqtL3TfwR7ld21bj8j00wudqELyyU9IrojIY9jkJ3XL/4shBGgP7u6OK5g8yJ6zTnWgysUetxHBPrYjG25lziiiZQFvZqK1B3PUqAOaFPltQs0PB8ipOCAHQgJsjaREj8VmC3+rskmSSy66NHm6gAB9+E8oAgOcU7FzWbdYgnz4kR3M7TQvHX9U61NinPXC6Q9d1VPhO3E6sIGvqJ4YeQOn65V9ezYuIpFSlgQzCHMmLVnOV96Uv1R/Z39I4w7D3S5qoZcQT/siQwGbsZoPMGFYmqOK1da5TZWrrJWkYzc9xvzT9m3q3Wds5pmCmo4b/dIqDifWwYEcNAZ0/YLHwCN5SEZWuunkEwtU5o7TZAv3bvDDA6WxUrrHI/y9/qvvhXxsJnY8IueNhshdmWZfXKz+lJi2Dvk7DUlEQ1zZWSsozi1E+3biMPJO47jsxjoT/jmE5+GHLCgcnXXDVBeaVal99IOaTRFukiz2EMsry1s8fnwEE5XKDKRlU/dOPfsje0gc7bgE0QD/u3E4NJ99g9A= |
|
80 | e7fa36d2ad3a7944a52dca126458d6f482db3524 0 iQIVAwUAUktg4yBXgaxoKi1yAQLO0g//du/2ypYYUfmM/yZ4zztNKIvgMSGTDVbCCGB2y2/wk2EcolpjpGTkcgnJT413ksYtw78ZU+mvv0RjgrFCm8DQ8kroJaQZ2qHmtSUb42hPBPvtg6kL9YaA4yvp87uUBpFRavGS5uX4hhEIyvZKzhXUBvqtL3TfwR7ld21bj8j00wudqELyyU9IrojIY9jkJ3XL/4shBGgP7u6OK5g8yJ6zTnWgysUetxHBPrYjG25lziiiZQFvZqK1B3PUqAOaFPltQs0PB8ipOCAHQgJsjaREj8VmC3+rskmSSy66NHm6gAB9+E8oAgOcU7FzWbdYgnz4kR3M7TQvHX9U61NinPXC6Q9d1VPhO3E6sIGvqJ4YeQOn65V9ezYuIpFSlgQzCHMmLVnOV96Uv1R/Z39I4w7D3S5qoZcQT/siQwGbsZoPMGFYmqOK1da5TZWrrJWkYzc9xvzT9m3q3Wds5pmCmo4b/dIqDifWwYEcNAZ0/YLHwCN5SEZWuunkEwtU5o7TZAv3bvDDA6WxUrrHI/y9/qvvhXxsJnY8IueNhshdmWZfXKz+lJi2Dvk7DUlEQ1zZWSsozi1E+3biMPJO47jsxjoT/jmE5+GHLCgcnXXDVBeaVal99IOaTRFukiz2EMsry1s8fnwEE5XKDKRlU/dOPfsje0gc7bgE0QD/u3E4NJ99g9A= | |
81 | 1596f2d8f2421314b1ddead8f7d0c91009358994 0 iQIVAwUAUmRq+yBXgaxoKi1yAQLolhAAi+l4ZFdQTu9yJDv22YmkmHH4fI3d5VBYgvfJPufpyaj7pX626QNW18UNcGSw2BBpYHIJzWPkk/4XznLVKr4Ciw2N3/yqloEFV0V2SSrTbMWiR9qXI4KJH+Df3KZnKs3FgiYpXkErL4GWkc1jLVR50xQ5RnkMljjtCd0NTeV2PHZ6gP2qbu6CS+5sm3AFhTDGnx8GicbMw76ZNw5M2G+T48yH9jn5KQi2SBThfi4H9Bpr8FDuR7PzQLgw9SbtYxtdQxNkK55k0nG4oLDxduNakU6SH9t8n8tdCfMt58kTzlQVrPFiTFjKu2n2JioDTz2HEivbZ5H757cu7SvpX8gW3paeBc57e+GOLMisMZABXLICq59c3QnrMwFY4FG+5cpiHVXoaZz/0bYCJx+IhU4QLWqZuzb18KSyHUCqQRzXlzS6QV5O7dY5YNQXFC44j/dS5zdgWMYo2mc6mVP2OaPUn7F6aQh5MCDYorPIOkcNjOg7ytajo7DXbzWt5Al8qt6386BJksyR3GAonc09+l8IFeNxk8HZNP4ETQ8aWj0dC9jgBDPK43T2Bju/i84s+U/bRe4tGSQalZUEv06mkIH/VRJp5w2izYTsdIjA4FT9d36OhaxlfoO1X6tHR9AyA3bF/g/ozvBwuo3kTRUUqo+Ggvx/DmcPQdDiZZQIqDBXch0= |
|
81 | 1596f2d8f2421314b1ddead8f7d0c91009358994 0 iQIVAwUAUmRq+yBXgaxoKi1yAQLolhAAi+l4ZFdQTu9yJDv22YmkmHH4fI3d5VBYgvfJPufpyaj7pX626QNW18UNcGSw2BBpYHIJzWPkk/4XznLVKr4Ciw2N3/yqloEFV0V2SSrTbMWiR9qXI4KJH+Df3KZnKs3FgiYpXkErL4GWkc1jLVR50xQ5RnkMljjtCd0NTeV2PHZ6gP2qbu6CS+5sm3AFhTDGnx8GicbMw76ZNw5M2G+T48yH9jn5KQi2SBThfi4H9Bpr8FDuR7PzQLgw9SbtYxtdQxNkK55k0nG4oLDxduNakU6SH9t8n8tdCfMt58kTzlQVrPFiTFjKu2n2JioDTz2HEivbZ5H757cu7SvpX8gW3paeBc57e+GOLMisMZABXLICq59c3QnrMwFY4FG+5cpiHVXoaZz/0bYCJx+IhU4QLWqZuzb18KSyHUCqQRzXlzS6QV5O7dY5YNQXFC44j/dS5zdgWMYo2mc6mVP2OaPUn7F6aQh5MCDYorPIOkcNjOg7ytajo7DXbzWt5Al8qt6386BJksyR3GAonc09+l8IFeNxk8HZNP4ETQ8aWj0dC9jgBDPK43T2Bju/i84s+U/bRe4tGSQalZUEv06mkIH/VRJp5w2izYTsdIjA4FT9d36OhaxlfoO1X6tHR9AyA3bF/g/ozvBwuo3kTRUUqo+Ggvx/DmcPQdDiZZQIqDBXch0= | |
82 | d825e4025e39d1c39db943cdc89818abd0a87c27 0 iQIVAwUAUnQlXiBXgaxoKi1yAQJd3BAAi7LjMSpXmdR7B8K98C3/By4YHsCOAocMl3JXiLd7SXwKmlta1zxtkgWwWJnNYE3lVJvGCl+l4YsGKmFu755MGXlyORh1x4ohckoC1a8cqnbNAgD6CSvjSaZfnINLGZQP1wIP4yWj0FftKVANQBjj/xkkxO530mjBYnUvyA4PeDd5A1AOUUu6qHzX6S5LcprEt7iktLI+Ae1dYTkiCpckDtyYUKIk3RK/4AGWwGCPddVWeV5bDxLs8GHyMbqdBwx+2EAMtyZfXT+z6MDRsL/gEBVOXHb/UR0qpYED+qFnbtTlxqQkRE/wBhwDoRzUgcSuukQ9iPn79WNDSdT5b6Jd393uEO5BNF/DB6rrOiWmlpoooWgTY9kcwGB02v0hhLrH5r1wkv8baaPl+qjCjBxf4CNKm/83KN5/umGbZlORqPSN5JVxK6vDNwFFmHLaZbMT1g27GsGOWm84VH+dgolgk4nmRNSO37eTNM5Y1C3Zf2amiqDSRcAxCgseg0Jh10G7i52SSTcZPI2MqrwT9eIyg8PTIxT1D5bPcCzkg5nTTL6S7bet7OSwynRnHslhvVUBly8aIj4eY/5cQqAucUUa5sq6xLD8N27Tl+sQi+kE6KtWu2c0ZhpouflYp55XNMHgU4KeFcVcDtHfJRF6THT6tFcHFNauCHbhfN2F33ANMP4= |
|
82 | d825e4025e39d1c39db943cdc89818abd0a87c27 0 iQIVAwUAUnQlXiBXgaxoKi1yAQJd3BAAi7LjMSpXmdR7B8K98C3/By4YHsCOAocMl3JXiLd7SXwKmlta1zxtkgWwWJnNYE3lVJvGCl+l4YsGKmFu755MGXlyORh1x4ohckoC1a8cqnbNAgD6CSvjSaZfnINLGZQP1wIP4yWj0FftKVANQBjj/xkkxO530mjBYnUvyA4PeDd5A1AOUUu6qHzX6S5LcprEt7iktLI+Ae1dYTkiCpckDtyYUKIk3RK/4AGWwGCPddVWeV5bDxLs8GHyMbqdBwx+2EAMtyZfXT+z6MDRsL/gEBVOXHb/UR0qpYED+qFnbtTlxqQkRE/wBhwDoRzUgcSuukQ9iPn79WNDSdT5b6Jd393uEO5BNF/DB6rrOiWmlpoooWgTY9kcwGB02v0hhLrH5r1wkv8baaPl+qjCjBxf4CNKm/83KN5/umGbZlORqPSN5JVxK6vDNwFFmHLaZbMT1g27GsGOWm84VH+dgolgk4nmRNSO37eTNM5Y1C3Zf2amiqDSRcAxCgseg0Jh10G7i52SSTcZPI2MqrwT9eIyg8PTIxT1D5bPcCzkg5nTTL6S7bet7OSwynRnHslhvVUBly8aIj4eY/5cQqAucUUa5sq6xLD8N27Tl+sQi+kE6KtWu2c0ZhpouflYp55XNMHgU4KeFcVcDtHfJRF6THT6tFcHFNauCHbhfN2F33ANMP4= | |
83 | 209e04a06467e2969c0cc6501335be0406d46ef0 0 iQIVAwUAUpv1oCBXgaxoKi1yAQKOFBAAma2wlsr3w/5NvDwq2rmOrgtNDq1DnNqcXloaOdwegX1z3/N++5uVjLjI0VyguexnwK+7E8rypMZ+4glaiZvIiGPnGMYbG9iOoz5XBhtUHzI5ECYfm5QU81by9VmCIvArDFe5Hlnz4XaXpEGnAwPywD+yzV3/+tyoV7MgsVinCMtbX9OF84/ubWKNzq2810FpQRfYoCOrF8sUed/1TcQrSm1eMB/PnuxjFCFySiR6J7Urd9bJoJIDtdZOQeeHaL5Z8Pcsyzjoe/9oTwJ3L3tl/NMZtRxiQUWtfRA0zvEnQ4QEkZSDMd/JnGiWHPVeP4P92+YN15za9yhneEAtustrTNAmVF2Uh92RIlmkG475HFhvwPJ4DfCx0vU1OOKX/U4c1rifW7H7HaipoaMlsDU2VFsAHcc3YF8ulVt27bH2yUaLGJz7eqpt+3DzZTKp4d/brZA2EkbVgsoYP+XYLbzxfwWlaMwiN3iCnlTFbNogH8MxhfHFWBj6ouikqOz8HlNl6BmSQiUCBnz5fquVpXmW2Md+TDekk+uOW9mvk1QMU62br+Z6PEZupkdTrqKaz+8ZMWvTRct8SiOcu7R11LpfERyrwYGGPei0P2YrEGIWGgXvEobXoPTSl7J+mpOA/rp2Q1zA3ihjgzwtGZZF+ThQXZGIMGaA2YPgzuYRqY8l5oc= |
|
83 | 209e04a06467e2969c0cc6501335be0406d46ef0 0 iQIVAwUAUpv1oCBXgaxoKi1yAQKOFBAAma2wlsr3w/5NvDwq2rmOrgtNDq1DnNqcXloaOdwegX1z3/N++5uVjLjI0VyguexnwK+7E8rypMZ+4glaiZvIiGPnGMYbG9iOoz5XBhtUHzI5ECYfm5QU81by9VmCIvArDFe5Hlnz4XaXpEGnAwPywD+yzV3/+tyoV7MgsVinCMtbX9OF84/ubWKNzq2810FpQRfYoCOrF8sUed/1TcQrSm1eMB/PnuxjFCFySiR6J7Urd9bJoJIDtdZOQeeHaL5Z8Pcsyzjoe/9oTwJ3L3tl/NMZtRxiQUWtfRA0zvEnQ4QEkZSDMd/JnGiWHPVeP4P92+YN15za9yhneEAtustrTNAmVF2Uh92RIlmkG475HFhvwPJ4DfCx0vU1OOKX/U4c1rifW7H7HaipoaMlsDU2VFsAHcc3YF8ulVt27bH2yUaLGJz7eqpt+3DzZTKp4d/brZA2EkbVgsoYP+XYLbzxfwWlaMwiN3iCnlTFbNogH8MxhfHFWBj6ouikqOz8HlNl6BmSQiUCBnz5fquVpXmW2Md+TDekk+uOW9mvk1QMU62br+Z6PEZupkdTrqKaz+8ZMWvTRct8SiOcu7R11LpfERyrwYGGPei0P2YrEGIWGgXvEobXoPTSl7J+mpOA/rp2Q1zA3ihjgzwtGZZF+ThQXZGIMGaA2YPgzuYRqY8l5oc= | |
84 | ca387377df7a3a67dbb90b6336b781cdadc3ef41 0 iQIVAwUAUsThISBXgaxoKi1yAQJpvRAAkRkCWLjHBZnWxX9Oe6t2HQgkSsmn9wMHvXXGFkcAmrqJ86yfyrxLq2Ns0X7Qwky37kOwKsywM53FQlsx9j//Y+ncnGZoObFTz9YTuSbOHGVsTbAruXWxBrGOf1nFTlg8afcbH0jPfQXwxf3ptfBhgsFCzORcqc8HNopAW+2sgXGhHnbVtq6LF90PWkbKjCCQLiX3da1uETGAElrl4jA5Y2i64S1Q/2X+UFrNslkIIRCGmAJ6BnE6KLJaUftpfbN7Br7a3z9xxWqxRYDOinxDgfAPAucOJPLgMVQ0bJIallaRu7KTmIWKIuSBgg1/hgfoX8I1w49WrTGp0gGY140kl8RWwczAz/SB03Xtbl2+h6PV7rUV2K/5g61DkwdVbWqXM9wmJZmvjEKK0qQbBT0By4QSEDNcKKqtaFFwhFzx4dkXph0igHOtXhSNzMd8PsFx/NRn9NLFIpirxfqVDwakpDNBZw4Q9hUAlTPxSFL3vD9/Zs7lV4/dAvvl+tixJEi2k/iv248b/AI1PrPIQEqDvjrozzzYvrS4HtbkUn+IiHiepQaYnpqKoXvBu6btK/nv0GTxB5OwVJzMA1RPDcxIFfZA2AazHjrXiPAl5uWYEddEvRjaCiF8xkQkfiXzLOoqhKQHdwPGcfMFEs9lNR8BrB2ZOajBJc8RPsFDswhT5h4= |
|
84 | ca387377df7a3a67dbb90b6336b781cdadc3ef41 0 iQIVAwUAUsThISBXgaxoKi1yAQJpvRAAkRkCWLjHBZnWxX9Oe6t2HQgkSsmn9wMHvXXGFkcAmrqJ86yfyrxLq2Ns0X7Qwky37kOwKsywM53FQlsx9j//Y+ncnGZoObFTz9YTuSbOHGVsTbAruXWxBrGOf1nFTlg8afcbH0jPfQXwxf3ptfBhgsFCzORcqc8HNopAW+2sgXGhHnbVtq6LF90PWkbKjCCQLiX3da1uETGAElrl4jA5Y2i64S1Q/2X+UFrNslkIIRCGmAJ6BnE6KLJaUftpfbN7Br7a3z9xxWqxRYDOinxDgfAPAucOJPLgMVQ0bJIallaRu7KTmIWKIuSBgg1/hgfoX8I1w49WrTGp0gGY140kl8RWwczAz/SB03Xtbl2+h6PV7rUV2K/5g61DkwdVbWqXM9wmJZmvjEKK0qQbBT0By4QSEDNcKKqtaFFwhFzx4dkXph0igHOtXhSNzMd8PsFx/NRn9NLFIpirxfqVDwakpDNBZw4Q9hUAlTPxSFL3vD9/Zs7lV4/dAvvl+tixJEi2k/iv248b/AI1PrPIQEqDvjrozzzYvrS4HtbkUn+IiHiepQaYnpqKoXvBu6btK/nv0GTxB5OwVJzMA1RPDcxIFfZA2AazHjrXiPAl5uWYEddEvRjaCiF8xkQkfiXzLOoqhKQHdwPGcfMFEs9lNR8BrB2ZOajBJc8RPsFDswhT5h4= | |
85 | 8862469e16f9236208581b20de5f96bd13cc039d 0 iQIVAwUAUt7cLSBXgaxoKi1yAQLOkRAAidp501zafqe+JnDwlf7ORcJc+FgCE6mK1gxDfReCbkMsY7AzspogU7orqfSmr6XXdrDwmk3Y5x3mf44OGzNQjvuNWhqnTgJ7sOcU/lICGQUc8WiGNzHEMFGX9S+K4dpUaBf8Tcl8pU3iArhlthDghW6SZeDFB/FDBaUx9dkdFp6eXrmu4OuGRZEvwUvPtCGxIL7nKNnufI1du/MsWQxvC2ORHbMNtRq6tjA0fLZi4SvbySuYifQRS32BfHkFS5Qu4/40+1k7kd0YFyyQUvIsVa17lrix3zDqMavG8x7oOlqM/axDMBT6DhpdBMAdc5qqf8myz8lwjlFjyDUL6u3Z4/yE0nUrmEudXiXwG0xbVoEN8SCNrDmmvFMt6qdCpdDMkHr2TuSh0Hh4FT5CDkzPI8ZRssv/01j/QvIO3c/xlbpGRPWpsPXEVOz3pmjYN4qyQesnBKWCENsQLy/8s2rey8iQgx2GtsrNw8+wGX6XE4v3QtwUrRe12hWoNrEHWl0xnLv2mvAFqdMAMpFY6EpOKLlE4hoCs2CmTJ2dv6e2tiGTXGU6/frI5iuNRK61OXnH5OjEc8DCGH/GC7NXyDOXOB+7BdBvvf50l2C/vxR2TKgTncLtHeLCrR0GHNHsxqRo1UDwOWur0r7fdfCRvb2tIr5LORCqKYVKd60/BAXjHWc= |
|
85 | 8862469e16f9236208581b20de5f96bd13cc039d 0 iQIVAwUAUt7cLSBXgaxoKi1yAQLOkRAAidp501zafqe+JnDwlf7ORcJc+FgCE6mK1gxDfReCbkMsY7AzspogU7orqfSmr6XXdrDwmk3Y5x3mf44OGzNQjvuNWhqnTgJ7sOcU/lICGQUc8WiGNzHEMFGX9S+K4dpUaBf8Tcl8pU3iArhlthDghW6SZeDFB/FDBaUx9dkdFp6eXrmu4OuGRZEvwUvPtCGxIL7nKNnufI1du/MsWQxvC2ORHbMNtRq6tjA0fLZi4SvbySuYifQRS32BfHkFS5Qu4/40+1k7kd0YFyyQUvIsVa17lrix3zDqMavG8x7oOlqM/axDMBT6DhpdBMAdc5qqf8myz8lwjlFjyDUL6u3Z4/yE0nUrmEudXiXwG0xbVoEN8SCNrDmmvFMt6qdCpdDMkHr2TuSh0Hh4FT5CDkzPI8ZRssv/01j/QvIO3c/xlbpGRPWpsPXEVOz3pmjYN4qyQesnBKWCENsQLy/8s2rey8iQgx2GtsrNw8+wGX6XE4v3QtwUrRe12hWoNrEHWl0xnLv2mvAFqdMAMpFY6EpOKLlE4hoCs2CmTJ2dv6e2tiGTXGU6/frI5iuNRK61OXnH5OjEc8DCGH/GC7NXyDOXOB+7BdBvvf50l2C/vxR2TKgTncLtHeLCrR0GHNHsxqRo1UDwOWur0r7fdfCRvb2tIr5LORCqKYVKd60/BAXjHWc= | |
86 | 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 0 iQIVAwUAUu1lIyBXgaxoKi1yAQIzCBAAizSWvTkWt8+tReM9jUetoSToF+XahLhn381AYdErFCBErX4bNL+vyEj+Jt2DHsAfabkvNBe3k7rtFlXHwpq6POa/ciFGPDhFlplNv6yN1jOKBlMsgdjpn7plZKcLHODOigU7IMlgg70Um8qVrRgQ8FhvbVgR2I5+CD6bucFzqo78wNl9mCIHIQCpGKIUoz56GbwT+rUpEB182Z3u6rf4NWj35RZLGAicVV2A2eAAFh4ZvuC+Z0tXMkp6Gq9cINawZgqfLbzVYJeXBtJC39lHPyp5P3LaEVRhntc9YTwbfkVGjyJZR60iYrieeKpOYRnzgHauPVdgVhkTkBxshmEPY7svKYSQqlj8hLuFa+a3ajbIPrpQAAi1MgtamA991atNqGiSTjdZa9kLQvfdn0k80+gkCxpuO56PhvtdjKsYVRgQMTYmQVQdh3x4WbQOSqTADXXIZUaWxx4RmNSlxY7KD+3lPP09teOD+A3B2cP60bC5NsCfULtQFXQzdC7NvfIyYfYBTZa+Pv6HFkVe10cbnqTt83hBy0D77vdaegPRe56qDNU+GrIG2/rosnlKGFjFoK/pTYkR9uzfkrhEjLwyfkoXlBqY+376W0PC5fP10pJeQBS9DuXpCPlgtyW0Jy1ayCT1YR4QJC4n75vZwTFBFRBhSi0HqFquOgy83+O0Q/k= |
|
86 | 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 0 iQIVAwUAUu1lIyBXgaxoKi1yAQIzCBAAizSWvTkWt8+tReM9jUetoSToF+XahLhn381AYdErFCBErX4bNL+vyEj+Jt2DHsAfabkvNBe3k7rtFlXHwpq6POa/ciFGPDhFlplNv6yN1jOKBlMsgdjpn7plZKcLHODOigU7IMlgg70Um8qVrRgQ8FhvbVgR2I5+CD6bucFzqo78wNl9mCIHIQCpGKIUoz56GbwT+rUpEB182Z3u6rf4NWj35RZLGAicVV2A2eAAFh4ZvuC+Z0tXMkp6Gq9cINawZgqfLbzVYJeXBtJC39lHPyp5P3LaEVRhntc9YTwbfkVGjyJZR60iYrieeKpOYRnzgHauPVdgVhkTkBxshmEPY7svKYSQqlj8hLuFa+a3ajbIPrpQAAi1MgtamA991atNqGiSTjdZa9kLQvfdn0k80+gkCxpuO56PhvtdjKsYVRgQMTYmQVQdh3x4WbQOSqTADXXIZUaWxx4RmNSlxY7KD+3lPP09teOD+A3B2cP60bC5NsCfULtQFXQzdC7NvfIyYfYBTZa+Pv6HFkVe10cbnqTt83hBy0D77vdaegPRe56qDNU+GrIG2/rosnlKGFjFoK/pTYkR9uzfkrhEjLwyfkoXlBqY+376W0PC5fP10pJeQBS9DuXpCPlgtyW0Jy1ayCT1YR4QJC4n75vZwTFBFRBhSi0HqFquOgy83+O0Q/k= | |
87 | b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 0 iQIVAwUAUxJPlyBXgaxoKi1yAQLIRA//Qh9qzoYthPAWAUNbzybWXC/oMBI2X89NQC7l1ivKhv7cn9L79D8SWXM18q7LTwLdlwOkV/a0NTE3tkQTLvxJpfnRLCBbMOcGiIn/PxsAae8IhMAUbR7qz+XOynHOs60ZhK9X8seQHJRf1YtOI9gYTL/WYk8Cnpmc6xZQ90TNhoPPkpdfe8Y236V11SbYtN14fmrPaWQ3GXwyrvQaqM1F7BxSnC/sbm9+/wprsTa8gRQo7YQL/T5jJQgFiatG3yayrDdJtoRq3TZKtsxw8gtQdfVCrrBibbysjM8++dnwA92apHNUY8LzyptPy7rSDXRrIpPUWGGTQTD+6HQwkcLFtIuUpw4I75SV3z2r6LyOLKzDJUIunKOOYFS/rEIQGxZHxZOBAvbI+73mHAn3pJqm+UAA7R1n7tk3JyQncg50qJlm9zIUPGpNFcdEqak5iXzGYx292VlcE+fbJYeIPWggpilaVUgdmXtMCG0O0uX6C8MDmzVDCjd6FzDJ4GTZwgmWJaamvls85CkZgyN/UqlisfFXub0A1h7qAzBSVpP1+Ti+UbBjlrGX8BMRYHRGYIeIq16elcWwSpLgshjDwNn2r2EdwX8xKU5mucgTzSLprbOYGdQaqnvf6e8IX5WMBgwVW9YdY9yJKSLF7kE1AlM9nfVcXwOK4mHoMvnNgiX3zsw= |
|
87 | b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 0 iQIVAwUAUxJPlyBXgaxoKi1yAQLIRA//Qh9qzoYthPAWAUNbzybWXC/oMBI2X89NQC7l1ivKhv7cn9L79D8SWXM18q7LTwLdlwOkV/a0NTE3tkQTLvxJpfnRLCBbMOcGiIn/PxsAae8IhMAUbR7qz+XOynHOs60ZhK9X8seQHJRf1YtOI9gYTL/WYk8Cnpmc6xZQ90TNhoPPkpdfe8Y236V11SbYtN14fmrPaWQ3GXwyrvQaqM1F7BxSnC/sbm9+/wprsTa8gRQo7YQL/T5jJQgFiatG3yayrDdJtoRq3TZKtsxw8gtQdfVCrrBibbysjM8++dnwA92apHNUY8LzyptPy7rSDXRrIpPUWGGTQTD+6HQwkcLFtIuUpw4I75SV3z2r6LyOLKzDJUIunKOOYFS/rEIQGxZHxZOBAvbI+73mHAn3pJqm+UAA7R1n7tk3JyQncg50qJlm9zIUPGpNFcdEqak5iXzGYx292VlcE+fbJYeIPWggpilaVUgdmXtMCG0O0uX6C8MDmzVDCjd6FzDJ4GTZwgmWJaamvls85CkZgyN/UqlisfFXub0A1h7qAzBSVpP1+Ti+UbBjlrGX8BMRYHRGYIeIq16elcWwSpLgshjDwNn2r2EdwX8xKU5mucgTzSLprbOYGdQaqnvf6e8IX5WMBgwVW9YdY9yJKSLF7kE1AlM9nfVcXwOK4mHoMvnNgiX3zsw= | |
88 | 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 0 iQIVAwUAUztENyBXgaxoKi1yAQIpkhAAmJj5JRTSn0Dn/OTAHggalw8KYFbAck1X35Wg9O7ku7sd+cOnNnkYfqAdz2m5ikqWHP7aWMiNkNy7Ree2110NqkQVYG/2AJStXBdIOmewqnjDlNt+rbJQN/JsjeKSCy+ToNvhqX5cTM9DF2pwRjMsTXVff307S6/3pga244i+RFAeG3WCUrzfDu641MGFLjG4atCj8ZFLg9DcW5bsRiOs5ZK5Il+UAb2yyoS2KNQ70VLhYULhGtqq9tuO4nLRGN3DX/eDcYfncPCav1GckW4OZKakcbLtAdW0goSgGWloxcM+j2E6Z1JZ9tOTTkFN77EvX0ZWZLmYM7sUN1meFnKbVxrtGKlMelwKwlT252c65PAKa9zsTaRUKvN7XclyxZAYVCsiCQ/V08NXhNgXJXcoKUAeGNf6wruOyvRU9teia8fAiuHJoY58WC8jC4nYG3iZTnl+zNj2A5xuEUpYHhjUfe3rNJeK7CwUpJKlbxopu5mnW9AE9ITfI490eaapRLTojOBDJNqCORAtbggMD46fLeCOzzB8Gl70U2p5P34F92Sn6mgERFKh/10XwJcj4ZIeexbQK8lqQ2cIanDN9dAmbvavPTY8grbANuq+vXDGxjIjfxapqzsSPqUJ5KnfTQyLq5NWwquR9t38XvHZfktkd140BFKwIUAIlKKaFfYXXtM= |
|
88 | 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 0 iQIVAwUAUztENyBXgaxoKi1yAQIpkhAAmJj5JRTSn0Dn/OTAHggalw8KYFbAck1X35Wg9O7ku7sd+cOnNnkYfqAdz2m5ikqWHP7aWMiNkNy7Ree2110NqkQVYG/2AJStXBdIOmewqnjDlNt+rbJQN/JsjeKSCy+ToNvhqX5cTM9DF2pwRjMsTXVff307S6/3pga244i+RFAeG3WCUrzfDu641MGFLjG4atCj8ZFLg9DcW5bsRiOs5ZK5Il+UAb2yyoS2KNQ70VLhYULhGtqq9tuO4nLRGN3DX/eDcYfncPCav1GckW4OZKakcbLtAdW0goSgGWloxcM+j2E6Z1JZ9tOTTkFN77EvX0ZWZLmYM7sUN1meFnKbVxrtGKlMelwKwlT252c65PAKa9zsTaRUKvN7XclyxZAYVCsiCQ/V08NXhNgXJXcoKUAeGNf6wruOyvRU9teia8fAiuHJoY58WC8jC4nYG3iZTnl+zNj2A5xuEUpYHhjUfe3rNJeK7CwUpJKlbxopu5mnW9AE9ITfI490eaapRLTojOBDJNqCORAtbggMD46fLeCOzzB8Gl70U2p5P34F92Sn6mgERFKh/10XwJcj4ZIeexbQK8lqQ2cIanDN9dAmbvavPTY8grbANuq+vXDGxjIjfxapqzsSPqUJ5KnfTQyLq5NWwquR9t38XvHZfktkd140BFKwIUAIlKKaFfYXXtM= | |
89 | 564f55b251224f16508dd1311452db7780dafe2b 0 iQIVAwUAU1BmFSBXgaxoKi1yAQJ2Aw//bjK++xJuZCIdktg/i5FxBwoxdbipfTkKsN/YjUwrEmroYM8IkqIsO+U54OGCYWr3NPJ3VS8wUQeJ+NF3ffcjmjC297R9J+X0c5G90DdQUYX44jG/tP8Tqpev4Q7DLCXT26aRwEMdJQpq0eGaqv55E5Cxnyt3RrLCqe7RjPresZFg7iYrro5nq8TGYwBhessHXnCix9QI0HtXiLpms+0UGz8Sbi9nEYW+M0OZCyO1TvykCpFzEsLNwqqtFvhOMD/AMiWcTKNUpjmOn3V83xjWl+jnDUt7BxJ7n1efUnlwl4IeWlSUb73q/durtaymb97cSdKFmXHv4pdAShQEuEpVVGO1WELsKoXmbj30ItTW2V3KvNbjFsvIdDo7zLCpXyTq1HC56W7QCIMINX2qT+hrAMWC12tPQ05f89Cv1+jpk6eOPFqIHFdi663AjyrnGll8nwN7HJWwtA5wTXisu3bec51FAq4yJTzPMtOE9spz36E+Go2hZ1cAv9oCSceZcM0wB8KiMfaZJKNZNZk1jvsdiio4CcdASOFQPOspz07GqQxVP7W+F1Oz32LgwcNAEAS/f3juwDj45GYfAWJrTh3dnJy5DTD2LVC7KtkxxUVkWkqxivnDB9anj++FN9eyekxzut5eFED+WrCfZMcSPW0ai7wbslhKUhCwSf/v3DgGwsM= |
|
89 | 564f55b251224f16508dd1311452db7780dafe2b 0 iQIVAwUAU1BmFSBXgaxoKi1yAQJ2Aw//bjK++xJuZCIdktg/i5FxBwoxdbipfTkKsN/YjUwrEmroYM8IkqIsO+U54OGCYWr3NPJ3VS8wUQeJ+NF3ffcjmjC297R9J+X0c5G90DdQUYX44jG/tP8Tqpev4Q7DLCXT26aRwEMdJQpq0eGaqv55E5Cxnyt3RrLCqe7RjPresZFg7iYrro5nq8TGYwBhessHXnCix9QI0HtXiLpms+0UGz8Sbi9nEYW+M0OZCyO1TvykCpFzEsLNwqqtFvhOMD/AMiWcTKNUpjmOn3V83xjWl+jnDUt7BxJ7n1efUnlwl4IeWlSUb73q/durtaymb97cSdKFmXHv4pdAShQEuEpVVGO1WELsKoXmbj30ItTW2V3KvNbjFsvIdDo7zLCpXyTq1HC56W7QCIMINX2qT+hrAMWC12tPQ05f89Cv1+jpk6eOPFqIHFdi663AjyrnGll8nwN7HJWwtA5wTXisu3bec51FAq4yJTzPMtOE9spz36E+Go2hZ1cAv9oCSceZcM0wB8KiMfaZJKNZNZk1jvsdiio4CcdASOFQPOspz07GqQxVP7W+F1Oz32LgwcNAEAS/f3juwDj45GYfAWJrTh3dnJy5DTD2LVC7KtkxxUVkWkqxivnDB9anj++FN9eyekxzut5eFED+WrCfZMcSPW0ai7wbslhKUhCwSf/v3DgGwsM= | |
90 | 2195ac506c6ababe86985b932f4948837c0891b5 0 iQIVAwUAU2LO/CBXgaxoKi1yAQI/3w/7BT/VRPyxey6tYp7i5cONIlEB3gznebGYwm0SGYNE6lsvS2VLh6ztb+j4eqOadr8Ssna6bslBx+dVsm+VuJ+vrNLMucD5Uc+fhn6dAfVqg+YBzUEaedI5yNsJizcJUDI7hUVsxiPiiYd9hchCWJ+z2tVt2jCyG2lMV2rbW36AM89sgz/wn5/AaAFsgoS6up/uzA3Tmw+qZSO6dZChb4Q8midIUWEbNzVhokgYcw7/HmjmvkvV9RJYiG8aBnMdQmxTE69q2dTjnnDL6wu61WU2FpTN09HRFbemUqzAfoJp8MmXq6jWgfLcm0cI3kRo7ZNpnEkmVKsfKQCXXiaR4alt9IQpQ6Jl7LSYsYI+D4ejpYysIsZyAE8qzltYhBKJWqO27A5V4WdJsoTgA/RwKfPRlci4PY8I4N466S7PBXVz/Cc5EpFkecvrgceTmBafb8JEi+gPiD2Po4vtW3bCeV4xldiEXHeJ77byUz7fZU7jL78SjJVOCCQTJfKZVr36kTz3KlaOz3E700RxzEFDYbK7I41mdANeQBmNNbcvRTy5ma6W6I3McEcAH4wqM5fFQ8YS+QWJxk85Si8KtaDPqoEdC/0dQPavuU/jAVjhV8IbmmkOtO7WvOHQDBtrR15yMxGMnUwMrPHaRNKdHNYRG0LL7lpCtdMi1mzLQgHYY9SRYvI= |
|
90 | 2195ac506c6ababe86985b932f4948837c0891b5 0 iQIVAwUAU2LO/CBXgaxoKi1yAQI/3w/7BT/VRPyxey6tYp7i5cONIlEB3gznebGYwm0SGYNE6lsvS2VLh6ztb+j4eqOadr8Ssna6bslBx+dVsm+VuJ+vrNLMucD5Uc+fhn6dAfVqg+YBzUEaedI5yNsJizcJUDI7hUVsxiPiiYd9hchCWJ+z2tVt2jCyG2lMV2rbW36AM89sgz/wn5/AaAFsgoS6up/uzA3Tmw+qZSO6dZChb4Q8midIUWEbNzVhokgYcw7/HmjmvkvV9RJYiG8aBnMdQmxTE69q2dTjnnDL6wu61WU2FpTN09HRFbemUqzAfoJp8MmXq6jWgfLcm0cI3kRo7ZNpnEkmVKsfKQCXXiaR4alt9IQpQ6Jl7LSYsYI+D4ejpYysIsZyAE8qzltYhBKJWqO27A5V4WdJsoTgA/RwKfPRlci4PY8I4N466S7PBXVz/Cc5EpFkecvrgceTmBafb8JEi+gPiD2Po4vtW3bCeV4xldiEXHeJ77byUz7fZU7jL78SjJVOCCQTJfKZVr36kTz3KlaOz3E700RxzEFDYbK7I41mdANeQBmNNbcvRTy5ma6W6I3McEcAH4wqM5fFQ8YS+QWJxk85Si8KtaDPqoEdC/0dQPavuU/jAVjhV8IbmmkOtO7WvOHQDBtrR15yMxGMnUwMrPHaRNKdHNYRG0LL7lpCtdMi1mzLQgHYY9SRYvI= | |
91 | 269c80ee5b3cb3684fa8edc61501b3506d02eb10 0 iQIVAwUAU4uX5CBXgaxoKi1yAQLpdg/+OxulOKwZN+Nr7xsRhUijYjyAElRf2mGDvMrbAOA2xNf85DOXjOrX5TKETumf1qANA5cHa1twA8wYgxUzhx30H+w5EsLjyeSsOncRnD5WZNqSoIq2XevT0T4c8xdyNftyBqK4h/SC/t2h3vEiSCUaGcfNK8yk4XO45MIk4kk9nlA9jNWdA5ZMLgEFBye2ggz0JjEAPUkVDqlr9sNORDEbnwZxGPV8CK9HaL/I8VWClaFgjKQmjqV3SQsNFe2XPffzXmIipFJ+ODuXVxYpAsvLiGmcfuUfSDHQ4L9QvjBsWe1PgYMr/6CY/lPYmR+xW5mJUE9eIdN4MYcXgicLrmMpdF5pToNccNCMtfa6CDvEasPRqe2bDzL/Q9dQbdOVE/boaYBlgmYLL+/u+dpqip9KkyGgbSo9uJzst1mLTCzJmr5bw+surul28i9HM+4+Lewg4UUdHLz46no1lfTlB5o5EAhiOZBTEVdoBaKfewVpDa/aBRvtWX7UMVRG5qrtA0sXwydN00Jaqkr9m20W0jWjtc1ZC72QCrynVHOyfIb2rN98rnuy2QN4bTvjNpNjHOhhhPTOoVo0YYPdiUupm46vymUTQCmWsglU4Rlaa3vXneP7JenL5TV8WLPs9J28lF0IkOnyBXY7OFcpvYO1euu7iR1VdjfrQukMyaX18usymiA= |
|
91 | 269c80ee5b3cb3684fa8edc61501b3506d02eb10 0 iQIVAwUAU4uX5CBXgaxoKi1yAQLpdg/+OxulOKwZN+Nr7xsRhUijYjyAElRf2mGDvMrbAOA2xNf85DOXjOrX5TKETumf1qANA5cHa1twA8wYgxUzhx30H+w5EsLjyeSsOncRnD5WZNqSoIq2XevT0T4c8xdyNftyBqK4h/SC/t2h3vEiSCUaGcfNK8yk4XO45MIk4kk9nlA9jNWdA5ZMLgEFBye2ggz0JjEAPUkVDqlr9sNORDEbnwZxGPV8CK9HaL/I8VWClaFgjKQmjqV3SQsNFe2XPffzXmIipFJ+ODuXVxYpAsvLiGmcfuUfSDHQ4L9QvjBsWe1PgYMr/6CY/lPYmR+xW5mJUE9eIdN4MYcXgicLrmMpdF5pToNccNCMtfa6CDvEasPRqe2bDzL/Q9dQbdOVE/boaYBlgmYLL+/u+dpqip9KkyGgbSo9uJzst1mLTCzJmr5bw+surul28i9HM+4+Lewg4UUdHLz46no1lfTlB5o5EAhiOZBTEVdoBaKfewVpDa/aBRvtWX7UMVRG5qrtA0sXwydN00Jaqkr9m20W0jWjtc1ZC72QCrynVHOyfIb2rN98rnuy2QN4bTvjNpNjHOhhhPTOoVo0YYPdiUupm46vymUTQCmWsglU4Rlaa3vXneP7JenL5TV8WLPs9J28lF0IkOnyBXY7OFcpvYO1euu7iR1VdjfrQukMyaX18usymiA= | |
92 | 2d8cd3d0e83c7336c0cb45a9f88638363f993848 0 iQIVAwUAU7OLTCBXgaxoKi1yAQJ+pw/+M3yOesgf55eo3PUTZw02QZxDyEg9ElrRc6664/QFXaJuYdz8H3LGG/NYs8uEdYihiGpS1Qc70jwd1IoUlrCELsaSSZpzWQ+VpQFX29aooBoetfL+8WgqV8zJHCtY0E1EBg/Z3ZL3n2OS++fVeWlKtp5mwEq8uLTUmhIS7GseP3bIG/CwF2Zz4bzhmPGK8V2s74aUvELZLCfkBE1ULNs7Nou1iPDGnhYOD53eq1KGIPlIg1rnLbyYw5bhS20wy5IxkWf2eCaXfmQBTG61kO5m3nkzfVgtxmZHLqYggISTJXUovfGsWZcp5a71clCSMVal+Mfviw8L/UPHG0Ie1c36djJiFLxM0f2HlwVMjegQOZSAeMGg1YL1xnIys2zMMsKgEeR+JISTal1pJyLcT9x5mr1HCnUczSGXE5zsixN+PORRnZOqcEZTa2mHJ1h5jJeEm36B/eR57BMJG+i0QgZqTpLzYTFrp2eWokGMjFB1MvgAkL2YoRsw9h6TeIwqzK8mFwLi28bf1c90gX9uMbwY/NOqGzfQKBR9bvCjs2k/gmJ+qd5AbC3DvOxHnN6hRZUqNq76Bo4F+CUVcjQ/NXnfnOIVNbILpl5Un5kl+8wLFM+mNxDxduajaUwLhSHZofKmmCSLbuuaGmQTC7a/4wzhQM9e5dX0X/8sOo8CptW7uw4= |
|
92 | 2d8cd3d0e83c7336c0cb45a9f88638363f993848 0 iQIVAwUAU7OLTCBXgaxoKi1yAQJ+pw/+M3yOesgf55eo3PUTZw02QZxDyEg9ElrRc6664/QFXaJuYdz8H3LGG/NYs8uEdYihiGpS1Qc70jwd1IoUlrCELsaSSZpzWQ+VpQFX29aooBoetfL+8WgqV8zJHCtY0E1EBg/Z3ZL3n2OS++fVeWlKtp5mwEq8uLTUmhIS7GseP3bIG/CwF2Zz4bzhmPGK8V2s74aUvELZLCfkBE1ULNs7Nou1iPDGnhYOD53eq1KGIPlIg1rnLbyYw5bhS20wy5IxkWf2eCaXfmQBTG61kO5m3nkzfVgtxmZHLqYggISTJXUovfGsWZcp5a71clCSMVal+Mfviw8L/UPHG0Ie1c36djJiFLxM0f2HlwVMjegQOZSAeMGg1YL1xnIys2zMMsKgEeR+JISTal1pJyLcT9x5mr1HCnUczSGXE5zsixN+PORRnZOqcEZTa2mHJ1h5jJeEm36B/eR57BMJG+i0QgZqTpLzYTFrp2eWokGMjFB1MvgAkL2YoRsw9h6TeIwqzK8mFwLi28bf1c90gX9uMbwY/NOqGzfQKBR9bvCjs2k/gmJ+qd5AbC3DvOxHnN6hRZUqNq76Bo4F+CUVcjQ/NXnfnOIVNbILpl5Un5kl+8wLFM+mNxDxduajaUwLhSHZofKmmCSLbuuaGmQTC7a/4wzhQM9e5dX0X/8sOo8CptW7uw4= | |
93 | 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 0 iQIVAwUAU8n97yBXgaxoKi1yAQKqcA/+MT0VFoP6N8fHnlxj85maoM2HfZbAzX7oEW1B8F1WH6rHESHDexDWIYWJ2XnEeTD4GCXN0/1p+O/I0IMPNzqoSz8BU0SR4+ejhRkGrKG7mcFiF5G8enxaiISn9nmax6DyRfqtOQBzuXYGObXg9PGvMS6zbR0SorJK61xX7fSsUNN6BAvHJfpwcVkOrrFAIpEhs/Gh9wg0oUKCffO/Abs6oS+P6nGLylpIyXqC7rKZ4uPVc6Ljh9DOcpV4NCU6kQbNE7Ty79E0/JWWLsHOEY4F4WBzI7rVh7dOkRMmfNGaqvKkuNkJOEqTR1o1o73Hhbxn4NU7IPbVP/zFKC+/4QVtcPk2IPlpK1MqA1H2hBNYZhJlNhvAa7LwkIxM0916/zQ8dbFAzp6Ay/t/L0tSEcIrudTz2KTrY0WKw+pkzB/nTwaS3XZre6H2B+gszskmf1Y41clkIy/nH9K7zBuzANWyK3+bm40vmMoBbbnsweUAKkyCwqm4KTyQoYQWzu/ZiZcI+Uuk/ajJ9s7EhJbIlSnYG9ttWL/IZ1h+qPU9mqVO9fcaqkeL/NIRh+IsnzaWo0zmHU1bK+/E29PPGGf3v6+IEJmXg7lvNl5pHiMd2tb7RNO/UaNSv1Y2E9naD4FQwSWo38GRBcnRGuKCLdZNHGUR+6dYo6BJCGG8wtZvNXb3TOo= |
|
93 | 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 0 iQIVAwUAU8n97yBXgaxoKi1yAQKqcA/+MT0VFoP6N8fHnlxj85maoM2HfZbAzX7oEW1B8F1WH6rHESHDexDWIYWJ2XnEeTD4GCXN0/1p+O/I0IMPNzqoSz8BU0SR4+ejhRkGrKG7mcFiF5G8enxaiISn9nmax6DyRfqtOQBzuXYGObXg9PGvMS6zbR0SorJK61xX7fSsUNN6BAvHJfpwcVkOrrFAIpEhs/Gh9wg0oUKCffO/Abs6oS+P6nGLylpIyXqC7rKZ4uPVc6Ljh9DOcpV4NCU6kQbNE7Ty79E0/JWWLsHOEY4F4WBzI7rVh7dOkRMmfNGaqvKkuNkJOEqTR1o1o73Hhbxn4NU7IPbVP/zFKC+/4QVtcPk2IPlpK1MqA1H2hBNYZhJlNhvAa7LwkIxM0916/zQ8dbFAzp6Ay/t/L0tSEcIrudTz2KTrY0WKw+pkzB/nTwaS3XZre6H2B+gszskmf1Y41clkIy/nH9K7zBuzANWyK3+bm40vmMoBbbnsweUAKkyCwqm4KTyQoYQWzu/ZiZcI+Uuk/ajJ9s7EhJbIlSnYG9ttWL/IZ1h+qPU9mqVO9fcaqkeL/NIRh+IsnzaWo0zmHU1bK+/E29PPGGf3v6+IEJmXg7lvNl5pHiMd2tb7RNO/UaNSv1Y2E9naD4FQwSWo38GRBcnRGuKCLdZNHGUR+6dYo6BJCGG8wtZvNXb3TOo= | |
94 | 3178e49892020336491cdc6945885c4de26ffa8b 0 iQIVAwUAU9whUCBXgaxoKi1yAQJDKxAAoGzdHXV/BvZ598VExEQ8IqkmBVIP1QZDVBr/orMc1eFM4tbGKxumMGbqgJsg+NetI0irkh/YWeJQ13lT4Og72iJ+4UC9eF9pcpUKr/0eBYdU2N/p2MIbVNWh3aF5QkbuQpSri0VbHOWkxqwoqrrwXEjgHaKYP4PKh+Dzukax4yzBUIyzAG38pt4a8hbjnozCl2uAikxk4Ojg+ZufhPoZWgFEuYzSfK5SrwVKOwuxKYFGbbVGTQMIXLvBhOipAmHp4JMEYHfG85kwuyx/DCDbGmXKPQYQfClwjJ4ob/IwG8asyMsPWs+09vrvpVO08HBuph3GjuiWJ1fhEef/ImWmZdQySI9Y4SjwP4dMVfzLCnY+PYPDM9Sq/5Iee13gI2lVM2NtAfQZPXh9l8u6SbCir1UhMNMx0qVMkqMAATmiZ+ETHCO75q4Wdcmnv5fk2PbvaGBVtrHGeiyuz5mK/j4cMbd0R9R0hR1PyC4dOhNqOnbqELNIe0rKNByG1RkpiQYsqZTU6insmnZrv4fVsxfA4JOObPfKNT4oa24MHS73ldLFCfQAuIxVE7RDJJ3bHeh/yO6Smo28FuVRldBl5e+wj2MykS8iVcuSa1smw6gJ14iLBH369nlR3fAAQxI0omVYPDHLr7SsH3vJasTaCD7V3SL4lW6vo/yaAh4ImlTAE+Y= |
|
94 | 3178e49892020336491cdc6945885c4de26ffa8b 0 iQIVAwUAU9whUCBXgaxoKi1yAQJDKxAAoGzdHXV/BvZ598VExEQ8IqkmBVIP1QZDVBr/orMc1eFM4tbGKxumMGbqgJsg+NetI0irkh/YWeJQ13lT4Og72iJ+4UC9eF9pcpUKr/0eBYdU2N/p2MIbVNWh3aF5QkbuQpSri0VbHOWkxqwoqrrwXEjgHaKYP4PKh+Dzukax4yzBUIyzAG38pt4a8hbjnozCl2uAikxk4Ojg+ZufhPoZWgFEuYzSfK5SrwVKOwuxKYFGbbVGTQMIXLvBhOipAmHp4JMEYHfG85kwuyx/DCDbGmXKPQYQfClwjJ4ob/IwG8asyMsPWs+09vrvpVO08HBuph3GjuiWJ1fhEef/ImWmZdQySI9Y4SjwP4dMVfzLCnY+PYPDM9Sq/5Iee13gI2lVM2NtAfQZPXh9l8u6SbCir1UhMNMx0qVMkqMAATmiZ+ETHCO75q4Wdcmnv5fk2PbvaGBVtrHGeiyuz5mK/j4cMbd0R9R0hR1PyC4dOhNqOnbqELNIe0rKNByG1RkpiQYsqZTU6insmnZrv4fVsxfA4JOObPfKNT4oa24MHS73ldLFCfQAuIxVE7RDJJ3bHeh/yO6Smo28FuVRldBl5e+wj2MykS8iVcuSa1smw6gJ14iLBH369nlR3fAAQxI0omVYPDHLr7SsH3vJasTaCD7V3SL4lW6vo/yaAh4ImlTAE+Y= | |
95 | 5dc91146f35369949ea56b40172308158b59063a 0 iQIVAwUAVAUgJyBXgaxoKi1yAQJkEg/9EXFZvPpuvU7AjII1dlIT8F534AXrO30+H6hweg+h2mUCSb/mZnbo3Jr1tATgBWbIKkYmmsiIKNlJMFNPZTWhImGcVA93t6v85tSFiNJRI2QP9ypl5wTt2KhiS/s7GbUYCtPDm6xyNYoSvDo6vXJ5mfGlgFZY5gYLwEHq/lIRWLWD4EWYWbk5yN+B7rHu6A1n3yro73UR8DudEhYYqC23KbWEqFOiNd1IGj3UJlxIHUE4AcDukxbfiMWrKvv1kuT/vXak3X7cLXlO56aUbMopvaUflA3PSr3XAqynDd69cxACo/T36fuwzCQN4ICpdzGTos0rQALSr7CKF5YP9LMhVhCsOn0pCsAkSiw4HxxbcHQLl+t+0rchNysc4dWGwDt6GAfYcdm3fPtGFtA3qsN8lOpCquFH3TAZ3TrIjLFoTOk6s1xX1x5rjP/DAHc/y3KZU0Ffx3TwdQEEEIFaAXaxQG848rdfzV42+dnFnXh1G/MIrKAmv3ZSUkQ3XJfGc7iu82FsYE1NLHriUQDmMRBzCoQ1Rn1Kji119Cxf5rsMcQ6ZISR1f0jDCUS/qxlHvSqETLp8H63NSUfvuKSC7uC6pGvq9XQm1JRNO5UuJfK6tHzy0jv9bt2IRo2xbmvpDu9L5oHHd3JePsAmFmbrFf/7Qem3JyzEvRcpdcdHtefxcxc= |
|
95 | 5dc91146f35369949ea56b40172308158b59063a 0 iQIVAwUAVAUgJyBXgaxoKi1yAQJkEg/9EXFZvPpuvU7AjII1dlIT8F534AXrO30+H6hweg+h2mUCSb/mZnbo3Jr1tATgBWbIKkYmmsiIKNlJMFNPZTWhImGcVA93t6v85tSFiNJRI2QP9ypl5wTt2KhiS/s7GbUYCtPDm6xyNYoSvDo6vXJ5mfGlgFZY5gYLwEHq/lIRWLWD4EWYWbk5yN+B7rHu6A1n3yro73UR8DudEhYYqC23KbWEqFOiNd1IGj3UJlxIHUE4AcDukxbfiMWrKvv1kuT/vXak3X7cLXlO56aUbMopvaUflA3PSr3XAqynDd69cxACo/T36fuwzCQN4ICpdzGTos0rQALSr7CKF5YP9LMhVhCsOn0pCsAkSiw4HxxbcHQLl+t+0rchNysc4dWGwDt6GAfYcdm3fPtGFtA3qsN8lOpCquFH3TAZ3TrIjLFoTOk6s1xX1x5rjP/DAHc/y3KZU0Ffx3TwdQEEEIFaAXaxQG848rdfzV42+dnFnXh1G/MIrKAmv3ZSUkQ3XJfGc7iu82FsYE1NLHriUQDmMRBzCoQ1Rn1Kji119Cxf5rsMcQ6ZISR1f0jDCUS/qxlHvSqETLp8H63NSUfvuKSC7uC6pGvq9XQm1JRNO5UuJfK6tHzy0jv9bt2IRo2xbmvpDu9L5oHHd3JePsAmFmbrFf/7Qem3JyzEvRcpdcdHtefxcxc= | |
96 | f768c888aaa68d12dd7f509dcc7f01c9584357d0 0 iQIVAwUAVCxczSBXgaxoKi1yAQJYiA/9HnqKuU7IsGACgsUGt+YaqZQumg077Anj158kihSytmSts6xDxqVY1UQB38dqAKLJrQc7RbN0YK0NVCKZZrx/4OqgWvjiL5qWUJKqQzsDx4LGTUlbPlZNZawW2urmmYW6c9ZZDs1EVnVeZMDrOdntddtnBgtILDwrZ8o3U7FwSlfnm03vTkqUMj9okA3AsI8+lQIlo4qbqjQJYwvUC1ZezRdQwaT1LyoWUgjmhoZ1XWcWKOs9baikaJr6fMv8vZpwmaOY1+pztxYlROeSPVWt9P6yOf0Hi/2eg8AwSZLaX96xfk9IvXUSItg/wjTWP9BhnNs/ulwTnN8QOgSXpYxH4RXwsYOyU7BvwAekA9xi17wuzPrGEliScplxICIZ7jiiwv/VngMvM9AYw2mNBvZt2ZIGrrLaK6pq/zBm5tbviwqt5/8U5aqO8k1O0e4XYm5WmQ1c2AkXRO+xwvFpondlSF2y0flzf2FRXP82QMfsy7vxIP0KmaQ4ex+J8krZgMjNTwXh2M4tdYNtu5AehJQEP3l6giy2srkMDuFLqoe1yECjVlGdgA86ve3J/84I8KGgsufYMhfQnwHHGXCbONcNsDvO0QOee6CIQVcdKCG7dac3M89SC6Ns2CjuC8BIYDRnxbGQb7Fvn4ZcadyJKKbXQJzMgRV25K6BAwTIdvYAtgU= |
|
96 | f768c888aaa68d12dd7f509dcc7f01c9584357d0 0 iQIVAwUAVCxczSBXgaxoKi1yAQJYiA/9HnqKuU7IsGACgsUGt+YaqZQumg077Anj158kihSytmSts6xDxqVY1UQB38dqAKLJrQc7RbN0YK0NVCKZZrx/4OqgWvjiL5qWUJKqQzsDx4LGTUlbPlZNZawW2urmmYW6c9ZZDs1EVnVeZMDrOdntddtnBgtILDwrZ8o3U7FwSlfnm03vTkqUMj9okA3AsI8+lQIlo4qbqjQJYwvUC1ZezRdQwaT1LyoWUgjmhoZ1XWcWKOs9baikaJr6fMv8vZpwmaOY1+pztxYlROeSPVWt9P6yOf0Hi/2eg8AwSZLaX96xfk9IvXUSItg/wjTWP9BhnNs/ulwTnN8QOgSXpYxH4RXwsYOyU7BvwAekA9xi17wuzPrGEliScplxICIZ7jiiwv/VngMvM9AYw2mNBvZt2ZIGrrLaK6pq/zBm5tbviwqt5/8U5aqO8k1O0e4XYm5WmQ1c2AkXRO+xwvFpondlSF2y0flzf2FRXP82QMfsy7vxIP0KmaQ4ex+J8krZgMjNTwXh2M4tdYNtu5AehJQEP3l6giy2srkMDuFLqoe1yECjVlGdgA86ve3J/84I8KGgsufYMhfQnwHHGXCbONcNsDvO0QOee6CIQVcdKCG7dac3M89SC6Ns2CjuC8BIYDRnxbGQb7Fvn4ZcadyJKKbXQJzMgRV25K6BAwTIdvYAtgU= | |
97 | 7f8d16af8cae246fa5a48e723d48d58b015aed94 0 iQIVAwUAVEL0XyBXgaxoKi1yAQJLkRAAjZhpUju5nnSYtN9S0/vXS/tjuAtBTUdGwc0mz97VrM6Yhc6BjSCZL59tjeqQaoH7Lqf94pRAtZyIB2Vj/VVMDbM+/eaoSr1JixxppU+a4eqScaj82944u4C5YMSMC22PMvEwqKmy87RinZKJlFwSQ699zZ5g6mnNq8xeAiDlYhoF2QKzUXwnKxzpvjGsYhYGDMmVS1QPmky4WGvuTl6KeGkv8LidKf7r6/2RZeMcq+yjJ7R0RTtyjo1cM5dMcn/jRdwZxuV4cmFweCAeoy5guV+X6du022TpVndjOSDoKiRgdk7pTuaToXIy+9bleHpEo9bwKx58wvOMg7sirAYjrA4Xcx762RHiUuidTTPktm8sNsBQmgwJZ8Pzm+8TyHjFGLnBfeiDbQQEdLCXloz0jVOVRflDfMays1WpAYUV8XNOsgxnD2jDU8L0NLkJiX5Y0OerGq9AZ+XbgJFVBFhaOfsm2PEc3jq00GOLzrGzA+4b3CGpFzM3EyK9OnnwbP7SqCGb7PJgjmQ7IO8IWEmVYGaKtWONSm8zRLcKdH8xuk8iN1qCkBXMty/wfTEVTkIlMVEDbslYkVfj0rAPJ8B37bfe0Yz4CEMkCmARIB1rIOpMhnavXGuD50OP2PBBY/8DyC5aY97z9f04na/ffk+l7rWaHihjHufKIApt5OnfJ1w= |
|
97 | 7f8d16af8cae246fa5a48e723d48d58b015aed94 0 iQIVAwUAVEL0XyBXgaxoKi1yAQJLkRAAjZhpUju5nnSYtN9S0/vXS/tjuAtBTUdGwc0mz97VrM6Yhc6BjSCZL59tjeqQaoH7Lqf94pRAtZyIB2Vj/VVMDbM+/eaoSr1JixxppU+a4eqScaj82944u4C5YMSMC22PMvEwqKmy87RinZKJlFwSQ699zZ5g6mnNq8xeAiDlYhoF2QKzUXwnKxzpvjGsYhYGDMmVS1QPmky4WGvuTl6KeGkv8LidKf7r6/2RZeMcq+yjJ7R0RTtyjo1cM5dMcn/jRdwZxuV4cmFweCAeoy5guV+X6du022TpVndjOSDoKiRgdk7pTuaToXIy+9bleHpEo9bwKx58wvOMg7sirAYjrA4Xcx762RHiUuidTTPktm8sNsBQmgwJZ8Pzm+8TyHjFGLnBfeiDbQQEdLCXloz0jVOVRflDfMays1WpAYUV8XNOsgxnD2jDU8L0NLkJiX5Y0OerGq9AZ+XbgJFVBFhaOfsm2PEc3jq00GOLzrGzA+4b3CGpFzM3EyK9OnnwbP7SqCGb7PJgjmQ7IO8IWEmVYGaKtWONSm8zRLcKdH8xuk8iN1qCkBXMty/wfTEVTkIlMVEDbslYkVfj0rAPJ8B37bfe0Yz4CEMkCmARIB1rIOpMhnavXGuD50OP2PBBY/8DyC5aY97z9f04na/ffk+l7rWaHihjHufKIApt5OnfJ1w= | |
98 | ced632394371a36953ce4d394f86278ae51a2aae 0 iQIVAwUAVFWpfSBXgaxoKi1yAQLCQw//cvCi/Di3z/2ZEDQt4Ayyxv18gzewqrYyoElgnEzr5uTynD9Mf25hprstKla/Y5C6q+y0K6qCHPimGOkz3H+wZ2GVUgLKAwMABkfSb5IZiLTGaB2DjAJKZRwB6h43wG/DSFggE3dYszWuyHW88c72ZzVF5CSNc4J1ARLjDSgnNYJQ6XdPw3C9KgiLFDXzynPpZbPg0AK5bdPUKJruMeIKPn36Hx/Tv5GXUrbc2/lcnyRDFWisaDl0X/5eLdA+r3ID0cSmyPLYOeCgszRiW++KGw+PPDsWVeM3ZaZ9SgaBWU7MIn9A7yQMnnSzgDbN+9v/VMT3zbk1WJXlQQK8oA+CCdHH9EY33RfZ6ST/lr3pSQbUG1hdK6Sw+H6WMkOnnEk6HtLwa4xZ3HjDpoPkhVV+S0C7D5WWOovbubxuBiW5v8tK4sIOS6bAaKevTBKRbo4Rs6qmS/Ish5Q+z5bKst80cyEdi4QSoPZ/W+6kh1KfOprMxynwPQhtEcDYW2gfLpgPIM7RdXPKukLlkV2qX3eF/tqApGU4KNdP4I3N80Ri0h+6tVU/K4TMYzlRV3ziLBumJ4TnBrTHU3X6AfZUfTgslQzokX8/7a3tbctX6kZuJPggLGisdFSdirHbrUc+y5VKuJtPr+LxxgZKRFbs2VpJRem6FvwGNyndWLv32v0GMtQ= |
|
98 | ced632394371a36953ce4d394f86278ae51a2aae 0 iQIVAwUAVFWpfSBXgaxoKi1yAQLCQw//cvCi/Di3z/2ZEDQt4Ayyxv18gzewqrYyoElgnEzr5uTynD9Mf25hprstKla/Y5C6q+y0K6qCHPimGOkz3H+wZ2GVUgLKAwMABkfSb5IZiLTGaB2DjAJKZRwB6h43wG/DSFggE3dYszWuyHW88c72ZzVF5CSNc4J1ARLjDSgnNYJQ6XdPw3C9KgiLFDXzynPpZbPg0AK5bdPUKJruMeIKPn36Hx/Tv5GXUrbc2/lcnyRDFWisaDl0X/5eLdA+r3ID0cSmyPLYOeCgszRiW++KGw+PPDsWVeM3ZaZ9SgaBWU7MIn9A7yQMnnSzgDbN+9v/VMT3zbk1WJXlQQK8oA+CCdHH9EY33RfZ6ST/lr3pSQbUG1hdK6Sw+H6WMkOnnEk6HtLwa4xZ3HjDpoPkhVV+S0C7D5WWOovbubxuBiW5v8tK4sIOS6bAaKevTBKRbo4Rs6qmS/Ish5Q+z5bKst80cyEdi4QSoPZ/W+6kh1KfOprMxynwPQhtEcDYW2gfLpgPIM7RdXPKukLlkV2qX3eF/tqApGU4KNdP4I3N80Ri0h+6tVU/K4TMYzlRV3ziLBumJ4TnBrTHU3X6AfZUfTgslQzokX8/7a3tbctX6kZuJPggLGisdFSdirHbrUc+y5VKuJtPr+LxxgZKRFbs2VpJRem6FvwGNyndWLv32v0GMtQ= | |
99 | 643c58303fb0ec020907af28b9e486be299ba043 0 iQIVAwUAVGKawCBXgaxoKi1yAQL7zxAAjpXKNvzm/PKVlTfDjuVOYZ9H8w9QKUZ0vfrNJrN6Eo6hULIostbdRc25FcMWocegTqvKbz3IG+L2TKOIdZJS9M9QS4URybUd37URq4Jai8kMiJY31KixNNnjO2G1B39aIXUhY+EPx12aY31/OVy4laXIVtN6qpSncjo9baXSOMZmx6RyA1dbyfwXRjT/aODCGHZXgLJHS/kHlkCsThVlqYQ4rUCDkXIeMqIGF1CR0KjfmKpp1fS14OMgpLgdnt9+pnBZ+qcf1YdpOeQob1zwunjMYOyYC74FyOTdwaynU2iDsuBrmkE8kgEedIn7+WWe9fp/6TQJMVOeTQPZBNSRRSUYCw5Tg/0L/+jLtzjc2mY4444sDPbR7scrtU+/GtvlR5z0Y5pofwEdFME7PZNOp9a4kMiSa7ZERyGdN7U1pDu9JU6BZRz+nPzW217PVnTF7YFV/GGUzMTk9i7EZb5M4T9r9gfxFSMPeT5ct712CdBfyRlsSbSWk8XclTXwW385kLVYNDtOukWrvEiwxpA14Xb/ZUXbIDZVf5rP2HrZHMkghzeUYPjRn/IlgYUt7sDNmqFZNIc9mRFrZC9uFQ/Nul5InZodNODQDM+nHpxaztt4xl4qKep8SDEPAQjNr8biC6T9MtLKbWbSKDlqYYNv0pb2PuGub3y9rvkF1Y05mgM= |
|
99 | 643c58303fb0ec020907af28b9e486be299ba043 0 iQIVAwUAVGKawCBXgaxoKi1yAQL7zxAAjpXKNvzm/PKVlTfDjuVOYZ9H8w9QKUZ0vfrNJrN6Eo6hULIostbdRc25FcMWocegTqvKbz3IG+L2TKOIdZJS9M9QS4URybUd37URq4Jai8kMiJY31KixNNnjO2G1B39aIXUhY+EPx12aY31/OVy4laXIVtN6qpSncjo9baXSOMZmx6RyA1dbyfwXRjT/aODCGHZXgLJHS/kHlkCsThVlqYQ4rUCDkXIeMqIGF1CR0KjfmKpp1fS14OMgpLgdnt9+pnBZ+qcf1YdpOeQob1zwunjMYOyYC74FyOTdwaynU2iDsuBrmkE8kgEedIn7+WWe9fp/6TQJMVOeTQPZBNSRRSUYCw5Tg/0L/+jLtzjc2mY4444sDPbR7scrtU+/GtvlR5z0Y5pofwEdFME7PZNOp9a4kMiSa7ZERyGdN7U1pDu9JU6BZRz+nPzW217PVnTF7YFV/GGUzMTk9i7EZb5M4T9r9gfxFSMPeT5ct712CdBfyRlsSbSWk8XclTXwW385kLVYNDtOukWrvEiwxpA14Xb/ZUXbIDZVf5rP2HrZHMkghzeUYPjRn/IlgYUt7sDNmqFZNIc9mRFrZC9uFQ/Nul5InZodNODQDM+nHpxaztt4xl4qKep8SDEPAQjNr8biC6T9MtLKbWbSKDlqYYNv0pb2PuGub3y9rvkF1Y05mgM= | |
100 | 902554884335e5ca3661d63be9978eb4aec3f68a 0 iQIVAwUAVH0KMyBXgaxoKi1yAQLUKxAAjgyYpmqD0Ji5OQ3995yX0dmwHOaaSuYpq71VUsOMYBskjH4xE2UgcTrX8RWUf0E+Ya91Nw3veTf+IZlYLaWuOYuJPRzw+zD1sVY8xprwqBOXNaA7n8SsTqZPSh6qgw4S0pUm0xJUOZzUP1l9S7BtIdJP7KwZ7hs9YZev4r9M3G15xOIPn5qJqBAtIeE6f5+ezoyOpSPZFtLFc4qKQ/YWzOT5uuSaYogXgVByXRFaO84+1TD93LR0PyVWxhwU9JrDU5d7P/bUTW1BXdjsxTbBnigWswKHC71EHpgz/HCYxivVL30qNdOm4Fow1Ec2GdUzGunSqTPrq18ScZDYW1x87f3JuqPM+ce/lxRWBBqP1yE30/8l/Us67m6enWXdGER8aL1lYTGOIWAhvJpfzv9KebaUq1gMFLo6j+OfwR3rYPiCHgi20nTNBa+LOceWFjCGzFa3T9UQWHW/MBElfAxK65uecbGRRYY9V1/+wxtTUiS6ixpmzL8S7uUd5n6oMaeeMiD82NLgPIbMyUHQv6eFEcCj0U9NT2uKbFRmclMs5V+8D+RTCsLJ55R9PD5OoRw/6K/coqqPShYmJvgYsFQPzXVpQdCRae31xdfGFmd5KUetqyrT+4GUdJWzSm0giSgovpEJNxXglrvNdvSO7fX3R1oahhwOwtGqMwNilcK+iDw= |
|
100 | 902554884335e5ca3661d63be9978eb4aec3f68a 0 iQIVAwUAVH0KMyBXgaxoKi1yAQLUKxAAjgyYpmqD0Ji5OQ3995yX0dmwHOaaSuYpq71VUsOMYBskjH4xE2UgcTrX8RWUf0E+Ya91Nw3veTf+IZlYLaWuOYuJPRzw+zD1sVY8xprwqBOXNaA7n8SsTqZPSh6qgw4S0pUm0xJUOZzUP1l9S7BtIdJP7KwZ7hs9YZev4r9M3G15xOIPn5qJqBAtIeE6f5+ezoyOpSPZFtLFc4qKQ/YWzOT5uuSaYogXgVByXRFaO84+1TD93LR0PyVWxhwU9JrDU5d7P/bUTW1BXdjsxTbBnigWswKHC71EHpgz/HCYxivVL30qNdOm4Fow1Ec2GdUzGunSqTPrq18ScZDYW1x87f3JuqPM+ce/lxRWBBqP1yE30/8l/Us67m6enWXdGER8aL1lYTGOIWAhvJpfzv9KebaUq1gMFLo6j+OfwR3rYPiCHgi20nTNBa+LOceWFjCGzFa3T9UQWHW/MBElfAxK65uecbGRRYY9V1/+wxtTUiS6ixpmzL8S7uUd5n6oMaeeMiD82NLgPIbMyUHQv6eFEcCj0U9NT2uKbFRmclMs5V+8D+RTCsLJ55R9PD5OoRw/6K/coqqPShYmJvgYsFQPzXVpQdCRae31xdfGFmd5KUetqyrT+4GUdJWzSm0giSgovpEJNxXglrvNdvSO7fX3R1oahhwOwtGqMwNilcK+iDw= | |
101 | 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 0 iQIVAwUAVJNALCBXgaxoKi1yAQKgmw/+OFbHHOMmN2zs2lI2Y0SoMALPNQBInMBq2E6RMCMbfcS9Cn75iD29DnvBwAYNWaWsYEGyheJ7JjGBiuNKPOrLaHkdjG+5ypbhAfNDyHDiteMsXfH7D1L+cTOAB8yvhimZHOTTVF0zb/uRyVIPNowAyervUVRjDptzdfcvjUS+X+/Ufgwms6Y4CcuzFLFCxpmryJhLtOpwUPLlzIqeNkFOYWkHanCgtZX03PNIWhorH3AWOc9yztwWPQ+kcKl3FMlyuNMPhS/ElxSF6GHGtreRbtP+ZLoSIOMb2QBKpGDpZLgJ3JQEHDcZ0h5CLZWL9dDUJR3M8pg1qglqMFSWMgRPTzxPS4QntPgT/Ewd3+U5oCZUh052fG41OeCZ0CnVCpqi5PjUIDhzQkONxRCN2zbjQ2GZY7glbXoqytissihEIVP9m7RmBVq1rbjOKr+yUetJ9gOZcsMtZiCEq4Uj2cbA1x32MQv7rxwAgQP1kgQ62b0sN08HTjQpI7/IkNALLIDHoQWWr45H97i34qK1dd5uCOnYk7juvhGNX5XispxNnC01/CUVNnqChfDHpgnDjgT+1H618LiTgUAD3zo4IVAhCqF5XWsS4pQEENOB3Msffi62fYowvJx7f/htWeRLZ2OA+B85hhDiD4QBdHCRoz3spVp0asNqDxX4f4ndj8RlzfM= |
|
101 | 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 0 iQIVAwUAVJNALCBXgaxoKi1yAQKgmw/+OFbHHOMmN2zs2lI2Y0SoMALPNQBInMBq2E6RMCMbfcS9Cn75iD29DnvBwAYNWaWsYEGyheJ7JjGBiuNKPOrLaHkdjG+5ypbhAfNDyHDiteMsXfH7D1L+cTOAB8yvhimZHOTTVF0zb/uRyVIPNowAyervUVRjDptzdfcvjUS+X+/Ufgwms6Y4CcuzFLFCxpmryJhLtOpwUPLlzIqeNkFOYWkHanCgtZX03PNIWhorH3AWOc9yztwWPQ+kcKl3FMlyuNMPhS/ElxSF6GHGtreRbtP+ZLoSIOMb2QBKpGDpZLgJ3JQEHDcZ0h5CLZWL9dDUJR3M8pg1qglqMFSWMgRPTzxPS4QntPgT/Ewd3+U5oCZUh052fG41OeCZ0CnVCpqi5PjUIDhzQkONxRCN2zbjQ2GZY7glbXoqytissihEIVP9m7RmBVq1rbjOKr+yUetJ9gOZcsMtZiCEq4Uj2cbA1x32MQv7rxwAgQP1kgQ62b0sN08HTjQpI7/IkNALLIDHoQWWr45H97i34qK1dd5uCOnYk7juvhGNX5XispxNnC01/CUVNnqChfDHpgnDjgT+1H618LiTgUAD3zo4IVAhCqF5XWsS4pQEENOB3Msffi62fYowvJx7f/htWeRLZ2OA+B85hhDiD4QBdHCRoz3spVp0asNqDxX4f4ndj8RlzfM= | |
102 | 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 0 iQIVAwUAVKXKYCBXgaxoKi1yAQIfsA/+PFfaWuZ6Jna12Y3MpKMnBCXYLWEJgMNlWHWzwU8lD26SKSlvMyHQsVZlkld2JmFugUCn1OV3OA4YWT6BA7VALq6Zsdcu5Dc8LRbyajBUkzGRpOUyWuFzjkCpGVbrQzbCR/bel/BBXzSqL4ipdtWgJ4y+WpZIhWkNXclBkR52b5hUTjN9vzhyhVVI7eURGwIEf7vVs1fDOcEGtaGY/ynzMTzyxIDsEEygCZau86wpKlYlqhCgxKDyzyGfpH3B1UlNGFt1afW8AWe1eHjdqC7TJZpMqmQ/Ju8vco8Xht6OXw4ZLHj7y39lpccfKTBLiK/cAKSg+xgyaH/BLhzoEkNAwYSFAB4i4IoV0KUC8nFxHfsoswBxJnMqU751ziMrpZ/XHZ1xQoEOdXgz2I04vlRn8xtynOVhcgjoAXwtbia7oNh/qCH/hl5/CdAtaawuCxJBf237F+cwur4PMAAvsGefRfZco/DInpr3qegr8rwInTxlO48ZG+o5xA4TPwT0QQTUjMdNfC146ZSbp65wG7VxJDocMZ8KJN/lqPaOvX+FVYWq4YnJhlldiV9DGgmym1AAaP0D3te2GcfHXpt/f6NYUPpgiBHy0GnOlNcQyGnnONg1A6oKVWB3k7WP28+PQbQEiCIFk2nkf5VZmye7OdHRGKOFfuprYFP1WwTWnVoNX9c= |
|
102 | 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 0 iQIVAwUAVKXKYCBXgaxoKi1yAQIfsA/+PFfaWuZ6Jna12Y3MpKMnBCXYLWEJgMNlWHWzwU8lD26SKSlvMyHQsVZlkld2JmFugUCn1OV3OA4YWT6BA7VALq6Zsdcu5Dc8LRbyajBUkzGRpOUyWuFzjkCpGVbrQzbCR/bel/BBXzSqL4ipdtWgJ4y+WpZIhWkNXclBkR52b5hUTjN9vzhyhVVI7eURGwIEf7vVs1fDOcEGtaGY/ynzMTzyxIDsEEygCZau86wpKlYlqhCgxKDyzyGfpH3B1UlNGFt1afW8AWe1eHjdqC7TJZpMqmQ/Ju8vco8Xht6OXw4ZLHj7y39lpccfKTBLiK/cAKSg+xgyaH/BLhzoEkNAwYSFAB4i4IoV0KUC8nFxHfsoswBxJnMqU751ziMrpZ/XHZ1xQoEOdXgz2I04vlRn8xtynOVhcgjoAXwtbia7oNh/qCH/hl5/CdAtaawuCxJBf237F+cwur4PMAAvsGefRfZco/DInpr3qegr8rwInTxlO48ZG+o5xA4TPwT0QQTUjMdNfC146ZSbp65wG7VxJDocMZ8KJN/lqPaOvX+FVYWq4YnJhlldiV9DGgmym1AAaP0D3te2GcfHXpt/f6NYUPpgiBHy0GnOlNcQyGnnONg1A6oKVWB3k7WP28+PQbQEiCIFk2nkf5VZmye7OdHRGKOFfuprYFP1WwTWnVoNX9c= | |
103 | db8e3f7948b1fdeb9ad12d448fc3525759908b9f 0 iQIVAwUAVLsaciBXgaxoKi1yAQKMIA//a90/GvySL9UID+iYvzV2oDaAPDD0T+4Xs43I7DT5NIoDz+3yq2VV54XevQe5lYiURmsb/Q9nX2VR/Qq1J9c/R6Gy+CIfmJ3HzMZ0aAX8ZlZgQPYZKh/2kY5Ojl++k6MTqbqcrICNs4+UE/4IAxPyOfu5gy7TpdJmRZo2J3lWVC2Jbhd02Mzb+tjtfbOM+QcQxPwt9PpqmQszJceyVYOSm3jvD1uJdSOC04tBQrQwrxktQ09Om0LUMMaB5zFXpJtqUzfw7l4U4AaddEmkd3vUfLtHxc21RB01c3cpe2dJnjifDfwseLsI8rS4jmi/91c74TeBatSOhvbqzEkm/p8xZFXE4Uh+EpWjTsVqmfQaRq6NfNCR7I/kvGv8Ps6w8mg8uX8fd8lx+GJbodj+Uy0X3oqHyqPMky/df5i79zADBDuz+yuxFfDD9i22DJPIYcilfGgwpIUuO2lER5nSMVmReuWTVBnT6SEN66Q4KR8zLtIRr+t1qUUCy6wYbgwrdHVCbgMF8RPOVZPjbs17RIqcHjch0Xc7bShKGhQg4WHDjXHK61w4tOa1Yp7jT6COkl01XC9BLcGxJYKFvNCbeDZQGvVgJNoEvHxBxD9rGMVRjfuxeJawc2fGzZJn0ySyLDW0pfd4EJNgTh9bLdPjWz2VlXqn4A6bgaLgTPqjmN0VBXw= |
|
103 | db8e3f7948b1fdeb9ad12d448fc3525759908b9f 0 iQIVAwUAVLsaciBXgaxoKi1yAQKMIA//a90/GvySL9UID+iYvzV2oDaAPDD0T+4Xs43I7DT5NIoDz+3yq2VV54XevQe5lYiURmsb/Q9nX2VR/Qq1J9c/R6Gy+CIfmJ3HzMZ0aAX8ZlZgQPYZKh/2kY5Ojl++k6MTqbqcrICNs4+UE/4IAxPyOfu5gy7TpdJmRZo2J3lWVC2Jbhd02Mzb+tjtfbOM+QcQxPwt9PpqmQszJceyVYOSm3jvD1uJdSOC04tBQrQwrxktQ09Om0LUMMaB5zFXpJtqUzfw7l4U4AaddEmkd3vUfLtHxc21RB01c3cpe2dJnjifDfwseLsI8rS4jmi/91c74TeBatSOhvbqzEkm/p8xZFXE4Uh+EpWjTsVqmfQaRq6NfNCR7I/kvGv8Ps6w8mg8uX8fd8lx+GJbodj+Uy0X3oqHyqPMky/df5i79zADBDuz+yuxFfDD9i22DJPIYcilfGgwpIUuO2lER5nSMVmReuWTVBnT6SEN66Q4KR8zLtIRr+t1qUUCy6wYbgwrdHVCbgMF8RPOVZPjbs17RIqcHjch0Xc7bShKGhQg4WHDjXHK61w4tOa1Yp7jT6COkl01XC9BLcGxJYKFvNCbeDZQGvVgJNoEvHxBxD9rGMVRjfuxeJawc2fGzZJn0ySyLDW0pfd4EJNgTh9bLdPjWz2VlXqn4A6bgaLgTPqjmN0VBXw= | |
104 | fbdd5195528fae4f41feebc1838215c110b25d6a 0 iQIVAwUAVM7fBCBXgaxoKi1yAQKoYw/+LeIGcjQmHIVFQULsiBtPDf+eGAADQoP3mKBy+eX/3Fa0qqUNfES2Q3Y6RRApyZ1maPRMt8BvvhZMgQsu9QIrmf3zsFxZGFwoyrIj4hM3xvAbEZXqmWiR85/Ywd4ImeLaZ0c7mkO1/HGF1n2Mv47bfM4hhNe7VGJSSrTY4srFHDfk4IG9f18DukJVzRD9/dZeBw6eUN1ukuLEgQAD5Sl47bUdKSetglOSR1PjXfZ1hjtz5ywUyBc5P9p3LC4wSvlcJKl22zEvB3L0hkoDcPsdIPEnJAeXxKlR1rQpoA3fEgrstGiSNUW/9Tj0VekAHLO95SExmQyoG/AhbjRRzIj4uQ0aevCJyiAhkv+ffOSf99PMW9L1k3tVjLhpMWEz9BOAWyX7cDFWj5t/iktI046O9HGN9SGVx18e9xM6pEgRcLA2TyjEmtkA4jX0JeN7WeCweMLiSxyGP7pSPSJdpJeXaFtRpSF62p/G0Z5wN9s05LHqDyqNVtCvg4WjkuV5LZSdLbMcYBWGBxQzCG6qowXFXIawmbaFiBZwTfOgNls9ndz5RGupAaxY317prxPFv/pXoesc1P8bdK09ZvjhbmmD66Q/BmS2dOMQ8rXRjuVdlR8j2QBtFZxekMcRD02nBAVnwHg1VWQMIRaGjdgmW4wOkirWVn7me177FnBxrxW1tG4= |
|
104 | fbdd5195528fae4f41feebc1838215c110b25d6a 0 iQIVAwUAVM7fBCBXgaxoKi1yAQKoYw/+LeIGcjQmHIVFQULsiBtPDf+eGAADQoP3mKBy+eX/3Fa0qqUNfES2Q3Y6RRApyZ1maPRMt8BvvhZMgQsu9QIrmf3zsFxZGFwoyrIj4hM3xvAbEZXqmWiR85/Ywd4ImeLaZ0c7mkO1/HGF1n2Mv47bfM4hhNe7VGJSSrTY4srFHDfk4IG9f18DukJVzRD9/dZeBw6eUN1ukuLEgQAD5Sl47bUdKSetglOSR1PjXfZ1hjtz5ywUyBc5P9p3LC4wSvlcJKl22zEvB3L0hkoDcPsdIPEnJAeXxKlR1rQpoA3fEgrstGiSNUW/9Tj0VekAHLO95SExmQyoG/AhbjRRzIj4uQ0aevCJyiAhkv+ffOSf99PMW9L1k3tVjLhpMWEz9BOAWyX7cDFWj5t/iktI046O9HGN9SGVx18e9xM6pEgRcLA2TyjEmtkA4jX0JeN7WeCweMLiSxyGP7pSPSJdpJeXaFtRpSF62p/G0Z5wN9s05LHqDyqNVtCvg4WjkuV5LZSdLbMcYBWGBxQzCG6qowXFXIawmbaFiBZwTfOgNls9ndz5RGupAaxY317prxPFv/pXoesc1P8bdK09ZvjhbmmD66Q/BmS2dOMQ8rXRjuVdlR8j2QBtFZxekMcRD02nBAVnwHg1VWQMIRaGjdgmW4wOkirWVn7me177FnBxrxW1tG4= | |
105 | 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 0 iQIVAwUAVPQL9CBXgaxoKi1yAQJIXxAAtD2hWhaKa+lABmCOYG92FE/WdqY/91Xv5atTL8Xeko/MkirIKZiOuxNWX+J34TVevINZSWmMfDSc5TkGxktL9jW/pDB/CXn+CVZpxRabPYFH9HM2K3g8VaTV1MFtV2+feOMDIPCmq5ogMF9/kXjmifiEBrJcFsE82fdexJ3OHoOY4iHFxEhh3GzvNqEQygk4VeU6VYziNvSQj9G//PsK3Bmk7zm5ScsZcMVML3SIYFuej1b1PI1v0N8mmCRooVNBGhD/eA0iLtdh/hSb9s/8UgJ4f9HOcx9zqs8V4i14lpd/fo0+yvFuVrVbWGzrDrk5EKLENhVPwvc1KA32PTQ4Z9u7VQIBIxq3K5lL2VlCMIYc1BSaSQBjuiLm8VdN6iDuf5poNZhk1rvtpQgpxJzh362dlGtR/iTJuLCeW7gCqWUAorLTeHy0bLQ/jSOeTAGys8bUHtlRL4QbnhLbUmJmRYVvCJ+Yt1aTgTSNcoFjoLJarR1169BXgdCA38BgReUL6kB224UJSTzB1hJUyB2LvCWrXZMipZmR99Iwdq7MePD3+AoSIXQNUMY9blxuuF5x7W2ikNXmVWuab4Z8rQRtmGqEuIMBSunxAnZSn+i8057dFKlq+/yGy+WW3RQg+RnLnwZs1zCDTfu98/GT5k5hFpjXZeUWWiOVwQJ5HrqncCw= |
|
105 | 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 0 iQIVAwUAVPQL9CBXgaxoKi1yAQJIXxAAtD2hWhaKa+lABmCOYG92FE/WdqY/91Xv5atTL8Xeko/MkirIKZiOuxNWX+J34TVevINZSWmMfDSc5TkGxktL9jW/pDB/CXn+CVZpxRabPYFH9HM2K3g8VaTV1MFtV2+feOMDIPCmq5ogMF9/kXjmifiEBrJcFsE82fdexJ3OHoOY4iHFxEhh3GzvNqEQygk4VeU6VYziNvSQj9G//PsK3Bmk7zm5ScsZcMVML3SIYFuej1b1PI1v0N8mmCRooVNBGhD/eA0iLtdh/hSb9s/8UgJ4f9HOcx9zqs8V4i14lpd/fo0+yvFuVrVbWGzrDrk5EKLENhVPwvc1KA32PTQ4Z9u7VQIBIxq3K5lL2VlCMIYc1BSaSQBjuiLm8VdN6iDuf5poNZhk1rvtpQgpxJzh362dlGtR/iTJuLCeW7gCqWUAorLTeHy0bLQ/jSOeTAGys8bUHtlRL4QbnhLbUmJmRYVvCJ+Yt1aTgTSNcoFjoLJarR1169BXgdCA38BgReUL6kB224UJSTzB1hJUyB2LvCWrXZMipZmR99Iwdq7MePD3+AoSIXQNUMY9blxuuF5x7W2ikNXmVWuab4Z8rQRtmGqEuIMBSunxAnZSn+i8057dFKlq+/yGy+WW3RQg+RnLnwZs1zCDTfu98/GT5k5hFpjXZeUWWiOVwQJ5HrqncCw= | |
106 | 07a92bbd02e5e3a625e0820389b47786b02b2cea 0 iQIVAwUAVPSP9SBXgaxoKi1yAQLkBQ//dRQExJHFepJfZ0gvGnUoYI4APsLmne5XtfeXJ8OtUyC4a6RylxA5BavDWgXwUh9BGhOX2cBSz1fyvzohrPrvNnlBrYKAvOIJGEAiBTXHYTxHINEKPtDF92Uz23T0Rn/wnSvvlbWF7Pvd+0DMJpFDEyr9n6jvVLR7mgxMaCqZbVaB1W/wTwDjni780WgVx8OPUXkLx3/DyarMcIiPeI5UN+FeHDovTsBWFC95msFLm80PMRPuHOejWp65yyEemGujZEPO2D5VVah7fshM2HTz63+bkEBYoqrftuv3vXKBRG78MIrUrKpqxmnCKNKDUUWJ4yk3+NwuOiHlKdly5kZ7MNFaL73XKo8HH287lDWz0lIazs91dQA9a9JOyTsp8YqGtIJGGCbhrUDtiQJ199oBU84mw3VH/EEzm4mPv4sW5fm7BnnoH/a+9vXySc+498rkdLlzFwxrQkWyJ/pFOx4UA3mCtGQK+OSwLPc+X4SRqA4fiyqKxVAL1kpLTSDL3QA82I7GzBaXsxUXzS4nmteMhUyzTdwAhKVydL0gC3d7NmkAFSyRjdGzutUUXshYxg0ywRgYebe8uzJcTj4nNRgaalYLdg3guuDulD+dJmILsrcLmA6KD/pvfDn8PYt+4ZjNIvN2E9GF6uXDu4Ux+AlOTLk9BChxUF8uBX9ev5cvWtQ= |
|
106 | 07a92bbd02e5e3a625e0820389b47786b02b2cea 0 iQIVAwUAVPSP9SBXgaxoKi1yAQLkBQ//dRQExJHFepJfZ0gvGnUoYI4APsLmne5XtfeXJ8OtUyC4a6RylxA5BavDWgXwUh9BGhOX2cBSz1fyvzohrPrvNnlBrYKAvOIJGEAiBTXHYTxHINEKPtDF92Uz23T0Rn/wnSvvlbWF7Pvd+0DMJpFDEyr9n6jvVLR7mgxMaCqZbVaB1W/wTwDjni780WgVx8OPUXkLx3/DyarMcIiPeI5UN+FeHDovTsBWFC95msFLm80PMRPuHOejWp65yyEemGujZEPO2D5VVah7fshM2HTz63+bkEBYoqrftuv3vXKBRG78MIrUrKpqxmnCKNKDUUWJ4yk3+NwuOiHlKdly5kZ7MNFaL73XKo8HH287lDWz0lIazs91dQA9a9JOyTsp8YqGtIJGGCbhrUDtiQJ199oBU84mw3VH/EEzm4mPv4sW5fm7BnnoH/a+9vXySc+498rkdLlzFwxrQkWyJ/pFOx4UA3mCtGQK+OSwLPc+X4SRqA4fiyqKxVAL1kpLTSDL3QA82I7GzBaXsxUXzS4nmteMhUyzTdwAhKVydL0gC3d7NmkAFSyRjdGzutUUXshYxg0ywRgYebe8uzJcTj4nNRgaalYLdg3guuDulD+dJmILsrcLmA6KD/pvfDn8PYt+4ZjNIvN2E9GF6uXDu4Ux+AlOTLk9BChxUF8uBX9ev5cvWtQ= | |
107 | 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 0 iQIVAwUAVRw4nyBXgaxoKi1yAQIFExAAkbCPtLjQlJvPaYCL1KhNR+ZVAmn7JrFH3XhvR26RayYbs4NxR3W1BhwhDy9+W+28szEx1kQvmr6t1bXAFywY0tNJOeuLU7uFfmbgAfYgkQ9kpsQNqFYkjbCyftw0S9vX9VOJ9DqUoDWuKfX7VzjkwE9dCfKI5F+dvzxnd6ZFjB85nyHBQuTZlzXl0+csY212RJ2G2j/mzEBVyeZj9l7Rm+1X8AC1xQMWRJGiyd0b7nhYqoOcceeJFAV1t9QO4+gjmkM5kL0orjxTnuVsxPTxcC5ca1BfidPWrZEto3duHWNiATGnCDylxxr52BxCAS+BWePW9J0PROtw1pYaZ9pF4N5X5LSXJzqX7ZiNGckxqIjry09+Tbsa8FS0VkkYBEiGotpuo4Jd05V6qpXfW2JqAfEVo6X6aGvPM2B7ZUtKi30I4J+WprrOP3WgZ/ZWHe1ERYKgjDqisn3t/D40q30WQUeQGltGsOX0Udqma2RjBugO5BHGzJ2yer4GdJXg7q1OMzrjAEuz1IoKvIB/o1pg86quVA4H2gQnL1B8t1M38/DIafyw7mrEY4Z3GL44Reev63XVvDE099Vbhqp7ufwq81Fpq7Xxa5vsr9SJ+8IqqQr8AcYSuK3G3L6BmIuSUAYMRqgl35FWoWkGyZIG5c6K6zI8w5Pb0aGi6Lb2Wfb9zbc= |
|
107 | 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 0 iQIVAwUAVRw4nyBXgaxoKi1yAQIFExAAkbCPtLjQlJvPaYCL1KhNR+ZVAmn7JrFH3XhvR26RayYbs4NxR3W1BhwhDy9+W+28szEx1kQvmr6t1bXAFywY0tNJOeuLU7uFfmbgAfYgkQ9kpsQNqFYkjbCyftw0S9vX9VOJ9DqUoDWuKfX7VzjkwE9dCfKI5F+dvzxnd6ZFjB85nyHBQuTZlzXl0+csY212RJ2G2j/mzEBVyeZj9l7Rm+1X8AC1xQMWRJGiyd0b7nhYqoOcceeJFAV1t9QO4+gjmkM5kL0orjxTnuVsxPTxcC5ca1BfidPWrZEto3duHWNiATGnCDylxxr52BxCAS+BWePW9J0PROtw1pYaZ9pF4N5X5LSXJzqX7ZiNGckxqIjry09+Tbsa8FS0VkkYBEiGotpuo4Jd05V6qpXfW2JqAfEVo6X6aGvPM2B7ZUtKi30I4J+WprrOP3WgZ/ZWHe1ERYKgjDqisn3t/D40q30WQUeQGltGsOX0Udqma2RjBugO5BHGzJ2yer4GdJXg7q1OMzrjAEuz1IoKvIB/o1pg86quVA4H2gQnL1B8t1M38/DIafyw7mrEY4Z3GL44Reev63XVvDE099Vbhqp7ufwq81Fpq7Xxa5vsr9SJ+8IqqQr8AcYSuK3G3L6BmIuSUAYMRqgl35FWoWkGyZIG5c6K6zI8w5Pb0aGi6Lb2Wfb9zbc= | |
108 | e89f909edffad558b56f4affa8239e4832f88de0 0 iQIVAwUAVTBozCBXgaxoKi1yAQLHeg/+IvfpPmG7OSqCoHvMVETYdrqT7lKCwfCQWMFOC/2faWs1n4R/qQNm6ckE5OY888RK8tVQ7ue03Pg/iyWgQlYfS7Njd3WPjS4JsnEBxIvuGkIu6TPIXAUAH0PFTBh0cZEICDpPEVT2X3bPRwDHA+hUE9RrxM5zJ39Fpk/pTYCjQ9UKfEhXlEfka75YB39g2Y/ssaSbn5w/tAAx8sL72Y4G96D4IV2seLHZhB3VQ7UZKThEWn6UdVOoKj+urIwGaBYMeekGVtHSh6fnHOw3EtDO9mQ5HtAz2Bl4CwRYN8eSN+Dwgr+mdk8MWpQQJ+i1A8jUhUp8gn1Pe5GkIH4CWZ9+AvLLnshe2MkVaTT1g7EQk37tFkkdZDRBsOHIvpF71B9pEA1gMUlX4gKgh5YwukgpQlDmFCfY7XmX6eXw9Ub+EckEwYuGMz7Fbwe9J/Ce4DxvgJgq3/cu/jb3bmbewH6tZmcrlqziqqA8GySIwcURnF1c37e7+e7x1jhFJfCWpHzvCusjKhUp9tZsl9Rt1Bo/y41QY+avY7//ymhbwTMKgqjzCYoA+ipF4JfZlFiZF+JhvOSIFb0ltkfdqKD+qOjlkFaglvQU1bpGKLJ6cz4Xk2Jqt5zhcrpyDMGVv9aiWywCK2ZP34RNaJ6ZFwzwdpXihqgkm5dBGoZ4ztFUfmjXzIg= |
|
108 | e89f909edffad558b56f4affa8239e4832f88de0 0 iQIVAwUAVTBozCBXgaxoKi1yAQLHeg/+IvfpPmG7OSqCoHvMVETYdrqT7lKCwfCQWMFOC/2faWs1n4R/qQNm6ckE5OY888RK8tVQ7ue03Pg/iyWgQlYfS7Njd3WPjS4JsnEBxIvuGkIu6TPIXAUAH0PFTBh0cZEICDpPEVT2X3bPRwDHA+hUE9RrxM5zJ39Fpk/pTYCjQ9UKfEhXlEfka75YB39g2Y/ssaSbn5w/tAAx8sL72Y4G96D4IV2seLHZhB3VQ7UZKThEWn6UdVOoKj+urIwGaBYMeekGVtHSh6fnHOw3EtDO9mQ5HtAz2Bl4CwRYN8eSN+Dwgr+mdk8MWpQQJ+i1A8jUhUp8gn1Pe5GkIH4CWZ9+AvLLnshe2MkVaTT1g7EQk37tFkkdZDRBsOHIvpF71B9pEA1gMUlX4gKgh5YwukgpQlDmFCfY7XmX6eXw9Ub+EckEwYuGMz7Fbwe9J/Ce4DxvgJgq3/cu/jb3bmbewH6tZmcrlqziqqA8GySIwcURnF1c37e7+e7x1jhFJfCWpHzvCusjKhUp9tZsl9Rt1Bo/y41QY+avY7//ymhbwTMKgqjzCYoA+ipF4JfZlFiZF+JhvOSIFb0ltkfdqKD+qOjlkFaglvQU1bpGKLJ6cz4Xk2Jqt5zhcrpyDMGVv9aiWywCK2ZP34RNaJ6ZFwzwdpXihqgkm5dBGoZ4ztFUfmjXzIg= | |
109 | 8cc6036bca532e06681c5a8fa37efaa812de67b5 0 iQIVAwUAVUP0xCBXgaxoKi1yAQLIChAAme3kg1Z0V8t5PnWKDoIvscIeAsD2s6EhMy1SofmdZ4wvYD1VmGC6TgXMCY7ssvRBhxqwG3GxwYpwELASuw2GYfVot2scN7+b8Hs5jHtkQevKbxarYni+ZI9mw/KldnJixD1yW3j+LoJFh/Fu6GD2yrfGIhimFLozcwUu3EbLk7JzyHSn7/8NFjLJz0foAYfcbowU9/BFwNVLrQPnsUbWcEifsq5bYso9MBO9k+25yLgqHoqMbGpJcgjubNy1cWoKnlKS+lOJl0/waAk+aIjHXMzFpRRuJDjxEZn7V4VdV5d23nrBTcit1BfMzga5df7VrLPVRbom1Bi0kQ0BDeDex3hHNqHS5X+HSrd/njzP1xp8twG8hTE+njv85PWoGBTo1eUGW/esChIJKA5f3/F4B9ErgBNNOKnYmRgxixd562OWAwAQZK0r0roe2H/Mfg2VvgxT0kHd22NQLoAv0YI4jcXcCFrnV/80vHUQ8AsAYAbkLcz1jkfk3YwYDP8jbJCqcwJRt9ialYKJwvXlEe0TMeGdq7EjCO0z/pIpu82k2R/C0FtCFih3bUvJEmWoVVx8UGkDDQEORLbzxQCt0IOiQGFcoCCxgQmL0x9ZoljCWg5vZuuhU4uSOuRTuM+aa4xoLkeOcvgGRSOXrqfkV8JpWKoJB4dmY2qSuxw8LsAAzK0= |
|
109 | 8cc6036bca532e06681c5a8fa37efaa812de67b5 0 iQIVAwUAVUP0xCBXgaxoKi1yAQLIChAAme3kg1Z0V8t5PnWKDoIvscIeAsD2s6EhMy1SofmdZ4wvYD1VmGC6TgXMCY7ssvRBhxqwG3GxwYpwELASuw2GYfVot2scN7+b8Hs5jHtkQevKbxarYni+ZI9mw/KldnJixD1yW3j+LoJFh/Fu6GD2yrfGIhimFLozcwUu3EbLk7JzyHSn7/8NFjLJz0foAYfcbowU9/BFwNVLrQPnsUbWcEifsq5bYso9MBO9k+25yLgqHoqMbGpJcgjubNy1cWoKnlKS+lOJl0/waAk+aIjHXMzFpRRuJDjxEZn7V4VdV5d23nrBTcit1BfMzga5df7VrLPVRbom1Bi0kQ0BDeDex3hHNqHS5X+HSrd/njzP1xp8twG8hTE+njv85PWoGBTo1eUGW/esChIJKA5f3/F4B9ErgBNNOKnYmRgxixd562OWAwAQZK0r0roe2H/Mfg2VvgxT0kHd22NQLoAv0YI4jcXcCFrnV/80vHUQ8AsAYAbkLcz1jkfk3YwYDP8jbJCqcwJRt9ialYKJwvXlEe0TMeGdq7EjCO0z/pIpu82k2R/C0FtCFih3bUvJEmWoVVx8UGkDDQEORLbzxQCt0IOiQGFcoCCxgQmL0x9ZoljCWg5vZuuhU4uSOuRTuM+aa4xoLkeOcvgGRSOXrqfkV8JpWKoJB4dmY2qSuxw8LsAAzK0= | |
110 | ed18f4acf435a2824c6f49fba40f42b9df5da7ad 0 iQIVAwUAVWy9mCBXgaxoKi1yAQIm+Q/+I/tV8DC51d4f/6T5OR+motlIx9U5za5p9XUUzfp3tzSY2PutVko/FclajVdFekZsK5pUzlh/GZhfe1jjyEEIr3UC3yWk8hMcvvS+2UDmfy81QxN7Uf0kz4mZOlME6d/fYDzf4cDKkkCXoec3kyZBw7L84mteUcrJoyb5K3fkQBrK5CG/CV7+uZN6b9+quKjtDhDEkAyc6phNanzWNgiHGucEbNgXsKM01HmV1TnN4GXTKx8y2UDalIJOPyes2OWHggibMHbaNnGnwSBAK+k29yaQ5FD0rsA+q0j3TijA1NfqvtluNEPbFOx/wJV4CxonYad93gWyEdgU34LRqqw1bx7PFUvew2/T3TJsxQLoCt67OElE7ScG8evuNEe8/4r3LDnzYFx7QMP5r5+B7PxVpj/DT+buS16BhYS8pXMMqLynFOQkX5uhEM7mNC0JTXQsBMHSDAcizVDrdFCF2OSfQjLpUfFP1VEWX7EInqj7hZrd+GE7TfBD8/rwSBSkkCX2aa9uKyt6Ius1GgQUuEETskAUvvpsNBzZxtvGpMMhqQLGlJYnBbhOmsbOyTSnXU66KJ5e/H3O0KRrF09i74v30DaY4uIH8xG6KpSkfw5s/oiLCtagfc0goUvvojk9pACDR3CKM/jVC63EVp2oUcjT72jUgSLxBgi7siLD8IW86wc= |
|
110 | ed18f4acf435a2824c6f49fba40f42b9df5da7ad 0 iQIVAwUAVWy9mCBXgaxoKi1yAQIm+Q/+I/tV8DC51d4f/6T5OR+motlIx9U5za5p9XUUzfp3tzSY2PutVko/FclajVdFekZsK5pUzlh/GZhfe1jjyEEIr3UC3yWk8hMcvvS+2UDmfy81QxN7Uf0kz4mZOlME6d/fYDzf4cDKkkCXoec3kyZBw7L84mteUcrJoyb5K3fkQBrK5CG/CV7+uZN6b9+quKjtDhDEkAyc6phNanzWNgiHGucEbNgXsKM01HmV1TnN4GXTKx8y2UDalIJOPyes2OWHggibMHbaNnGnwSBAK+k29yaQ5FD0rsA+q0j3TijA1NfqvtluNEPbFOx/wJV4CxonYad93gWyEdgU34LRqqw1bx7PFUvew2/T3TJsxQLoCt67OElE7ScG8evuNEe8/4r3LDnzYFx7QMP5r5+B7PxVpj/DT+buS16BhYS8pXMMqLynFOQkX5uhEM7mNC0JTXQsBMHSDAcizVDrdFCF2OSfQjLpUfFP1VEWX7EInqj7hZrd+GE7TfBD8/rwSBSkkCX2aa9uKyt6Ius1GgQUuEETskAUvvpsNBzZxtvGpMMhqQLGlJYnBbhOmsbOyTSnXU66KJ5e/H3O0KRrF09i74v30DaY4uIH8xG6KpSkfw5s/oiLCtagfc0goUvvojk9pACDR3CKM/jVC63EVp2oUcjT72jUgSLxBgi7siLD8IW86wc= | |
111 | 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 0 iQIVAwUAVZRtzSBXgaxoKi1yAQJVLhAAtfn+8OzHIp6wRC4NUbkImAJRLsNTRPKeRSWPCF5O5XXQ84hp+86qjhndIE6mcJSAt4cVP8uky6sEa8ULd6b3ACRBvtgZtsecA9S/KtRjyE9CKr8nP+ogBNqJPaYlTz9RuwGedOd+8I9lYgsnRjfaHSByNMX08WEHtWqAWhSkAz/HO32ardS38cN97fckCgQtA8v7c77nBT7vcw4epgxyUQvMUxUhqmCVVhVfz8JXa5hyJxFrOtqgaVuQ1B5Y/EKxcyZT+JNHPtu3V1uc1awS/w16CEPstNBSFHax5MuT9UbY0mV2ZITP99EkM+vdomh82VHdnMo0i7Pz7XF45ychD4cteroO9gGqDDt9j7hd1rubBX1bfkPsd/APJlyeshusyTj+FqsUD/HDlvM9LRjY1HpU7i7yAlLQQ3851XKMLUPNFYu2r3bo8Wt/CCHtJvB4wYuH+7Wo3muudpU01ziJBxQrUWwPbUrG+7LvO1iEEVxB8l+8Vq0mU3Te7lJi1kGetm6xHNbtvQip5P2YUqvv+lLo/K8KoJDxsh63Y01JGwdmUDb8mnFlRx4J7hQJaoNEvz3cgnc4X8gDJD8sUOjGOPnbtz2QwTY+zj/5+FdLxWDCxNrHX5vvkVdJHcCqEfVvQTKfDMOUeKuhjI7GD7t3xRPfUxq19jjoLPe7aqn1Z1s= |
|
111 | 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 0 iQIVAwUAVZRtzSBXgaxoKi1yAQJVLhAAtfn+8OzHIp6wRC4NUbkImAJRLsNTRPKeRSWPCF5O5XXQ84hp+86qjhndIE6mcJSAt4cVP8uky6sEa8ULd6b3ACRBvtgZtsecA9S/KtRjyE9CKr8nP+ogBNqJPaYlTz9RuwGedOd+8I9lYgsnRjfaHSByNMX08WEHtWqAWhSkAz/HO32ardS38cN97fckCgQtA8v7c77nBT7vcw4epgxyUQvMUxUhqmCVVhVfz8JXa5hyJxFrOtqgaVuQ1B5Y/EKxcyZT+JNHPtu3V1uc1awS/w16CEPstNBSFHax5MuT9UbY0mV2ZITP99EkM+vdomh82VHdnMo0i7Pz7XF45ychD4cteroO9gGqDDt9j7hd1rubBX1bfkPsd/APJlyeshusyTj+FqsUD/HDlvM9LRjY1HpU7i7yAlLQQ3851XKMLUPNFYu2r3bo8Wt/CCHtJvB4wYuH+7Wo3muudpU01ziJBxQrUWwPbUrG+7LvO1iEEVxB8l+8Vq0mU3Te7lJi1kGetm6xHNbtvQip5P2YUqvv+lLo/K8KoJDxsh63Y01JGwdmUDb8mnFlRx4J7hQJaoNEvz3cgnc4X8gDJD8sUOjGOPnbtz2QwTY+zj/5+FdLxWDCxNrHX5vvkVdJHcCqEfVvQTKfDMOUeKuhjI7GD7t3xRPfUxq19jjoLPe7aqn1Z1s= | |
112 | 96a38d44ba093bd1d1ecfd34119e94056030278b 0 iQIVAwUAVarUUyBXgaxoKi1yAQIfJw/+MG/0736F/9IvzgCTF6omIC+9kS8JH0n/JBGPhpbPAHK4xxjhOOz6m3Ia3c3HNoy+I6calwU6YV7k5dUzlyLhM0Z5oYpdrH+OBNxDEsD5SfhclfR63MK1kmgtD33izijsZ++6a+ZaVfyxpMTksKOktWSIDD63a5b/avb6nKY64KwJcbbeXPdelxvXV7TXYm0GvWc46BgvrHOJpYHCDaXorAn6BMq7EQF8sxdNK4GVMNMVk1njve0HOg3Kz8llPB/7QmddZXYLFGmWqICyUn1IsJDfePxzh8sOYVCbxAgitTJHJJmmH5gzVzw7t7ljtmxSJpcUGQJB2MphejmNFGfgvJPB9c6xOCfUqDjxN5m24V+UYesZntpfgs3lpfvE7785IpVnf6WfKG4PKty01ome/joHlDlrRTekKMlpiBapGMfv8EHvPBrOA+5yAHNfKsmcyCcjD1nvXYZ2/X9qY35AhdcBuNkyp55oPDOdtYIHfnOIxlYMKG1dusDx3Z4eveF0lQTzfRVoE5w+k9A2Ov3Zx0aiSkFFevJjrq5QBfs9dAiT8JYgBmWhaJzCtJm12lQirRMKR/br88Vwt/ry/UVY9cereMNvRYUGOGfC8CGGDCw4WDD+qWvyB3mmrXVuMlXxQRIZRJy5KazaQXsBWuIsx4kgGqC5Uo+yzpiQ1VMuCyI= |
|
112 | 96a38d44ba093bd1d1ecfd34119e94056030278b 0 iQIVAwUAVarUUyBXgaxoKi1yAQIfJw/+MG/0736F/9IvzgCTF6omIC+9kS8JH0n/JBGPhpbPAHK4xxjhOOz6m3Ia3c3HNoy+I6calwU6YV7k5dUzlyLhM0Z5oYpdrH+OBNxDEsD5SfhclfR63MK1kmgtD33izijsZ++6a+ZaVfyxpMTksKOktWSIDD63a5b/avb6nKY64KwJcbbeXPdelxvXV7TXYm0GvWc46BgvrHOJpYHCDaXorAn6BMq7EQF8sxdNK4GVMNMVk1njve0HOg3Kz8llPB/7QmddZXYLFGmWqICyUn1IsJDfePxzh8sOYVCbxAgitTJHJJmmH5gzVzw7t7ljtmxSJpcUGQJB2MphejmNFGfgvJPB9c6xOCfUqDjxN5m24V+UYesZntpfgs3lpfvE7785IpVnf6WfKG4PKty01ome/joHlDlrRTekKMlpiBapGMfv8EHvPBrOA+5yAHNfKsmcyCcjD1nvXYZ2/X9qY35AhdcBuNkyp55oPDOdtYIHfnOIxlYMKG1dusDx3Z4eveF0lQTzfRVoE5w+k9A2Ov3Zx0aiSkFFevJjrq5QBfs9dAiT8JYgBmWhaJzCtJm12lQirRMKR/br88Vwt/ry/UVY9cereMNvRYUGOGfC8CGGDCw4WDD+qWvyB3mmrXVuMlXxQRIZRJy5KazaQXsBWuIsx4kgGqC5Uo+yzpiQ1VMuCyI= | |
113 | 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 0 iQIVAwUAVbuouCBXgaxoKi1yAQL2ng//eI1w51F4YkDiUAhrZuc8RE/chEd2o4F6Jyu9laA03vbim598ntqGjX3+UkOyTQ/zGVeZfW2cNG8zkJjSLk138DHCYl2YPPD/yxqMOJp/a7U34+HrA0aE5Y2pcfx+FofZHRvRtt40UCngicjKivko8au7Ezayidpa/vQbc6dNvGrwwk4KMgOP2HYIfHgCirR5UmaWtNpzlLhf9E7JSNL5ZXij3nt6AgEPyn0OvmmOLyUARO/JTJ6vVyLEtwiXg7B3sF5RpmyFDhrkZ+MbFHgL4k/3y9Lb97WaZl8nXJIaNPOTPJqkApFY/56S12PKYK4js2OgU+QsX1XWvouAhEx6CC6Jk9EHhr6+9qxYFhBJw7RjbswUG6LvJy/kBe+Ei5UbYg9dATf3VxQ6Gqs19lebtzltERH2yNwaHyVeqqakPSonOaUyxGMRRosvNHyrTTor38j8d27KksgpocXzBPZcc1MlS3vJg2nIwZlc9EKM9z5R0J1KAi1Z/+xzBjiGRYg5EZY6ElAw30eCjGta7tXlBssJiKeHut7QTLxCZHQuX1tKxDDs1qlXlGCMbrFqo0EiF9hTssptRG3ZyLwMdzEjnh4ki6gzONZKDI8uayAS3N+CEtWcGUtiA9OwuiFXTwodmles/Mh14LEhiVZoDK3L9TPcY22o2qRuku/6wq6QKsg= |
|
113 | 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 0 iQIVAwUAVbuouCBXgaxoKi1yAQL2ng//eI1w51F4YkDiUAhrZuc8RE/chEd2o4F6Jyu9laA03vbim598ntqGjX3+UkOyTQ/zGVeZfW2cNG8zkJjSLk138DHCYl2YPPD/yxqMOJp/a7U34+HrA0aE5Y2pcfx+FofZHRvRtt40UCngicjKivko8au7Ezayidpa/vQbc6dNvGrwwk4KMgOP2HYIfHgCirR5UmaWtNpzlLhf9E7JSNL5ZXij3nt6AgEPyn0OvmmOLyUARO/JTJ6vVyLEtwiXg7B3sF5RpmyFDhrkZ+MbFHgL4k/3y9Lb97WaZl8nXJIaNPOTPJqkApFY/56S12PKYK4js2OgU+QsX1XWvouAhEx6CC6Jk9EHhr6+9qxYFhBJw7RjbswUG6LvJy/kBe+Ei5UbYg9dATf3VxQ6Gqs19lebtzltERH2yNwaHyVeqqakPSonOaUyxGMRRosvNHyrTTor38j8d27KksgpocXzBPZcc1MlS3vJg2nIwZlc9EKM9z5R0J1KAi1Z/+xzBjiGRYg5EZY6ElAw30eCjGta7tXlBssJiKeHut7QTLxCZHQuX1tKxDDs1qlXlGCMbrFqo0EiF9hTssptRG3ZyLwMdzEjnh4ki6gzONZKDI8uayAS3N+CEtWcGUtiA9OwuiFXTwodmles/Mh14LEhiVZoDK3L9TPcY22o2qRuku/6wq6QKsg= | |
114 | 1a45e49a6bed023deb229102a8903234d18054d3 0 iQIVAwUAVeYa2SBXgaxoKi1yAQLWVA//Q7vU0YzngbxIbrTPvfFiNTJcT4bx9u1xMHRZf6QBIE3KtRHKTooJwH9lGR0HHM+8DWWZup3Vzo6JuWHMGoW0v5fzDyk2czwM9BgQQPfEmoJ/ZuBMevTkTZngjgHVwhP3tHFym8Rk9vVxyiZd35EcxP+4F817GCzD+K7XliIBqVggmv9YeQDXfEtvo7UZrMPPec79t8tzt2UadI3KC1jWUriTS1Fg1KxgXW6srD80D10bYyCkkdo/KfF6BGZ9SkF+U3b95cuqSmOfoyyQwUA3JbMXXOnIefnC7lqRC2QTC6mYDx5hIkBiwymXJBe8rpq/S94VVvPGfW6A5upyeCZISLEEnAz0GlykdpIy/NogzhmWpbAMOus05Xnen6xPdNig6c/M5ZleRxVobNrZSd7c5qI3aUUyfMKXlY1j9oiUTjSKH1IizwaI3aL/MM70eErBxXiLs2tpQvZeaVLn3kwCB5YhywO3LK0x+FNx4Gl90deAXMYibGNiLTq9grpB8fuLg9M90JBjFkeYkrSJ2yGYumYyP/WBA3mYEYGDLNstOby4riTU3WCqVl+eah6ss3l+gNDjLxiMtJZ/g0gQACaAvxQ9tYp5eeRMuLRTp79QQPxv97s8IyVwE/TlPlcSFlEXAzsBvqvsolQXRVi9AxA6M2davYabBYAgRf6rRfgujoU= |
|
114 | 1a45e49a6bed023deb229102a8903234d18054d3 0 iQIVAwUAVeYa2SBXgaxoKi1yAQLWVA//Q7vU0YzngbxIbrTPvfFiNTJcT4bx9u1xMHRZf6QBIE3KtRHKTooJwH9lGR0HHM+8DWWZup3Vzo6JuWHMGoW0v5fzDyk2czwM9BgQQPfEmoJ/ZuBMevTkTZngjgHVwhP3tHFym8Rk9vVxyiZd35EcxP+4F817GCzD+K7XliIBqVggmv9YeQDXfEtvo7UZrMPPec79t8tzt2UadI3KC1jWUriTS1Fg1KxgXW6srD80D10bYyCkkdo/KfF6BGZ9SkF+U3b95cuqSmOfoyyQwUA3JbMXXOnIefnC7lqRC2QTC6mYDx5hIkBiwymXJBe8rpq/S94VVvPGfW6A5upyeCZISLEEnAz0GlykdpIy/NogzhmWpbAMOus05Xnen6xPdNig6c/M5ZleRxVobNrZSd7c5qI3aUUyfMKXlY1j9oiUTjSKH1IizwaI3aL/MM70eErBxXiLs2tpQvZeaVLn3kwCB5YhywO3LK0x+FNx4Gl90deAXMYibGNiLTq9grpB8fuLg9M90JBjFkeYkrSJ2yGYumYyP/WBA3mYEYGDLNstOby4riTU3WCqVl+eah6ss3l+gNDjLxiMtJZ/g0gQACaAvxQ9tYp5eeRMuLRTp79QQPxv97s8IyVwE/TlPlcSFlEXAzsBvqvsolQXRVi9AxA6M2davYabBYAgRf6rRfgujoU= | |
115 | 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 0 iQIVAwUAVg1oMSBXgaxoKi1yAQLPag/+Pv0+pR9b9Y5RflEcERUzVu92q+l/JEiP7PHP9pAZuXoQ0ikYBFo1Ygw8tkIG00dgEaLk/2b7E3OxaU9pjU3thoX//XpTcbkJtVhe7Bkjh9/S3dRpm2FWNL9n0qnywebziB45Xs8XzUwBZTYOkVRInYr/NzSo8KNbQH1B4u2g56veb8u/7GtEvBSGnMGVYKhVUZ3jxyDf371QkdafMOJPpogkZcVhXusvMZPDBYtTIzswyxBJ2jxHzjt8+EKs+FI3FxzvQ9Ze3M5Daa7xfiHI3sOgECO8GMVaJi0F49lttKx08KONw8xLlEof+cJ+qxLxQ42X5XOQglJ2/bv5ES5JiZYAti2XSXbZK96p4wexqL4hnaLVU/2iEUfqB9Sj6itEuhGOknPD9fQo1rZXYIS8CT5nGTNG4rEpLFN6VwWn1btIMNkEHw998zU7N3HAOk6adD6zGcntUfMBvQC3V4VK3o7hp8PGeySrWrOLcC/xLKM+XRonz46woJK5D8w8lCVYAxBWEGKAFtj9hv9R8Ye9gCW0Q8BvJ7MwGpn+7fLQ1BVZdV1LZQTSBUr5u8mNeDsRo4H2hITQRhUeElIwlMsUbbN078a4JPOUgPz1+Fi8oHRccBchN6I40QohL934zhcKXQ+NXYN8BgpCicPztSg8O8Y/qvhFP12Zu4tOH8P/dFY= |
|
115 | 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 0 iQIVAwUAVg1oMSBXgaxoKi1yAQLPag/+Pv0+pR9b9Y5RflEcERUzVu92q+l/JEiP7PHP9pAZuXoQ0ikYBFo1Ygw8tkIG00dgEaLk/2b7E3OxaU9pjU3thoX//XpTcbkJtVhe7Bkjh9/S3dRpm2FWNL9n0qnywebziB45Xs8XzUwBZTYOkVRInYr/NzSo8KNbQH1B4u2g56veb8u/7GtEvBSGnMGVYKhVUZ3jxyDf371QkdafMOJPpogkZcVhXusvMZPDBYtTIzswyxBJ2jxHzjt8+EKs+FI3FxzvQ9Ze3M5Daa7xfiHI3sOgECO8GMVaJi0F49lttKx08KONw8xLlEof+cJ+qxLxQ42X5XOQglJ2/bv5ES5JiZYAti2XSXbZK96p4wexqL4hnaLVU/2iEUfqB9Sj6itEuhGOknPD9fQo1rZXYIS8CT5nGTNG4rEpLFN6VwWn1btIMNkEHw998zU7N3HAOk6adD6zGcntUfMBvQC3V4VK3o7hp8PGeySrWrOLcC/xLKM+XRonz46woJK5D8w8lCVYAxBWEGKAFtj9hv9R8Ye9gCW0Q8BvJ7MwGpn+7fLQ1BVZdV1LZQTSBUr5u8mNeDsRo4H2hITQRhUeElIwlMsUbbN078a4JPOUgPz1+Fi8oHRccBchN6I40QohL934zhcKXQ+NXYN8BgpCicPztSg8O8Y/qvhFP12Zu4tOH8P/dFY= | |
116 | b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 0 iQIVAwUAViarTyBXgaxoKi1yAQLZgRAAh7c7ebn7kUWI5M/b/T6qHGjFrU5azkjamzy9IG+KIa2hZgSMxyEM7JJUFqKP4TiWa3sW03bjKGSM/SjjDSSyheX+JIVSPNyKrBwneYhPq45Ius8eiHziClkt0CSsl2d9xDRpI0JmHbN0Pf8nh7rnbL+231GDAOT6dP+2S8K1HGa/0BgEcL9gpYs4/2GyjL+hBSUjyrabzvwe48DCN5W0tEJbGFw5YEADxdfbVbNEuXL81tR4PFGiJxPW0QKRLDB74MWmiWC0gi2ZC/IhbNBZ2sLb6694d4Bx4PVwtiARh63HNXVMEaBrFu1S9NcMQyHvAOc6Zw4izF/PCeTcdEnPk8J1t5PTz09Lp0EAKxe7CWIViy350ke5eiaxO3ySrNMX6d83BOHLDqEFMSWm+ad+KEMT4CJrK4X/n/XMgEFAaU5nWlIRqrLRIeU2Ifc625T0Xh4BgTqXPpytQxhgV5b+Fi6duNk4cy+QnHT4ymxI6BPD9HvSQwc+O7h37qjvJVZmpQX6AP8O75Yza8ZbcYKRIIxZzOkwNpzE5A/vpvP5bCRn7AGcT3ORWmAYr/etr3vxUvt2fQz6U/R4S915V+AeWBdcp+uExu6VZ42M0vhhh0lyzx1VRJGVdV+LoxFKkaC42d0yT+O1QEhSB7WL1D3/a/iWubv6ieB/cvNMhFaK9DA= |
|
116 | b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 0 iQIVAwUAViarTyBXgaxoKi1yAQLZgRAAh7c7ebn7kUWI5M/b/T6qHGjFrU5azkjamzy9IG+KIa2hZgSMxyEM7JJUFqKP4TiWa3sW03bjKGSM/SjjDSSyheX+JIVSPNyKrBwneYhPq45Ius8eiHziClkt0CSsl2d9xDRpI0JmHbN0Pf8nh7rnbL+231GDAOT6dP+2S8K1HGa/0BgEcL9gpYs4/2GyjL+hBSUjyrabzvwe48DCN5W0tEJbGFw5YEADxdfbVbNEuXL81tR4PFGiJxPW0QKRLDB74MWmiWC0gi2ZC/IhbNBZ2sLb6694d4Bx4PVwtiARh63HNXVMEaBrFu1S9NcMQyHvAOc6Zw4izF/PCeTcdEnPk8J1t5PTz09Lp0EAKxe7CWIViy350ke5eiaxO3ySrNMX6d83BOHLDqEFMSWm+ad+KEMT4CJrK4X/n/XMgEFAaU5nWlIRqrLRIeU2Ifc625T0Xh4BgTqXPpytQxhgV5b+Fi6duNk4cy+QnHT4ymxI6BPD9HvSQwc+O7h37qjvJVZmpQX6AP8O75Yza8ZbcYKRIIxZzOkwNpzE5A/vpvP5bCRn7AGcT3ORWmAYr/etr3vxUvt2fQz6U/R4S915V+AeWBdcp+uExu6VZ42M0vhhh0lyzx1VRJGVdV+LoxFKkaC42d0yT+O1QEhSB7WL1D3/a/iWubv6ieB/cvNMhFaK9DA= | |
117 | 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 0 iQIVAwUAVjZiKiBXgaxoKi1yAQKBWQ/+JcE37vprSOA5e0ezs/avC7leR6hTlXy9O5bpFnvMpbVMTUp+KfBE4HxTT0KKXKh9lGtNaQ+lAmHuy1OQE1hBKPIaCUd8/1gunGsXgRM3TJ9LwjFd4qFpOMxvOouc6kW5kmea7V9W2fg6aFNjjc/4/0J3HMOIjmf2fFz87xqR1xX8iezJ57A4pUPNViJlOWXRzfa56cI6VUe5qOMD0NRXcY+JyI5qW25Y/aL5D9loeKflpzd53Ue+Pu3qlhddJd3PVkaAiVDH+DYyRb8sKgwuiEsyaBO18IBgC8eDmTohEJt6707A+WNhwBJwp9aOUhHC7caaKRYhEKuDRQ3op++VqwuxbFRXx22XYR9bEzQIlpsv9GY2k8SShU5MZqUKIhk8vppFI6RaID5bmALnLLmjmXfSPYSJDzDuCP5UTQgI3PKPOATorVrqMdKzfb7FiwtcTvtHAXpOgLaY9P9XIePbnei6Rx9TfoHYDvzFWRqzSjl21xR+ZUrJtG2fx7XLbMjEAZJcnjP++GRvNbHBOi57aX0l2LO1peQqZVMULoIivaoLFP3i16RuXXQ/bvKyHmKjJzGrLc0QCa0yfrvV2m30RRMaYlOv7ToJfdfZLXvSAP0zbAuDaXdjGnq7gpfIlNE3xM+kQ75Akcf4V4fK1p061EGBQvQz6Ov3PkPiWL/bxrQ= |
|
117 | 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 0 iQIVAwUAVjZiKiBXgaxoKi1yAQKBWQ/+JcE37vprSOA5e0ezs/avC7leR6hTlXy9O5bpFnvMpbVMTUp+KfBE4HxTT0KKXKh9lGtNaQ+lAmHuy1OQE1hBKPIaCUd8/1gunGsXgRM3TJ9LwjFd4qFpOMxvOouc6kW5kmea7V9W2fg6aFNjjc/4/0J3HMOIjmf2fFz87xqR1xX8iezJ57A4pUPNViJlOWXRzfa56cI6VUe5qOMD0NRXcY+JyI5qW25Y/aL5D9loeKflpzd53Ue+Pu3qlhddJd3PVkaAiVDH+DYyRb8sKgwuiEsyaBO18IBgC8eDmTohEJt6707A+WNhwBJwp9aOUhHC7caaKRYhEKuDRQ3op++VqwuxbFRXx22XYR9bEzQIlpsv9GY2k8SShU5MZqUKIhk8vppFI6RaID5bmALnLLmjmXfSPYSJDzDuCP5UTQgI3PKPOATorVrqMdKzfb7FiwtcTvtHAXpOgLaY9P9XIePbnei6Rx9TfoHYDvzFWRqzSjl21xR+ZUrJtG2fx7XLbMjEAZJcnjP++GRvNbHBOi57aX0l2LO1peQqZVMULoIivaoLFP3i16RuXXQ/bvKyHmKjJzGrLc0QCa0yfrvV2m30RRMaYlOv7ToJfdfZLXvSAP0zbAuDaXdjGnq7gpfIlNE3xM+kQ75Akcf4V4fK1p061EGBQvQz6Ov3PkPiWL/bxrQ= | |
118 | 1aa5083cbebbe7575c88f3402ab377539b484897 0 iQIVAwUAVkEdCCBXgaxoKi1yAQKdWg//crTr5gsnHQppuD1p+PPn3/7SMsWJ7bgbuaXgERDLC0zWMfhM2oMmu/4jqXnpangdBVvb0SojejgzxoBo9FfRQiIoKt0vxmmn+S8CrEwb99rpP4M7lgyMAInKPMXQdYxkoDNwL70Afmog6eBtlxjYnu8nmUE/swu6JoVns+tF8UOvIKFYbuCcGujo2pUOQC0xBGiHeHSGRDJOlWmY2d7D/PkQtQE/u/d4QZt7enTHMiV44XVJ8+0U0f1ZQE7V+hNWf+IjwcZtL95dnQzUKs6tXMIln/OwO+eJ3d61BfLvmABvCwUC9IepPssNSFBUfGqBAP5wXOzFIPSYn00IWpmZtCnpUNL99X1IV3RP+p99gnEDTScQFPYt5B0q5I1nFdRh1p48BSF/kjPA7V++UfBwMXrrYLKhUR9BjmrRzYnyXJKwbH6iCNj5hsXUkVrBdBi/FnMczgsVILfFcIXUfnJD3E/dG+1lmuObg6dEynxiGChTuaR4KkLa5ZRkUcUl6fWlSRsqSNbGEEbdwcI+nTCZqJUlLSghumhs0Z89Hs1nltBd1ALX2VLJEHrKMrFQ8NfEBeCB6ENqMJi5qPlq354MCdGOZ9RvisX/HlxE4Q61BW0+EwnyXSch6LFSOS3axOocUazMoK1XiOTJSv/5bAsnwb0ztDWeUj9fZEJL+SWtgB8= |
|
118 | 1aa5083cbebbe7575c88f3402ab377539b484897 0 iQIVAwUAVkEdCCBXgaxoKi1yAQKdWg//crTr5gsnHQppuD1p+PPn3/7SMsWJ7bgbuaXgERDLC0zWMfhM2oMmu/4jqXnpangdBVvb0SojejgzxoBo9FfRQiIoKt0vxmmn+S8CrEwb99rpP4M7lgyMAInKPMXQdYxkoDNwL70Afmog6eBtlxjYnu8nmUE/swu6JoVns+tF8UOvIKFYbuCcGujo2pUOQC0xBGiHeHSGRDJOlWmY2d7D/PkQtQE/u/d4QZt7enTHMiV44XVJ8+0U0f1ZQE7V+hNWf+IjwcZtL95dnQzUKs6tXMIln/OwO+eJ3d61BfLvmABvCwUC9IepPssNSFBUfGqBAP5wXOzFIPSYn00IWpmZtCnpUNL99X1IV3RP+p99gnEDTScQFPYt5B0q5I1nFdRh1p48BSF/kjPA7V++UfBwMXrrYLKhUR9BjmrRzYnyXJKwbH6iCNj5hsXUkVrBdBi/FnMczgsVILfFcIXUfnJD3E/dG+1lmuObg6dEynxiGChTuaR4KkLa5ZRkUcUl6fWlSRsqSNbGEEbdwcI+nTCZqJUlLSghumhs0Z89Hs1nltBd1ALX2VLJEHrKMrFQ8NfEBeCB6ENqMJi5qPlq354MCdGOZ9RvisX/HlxE4Q61BW0+EwnyXSch6LFSOS3axOocUazMoK1XiOTJSv/5bAsnwb0ztDWeUj9fZEJL+SWtgB8= | |
119 | 2d437a0f3355834a9485bbbeb30a52a052c98f19 0 iQIVAwUAVl5U9CBXgaxoKi1yAQLocg//a4YFz9UVSIEzVEJMUPJnN2dBvEXRpwpb5CdKPd428+18K6VWZd5Mc6xNNRV5AV/hCYylgqDplIvyOvwCj7uN8nEOrLUQQ0Pp37M5ZIX8ZVCK/wgchJ2ltabUG1NrZ7/JA84U79VGLAECMnD0Z9WvZDESpVXmdXfxrk1eCc3omRB0ofNghEx+xpYworfZsu8aap1GHQuBsjPv4VyUWGpMq/KA01PdxRTELmrJnfSyr0nPKwxlI5KsbA1GOe+Mk3tp5HJ42DZqLtKSGPirf6E+6lRJeB0H7EpotN4wD3yZDsw6AgRb2C/ay/3T3Oz7CN+45mwuujV9Cxx5zs1EeOgZcqgA/hXMcwlQyvQDMrWpO8ytSBm6MhOuFOTB3HnUxfsnfSocLJsbNwGWKceAzACcXSqapveVAz/7h+InFgl/8Qce28UJdnX5wro5gP6UWt+xrvc7vfmVGgI3oxbiOUrfglhkjmrxBjEiDQy4BWH7HWMZUVxnqPQRcxIE10+dv0KtM/PBkbUtnbGJ88opFBGkFweje5vQcZy/duuPEIufRkPr8EV47QjOxlvldEjlLq3+QUdJZEgCIFw1X0y7Pix4dsPFjwOmAyo4El1ePrdFzG3dXSVA3eHvMDRnYnNlue9wHvKhYbBle5xTOZBgGuMzhDVe+54JLql5JYr4WrI1pvA= |
|
119 | 2d437a0f3355834a9485bbbeb30a52a052c98f19 0 iQIVAwUAVl5U9CBXgaxoKi1yAQLocg//a4YFz9UVSIEzVEJMUPJnN2dBvEXRpwpb5CdKPd428+18K6VWZd5Mc6xNNRV5AV/hCYylgqDplIvyOvwCj7uN8nEOrLUQQ0Pp37M5ZIX8ZVCK/wgchJ2ltabUG1NrZ7/JA84U79VGLAECMnD0Z9WvZDESpVXmdXfxrk1eCc3omRB0ofNghEx+xpYworfZsu8aap1GHQuBsjPv4VyUWGpMq/KA01PdxRTELmrJnfSyr0nPKwxlI5KsbA1GOe+Mk3tp5HJ42DZqLtKSGPirf6E+6lRJeB0H7EpotN4wD3yZDsw6AgRb2C/ay/3T3Oz7CN+45mwuujV9Cxx5zs1EeOgZcqgA/hXMcwlQyvQDMrWpO8ytSBm6MhOuFOTB3HnUxfsnfSocLJsbNwGWKceAzACcXSqapveVAz/7h+InFgl/8Qce28UJdnX5wro5gP6UWt+xrvc7vfmVGgI3oxbiOUrfglhkjmrxBjEiDQy4BWH7HWMZUVxnqPQRcxIE10+dv0KtM/PBkbUtnbGJ88opFBGkFweje5vQcZy/duuPEIufRkPr8EV47QjOxlvldEjlLq3+QUdJZEgCIFw1X0y7Pix4dsPFjwOmAyo4El1ePrdFzG3dXSVA3eHvMDRnYnNlue9wHvKhYbBle5xTOZBgGuMzhDVe+54JLql5JYr4WrI1pvA= | |
120 | ea389970c08449440587712117f178d33bab3f1e 0 iQIVAwUAVociGyBXgaxoKi1yAQJx9Q//TzMypcls5CQW3DM9xY1Q+RFeIw1LcDIev6NDBjUYxULb2WIK2qPw4Th5czF622SMd+XO/kiQeWYp9IW90MZOUVT1YGgUPKlKWMjkf0lZEPzprHjHq0+z/no1kBCBQg2uUOLsb6Y7zom4hFCyPsxXOk5nnxcFEK0VDbODa9zoKb/flyQ7rtzs+Z6BljIQ0TJAJsXs+6XgrW1XJ/f6nbeqsQyPklIBJuGKiaU1Pg8wQe6QqFaO1NYgM3hBETku6r3OTpUhu/2FTUZ7yDWGGzBqmifxzdHoj7/B+2qzRpII77PlZqoe6XF+UOObSFnhKvXKLjlGY5cy3SXBMbHkPcYtHua8wYR8LqO2bYYnsDd9qD0DJ+LlqH0ZMUkB2Cdk9q/cp1PGJWGlYYecHP87DLuWKwS+a6LhVI9TGkIUosVtLaIMsUUEz83RJFb4sSGOXtjk5DDznn9QW8ltXXMTdGQwFq1vmuiXATYenhszbvagrnbAnDyNFths4IhS1jG8237SB36nGmO3zQm5V7AMHfSrISB/8VPyY4Si7uvAV2kMWxuMhYuQbBwVx/KxbKrYjowuvJvCKaV101rWxvSeU2wDih20v+dnQKPveRNnO8AAK/ICflVVsISkd7hXcfk+SnhfxcPQTr+HQIJEW9wt5Q8WbgHk9wuR8kgXQEX6tCGpT/w= |
|
120 | ea389970c08449440587712117f178d33bab3f1e 0 iQIVAwUAVociGyBXgaxoKi1yAQJx9Q//TzMypcls5CQW3DM9xY1Q+RFeIw1LcDIev6NDBjUYxULb2WIK2qPw4Th5czF622SMd+XO/kiQeWYp9IW90MZOUVT1YGgUPKlKWMjkf0lZEPzprHjHq0+z/no1kBCBQg2uUOLsb6Y7zom4hFCyPsxXOk5nnxcFEK0VDbODa9zoKb/flyQ7rtzs+Z6BljIQ0TJAJsXs+6XgrW1XJ/f6nbeqsQyPklIBJuGKiaU1Pg8wQe6QqFaO1NYgM3hBETku6r3OTpUhu/2FTUZ7yDWGGzBqmifxzdHoj7/B+2qzRpII77PlZqoe6XF+UOObSFnhKvXKLjlGY5cy3SXBMbHkPcYtHua8wYR8LqO2bYYnsDd9qD0DJ+LlqH0ZMUkB2Cdk9q/cp1PGJWGlYYecHP87DLuWKwS+a6LhVI9TGkIUosVtLaIMsUUEz83RJFb4sSGOXtjk5DDznn9QW8ltXXMTdGQwFq1vmuiXATYenhszbvagrnbAnDyNFths4IhS1jG8237SB36nGmO3zQm5V7AMHfSrISB/8VPyY4Si7uvAV2kMWxuMhYuQbBwVx/KxbKrYjowuvJvCKaV101rWxvSeU2wDih20v+dnQKPveRNnO8AAK/ICflVVsISkd7hXcfk+SnhfxcPQTr+HQIJEW9wt5Q8WbgHk9wuR8kgXQEX6tCGpT/w= | |
121 | 158bdc8965720ca4061f8f8d806563cfc7cdb62e 0 iQIVAwUAVqBhFyBXgaxoKi1yAQLJpQ//S8kdgmVlS+CI0d2hQVGYWB/eK+tcntG+bZKLto4bvVy5d0ymlDL0x7VrJMOkwzkU1u/GaYo3L6CVEiM/JGCgB32bllrpx+KwQ0AyHswMZruo/6xrjDIYymLMEJ9yonXBZsG7pf2saYTHm3C5/ZIPkrDZSlssJHJDdeWqd75hUnx3nX8dZ4jIIxYDhtdB5/EmuEGOVlbeBHVpwfDXidSJUHJRwJvDqezUlN003sQdUvOHHtRqBrhsYEhHqPMOxDidAgCvjSfWZQKOTKaPE/gQo/BP3GU++Fg55jBz+SBXpdfQJI2Gd8FZfjLkhFa9vTTTcd10YCd4CZbYLpj/4R2xWj1U4oTVEFa6d+AA5Yyu8xG53XSCCPyzfagyuyfLqsaq5r1qDZO/Mh5KZCTvc9xSF5KXj57mKvzMDpiNeQcamGmsV4yXxymKJKGMQvbnzqp+ItIdbnfk38Nuac8rqNnGmFYwMIPa50680vSZT/NhrlPJ8FVTJlfHtSUZbdjPpsqw7BgjFWaVUdwgCKIGERiK7zfR0innj9rF5oVwT8EbKiaR1uVxOKnTwZzPCbdO1euNg/HutZLVQmugiLAv5Z38L3YZf5bH7zJdUydhiTI4mGn/mgncsKXoSarnnduhoYu9OsQZc9pndhxjAEuAslEIyBsLy81fR2HOhUzw5FGNgdY= |
|
121 | 158bdc8965720ca4061f8f8d806563cfc7cdb62e 0 iQIVAwUAVqBhFyBXgaxoKi1yAQLJpQ//S8kdgmVlS+CI0d2hQVGYWB/eK+tcntG+bZKLto4bvVy5d0ymlDL0x7VrJMOkwzkU1u/GaYo3L6CVEiM/JGCgB32bllrpx+KwQ0AyHswMZruo/6xrjDIYymLMEJ9yonXBZsG7pf2saYTHm3C5/ZIPkrDZSlssJHJDdeWqd75hUnx3nX8dZ4jIIxYDhtdB5/EmuEGOVlbeBHVpwfDXidSJUHJRwJvDqezUlN003sQdUvOHHtRqBrhsYEhHqPMOxDidAgCvjSfWZQKOTKaPE/gQo/BP3GU++Fg55jBz+SBXpdfQJI2Gd8FZfjLkhFa9vTTTcd10YCd4CZbYLpj/4R2xWj1U4oTVEFa6d+AA5Yyu8xG53XSCCPyzfagyuyfLqsaq5r1qDZO/Mh5KZCTvc9xSF5KXj57mKvzMDpiNeQcamGmsV4yXxymKJKGMQvbnzqp+ItIdbnfk38Nuac8rqNnGmFYwMIPa50680vSZT/NhrlPJ8FVTJlfHtSUZbdjPpsqw7BgjFWaVUdwgCKIGERiK7zfR0innj9rF5oVwT8EbKiaR1uVxOKnTwZzPCbdO1euNg/HutZLVQmugiLAv5Z38L3YZf5bH7zJdUydhiTI4mGn/mgncsKXoSarnnduhoYu9OsQZc9pndhxjAEuAslEIyBsLy81fR2HOhUzw5FGNgdY= | |
122 | 2408645de650d8a29a6ce9e7dce601d8dd0d1474 0 iQIVAwUAVq/xFSBXgaxoKi1yAQLsxhAAg+E6uJCtZZOugrrFi9S6C20SRPBwHwmw22PC5z3Ufp9Vf3vqSL/+zmWI9d/yezIVcTXgM9rKCvq58sZvo4FuO2ngPx7bL9LMJ3qx0IyHUKjwa3AwrzjSzvVhNIrRoimD+lVBI/GLmoszpMICM+Nyg3D41fNJKs6YpnwwsHNJkjMwz0n2SHAShWAgIilyANNVnwnzHE68AIkB/gBkUGtrjf6xB9mXQxAv4GPco/234FAkX9xSWsM0Rx+JLLrSBXoHmIlmu9LPjC0AKn8/DDke+fj7bFaF7hdJBUYOtlYH6f7NIvyZSpw0FHl7jPxoRCtXzIV+1dZEbbIMIXzNtzPFVDYDfMhLqpTgthkZ9x0UaMaHecCUWYYBp8G/IyVS40GJodl8xnRiXUkFejbK/NDdR1f9iZS0dtiFu66cATMdb6d+MG+zW0nDKiQmBt6bwynysqn4g3SIGQFEPyEoRy0bXiefHrlkeHbdfc4zgoejx3ywcRDMGvUbpWs5C43EPu44irKXcqC695vAny3A7nZpt/XP5meDdOF67DNQPvhFdjPPbJBpSsUi2hUlZ+599wUfr3lNVzeEzHT7XApTOf6ysuGtHH3qcVHpFqQSRL1MI0f2xL13UadgTVWYrnHEis7f+ncwlWiR0ucpJB3+dQQh3NVGVo89MfbIZPkA8iil03U= |
|
122 | 2408645de650d8a29a6ce9e7dce601d8dd0d1474 0 iQIVAwUAVq/xFSBXgaxoKi1yAQLsxhAAg+E6uJCtZZOugrrFi9S6C20SRPBwHwmw22PC5z3Ufp9Vf3vqSL/+zmWI9d/yezIVcTXgM9rKCvq58sZvo4FuO2ngPx7bL9LMJ3qx0IyHUKjwa3AwrzjSzvVhNIrRoimD+lVBI/GLmoszpMICM+Nyg3D41fNJKs6YpnwwsHNJkjMwz0n2SHAShWAgIilyANNVnwnzHE68AIkB/gBkUGtrjf6xB9mXQxAv4GPco/234FAkX9xSWsM0Rx+JLLrSBXoHmIlmu9LPjC0AKn8/DDke+fj7bFaF7hdJBUYOtlYH6f7NIvyZSpw0FHl7jPxoRCtXzIV+1dZEbbIMIXzNtzPFVDYDfMhLqpTgthkZ9x0UaMaHecCUWYYBp8G/IyVS40GJodl8xnRiXUkFejbK/NDdR1f9iZS0dtiFu66cATMdb6d+MG+zW0nDKiQmBt6bwynysqn4g3SIGQFEPyEoRy0bXiefHrlkeHbdfc4zgoejx3ywcRDMGvUbpWs5C43EPu44irKXcqC695vAny3A7nZpt/XP5meDdOF67DNQPvhFdjPPbJBpSsUi2hUlZ+599wUfr3lNVzeEzHT7XApTOf6ysuGtHH3qcVHpFqQSRL1MI0f2xL13UadgTVWYrnHEis7f+ncwlWiR0ucpJB3+dQQh3NVGVo89MfbIZPkA8iil03U= | |
123 | b698abf971e7377d9b7ec7fc8c52df45255b0329 0 iQIVAwUAVrJ4YCBXgaxoKi1yAQJsKw/+JHSR0bIyarO4/VilFwsYxCprOnPxmUdS4qc4yjvpbf7Dqqr/OnOHJA29LrMoqWqsHgREepemjqiNindwNtlZec+KgmbF08ihSBBpls96UTTYTcytKRkkbrB+FhwB0iDl/o8RgGPniyG6M7gOp6p8pXQVRCOToIY1B/G0rtpkcU1N3GbiZntO5Fm/LPAVIE74VaDsamMopQ/wEB8qiERngX/M8SjO1ZSaVNW6KjRUsarLXQB9ziVJBolK/WnQsDwEeuWU2udpjBiOHnFC6h84uBpc8rLGhr419bKMJcjgl+0sl2zHGPY2edQYuJqVjVENzf4zzZA+xPgKw3GrSTpd37PEnGU/fufdJ0X+pp3kvmO1cV3TsvVMTCn7NvS6+w8SGdHdwKQQwelYI6vmJnjuOCATbafJiHMaOQ0GVYYk6PPoGrYcQ081x6dStCMaHIPOV1Wirwd2wq+SN9Ql8H6njftBf5Sa5tVWdW/zrhsltMsdZYZagZ/oFT3t83exL0rgZ96bZFs0j3HO3APELygIVuQ6ybPsFyToMDbURNDvr7ZqPKhQkkdHIUMqEez5ReuVgpbO9CWV/yWpB1/ZCpjNBZyDvw05kG2mOoC7AbHc8aLUS/8DetAmhwyb48LW4qjfUkO7RyxVSxqdnaBOMlsg1wsP2S+SlkZKsDHjcquZJ5U= |
|
123 | b698abf971e7377d9b7ec7fc8c52df45255b0329 0 iQIVAwUAVrJ4YCBXgaxoKi1yAQJsKw/+JHSR0bIyarO4/VilFwsYxCprOnPxmUdS4qc4yjvpbf7Dqqr/OnOHJA29LrMoqWqsHgREepemjqiNindwNtlZec+KgmbF08ihSBBpls96UTTYTcytKRkkbrB+FhwB0iDl/o8RgGPniyG6M7gOp6p8pXQVRCOToIY1B/G0rtpkcU1N3GbiZntO5Fm/LPAVIE74VaDsamMopQ/wEB8qiERngX/M8SjO1ZSaVNW6KjRUsarLXQB9ziVJBolK/WnQsDwEeuWU2udpjBiOHnFC6h84uBpc8rLGhr419bKMJcjgl+0sl2zHGPY2edQYuJqVjVENzf4zzZA+xPgKw3GrSTpd37PEnGU/fufdJ0X+pp3kvmO1cV3TsvVMTCn7NvS6+w8SGdHdwKQQwelYI6vmJnjuOCATbafJiHMaOQ0GVYYk6PPoGrYcQ081x6dStCMaHIPOV1Wirwd2wq+SN9Ql8H6njftBf5Sa5tVWdW/zrhsltMsdZYZagZ/oFT3t83exL0rgZ96bZFs0j3HO3APELygIVuQ6ybPsFyToMDbURNDvr7ZqPKhQkkdHIUMqEez5ReuVgpbO9CWV/yWpB1/ZCpjNBZyDvw05kG2mOoC7AbHc8aLUS/8DetAmhwyb48LW4qjfUkO7RyxVSxqdnaBOMlsg1wsP2S+SlkZKsDHjcquZJ5U= | |
124 | d493d64757eb45ada99fcb3693e479a51b7782da 0 iQIVAwUAVtYt4SBXgaxoKi1yAQL6TQ/9FzYE/xOSC2LYqPdPjCXNjGuZdN1WMf/8fUMYT83NNOoLEBGx37C0bAxgD4/P03FwYMuP37IjIcX8vN6fWvtG9Oo0o2n/oR3SKjpsheh2zxhAFX3vXhFD4U18wCz/DnM0O1qGJwJ49kk/99WNgDWeW4n9dMzTFpcaeZBCu1REbZQS40Z+ArXTDCr60g5TLN1XR1WKEzQJvF71rvaE6P8d3GLoGobTIJMLi5UnMwGsnsv2/EIPrWHQiAY9ZEnYq6deU/4RMh9c7afZie9I+ycIA/qVH6vXNt3/a2BP3Frmv8IvKPzqwnoWmIUamew9lLf1joD5joBy8Yu+qMW0/s6DYUGQ4Slk9qIfn6wh4ySgT/7FJUMcayx9ONDq7920RjRc+XFpD8B3Zhj2mM+0g9At1FgX2w2Gkf957oz2nlgTVh9sdPvP6UvWzhqszPMpdG5Vt0oc5vuyobW333qSkufCxi5gmH7do1DIzErMcy8b6IpZUDeQ/dakKwLQpZVVPF15IrNa/zsOW55SrGrL8/ErM/mXNQBBAqvRsOLq2njFqK2JaoG6biH21DMjHVZFw2wBRoLQxbOppfz2/e3mNkNy9HjgJTW3+0iHWvRzMSjwRbk9BlbkmH6kG5163ElHq3Ft3uuQyZBL9I5SQxlHi9s/CV0YSTYthpWR3ChKIMoqBQ0= |
|
124 | d493d64757eb45ada99fcb3693e479a51b7782da 0 iQIVAwUAVtYt4SBXgaxoKi1yAQL6TQ/9FzYE/xOSC2LYqPdPjCXNjGuZdN1WMf/8fUMYT83NNOoLEBGx37C0bAxgD4/P03FwYMuP37IjIcX8vN6fWvtG9Oo0o2n/oR3SKjpsheh2zxhAFX3vXhFD4U18wCz/DnM0O1qGJwJ49kk/99WNgDWeW4n9dMzTFpcaeZBCu1REbZQS40Z+ArXTDCr60g5TLN1XR1WKEzQJvF71rvaE6P8d3GLoGobTIJMLi5UnMwGsnsv2/EIPrWHQiAY9ZEnYq6deU/4RMh9c7afZie9I+ycIA/qVH6vXNt3/a2BP3Frmv8IvKPzqwnoWmIUamew9lLf1joD5joBy8Yu+qMW0/s6DYUGQ4Slk9qIfn6wh4ySgT/7FJUMcayx9ONDq7920RjRc+XFpD8B3Zhj2mM+0g9At1FgX2w2Gkf957oz2nlgTVh9sdPvP6UvWzhqszPMpdG5Vt0oc5vuyobW333qSkufCxi5gmH7do1DIzErMcy8b6IpZUDeQ/dakKwLQpZVVPF15IrNa/zsOW55SrGrL8/ErM/mXNQBBAqvRsOLq2njFqK2JaoG6biH21DMjHVZFw2wBRoLQxbOppfz2/e3mNkNy9HjgJTW3+0iHWvRzMSjwRbk9BlbkmH6kG5163ElHq3Ft3uuQyZBL9I5SQxlHi9s/CV0YSTYthpWR3ChKIMoqBQ0= | |
125 | ae279d4a19e9683214cbd1fe8298cf0b50571432 0 iQIVAwUAVvqzViBXgaxoKi1yAQKUCxAAtctMD3ydbe+li3iYjhY5qT0wyHwPr9fcLqsQUJ4ZtD4sK3oxCRZFWFxNBk5bIIyiwusSEJPiPddoQ7NljSZlYDI0HR3R4vns55fmDwPG07Ykf7aSyqr+c2ppCGzn2/2ID476FNtzKqjF+LkVyadgI9vgZk5S4BgdSlfSRBL+1KtB1BlF5etIZnc5U9qs1uqzZJc06xyyF8HlrmMZkAvRUbsx/JzA5LgzZ2WzueaxZgYzYjDk0nPLgyPPBj0DVyWXnW/kdRNmKHNbaZ9aZlWmdPCEoq5iBm71d7Xoa61shmeuVZWvxHNqXdjVMHVeT61cRxjdfxTIkJwvlRGwpy7V17vTgzWFxw6QJpmr7kupRo3idsDydLDPHGUsxP3uMZFsp6+4rEe6qbafjNajkRyiw7kVGCxboOFN0rLVJPZwZGksEIkw58IHcPhZNT1bHHocWOA/uHJTAynfKsAdv/LDdGKcZWUCFOzlokw54xbPvdrBtEOnYNp15OY01IAJd2FCUki5WHvhELUggTjfank1Tc3/Rt1KrGOFhg80CWq6eMiuiWkHGvYq3fjNLbgjl3JJatUFoB+cX1ulDOGsLJEXQ4v5DNHgel0o2H395owNlStksSeW1UBVk0hUK/ADtVUYKAPEIFiboh1iDpEOl40JVnYdsGz3w5FLj2w+16/1vWs= |
|
125 | ae279d4a19e9683214cbd1fe8298cf0b50571432 0 iQIVAwUAVvqzViBXgaxoKi1yAQKUCxAAtctMD3ydbe+li3iYjhY5qT0wyHwPr9fcLqsQUJ4ZtD4sK3oxCRZFWFxNBk5bIIyiwusSEJPiPddoQ7NljSZlYDI0HR3R4vns55fmDwPG07Ykf7aSyqr+c2ppCGzn2/2ID476FNtzKqjF+LkVyadgI9vgZk5S4BgdSlfSRBL+1KtB1BlF5etIZnc5U9qs1uqzZJc06xyyF8HlrmMZkAvRUbsx/JzA5LgzZ2WzueaxZgYzYjDk0nPLgyPPBj0DVyWXnW/kdRNmKHNbaZ9aZlWmdPCEoq5iBm71d7Xoa61shmeuVZWvxHNqXdjVMHVeT61cRxjdfxTIkJwvlRGwpy7V17vTgzWFxw6QJpmr7kupRo3idsDydLDPHGUsxP3uMZFsp6+4rEe6qbafjNajkRyiw7kVGCxboOFN0rLVJPZwZGksEIkw58IHcPhZNT1bHHocWOA/uHJTAynfKsAdv/LDdGKcZWUCFOzlokw54xbPvdrBtEOnYNp15OY01IAJd2FCUki5WHvhELUggTjfank1Tc3/Rt1KrGOFhg80CWq6eMiuiWkHGvYq3fjNLbgjl3JJatUFoB+cX1ulDOGsLJEXQ4v5DNHgel0o2H395owNlStksSeW1UBVk0hUK/ADtVUYKAPEIFiboh1iDpEOl40JVnYdsGz3w5FLj2w+16/1vWs= | |
126 | 740156eedf2c450aee58b1a90b0e826f47c5da64 0 iQIVAwUAVxLGMCBXgaxoKi1yAQLhIg/8DDX+sCz7LmqO47/FfTo+OqGR+bTTqpfK3WebitL0Z6hbXPj7s45jijqIFGqKgMPqS5oom1xeuGTPHdYA0NNoc/mxSCuNLfuXYolpNWPN71HeSDRV9SnhMThG5HSxI+P0Ye4rbsCHrVV+ib1rV81QE2kZ9aZsJd0HnGd512xJ+2ML7AXweM/4lcLmMthN+oi/dv1OGLzfckrcr/fEATCLZt55eO7idx11J1Fk4ptQ6dQ/bKznlD4hneyy1HMPsGxw+bCXrMF2C/nUiRLHdKgGqZ+cDq6loQRfFlQoIhfoEnWC424qbjH4rvHgkZHqC59Oi/ti9Hi75oq9Tb79yzlCY/fGsdrlJpEzrTQdHFMHUoO9CC+JYObXHRo3ALnC5350ZBKxlkdpmucrHTgcDabfhRlx9vDxP4RDopm2hAjk2LJH7bdxnGEyZYkTOZ3hXKnVpt2hUQb4jyzzC9Kl47TFpPKNVKI+NLqRRZAIdXXiy24KD7WzzE6L0NNK0/IeqKBENLL8I1PmDQ6XmYTQVhTuad1jjm2PZDyGiXmJFZO1O/NGecVTvVynKsDT6XhEvzyEtjXqD98rrhbeMHTcmNSwwJMDvm9ws0075sLQyq2EYFG6ECWFypdA/jfumTmxOTkMtuy/V1Gyq7YJ8YaksZ7fXNY9VuJFP72grmlXc6Dvpr4= |
|
126 | 740156eedf2c450aee58b1a90b0e826f47c5da64 0 iQIVAwUAVxLGMCBXgaxoKi1yAQLhIg/8DDX+sCz7LmqO47/FfTo+OqGR+bTTqpfK3WebitL0Z6hbXPj7s45jijqIFGqKgMPqS5oom1xeuGTPHdYA0NNoc/mxSCuNLfuXYolpNWPN71HeSDRV9SnhMThG5HSxI+P0Ye4rbsCHrVV+ib1rV81QE2kZ9aZsJd0HnGd512xJ+2ML7AXweM/4lcLmMthN+oi/dv1OGLzfckrcr/fEATCLZt55eO7idx11J1Fk4ptQ6dQ/bKznlD4hneyy1HMPsGxw+bCXrMF2C/nUiRLHdKgGqZ+cDq6loQRfFlQoIhfoEnWC424qbjH4rvHgkZHqC59Oi/ti9Hi75oq9Tb79yzlCY/fGsdrlJpEzrTQdHFMHUoO9CC+JYObXHRo3ALnC5350ZBKxlkdpmucrHTgcDabfhRlx9vDxP4RDopm2hAjk2LJH7bdxnGEyZYkTOZ3hXKnVpt2hUQb4jyzzC9Kl47TFpPKNVKI+NLqRRZAIdXXiy24KD7WzzE6L0NNK0/IeqKBENLL8I1PmDQ6XmYTQVhTuad1jjm2PZDyGiXmJFZO1O/NGecVTvVynKsDT6XhEvzyEtjXqD98rrhbeMHTcmNSwwJMDvm9ws0075sLQyq2EYFG6ECWFypdA/jfumTmxOTkMtuy/V1Gyq7YJ8YaksZ7fXNY9VuJFP72grmlXc6Dvpr4= | |
127 | f85de28eae32e7d3064b1a1321309071bbaaa069 0 iQIVAwUAVyZQaiBXgaxoKi1yAQJhCQ//WrRZ55k3VI/OgY+I/HvgFHOC0sbhe207Kedxvy00a3AtXM6wa5E95GNX04QxUfTWUf5ZHDfEgj0/mQywNrH1oJG47iPZSs+qXNLqtgAaXtrih6r4/ruUwFCRFxqK9mkhjG61SKicw3Q7uGva950g6ZUE5BsZ7XJWgoDcJzWKR+AH992G6H//Fhi4zFQAmB34++sm80wV6wMxVKA/qhQzetooTR2x9qrHpvCKMzKllleJe48yzPLJjQoaaVgXCDav0eIePFNw0WvVSldOEp/ADDdTGa65qsC1rO2BB1Cu5+frJ/vUoo0PwIgqgD6p2i41hfIKvkp6130TxmRVxUx+ma8gBYEpPIabV0flLU72gq8lMlGBBSnQ+fcZsfs/Ug0xRN0tzkEScmZFiDxRGk0y7IalXzv6irwOyC2fZCajXGJDzkROQXWMgy9eKkwuFhZBmPVYtrATSq3jHLVmJg5vfdeiVzA6NKxAgGm2z8AsRrijKK8WRqFYiH6xcWKG5u+FroPQdKa0nGCkPSTH3tvC6fAHTVm7JeXch5QE/LiS9Y575pM2PeIP+k+Fr1ugK0AEvYJAXa5UIIcdszPyI+TwPTtWaQ83X99qGAdmRWLvSYjqevOVr7F/fhO3XKFXRCcHA3EzVYnG7nWiVACYF3H2UgN4PWjStbx/Qhhdi9xAuks= |
|
127 | f85de28eae32e7d3064b1a1321309071bbaaa069 0 iQIVAwUAVyZQaiBXgaxoKi1yAQJhCQ//WrRZ55k3VI/OgY+I/HvgFHOC0sbhe207Kedxvy00a3AtXM6wa5E95GNX04QxUfTWUf5ZHDfEgj0/mQywNrH1oJG47iPZSs+qXNLqtgAaXtrih6r4/ruUwFCRFxqK9mkhjG61SKicw3Q7uGva950g6ZUE5BsZ7XJWgoDcJzWKR+AH992G6H//Fhi4zFQAmB34++sm80wV6wMxVKA/qhQzetooTR2x9qrHpvCKMzKllleJe48yzPLJjQoaaVgXCDav0eIePFNw0WvVSldOEp/ADDdTGa65qsC1rO2BB1Cu5+frJ/vUoo0PwIgqgD6p2i41hfIKvkp6130TxmRVxUx+ma8gBYEpPIabV0flLU72gq8lMlGBBSnQ+fcZsfs/Ug0xRN0tzkEScmZFiDxRGk0y7IalXzv6irwOyC2fZCajXGJDzkROQXWMgy9eKkwuFhZBmPVYtrATSq3jHLVmJg5vfdeiVzA6NKxAgGm2z8AsRrijKK8WRqFYiH6xcWKG5u+FroPQdKa0nGCkPSTH3tvC6fAHTVm7JeXch5QE/LiS9Y575pM2PeIP+k+Fr1ugK0AEvYJAXa5UIIcdszPyI+TwPTtWaQ83X99qGAdmRWLvSYjqevOVr7F/fhO3XKFXRCcHA3EzVYnG7nWiVACYF3H2UgN4PWjStbx/Qhhdi9xAuks= | |
128 | a56296f55a5e1038ea5016dace2076b693c28a56 0 iQIVAwUAVyZarCBXgaxoKi1yAQL87g/8D7whM3e08HVGDHHEkVUgqLIfueVy1mx0AkRvelmZmwaocFNGpZTd3AjSwy6qXbRNZFXrWU85JJvQCi3PSo/8bK43kwqLJ4lv+Hv2zVTvz30vbLWTSndH3oVRu38lIA7b5K9J4y50pMCwjKLG9iyp+aQG4RBz76fJMlhXy0gu38A8JZVKEeAnQCbtzxKXBzsC8k0/ku/bEQEoo9D4AAGlVTbl5AsHMp3Z6NWu7kEHAX/52/VKU2I0LxYqRxoL1tjTVGkAQfkOHz1gOhLXUgGSYmA9Fb265AYj9cnGWCfyNonlE0Rrk2kAsrjBTGiLyb8WvK/TZmRo4ZpNukzenS9UuAOKxA22Kf9+oN9kKBu1HnwqusYDH9pto1WInCZKV1al7DMBXbGFcnyTXk2xuiTGhVRG5LzCO2QMByBLXiYl77WqqJnzxK3v5lAc/immJl5qa3ATUlTnVBjAs+6cbsbCoY6sjXCT0ClndA9+iZZ1TjPnmLrSeFh5AoE8WHmnFV6oqGN4caX6wiIW5vO+x5Q2ruSsDrwXosXIYzm+0KYKRq9O+MaTwR44Dvq3/RyeIu/cif/Nc7B8bR5Kf7OiRf2T5u97MYAomwGcQfXqgUfm6y7D3Yg+IdAdAJKitxhRPsqqdxIuteXMvOvwukXNDiWP1zsKoYLI37EcwzvbGLUlZvg= |
|
128 | a56296f55a5e1038ea5016dace2076b693c28a56 0 iQIVAwUAVyZarCBXgaxoKi1yAQL87g/8D7whM3e08HVGDHHEkVUgqLIfueVy1mx0AkRvelmZmwaocFNGpZTd3AjSwy6qXbRNZFXrWU85JJvQCi3PSo/8bK43kwqLJ4lv+Hv2zVTvz30vbLWTSndH3oVRu38lIA7b5K9J4y50pMCwjKLG9iyp+aQG4RBz76fJMlhXy0gu38A8JZVKEeAnQCbtzxKXBzsC8k0/ku/bEQEoo9D4AAGlVTbl5AsHMp3Z6NWu7kEHAX/52/VKU2I0LxYqRxoL1tjTVGkAQfkOHz1gOhLXUgGSYmA9Fb265AYj9cnGWCfyNonlE0Rrk2kAsrjBTGiLyb8WvK/TZmRo4ZpNukzenS9UuAOKxA22Kf9+oN9kKBu1HnwqusYDH9pto1WInCZKV1al7DMBXbGFcnyTXk2xuiTGhVRG5LzCO2QMByBLXiYl77WqqJnzxK3v5lAc/immJl5qa3ATUlTnVBjAs+6cbsbCoY6sjXCT0ClndA9+iZZ1TjPnmLrSeFh5AoE8WHmnFV6oqGN4caX6wiIW5vO+x5Q2ruSsDrwXosXIYzm+0KYKRq9O+MaTwR44Dvq3/RyeIu/cif/Nc7B8bR5Kf7OiRf2T5u97MYAomwGcQfXqgUfm6y7D3Yg+IdAdAJKitxhRPsqqdxIuteXMvOvwukXNDiWP1zsKoYLI37EcwzvbGLUlZvg= | |
129 | aaabed77791a75968a12b8c43ad263631a23ee81 0 iQIVAwUAVzpH4CBXgaxoKi1yAQLm5A/9GUYv9CeIepjcdWSBAtNhCBJcqgk2cBcV0XaeQomfxqYWfbW2fze6eE+TrXPKTX1ajycgqquMyo3asQolhHXwasv8+5CQxowjGfyVg7N/kyyjgmJljI+rCi74VfnsEhvG/J4GNr8JLVQmSICfALqQjw7XN8doKthYhwOfIY2vY419613v4oeBQXSsItKC/tfKw9lYvlk4qJKDffJQFyAekgv43ovWqHNkl4LaR6ubtjOsxCnxHfr7OtpX3muM9MLT/obBax5I3EsmiDTQBOjbvI6TcLczs5tVCnTa1opQsPUcEmdA4WpUEiTnLl9lk9le/BIImfYfEP33oVYmubRlKhJYnUiu89ao9L+48FBoqCY88HqbjQI1GO6icfRJN/+NLVeE9wubltbWFETH6e2Q+Ex4+lkul1tQMLPcPt10suMHnEo3/FcOTPt6/DKeMpsYgckHSJq5KzTg632xifyySmb9qkpdGGpY9lRal6FHw3rAhRBqucMgxso4BwC51h04RImtCUQPoA3wpb4BvCHba/thpsUFnHefOvsu3ei4JyHXZK84LPwOj31PcucNFdGDTW6jvKrF1vVUIVS9uMJkJXPu0V4i/oEQSUKifJZivROlpvj1eHy3KeMtjq2kjGyXY2KdzxpT8wX/oYJhCtm1XWMui5f24XBjE6xOcjjm8k4= |
|
129 | aaabed77791a75968a12b8c43ad263631a23ee81 0 iQIVAwUAVzpH4CBXgaxoKi1yAQLm5A/9GUYv9CeIepjcdWSBAtNhCBJcqgk2cBcV0XaeQomfxqYWfbW2fze6eE+TrXPKTX1ajycgqquMyo3asQolhHXwasv8+5CQxowjGfyVg7N/kyyjgmJljI+rCi74VfnsEhvG/J4GNr8JLVQmSICfALqQjw7XN8doKthYhwOfIY2vY419613v4oeBQXSsItKC/tfKw9lYvlk4qJKDffJQFyAekgv43ovWqHNkl4LaR6ubtjOsxCnxHfr7OtpX3muM9MLT/obBax5I3EsmiDTQBOjbvI6TcLczs5tVCnTa1opQsPUcEmdA4WpUEiTnLl9lk9le/BIImfYfEP33oVYmubRlKhJYnUiu89ao9L+48FBoqCY88HqbjQI1GO6icfRJN/+NLVeE9wubltbWFETH6e2Q+Ex4+lkul1tQMLPcPt10suMHnEo3/FcOTPt6/DKeMpsYgckHSJq5KzTg632xifyySmb9qkpdGGpY9lRal6FHw3rAhRBqucMgxso4BwC51h04RImtCUQPoA3wpb4BvCHba/thpsUFnHefOvsu3ei4JyHXZK84LPwOj31PcucNFdGDTW6jvKrF1vVUIVS9uMJkJXPu0V4i/oEQSUKifJZivROlpvj1eHy3KeMtjq2kjGyXY2KdzxpT8wX/oYJhCtm1XWMui5f24XBjE6xOcjjm8k4= | |
130 | a9764ab80e11bcf6a37255db7dd079011f767c6c 0 iQIVAwUAV09KHyBXgaxoKi1yAQJBWg/+OywRrqU+zvnL1tHJ95PgatsF7S4ZAHZFR098+oCjUDtKpvnm71o2TKiY4D5cckyD2KNwLWg/qW6V+5+2EYU0Y/ViwPVcngib/ZeJP+Nr44TK3YZMRmfFuUEEzA7sZ2r2Gm8eswv//W79I0hXJeFd/o6FgLnn7AbOjcOn3IhWdGAP6jUHv9zyJigQv6K9wgyvAnK1RQE+2CgMcoyeqao/zs23IPXI6XUHOwfrQ7XrQ83+ciMqN7XNRx+TKsUQoYeUew4AanoDSMPAQ4kIudsP5tOgKeLRPmHX9zg6Y5S1nTpLRNdyAxuNuyZtkQxDYcG5Hft/SIx27tZUo3gywHL2U+9RYD2nvXqaWzT3sYB2sPBOiq7kjHRgvothkXemAFsbq2nKFrN0PRua9WG4l3ny0xYmDFPlJ/s0E9XhmQaqy+uXtVbA2XdLEvE6pQ0YWbHEKMniW26w6LJkx4IV6RX/7Kpq7byw/bW65tu/BzgISKau5FYLY4CqZJH7f8QBg3XWpzB91AR494tdsD+ugM45wrY/6awGQx9CY5SAzGqTyFuSFQxgB2rBurb01seZPf8nqG8V13UYXfX/O3/WMOBMr7U/RVqmAA0ZMYOyEwfVUmHqrFjkxpXX+JdNKRiA1GJp5sdRpCxSeXdQ/Ni6AAGZV2IyRb4G4Y++1vP4yPBalas= |
|
130 | a9764ab80e11bcf6a37255db7dd079011f767c6c 0 iQIVAwUAV09KHyBXgaxoKi1yAQJBWg/+OywRrqU+zvnL1tHJ95PgatsF7S4ZAHZFR098+oCjUDtKpvnm71o2TKiY4D5cckyD2KNwLWg/qW6V+5+2EYU0Y/ViwPVcngib/ZeJP+Nr44TK3YZMRmfFuUEEzA7sZ2r2Gm8eswv//W79I0hXJeFd/o6FgLnn7AbOjcOn3IhWdGAP6jUHv9zyJigQv6K9wgyvAnK1RQE+2CgMcoyeqao/zs23IPXI6XUHOwfrQ7XrQ83+ciMqN7XNRx+TKsUQoYeUew4AanoDSMPAQ4kIudsP5tOgKeLRPmHX9zg6Y5S1nTpLRNdyAxuNuyZtkQxDYcG5Hft/SIx27tZUo3gywHL2U+9RYD2nvXqaWzT3sYB2sPBOiq7kjHRgvothkXemAFsbq2nKFrN0PRua9WG4l3ny0xYmDFPlJ/s0E9XhmQaqy+uXtVbA2XdLEvE6pQ0YWbHEKMniW26w6LJkx4IV6RX/7Kpq7byw/bW65tu/BzgISKau5FYLY4CqZJH7f8QBg3XWpzB91AR494tdsD+ugM45wrY/6awGQx9CY5SAzGqTyFuSFQxgB2rBurb01seZPf8nqG8V13UYXfX/O3/WMOBMr7U/RVqmAA0ZMYOyEwfVUmHqrFjkxpXX+JdNKRiA1GJp5sdRpCxSeXdQ/Ni6AAGZV2IyRb4G4Y++1vP4yPBalas= | |
131 | 26a5d605b8683a292bb89aea11f37a81b06ac016 0 iQIVAwUAV3bOsSBXgaxoKi1yAQLiDg//fxmcNpTUedsXqEwNdGFJsJ2E25OANgyv1saZHNfbYFWXIR8g4nyjNaj2SjtXF0wzOq5aHlMWXjMZPOT6pQBdTnOYDdgv+O8DGpgHs5x/f+uuxtpVkdxR6uRP0/ImlTEtDix8VQiN3nTu5A0N3C7E2y+D1JIIyTp6vyjzxvGQTY0MD/qgB55Dn6khx8c3phDtMkzmVEwL4ItJxVRVNw1m+2FOXHu++hJEruJdeMV0CKOV6LVbXHho+yt3jQDKhlIgJ65EPLKrf+yRalQtSWpu7y/vUMcEUde9XeQ5x05ebCiI4MkJ0ULQro/Bdx9vBHkAstUC7D+L5y45ZnhHjOwxz9c3GQMZQt1HuyORqbBhf9hvOkUQ2GhlDHc5U04nBe0VhEoCw9ra54n+AgUyqWr4CWimSW6pMTdquCzAAbcJWgdNMwDHrMalCYHhJksKFARKq3uSTR1Noz7sOCSIEQvOozawKSQfOwGxn/5bNepKh4uIRelC1uEDoqculqCLgAruzcMNIMndNVYaJ09IohJzA9jVApa+SZVPAeREg71lnS3d8jaWh1Lu5JFlAAKQeKGVJmNm40Y3HBjtHQDrI67TT59oDAhjo420Wf9VFCaj2k0weYBLWSeJhfUZ5x3PVpAHUvP/rnHPwNYyY0wVoQEvM/bnQdcpICmKhqcK+vKjDrM= |
|
131 | 26a5d605b8683a292bb89aea11f37a81b06ac016 0 iQIVAwUAV3bOsSBXgaxoKi1yAQLiDg//fxmcNpTUedsXqEwNdGFJsJ2E25OANgyv1saZHNfbYFWXIR8g4nyjNaj2SjtXF0wzOq5aHlMWXjMZPOT6pQBdTnOYDdgv+O8DGpgHs5x/f+uuxtpVkdxR6uRP0/ImlTEtDix8VQiN3nTu5A0N3C7E2y+D1JIIyTp6vyjzxvGQTY0MD/qgB55Dn6khx8c3phDtMkzmVEwL4ItJxVRVNw1m+2FOXHu++hJEruJdeMV0CKOV6LVbXHho+yt3jQDKhlIgJ65EPLKrf+yRalQtSWpu7y/vUMcEUde9XeQ5x05ebCiI4MkJ0ULQro/Bdx9vBHkAstUC7D+L5y45ZnhHjOwxz9c3GQMZQt1HuyORqbBhf9hvOkUQ2GhlDHc5U04nBe0VhEoCw9ra54n+AgUyqWr4CWimSW6pMTdquCzAAbcJWgdNMwDHrMalCYHhJksKFARKq3uSTR1Noz7sOCSIEQvOozawKSQfOwGxn/5bNepKh4uIRelC1uEDoqculqCLgAruzcMNIMndNVYaJ09IohJzA9jVApa+SZVPAeREg71lnS3d8jaWh1Lu5JFlAAKQeKGVJmNm40Y3HBjtHQDrI67TT59oDAhjo420Wf9VFCaj2k0weYBLWSeJhfUZ5x3PVpAHUvP/rnHPwNYyY0wVoQEvM/bnQdcpICmKhqcK+vKjDrM= | |
132 | 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 0 iQIVAwUAV42tNyBXgaxoKi1yAQI/Iw//V0NtxpVD4sClotAwffBVW42Uv+SG+07CJoOuFYnmHZv/plOzXuuJlmm95L00/qyRCCTUyAGxK/eP5cAKP2V99ln6rNhh8gpgvmZlnYjU3gqFv8tCQ+fkwgRiWmgKjRL6/bK9FY5cO7ATLVu3kCkFd8CEgzlAaUqBfkNFxZxLDLvKqRlhXxVXhKjvkKg5DZ6eJqRQY7w3UqqR+sF1rMLtVyt490Wqv7YQKwcvY7MEKTyH4twGLx/RhBpBi+GccVKvWC011ffjSjxqAfQqrrSVt0Ld1Khj2/p1bDDYpTgtdDgCzclSXWEQpmSdFRBF5wYs/pDMUreI/E6mlWkB4hfZZk1NBRPRWYikXwnhU3ziubCGesZDyBYLrK1vT+tf6giseo22YQmDnOftbS999Pcn04cyCafeFuOjkubYaINB25T20GS5Wb4a0nHPRAOOVxzk/m/arwYgF0ZZZDDvJ48TRMDf3XOc1jc5qZ7AN/OQKbvh2B08vObnnPm3lmBY1qOnhwzJxpNiq+Z/ypokGXQkGBfKUo7rWHJy5iXLb3Biv9AhxY9d5pSTjBmTAYJEic3q03ztzlnfMyi+C13+YxFAbSSNGBP8Hejkkz0NvmB1TBuCKpnZA8spxY5rhZ/zMx+cCw8hQvWHHDUURps7SQvZEfrJSCGJFPDHL3vbfK+LNwI= |
|
132 | 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 0 iQIVAwUAV42tNyBXgaxoKi1yAQI/Iw//V0NtxpVD4sClotAwffBVW42Uv+SG+07CJoOuFYnmHZv/plOzXuuJlmm95L00/qyRCCTUyAGxK/eP5cAKP2V99ln6rNhh8gpgvmZlnYjU3gqFv8tCQ+fkwgRiWmgKjRL6/bK9FY5cO7ATLVu3kCkFd8CEgzlAaUqBfkNFxZxLDLvKqRlhXxVXhKjvkKg5DZ6eJqRQY7w3UqqR+sF1rMLtVyt490Wqv7YQKwcvY7MEKTyH4twGLx/RhBpBi+GccVKvWC011ffjSjxqAfQqrrSVt0Ld1Khj2/p1bDDYpTgtdDgCzclSXWEQpmSdFRBF5wYs/pDMUreI/E6mlWkB4hfZZk1NBRPRWYikXwnhU3ziubCGesZDyBYLrK1vT+tf6giseo22YQmDnOftbS999Pcn04cyCafeFuOjkubYaINB25T20GS5Wb4a0nHPRAOOVxzk/m/arwYgF0ZZZDDvJ48TRMDf3XOc1jc5qZ7AN/OQKbvh2B08vObnnPm3lmBY1qOnhwzJxpNiq+Z/ypokGXQkGBfKUo7rWHJy5iXLb3Biv9AhxY9d5pSTjBmTAYJEic3q03ztzlnfMyi+C13+YxFAbSSNGBP8Hejkkz0NvmB1TBuCKpnZA8spxY5rhZ/zMx+cCw8hQvWHHDUURps7SQvZEfrJSCGJFPDHL3vbfK+LNwI= | |
133 | 299546f84e68dbb9bd026f0f3a974ce4bdb93686 0 iQIcBAABCAAGBQJXn3rFAAoJELnJ3IJKpb3VmZoQAK0cdOfi/OURglnN0vYYGwdvSXTPpZauPEYEpwML3dW1j6HRnl5L+H8D8vlYzahK95X4+NNBhqtyyB6wmIVI0NkYfXfd6ACntJE/EnTdLIHIP2NAAoVsggIjiNr26ubRegaD5ya63Ofxz+Yq5iRsUUfHet7o+CyFhExyzdu+Vcz1/E9GztxNfTDVpC/mf+RMLwQTfHOhoTVbaamLCmGAIjw39w72X+vRMJoYNF44te6PvsfI67+6uuC0+9DjMnp5eL/hquSQ1qfks71rnWwxuiPcUDZloIueowVmt0z0sO4loSP1nZ5IP/6ZOoAzSjspqsxeay9sKP0kzSYLGsmCi29otyVSnXiKtyMCW5z5iM6k8XQcMi5mWy9RcpqlNYD7RUTn3g0+a8u7F6UEtske3/qoweJLPhtTmBNOfDNw4JXwOBSZea0QnIIjCeCc4ZGqfojPpbvcA4rkRpxI23YoMrT2v/kp4wgwrqK9fi8ctt8WbXpmGoAQDXWj2bWcuzj94HsAhLduFKv6sxoDz871hqjmjjnjQSU7TSNNnVzdzwqYkMB+BvhcNYxk6lcx3Aif3AayGdrWDubtU/ZRNoLzBwe6gm0udRMXBj4D/60GD6TIkYeL7HjJwfBb6Bf7qvQ6y7g0zbYG9uwBmMeduU7XchErGqQGSEyyJH3DG9OLaFOj |
|
133 | 299546f84e68dbb9bd026f0f3a974ce4bdb93686 0 iQIcBAABCAAGBQJXn3rFAAoJELnJ3IJKpb3VmZoQAK0cdOfi/OURglnN0vYYGwdvSXTPpZauPEYEpwML3dW1j6HRnl5L+H8D8vlYzahK95X4+NNBhqtyyB6wmIVI0NkYfXfd6ACntJE/EnTdLIHIP2NAAoVsggIjiNr26ubRegaD5ya63Ofxz+Yq5iRsUUfHet7o+CyFhExyzdu+Vcz1/E9GztxNfTDVpC/mf+RMLwQTfHOhoTVbaamLCmGAIjw39w72X+vRMJoYNF44te6PvsfI67+6uuC0+9DjMnp5eL/hquSQ1qfks71rnWwxuiPcUDZloIueowVmt0z0sO4loSP1nZ5IP/6ZOoAzSjspqsxeay9sKP0kzSYLGsmCi29otyVSnXiKtyMCW5z5iM6k8XQcMi5mWy9RcpqlNYD7RUTn3g0+a8u7F6UEtske3/qoweJLPhtTmBNOfDNw4JXwOBSZea0QnIIjCeCc4ZGqfojPpbvcA4rkRpxI23YoMrT2v/kp4wgwrqK9fi8ctt8WbXpmGoAQDXWj2bWcuzj94HsAhLduFKv6sxoDz871hqjmjjnjQSU7TSNNnVzdzwqYkMB+BvhcNYxk6lcx3Aif3AayGdrWDubtU/ZRNoLzBwe6gm0udRMXBj4D/60GD6TIkYeL7HjJwfBb6Bf7qvQ6y7g0zbYG9uwBmMeduU7XchErGqQGSEyyJH3DG9OLaFOj | |
134 | ccd436f7db6d5d7b9af89715179b911d031d44f1 0 iQIVAwUAV8h7F0emf/qjRqrOAQjmdhAAgYhom8fzL/YHeVLddm71ZB+pKDviKASKGSrBHY4D5Szrh/pYTedmG9IptYue5vzXpspHAaGvZN5xkwrz1/5nmnCsLA8DFaYT9qCkize6EYzxSBtA/W1S9Mv5tObinr1EX9rCSyI4HEJYE8i1IQM5h07SqUsMKDoasd4e29t6gRWg5pfOYq1kc2MTck35W9ff1Fii8S28dqbO3cLU6g5K0pT0JLCZIq7hyTNQdxHAYfebxkVl7PZrZR383IrnyotXVKFFc44qinv94T50uR4yUNYPQ8Gu0TgoGQQjBjk1Lrxot2xpgPQAy8vx+EOJgpg/yNZnYkmJZMxjDkTGVrwvXtOXZzmy2jti7PniET9hUBCU7aNHnoJJLzIf+Vb1CIRP0ypJl8GYCZx6HIYwOQH6EtcaeUqq3r+WXWv74ijIE7OApotmutM9buTvdOLdZddBzFPIjykc6cXO+W4E0kl6u9/OHtaZ3Nynh0ejBRafRWAVw2yU3T9SgQyICsmYWJCThkj14WqCJr2b7jfGlg9MkQOUG6/3f4xz2R3SgyUD8KiGsq/vdBE53zh0YA9gppLoum6AY+z61G1NhVGlrtps90txZBehuARUUz2dJC0pBMRy8XFwXMewDSIe6ATg25pHZsxHfhcalBpJncBl8pORs7oQl+GKBVxlnV4jm1pCzLU= |
|
134 | ccd436f7db6d5d7b9af89715179b911d031d44f1 0 iQIVAwUAV8h7F0emf/qjRqrOAQjmdhAAgYhom8fzL/YHeVLddm71ZB+pKDviKASKGSrBHY4D5Szrh/pYTedmG9IptYue5vzXpspHAaGvZN5xkwrz1/5nmnCsLA8DFaYT9qCkize6EYzxSBtA/W1S9Mv5tObinr1EX9rCSyI4HEJYE8i1IQM5h07SqUsMKDoasd4e29t6gRWg5pfOYq1kc2MTck35W9ff1Fii8S28dqbO3cLU6g5K0pT0JLCZIq7hyTNQdxHAYfebxkVl7PZrZR383IrnyotXVKFFc44qinv94T50uR4yUNYPQ8Gu0TgoGQQjBjk1Lrxot2xpgPQAy8vx+EOJgpg/yNZnYkmJZMxjDkTGVrwvXtOXZzmy2jti7PniET9hUBCU7aNHnoJJLzIf+Vb1CIRP0ypJl8GYCZx6HIYwOQH6EtcaeUqq3r+WXWv74ijIE7OApotmutM9buTvdOLdZddBzFPIjykc6cXO+W4E0kl6u9/OHtaZ3Nynh0ejBRafRWAVw2yU3T9SgQyICsmYWJCThkj14WqCJr2b7jfGlg9MkQOUG6/3f4xz2R3SgyUD8KiGsq/vdBE53zh0YA9gppLoum6AY+z61G1NhVGlrtps90txZBehuARUUz2dJC0pBMRy8XFwXMewDSIe6ATg25pHZsxHfhcalBpJncBl8pORs7oQl+GKBVxlnV4jm1pCzLU= | |
135 | 149433e68974eb5c63ccb03f794d8b57339a80c4 0 iQIcBAABAgAGBQJX8AfCAAoJELnJ3IJKpb3VnNAP/3umS8tohcZTr4m6DJm9u4XGr2m3FWQmjTEfimGpsOuBC8oCgsq0eAlORYcV68zDax+vQHQu3pqfPXaX+y4ZFDuz0ForNRiPJn+Q+tj1+NrOT1e8h4gH0nSK4rDxEGaa6x01fyC/xQMqN6iNfzbLLB7+WadZlyBRbHaUeZFDlPxPDf1rjDpu1vqwtOrVzSxMasRGEceiUegwsFdFMAefCq0ya/pKe9oV+GgGfR4qNrP7BfpOBcN/Po/ctkFCbLOhHbu6M7HpBSiD57BUy5lfhQQtSjzCKEVTyrWEH0ApjjXKuJzLSyq7xsHKQSOPMgGQprGehyzdCETlZOdauGrC0t9vBCr7kXEhXtycqxBC03vknA2eNeV610VX+HgO9VpCVZWHtENiArhALCcpoEsJvT29xCBYpSii/wnTpYJFT9yW8tjQCxH0zrmEZJvO1/nMINEBQFScB/nzUELn9asnghNf6vMpSGy0fSM27j87VAXCzJ5lqa6WCL/RrKgvYflow/m5AzUfMQhpqpH1vmh4ba1zZ4123lgnW4pNZDV9kmwXrEagGbWe1rnmsMzHugsECiYQyIngjWzHfpHgyEr49Uc5bMM1MlTypeHYYL4kV1jJ8Ou0SC4aV+49p8Onmb2NlVY7JKV7hqDCuZPI164YXMxhPNst4XK0/ENhoOE+8iB6 |
|
135 | 149433e68974eb5c63ccb03f794d8b57339a80c4 0 iQIcBAABAgAGBQJX8AfCAAoJELnJ3IJKpb3VnNAP/3umS8tohcZTr4m6DJm9u4XGr2m3FWQmjTEfimGpsOuBC8oCgsq0eAlORYcV68zDax+vQHQu3pqfPXaX+y4ZFDuz0ForNRiPJn+Q+tj1+NrOT1e8h4gH0nSK4rDxEGaa6x01fyC/xQMqN6iNfzbLLB7+WadZlyBRbHaUeZFDlPxPDf1rjDpu1vqwtOrVzSxMasRGEceiUegwsFdFMAefCq0ya/pKe9oV+GgGfR4qNrP7BfpOBcN/Po/ctkFCbLOhHbu6M7HpBSiD57BUy5lfhQQtSjzCKEVTyrWEH0ApjjXKuJzLSyq7xsHKQSOPMgGQprGehyzdCETlZOdauGrC0t9vBCr7kXEhXtycqxBC03vknA2eNeV610VX+HgO9VpCVZWHtENiArhALCcpoEsJvT29xCBYpSii/wnTpYJFT9yW8tjQCxH0zrmEZJvO1/nMINEBQFScB/nzUELn9asnghNf6vMpSGy0fSM27j87VAXCzJ5lqa6WCL/RrKgvYflow/m5AzUfMQhpqpH1vmh4ba1zZ4123lgnW4pNZDV9kmwXrEagGbWe1rnmsMzHugsECiYQyIngjWzHfpHgyEr49Uc5bMM1MlTypeHYYL4kV1jJ8Ou0SC4aV+49p8Onmb2NlVY7JKV7hqDCuZPI164YXMxhPNst4XK0/ENhoOE+8iB6 | |
136 | 438173c415874f6ac653efc1099dec9c9150e90f 0 iQIVAwUAWAZ3okemf/qjRqrOAQj89xAAw/6QZ07yqvH+aZHeGQfgJ/X1Nze/hSMzkqbwGkuUOWD5ztN8+c39EXCn8JlqyLUPD7uGzhTV0299k5fGRihLIseXr0hy/cvVW16uqfeKJ/4/qL9zLS3rwSAgWbaHd1s6UQZVfGCb8V6oC1dkJxfrE9h6kugBqV97wStIRxmCpMDjsFv/zdNwsv6eEdxbiMilLn2/IbWXFOVKJzzv9iEY5Pu5McFR+nnrMyUZQhyGtVPLSkoEPsOysorfCZaVLJ6MnVaJunp9XEv94Pqx9+k+shsQvJHWkc0Nnb6uDHZYkLR5v2AbFsbJ9jDHsdr9A7qeQTiZay7PGI0uPoIrkmLya3cYbU1ADhwloAeQ/3gZLaJaKEjrXcFSsz7AZ9yq74rTwiPulF8uqZxJUodk2m/zy83HBrxxp/vgxWJ5JP2WXPtB8qKY+05umAt4rQS+fd2H/xOu2V2d5Mq1WmgknLBLC0ItaNaf91sSHtgEy22GtcvWQE7S6VWU1PoSYmOLITdJKAsmb7Eq+yKDW9nt0lOpUu2wUhBGctlgXgcWOmJP6gL6edIg66czAkVBp/fpKNl8Z/A0hhpuH7nW7GW/mzLVQnc+JW4wqUVkwlur3NRfvSt5ZyTY/SaR++nRf62h7PHIjU+f0kWQRdCcEQ0X38b8iAjeXcsOW8NCOPpm0zcz3i8= |
|
136 | 438173c415874f6ac653efc1099dec9c9150e90f 0 iQIVAwUAWAZ3okemf/qjRqrOAQj89xAAw/6QZ07yqvH+aZHeGQfgJ/X1Nze/hSMzkqbwGkuUOWD5ztN8+c39EXCn8JlqyLUPD7uGzhTV0299k5fGRihLIseXr0hy/cvVW16uqfeKJ/4/qL9zLS3rwSAgWbaHd1s6UQZVfGCb8V6oC1dkJxfrE9h6kugBqV97wStIRxmCpMDjsFv/zdNwsv6eEdxbiMilLn2/IbWXFOVKJzzv9iEY5Pu5McFR+nnrMyUZQhyGtVPLSkoEPsOysorfCZaVLJ6MnVaJunp9XEv94Pqx9+k+shsQvJHWkc0Nnb6uDHZYkLR5v2AbFsbJ9jDHsdr9A7qeQTiZay7PGI0uPoIrkmLya3cYbU1ADhwloAeQ/3gZLaJaKEjrXcFSsz7AZ9yq74rTwiPulF8uqZxJUodk2m/zy83HBrxxp/vgxWJ5JP2WXPtB8qKY+05umAt4rQS+fd2H/xOu2V2d5Mq1WmgknLBLC0ItaNaf91sSHtgEy22GtcvWQE7S6VWU1PoSYmOLITdJKAsmb7Eq+yKDW9nt0lOpUu2wUhBGctlgXgcWOmJP6gL6edIg66czAkVBp/fpKNl8Z/A0hhpuH7nW7GW/mzLVQnc+JW4wqUVkwlur3NRfvSt5ZyTY/SaR++nRf62h7PHIjU+f0kWQRdCcEQ0X38b8iAjeXcsOW8NCOPpm0zcz3i8= | |
137 | eab27446995210c334c3d06f1a659e3b9b5da769 0 iQIcBAABCAAGBQJYGNsXAAoJELnJ3IJKpb3Vf30QAK/dq5vEHEkufLGiYxxkvIyiRaswS+8jamXeHMQrdK8CuokcQYhEv9xiUI6FMIoX4Zc0xfoFCBc+X4qE+Ed9SFYWgQkDs/roJq1C1mTYA+KANMqJkDt00QZq536snFQvjCXAA5fwR/DpgGOOuGMRfvbjh7x8mPyVoPr4HDQCGFXnTYdn193HpTOqUsipzIV5OJqQ9p0sfJjwKP4ZfD0tqqdjTkNwMyJuwuRaReXFvGGCjH2PqkZE/FwQG0NJJjt0xaMUmv5U5tXHC9tEVobVV/qEslqfbH2v1YPF5d8Jmdn7F76FU5J0nTd+3rIVjYGYSt01cR6wtGnzvr/7kw9kbChw4wYhXxnmIALSd48FpA1qWjlPcAdHfUUwObxOxfqmlnBGtAQFK+p5VXCsxDZEIT9MSxscfCjyDQZpkY5S5B3PFIRg6V9bdl5a4rEt27aucuKTHj1Ok2vip4WfaIKk28YMjjzuOQRbr6Pp7mJcCC1/ERHUJdLsaQP+dy18z6XbDjX3O2JDRNYbCBexQyV/Kfrt5EOS5fXiByQUHv+PyR+9Ju6QWkkcFBfgsxq25kFl+eos4V9lxPOY5jDpw2BWu9TyHtTWkjL/YxDUGwUO9WA/WzrcT4skr9FYrFV/oEgi8MkwydC0cFICDfd6tr9upqkkr1W025Im1UBXXJ89bTVj |
|
137 | eab27446995210c334c3d06f1a659e3b9b5da769 0 iQIcBAABCAAGBQJYGNsXAAoJELnJ3IJKpb3Vf30QAK/dq5vEHEkufLGiYxxkvIyiRaswS+8jamXeHMQrdK8CuokcQYhEv9xiUI6FMIoX4Zc0xfoFCBc+X4qE+Ed9SFYWgQkDs/roJq1C1mTYA+KANMqJkDt00QZq536snFQvjCXAA5fwR/DpgGOOuGMRfvbjh7x8mPyVoPr4HDQCGFXnTYdn193HpTOqUsipzIV5OJqQ9p0sfJjwKP4ZfD0tqqdjTkNwMyJuwuRaReXFvGGCjH2PqkZE/FwQG0NJJjt0xaMUmv5U5tXHC9tEVobVV/qEslqfbH2v1YPF5d8Jmdn7F76FU5J0nTd+3rIVjYGYSt01cR6wtGnzvr/7kw9kbChw4wYhXxnmIALSd48FpA1qWjlPcAdHfUUwObxOxfqmlnBGtAQFK+p5VXCsxDZEIT9MSxscfCjyDQZpkY5S5B3PFIRg6V9bdl5a4rEt27aucuKTHj1Ok2vip4WfaIKk28YMjjzuOQRbr6Pp7mJcCC1/ERHUJdLsaQP+dy18z6XbDjX3O2JDRNYbCBexQyV/Kfrt5EOS5fXiByQUHv+PyR+9Ju6QWkkcFBfgsxq25kFl+eos4V9lxPOY5jDpw2BWu9TyHtTWkjL/YxDUGwUO9WA/WzrcT4skr9FYrFV/oEgi8MkwydC0cFICDfd6tr9upqkkr1W025Im1UBXXJ89bTVj | |
138 | b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 0 iQIVAwUAWECEaEemf/qjRqrOAQjuZw/+IWJKnKOsaUMcB9ly3Fo/eskqDL6A0j69IXTJDeBDGMoyGbQU/gZyX2yc6Sw3EhwTSCXu5vKpzg3a6e8MNrC1iHqli4wJ/jPY7XtmiqTYDixdsBLNk46VfOi73ooFe08wVDSNB65xpZsrtPDSioNmQ2kSJwSHb71UlauS4xGkM74vuDpWvX5OZRSfBqMh6NjG5RwBBnS8mzA0SW2dCI2jSc5SCGIzIZpzM0xUN21xzq0YQbrk9qEsmi7ks0eowdhUjeET2wSWwhOK4jS4IfMyRO7KueUB05yHs4mChj9kNFNWtSzXKwKBQbZzwO/1Y7IJjU+AsbWkiUu+6ipqBPQWzS28gCwGOrv5BcIJS+tzsvLUKWgcixyfy5UAqJ32gCdzKC54FUpT2zL6Ad0vXGM6WkpZA7yworN4RCFPexXbi0x2GSTLG8PyIoZ4Iwgtj5NtsEDHrz0380FxgnKUIC3ny2SVuPlyD+9wepD3QYcxdRk1BIzcFT9ZxNlgil3IXRVPwVejvQ/zr6/ILdhBnZ8ojjvVCy3b86B1OhZj/ZByYo5QaykVqWl0V9vJOZlZfvOpm2HiDhm/2uNrVWxG4O6EwhnekAdaJYmeLq1YbhIfGA6KVOaB9Yi5A5BxK9QGXBZ6sLj+dIUD3QR47r9yAqVQE8Gr/Oh6oQXBQqOQv7WzBBs= |
|
138 | b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 0 iQIVAwUAWECEaEemf/qjRqrOAQjuZw/+IWJKnKOsaUMcB9ly3Fo/eskqDL6A0j69IXTJDeBDGMoyGbQU/gZyX2yc6Sw3EhwTSCXu5vKpzg3a6e8MNrC1iHqli4wJ/jPY7XtmiqTYDixdsBLNk46VfOi73ooFe08wVDSNB65xpZsrtPDSioNmQ2kSJwSHb71UlauS4xGkM74vuDpWvX5OZRSfBqMh6NjG5RwBBnS8mzA0SW2dCI2jSc5SCGIzIZpzM0xUN21xzq0YQbrk9qEsmi7ks0eowdhUjeET2wSWwhOK4jS4IfMyRO7KueUB05yHs4mChj9kNFNWtSzXKwKBQbZzwO/1Y7IJjU+AsbWkiUu+6ipqBPQWzS28gCwGOrv5BcIJS+tzsvLUKWgcixyfy5UAqJ32gCdzKC54FUpT2zL6Ad0vXGM6WkpZA7yworN4RCFPexXbi0x2GSTLG8PyIoZ4Iwgtj5NtsEDHrz0380FxgnKUIC3ny2SVuPlyD+9wepD3QYcxdRk1BIzcFT9ZxNlgil3IXRVPwVejvQ/zr6/ILdhBnZ8ojjvVCy3b86B1OhZj/ZByYo5QaykVqWl0V9vJOZlZfvOpm2HiDhm/2uNrVWxG4O6EwhnekAdaJYmeLq1YbhIfGA6KVOaB9Yi5A5BxK9QGXBZ6sLj+dIUD3QR47r9yAqVQE8Gr/Oh6oQXBQqOQv7WzBBs= | |
139 | e69874dc1f4e142746ff3df91e678a09c6fc208c 0 iQIVAwUAWG0oGUemf/qjRqrOAQh3uhAAu4TN7jkkgH7Hxn8S1cB6Ru0x8MQutzzzpjShhsE/G7nzCxsZ5eWdJ5ItwXmKhunb7T0og54CGcTxfmdPtCI7AhhHh9/TM2Hv1EBcsXCiwjG8E+P6X1UJkijgTGjNWuCvEDOsQAvgywslECBNnXp2QA5I5UdCMeqDdTAb8ujvbD8I4pxUx1xXKY18DgQGJh13mRlfkEVnPxUi2n8emnwPLjbVVkVISkMFUkaOl8a4fOeZC1xzDpoQocoH2Q8DYa9RCPPSHHSYPNMWGCdNGN2CoAurcHWWvc7jNU28/tBhTazfFv8LYh63lLQ8SIIPZHJAOxo45ufMspzUfNgoD6y3vlF5aW7DpdxwYHnueh7S1Fxgtd9cOnxmxQsgiF4LK0a+VXOi/Tli/fivZHDRCGHJvJgsMQm7pzkay9sGohes6jAnsOv2E8DwFC71FO/btrAp07IRFxH9WhUeMsXLMS9oBlubMxMM58M+xzSKApK6bz2MkLsx9cewmfmfbJnRIK1xDv+J+77pWWNGlxCCjl1WU+aA3M7G8HzwAqjL75ASOWtBrJlFXvlLgzobwwetg6cm44Rv1P39i3rDySZvi4BDlOQHWFupgMKiXnZ1PeL7eBDs/aawrE0V2ysNkf9An+XJZkos2JSLPWcoNigfXNUu5c1AqsERvHA246XJzqvCEK8= |
|
139 | e69874dc1f4e142746ff3df91e678a09c6fc208c 0 iQIVAwUAWG0oGUemf/qjRqrOAQh3uhAAu4TN7jkkgH7Hxn8S1cB6Ru0x8MQutzzzpjShhsE/G7nzCxsZ5eWdJ5ItwXmKhunb7T0og54CGcTxfmdPtCI7AhhHh9/TM2Hv1EBcsXCiwjG8E+P6X1UJkijgTGjNWuCvEDOsQAvgywslECBNnXp2QA5I5UdCMeqDdTAb8ujvbD8I4pxUx1xXKY18DgQGJh13mRlfkEVnPxUi2n8emnwPLjbVVkVISkMFUkaOl8a4fOeZC1xzDpoQocoH2Q8DYa9RCPPSHHSYPNMWGCdNGN2CoAurcHWWvc7jNU28/tBhTazfFv8LYh63lLQ8SIIPZHJAOxo45ufMspzUfNgoD6y3vlF5aW7DpdxwYHnueh7S1Fxgtd9cOnxmxQsgiF4LK0a+VXOi/Tli/fivZHDRCGHJvJgsMQm7pzkay9sGohes6jAnsOv2E8DwFC71FO/btrAp07IRFxH9WhUeMsXLMS9oBlubMxMM58M+xzSKApK6bz2MkLsx9cewmfmfbJnRIK1xDv+J+77pWWNGlxCCjl1WU+aA3M7G8HzwAqjL75ASOWtBrJlFXvlLgzobwwetg6cm44Rv1P39i3rDySZvi4BDlOQHWFupgMKiXnZ1PeL7eBDs/aawrE0V2ysNkf9An+XJZkos2JSLPWcoNigfXNUu5c1AqsERvHA246XJzqvCEK8= | |
140 | a1dd2c0c479e0550040542e392e87bc91262517e 0 iQIcBAABCAAGBQJYgBBEAAoJELnJ3IJKpb3VJosP/10rr3onsVbL8E+ri1Q0TJc8uhqIsBVyD/vS1MJtbxRaAdIV92o13YOent0o5ASFF/0yzVKlOWPQRjsYYbYY967k1TruDaWxJAnpeFgMni2Afl/qyWrW4AY2xegZNZCfMmwJA+uSJDdAn+jPV40XbuCZ+OgyZo5S05dfclHFxdc8rPKeUsJtvs5PMmCL3iQl1sulp1ASjuhRtFWZgSFsC6rb2Y7evD66ikL93+0/BPEB4SVX17vB/XEzdmh4ntyt4+d1XAznLHS33IU8UHbTkUmLy+82WnNH7HBB2V7gO47m/HhvaYjEfeW0bqMzN3aOUf30Vy/wB4HHsvkBGDgL5PYVHRRovGcAuCmnYbOkawqbRewW5oDs7UT3HbShNpxCxfsYpo7deHr11zWA3ooWCSlIRRREU4BfwVmn+Ds1hT5HM28Q6zr6GQZegDUbiT9i1zU0EpyfTpH7gc6NTVQrO1z1p70NBnQMqXcHjWJwjSwLER2Qify9MjrGXTL6ofD5zVZKobeRmq94mf3lDq26H7coraM9X5h9xa49VgAcRHzn/WQ6wcFCKDQr6FT67hTUOlF7Jriv8/5h/ziSZr10fCObKeKWN8Skur29VIAHHY4NuUqbM55WohD+jZ2O3d4tze1eWm5MDgWD8RlrfYhQ+cLOwH65AOtts0LNZwlvJuC7 |
|
140 | a1dd2c0c479e0550040542e392e87bc91262517e 0 iQIcBAABCAAGBQJYgBBEAAoJELnJ3IJKpb3VJosP/10rr3onsVbL8E+ri1Q0TJc8uhqIsBVyD/vS1MJtbxRaAdIV92o13YOent0o5ASFF/0yzVKlOWPQRjsYYbYY967k1TruDaWxJAnpeFgMni2Afl/qyWrW4AY2xegZNZCfMmwJA+uSJDdAn+jPV40XbuCZ+OgyZo5S05dfclHFxdc8rPKeUsJtvs5PMmCL3iQl1sulp1ASjuhRtFWZgSFsC6rb2Y7evD66ikL93+0/BPEB4SVX17vB/XEzdmh4ntyt4+d1XAznLHS33IU8UHbTkUmLy+82WnNH7HBB2V7gO47m/HhvaYjEfeW0bqMzN3aOUf30Vy/wB4HHsvkBGDgL5PYVHRRovGcAuCmnYbOkawqbRewW5oDs7UT3HbShNpxCxfsYpo7deHr11zWA3ooWCSlIRRREU4BfwVmn+Ds1hT5HM28Q6zr6GQZegDUbiT9i1zU0EpyfTpH7gc6NTVQrO1z1p70NBnQMqXcHjWJwjSwLER2Qify9MjrGXTL6ofD5zVZKobeRmq94mf3lDq26H7coraM9X5h9xa49VgAcRHzn/WQ6wcFCKDQr6FT67hTUOlF7Jriv8/5h/ziSZr10fCObKeKWN8Skur29VIAHHY4NuUqbM55WohD+jZ2O3d4tze1eWm5MDgWD8RlrfYhQ+cLOwH65AOtts0LNZwlvJuC7 | |
141 | e1526da1e6d84e03146151c9b6e6950fe9a83d7d 0 iQIVAwUAWJIKpUemf/qjRqrOAQjjThAAvl1K/GZBrkanwEPXomewHkWKTEy1s5d5oWmPPGrSb9G4LM/3/abSbQ7fnzkS6IWi4Ao0za68w/MohaVGKoMAslRbelaTqlus0wE3zxb2yQ/j2NeZzFnFEuR/vbUug7uzH+onko2jXrt7VcPNXLOa1/g5CWwaf/YPfJO4zv+atlzBHvuFcQCkdbcOJkccCnBUoR7y0PJoBJX6K7wJQ+hWLdcY4nVaxkGPRmsZJo9qogXZMw1CwJVjofxRI0S/5vMtEqh8srYsg7qlTNv8eYnwdpfuunn2mI7Khx10Tz85PZDnr3SGRiFvdfmT30pI7jL3bhOHALkaoy2VevteJjIyMxANTvjIUBNQUi+7Kj3VIKmkL9NAMAQBbshiQL1wTrXdqOeC8Nm1BfCQEox2yiC6pDFbXVbguwJZ5VKFizTTK6f6BdNYKTVx8lNEdjAsWH8ojgGWwGXBbTkClULHezJ/sODaZzK/+M/IzbGmlF27jJYpdJX8fUoybZNw9lXwIfQQWHmQHEOJYCljD9G1tvYY70+xAFexgBX5Ib48UK4DRITVNecyQZL7bLTzGcM0TAE0EtD4M42wawsYP3Cva9UxShFLICQdPoa4Wmfs6uLbXG1DDLol/j7b6bL+6W8E3AlW+aAPc8GZm51/w3VlYqqciWTc12OJpu8FiD0pZ/iBw+E= |
|
141 | e1526da1e6d84e03146151c9b6e6950fe9a83d7d 0 iQIVAwUAWJIKpUemf/qjRqrOAQjjThAAvl1K/GZBrkanwEPXomewHkWKTEy1s5d5oWmPPGrSb9G4LM/3/abSbQ7fnzkS6IWi4Ao0za68w/MohaVGKoMAslRbelaTqlus0wE3zxb2yQ/j2NeZzFnFEuR/vbUug7uzH+onko2jXrt7VcPNXLOa1/g5CWwaf/YPfJO4zv+atlzBHvuFcQCkdbcOJkccCnBUoR7y0PJoBJX6K7wJQ+hWLdcY4nVaxkGPRmsZJo9qogXZMw1CwJVjofxRI0S/5vMtEqh8srYsg7qlTNv8eYnwdpfuunn2mI7Khx10Tz85PZDnr3SGRiFvdfmT30pI7jL3bhOHALkaoy2VevteJjIyMxANTvjIUBNQUi+7Kj3VIKmkL9NAMAQBbshiQL1wTrXdqOeC8Nm1BfCQEox2yiC6pDFbXVbguwJZ5VKFizTTK6f6BdNYKTVx8lNEdjAsWH8ojgGWwGXBbTkClULHezJ/sODaZzK/+M/IzbGmlF27jJYpdJX8fUoybZNw9lXwIfQQWHmQHEOJYCljD9G1tvYY70+xAFexgBX5Ib48UK4DRITVNecyQZL7bLTzGcM0TAE0EtD4M42wawsYP3Cva9UxShFLICQdPoa4Wmfs6uLbXG1DDLol/j7b6bL+6W8E3AlW+aAPc8GZm51/w3VlYqqciWTc12OJpu8FiD0pZ/iBw+E= | |
142 | 25703b624d27e3917d978af56d6ad59331e0464a 0 iQIcBAABCAAGBQJYuMSwAAoJELnJ3IJKpb3VL3YP/iKWY3+K3cLUBD3Ne5MhfS7N3t6rlk9YD4kmU8JnVeV1oAfg36VCylpbJLBnmQdvC8AfBJOkXi6DHp9RKXXmlsOeoppdWYGX5RMOzuwuGPBii6cA6KFd+WBpBJlRtklz61qGCAtv4q8V1mga0yucihghzt4lD/PPz7mk6yUBL8s3rK+bIHGdEhnK2dfnn/U2G0K/vGgsYZESORISuBclCrrc7M3/v1D+FBMCEYX9FXYU4PhYkKXK1mSqzCB7oENu/WP4ijl1nRnEIyzBV9pKO4ylnXTpbZAr/e4PofzjzPXb0zume1191C3wvgJ4eDautGide/Pxls5s6fJRaIowf5XVYQ5srX/NC9N3K77Hy01t5u8nwcyAhjmajZYuB9j37nmiwFawqS/y2eHovrUjkGdelV8OM7/iAexPRC8i2NcGk0m6XuzWy1Dxr8453VD8Hh3tTeafd6v5uHXSLjwogpu/th5rk/i9/5GBzc1MyJgRTwBhVHi/yFxfyakrSU7HT2cwX/Lb5KgWccogqfvrFYQABIBanxLIeZxTv8OIjC75EYknbxYtvvgb35ZdJytwrTHSZN0S7Ua2dHx2KUnHB6thbLu/v9fYrCgFF76DK4Ogd22Cbvv6NqRoglG26d0bqdwz/l1n3o416YjupteW8LMxHzuwiJy69WP1yi10eNDq |
|
142 | 25703b624d27e3917d978af56d6ad59331e0464a 0 iQIcBAABCAAGBQJYuMSwAAoJELnJ3IJKpb3VL3YP/iKWY3+K3cLUBD3Ne5MhfS7N3t6rlk9YD4kmU8JnVeV1oAfg36VCylpbJLBnmQdvC8AfBJOkXi6DHp9RKXXmlsOeoppdWYGX5RMOzuwuGPBii6cA6KFd+WBpBJlRtklz61qGCAtv4q8V1mga0yucihghzt4lD/PPz7mk6yUBL8s3rK+bIHGdEhnK2dfnn/U2G0K/vGgsYZESORISuBclCrrc7M3/v1D+FBMCEYX9FXYU4PhYkKXK1mSqzCB7oENu/WP4ijl1nRnEIyzBV9pKO4ylnXTpbZAr/e4PofzjzPXb0zume1191C3wvgJ4eDautGide/Pxls5s6fJRaIowf5XVYQ5srX/NC9N3K77Hy01t5u8nwcyAhjmajZYuB9j37nmiwFawqS/y2eHovrUjkGdelV8OM7/iAexPRC8i2NcGk0m6XuzWy1Dxr8453VD8Hh3tTeafd6v5uHXSLjwogpu/th5rk/i9/5GBzc1MyJgRTwBhVHi/yFxfyakrSU7HT2cwX/Lb5KgWccogqfvrFYQABIBanxLIeZxTv8OIjC75EYknbxYtvvgb35ZdJytwrTHSZN0S7Ua2dHx2KUnHB6thbLu/v9fYrCgFF76DK4Ogd22Cbvv6NqRoglG26d0bqdwz/l1n3o416YjupteW8LMxHzuwiJy69WP1yi10eNDq | |
143 | ed5b25874d998ababb181a939dd37a16ea644435 0 iQIcBAABCAAGBQJY4r/gAAoJELnJ3IJKpb3VtwYP/RuTmo252ExXQk/n5zGJZvZQnI86vO1+yGuyOlGFFBwf1v3sOLW1HD7fxF6/GdT8CSQrRqtC17Ya3qtayfY/0AEiSuH2bklBXSB1H5wPyguS5iLqyilCJY0SkHYBIDhJ0xftuIjsa805wdMm3OdclnTOkYT+K1WL8Ylbx/Ni2Lsx1rPpYdcQ/HlTkr5ca1ZbNOOSxSNI4+ilGlKbdSYeEsmqB2sDEiSaDEoxGGoSgzAE9+5Q2FfCGXV0bq4vfmEPoT9lhB4kANE+gcFUvsJTu8Z7EdF8y3CJLiy8+KHO/VLKTGJ1pMperbig9nAXl1AOt+izBFGJGTolbR/ShkkDWB/QVcqIF5CysAWMgnHAx7HjnMDBOANcKzhMMfOi3GUvOCNNIqIIoJHKRHaRk0YbMdt7z2mKpTrRQ9Zadz764jXOqqrPgQFM3jkBHzAvZz9yShrHGh42Y+iReAF9pAN0xPjyZ5Y2qp+DSl0bIQqrAet6Zd3QuoJtXczAeRrAvgn7O9MyLnMyE5s7xxI7o8M7zfWtChLF8ytJUzmRo3iVJNOJH+Zls9N30PGw6vubQAnB5ieaVTv8lnNpcAnEQD/i0tmRSxzyyqoOQbnItIPKFOsaYW+eX9sgJmObU3yDc5k3cs+yAFD2CM/uiUsLcTKyxPNcP1JHBYpwhOjIGczSHVS1 |
|
143 | ed5b25874d998ababb181a939dd37a16ea644435 0 iQIcBAABCAAGBQJY4r/gAAoJELnJ3IJKpb3VtwYP/RuTmo252ExXQk/n5zGJZvZQnI86vO1+yGuyOlGFFBwf1v3sOLW1HD7fxF6/GdT8CSQrRqtC17Ya3qtayfY/0AEiSuH2bklBXSB1H5wPyguS5iLqyilCJY0SkHYBIDhJ0xftuIjsa805wdMm3OdclnTOkYT+K1WL8Ylbx/Ni2Lsx1rPpYdcQ/HlTkr5ca1ZbNOOSxSNI4+ilGlKbdSYeEsmqB2sDEiSaDEoxGGoSgzAE9+5Q2FfCGXV0bq4vfmEPoT9lhB4kANE+gcFUvsJTu8Z7EdF8y3CJLiy8+KHO/VLKTGJ1pMperbig9nAXl1AOt+izBFGJGTolbR/ShkkDWB/QVcqIF5CysAWMgnHAx7HjnMDBOANcKzhMMfOi3GUvOCNNIqIIoJHKRHaRk0YbMdt7z2mKpTrRQ9Zadz764jXOqqrPgQFM3jkBHzAvZz9yShrHGh42Y+iReAF9pAN0xPjyZ5Y2qp+DSl0bIQqrAet6Zd3QuoJtXczAeRrAvgn7O9MyLnMyE5s7xxI7o8M7zfWtChLF8ytJUzmRo3iVJNOJH+Zls9N30PGw6vubQAnB5ieaVTv8lnNpcAnEQD/i0tmRSxzyyqoOQbnItIPKFOsaYW+eX9sgJmObU3yDc5k3cs+yAFD2CM/uiUsLcTKyxPNcP1JHBYpwhOjIGczSHVS1 | |
144 | 77eaf9539499a1b8be259ffe7ada787d07857f80 0 iQIcBAABCAAGBQJY9iz9AAoJELnJ3IJKpb3VYqEQAJNkB09sXgYRLA4kGQv3p4v02q9WZ1lHkAhOlNwIh7Zp+pGvT33nHZffByA0v+xtJNV9TNMIFFjkCg3jl5Z42CCe33ZlezGBAzXU+70QPvOR0ojlYk+FdMfeSyCBzWYokIpImwNmwNGKVrUAfywdikCsUC2aRjKg4Mn7GnqWl9WrBG6JEOOUamdx8qV2f6g/utRiqj4YQ86P0y4K3yakwc1LMM+vRfrwvsf1+DZ9t7QRENNKQ6gRnUdfryqSFIWn1VkBVMwIN5W3yIrTMfgH1wAZxbnYHrN5qDK7mcbP7bOA3XWJuEC+3QRnheRFd/21O1dMFuYjaKApXPHRlTGRMOaz2eydbfBopUS1BtfYEh4/B/1yJb9/HDw6LiAjea7ACHiaNec83z643005AvtUuWhjX3QTPkYlQzWaosanGy1IOGtXCPp1L0A+9gUpqyqycfPjQCbST5KRzYSZn3Ngmed5Bb6jsgvg5e5y0En/SQgK/pTKnxemAmFFVvIIrrWGRKj0AD0IFEHEepmwprPRs97EZPoBPFAGmVRuASBeIhFQxSDIXV0ebHJoUmz5w1rTy7U3Eq0ff6nW14kjWOUplatXz5LpWJ3VkZKrI+4gelto5xpTI6gJl2nmezhXQIlInk17cPuxmiHjeMdlOHZRh/zICLhQNL5fGne0ZL+qlrXY |
|
144 | 77eaf9539499a1b8be259ffe7ada787d07857f80 0 iQIcBAABCAAGBQJY9iz9AAoJELnJ3IJKpb3VYqEQAJNkB09sXgYRLA4kGQv3p4v02q9WZ1lHkAhOlNwIh7Zp+pGvT33nHZffByA0v+xtJNV9TNMIFFjkCg3jl5Z42CCe33ZlezGBAzXU+70QPvOR0ojlYk+FdMfeSyCBzWYokIpImwNmwNGKVrUAfywdikCsUC2aRjKg4Mn7GnqWl9WrBG6JEOOUamdx8qV2f6g/utRiqj4YQ86P0y4K3yakwc1LMM+vRfrwvsf1+DZ9t7QRENNKQ6gRnUdfryqSFIWn1VkBVMwIN5W3yIrTMfgH1wAZxbnYHrN5qDK7mcbP7bOA3XWJuEC+3QRnheRFd/21O1dMFuYjaKApXPHRlTGRMOaz2eydbfBopUS1BtfYEh4/B/1yJb9/HDw6LiAjea7ACHiaNec83z643005AvtUuWhjX3QTPkYlQzWaosanGy1IOGtXCPp1L0A+9gUpqyqycfPjQCbST5KRzYSZn3Ngmed5Bb6jsgvg5e5y0En/SQgK/pTKnxemAmFFVvIIrrWGRKj0AD0IFEHEepmwprPRs97EZPoBPFAGmVRuASBeIhFQxSDIXV0ebHJoUmz5w1rTy7U3Eq0ff6nW14kjWOUplatXz5LpWJ3VkZKrI+4gelto5xpTI6gJl2nmezhXQIlInk17cPuxmiHjeMdlOHZRh/zICLhQNL5fGne0ZL+qlrXY | |
145 | 616e788321cc4ae9975b7f0c54c849f36d82182b 0 iQIVAwUAWPZuQkemf/qjRqrOAQjFlg/9HXEegJMv8FP+uILPoaiA2UCiqWUL2MVJ0K1cvafkwUq+Iwir8sTe4VJ1v6V+ZRiOuzs4HMnoGJrIks4vHRbAxJ3J6xCfvrsbHdl59grv54vuoL5FlZvkdIe8L7/ovKrUmNwPWZX2v+ffFPrsEBeVlVrXpp4wOPhDxCKTmjYVOp87YqXfJsud7EQFPqpV4jX8DEDtJWT95OE9x0srBg0HpSE95d/BM4TuXTVNI8fV41YEqearKeFIhLxu37HxUmGmkAALCi8RJmm4hVpUHgk3tAVzImI8DglUqnC6VEfaYb+PKzIqHelhb66JO/48qN2S/JXihpNHAVUBysBT0b1xEnc6eNsF2fQEB+bEcf8IGj7/ILee1cmwPtoK2OXR2+xWWWjlu2keVcKeI0yAajJw/dP21yvVzVq0ypst7iD+EGHLJWJSmZscbyH5ICr+TJ5yQvIGZJtfsAdAUUTM2xpqSDW4mT5kYyg75URbQ3AKI7lOhJBmkkGQErE4zIQMkaAqcWziVF20xiRWfJoFxT2fK5weaRGIjELH49NLlyvZxYc4LlRo9lIdC7l/6lYDdTx15VuEj1zx/91y/d7OtPm+KCA2Bbdqth8m/fMD8trfQ6jSG/wgsvjZ+S0eoXa92qIR/igsCI+6EwP7duuzL2iyKOPXupQVNN10PKI7EuKv4Lk= |
|
145 | 616e788321cc4ae9975b7f0c54c849f36d82182b 0 iQIVAwUAWPZuQkemf/qjRqrOAQjFlg/9HXEegJMv8FP+uILPoaiA2UCiqWUL2MVJ0K1cvafkwUq+Iwir8sTe4VJ1v6V+ZRiOuzs4HMnoGJrIks4vHRbAxJ3J6xCfvrsbHdl59grv54vuoL5FlZvkdIe8L7/ovKrUmNwPWZX2v+ffFPrsEBeVlVrXpp4wOPhDxCKTmjYVOp87YqXfJsud7EQFPqpV4jX8DEDtJWT95OE9x0srBg0HpSE95d/BM4TuXTVNI8fV41YEqearKeFIhLxu37HxUmGmkAALCi8RJmm4hVpUHgk3tAVzImI8DglUqnC6VEfaYb+PKzIqHelhb66JO/48qN2S/JXihpNHAVUBysBT0b1xEnc6eNsF2fQEB+bEcf8IGj7/ILee1cmwPtoK2OXR2+xWWWjlu2keVcKeI0yAajJw/dP21yvVzVq0ypst7iD+EGHLJWJSmZscbyH5ICr+TJ5yQvIGZJtfsAdAUUTM2xpqSDW4mT5kYyg75URbQ3AKI7lOhJBmkkGQErE4zIQMkaAqcWziVF20xiRWfJoFxT2fK5weaRGIjELH49NLlyvZxYc4LlRo9lIdC7l/6lYDdTx15VuEj1zx/91y/d7OtPm+KCA2Bbdqth8m/fMD8trfQ6jSG/wgsvjZ+S0eoXa92qIR/igsCI+6EwP7duuzL2iyKOPXupQVNN10PKI7EuKv4Lk= | |
146 | bb96d4a497432722623ae60d9bc734a1e360179e 0 iQIVAwUAWQkDfEemf/qjRqrOAQierQ/7BuQ0IW0T0cglgqIgkLuYLx2VXJCTEtRNCWmrH2UMK7fAdpAhN0xf+xedv56zYHrlyHpbskDbWvsKIHJdw/4bQitXaIFTyuMMtSR5vXy4Nly34O/Xs2uGb3Y5qwdubeK2nZr4lSPgiRHb/zI/B1Oy8GX830ljmIOY7B0nUWy4DrXcy/M41SnAMLFyD1K6T/8tkv7M4Fai7dQoF9EmIIkShVPktI3lqp3m7infZ4XnJqcqUB0NSfQZwZaUaoalOdCvEIe3ab5ewgl/CuvlDI4oqMQGjXCtNLbtiZSwo6hvudO6ewT+Zn/VdabkZyRtXUxu56ajjd6h22nU1+vknqDzo5tzw6oh1Ubzf8tzyv3Gmmr+tlOjzfK7tXXnT3vR9aEGli0qri0DzOpsDSY0pDC7EsS4LINPoNdsGQrGQdoX++AISROlNjvyuo4Vrp26tPHCSupkKOXuZaiozycAa2Q+aI1EvkPZSXe8SAXKDVtFn05ZB58YVkFzZKAYAxkE/ven59zb4aIbOgR12tZbJoZZsVHrlf/TcDtiXVfIMEMsCtJ1tPgD1rAsEURWRxK3mJ0Ev6KTHgNz4PeBhq1gIP/Y665aX2+cCjc4+vApPUienh5aOr1bQFpIDyYZsafHGMUFNCwRh8bX98oTGa0hjqz4ypwXE4Wztjdc+48UiHARp/Y= |
|
146 | bb96d4a497432722623ae60d9bc734a1e360179e 0 iQIVAwUAWQkDfEemf/qjRqrOAQierQ/7BuQ0IW0T0cglgqIgkLuYLx2VXJCTEtRNCWmrH2UMK7fAdpAhN0xf+xedv56zYHrlyHpbskDbWvsKIHJdw/4bQitXaIFTyuMMtSR5vXy4Nly34O/Xs2uGb3Y5qwdubeK2nZr4lSPgiRHb/zI/B1Oy8GX830ljmIOY7B0nUWy4DrXcy/M41SnAMLFyD1K6T/8tkv7M4Fai7dQoF9EmIIkShVPktI3lqp3m7infZ4XnJqcqUB0NSfQZwZaUaoalOdCvEIe3ab5ewgl/CuvlDI4oqMQGjXCtNLbtiZSwo6hvudO6ewT+Zn/VdabkZyRtXUxu56ajjd6h22nU1+vknqDzo5tzw6oh1Ubzf8tzyv3Gmmr+tlOjzfK7tXXnT3vR9aEGli0qri0DzOpsDSY0pDC7EsS4LINPoNdsGQrGQdoX++AISROlNjvyuo4Vrp26tPHCSupkKOXuZaiozycAa2Q+aI1EvkPZSXe8SAXKDVtFn05ZB58YVkFzZKAYAxkE/ven59zb4aIbOgR12tZbJoZZsVHrlf/TcDtiXVfIMEMsCtJ1tPgD1rAsEURWRxK3mJ0Ev6KTHgNz4PeBhq1gIP/Y665aX2+cCjc4+vApPUienh5aOr1bQFpIDyYZsafHGMUFNCwRh8bX98oTGa0hjqz4ypwXE4Wztjdc+48UiHARp/Y= | |
147 | c850f0ed54c1d42f9aa079ad528f8127e5775217 0 iQIVAwUAWTQINUemf/qjRqrOAQjZDw//b4pEgHYfWRVDEmLZtevysfhlJzbSyLAnWgNnRUVdSwl4WRF1r6ds/q7N4Ege5wQHjOpRtx4jC3y/riMbrLUlaeUXzCdqKgm4JcINS1nXy3IfkeDdUKyOR9upjaVhIEzCMRpyzabdYuflh5CoxayO7GFk2iZ8c1oAl4QzuLSspn9w+znqDg0HrMDbRNijStSulNjkqutih9UqT/PYizhE1UjL0NSnpYyD1vDljsHModJc2dhSzuZ1c4VFZHkienk+CNyeLtVKg8aC+Ej/Ppwq6FlE461T/RxOEzf+WFAc9F4iJibSN2kAFB4ySJ43y+OKkvzAwc5XbUx0y6OlWn2Ph+5T54sIwqasG3DjXyVrwVtAvCrcWUmOyS0RfkKoDVepMPIhFXyrhGqUYSq25Gt6tHVtIrlcWARIGGWlsE+PSHi87qcnSjs4xUzZwVvJWz4fuM1AUG/GTpyt4w3kB85XQikIINkmSTmsM/2/ar75T6jBL3kqOCGOL3n7bVZsGXllhkkQ7e/jqPPWnNXm8scDYdT3WENNu34zZp5ZmqdTXPAIIaqGswnU04KfUSEoYtOMri3E2VvrgMkiINm9BOKpgeTsMb3dkYRw2ZY3UAH9QfdX9BZywk6v3kkE5ghLWMUoQ4sqRlTo7mJKA8+EodjmIGRV/kAv1f7pigg6pIWWEyo= |
|
147 | c850f0ed54c1d42f9aa079ad528f8127e5775217 0 iQIVAwUAWTQINUemf/qjRqrOAQjZDw//b4pEgHYfWRVDEmLZtevysfhlJzbSyLAnWgNnRUVdSwl4WRF1r6ds/q7N4Ege5wQHjOpRtx4jC3y/riMbrLUlaeUXzCdqKgm4JcINS1nXy3IfkeDdUKyOR9upjaVhIEzCMRpyzabdYuflh5CoxayO7GFk2iZ8c1oAl4QzuLSspn9w+znqDg0HrMDbRNijStSulNjkqutih9UqT/PYizhE1UjL0NSnpYyD1vDljsHModJc2dhSzuZ1c4VFZHkienk+CNyeLtVKg8aC+Ej/Ppwq6FlE461T/RxOEzf+WFAc9F4iJibSN2kAFB4ySJ43y+OKkvzAwc5XbUx0y6OlWn2Ph+5T54sIwqasG3DjXyVrwVtAvCrcWUmOyS0RfkKoDVepMPIhFXyrhGqUYSq25Gt6tHVtIrlcWARIGGWlsE+PSHi87qcnSjs4xUzZwVvJWz4fuM1AUG/GTpyt4w3kB85XQikIINkmSTmsM/2/ar75T6jBL3kqOCGOL3n7bVZsGXllhkkQ7e/jqPPWnNXm8scDYdT3WENNu34zZp5ZmqdTXPAIIaqGswnU04KfUSEoYtOMri3E2VvrgMkiINm9BOKpgeTsMb3dkYRw2ZY3UAH9QfdX9BZywk6v3kkE5ghLWMUoQ4sqRlTo7mJKA8+EodjmIGRV/kAv1f7pigg6pIWWEyo= | |
148 | 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 0 iQIcBAABCAAGBQJZXQSmAAoJELnJ3IJKpb3VmTwP/jsxFTlKzWU8EnEhEViiP2YREOD3AXU7685DIMnoyVAsZgxrt0CG6Y92b5sINCeh5B0ORPQ7+xi2Xmz6tX8EeAR+/Dpdx6K623yExf8kq91zgfMvYkatNMu6ZVfywibYZAASq02oKoX7WqSPcQG/OwgtdFiGacCrG5iMH7wRv0N9hPc6D5vAV8/H/Inq8twpSG5SGDpCdKj7KPZiY8DFu/3OXatJtl+byg8zWT4FCYKkBPvmZp8/sRhDKBgwr3RvF1p84uuw/QxXjt+DmGxgtjvObjHr+shCMcKBAuZ4RtZmyEo/0L81uaTElHu1ejsEzsEKxs+8YifnH070PTFoV4VXQyXfTc8AyaqHE6rzX96a/HjQiJnL4dFeTZIrUhGK3AkObFLWJxVTo4J8+oliBQQldIh1H2yb1ZMfwapLnUGIqSieHDGZ6K2ccNJK8Q7IRhTCvYc0cjsnbwTpV4cebGqf3WXZhX0cZN+TNfhh/HGRzR1EeAAavjJqpDam1OBA5TmtJd/lHLIRVR5jyG+r4SK0XDlJ8uSfah7MpVH6aQ6UrycPyFusGXQlIqJ1DYQaBrI/SRJfIvRUmvVz9WgKLe83oC3Ui3aWR9rNjMb2InuQuXjeZaeaYfBAUYACcGfCZpZZvoEkMHCqtTng1rbbFnKMFk5kVy9YWuVgK9Iuh0O5 |
|
148 | 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 0 iQIcBAABCAAGBQJZXQSmAAoJELnJ3IJKpb3VmTwP/jsxFTlKzWU8EnEhEViiP2YREOD3AXU7685DIMnoyVAsZgxrt0CG6Y92b5sINCeh5B0ORPQ7+xi2Xmz6tX8EeAR+/Dpdx6K623yExf8kq91zgfMvYkatNMu6ZVfywibYZAASq02oKoX7WqSPcQG/OwgtdFiGacCrG5iMH7wRv0N9hPc6D5vAV8/H/Inq8twpSG5SGDpCdKj7KPZiY8DFu/3OXatJtl+byg8zWT4FCYKkBPvmZp8/sRhDKBgwr3RvF1p84uuw/QxXjt+DmGxgtjvObjHr+shCMcKBAuZ4RtZmyEo/0L81uaTElHu1ejsEzsEKxs+8YifnH070PTFoV4VXQyXfTc8AyaqHE6rzX96a/HjQiJnL4dFeTZIrUhGK3AkObFLWJxVTo4J8+oliBQQldIh1H2yb1ZMfwapLnUGIqSieHDGZ6K2ccNJK8Q7IRhTCvYc0cjsnbwTpV4cebGqf3WXZhX0cZN+TNfhh/HGRzR1EeAAavjJqpDam1OBA5TmtJd/lHLIRVR5jyG+r4SK0XDlJ8uSfah7MpVH6aQ6UrycPyFusGXQlIqJ1DYQaBrI/SRJfIvRUmvVz9WgKLe83oC3Ui3aWR9rNjMb2InuQuXjeZaeaYfBAUYACcGfCZpZZvoEkMHCqtTng1rbbFnKMFk5kVy9YWuVgK9Iuh0O5 | |
149 | 857876ebaed4e315f63157bd157d6ce553c7ab73 0 iQIVAwUAWW9XW0emf/qjRqrOAQhI7A//cKXIM4l8vrWWsc1Os4knXm/2UaexmAwV70TpviKL9RxCy5zBP/EapCaGRCH8uNPOQTkWGR9Aucm3CtxhggCMzULQxxeH86mEpWf1xILWLySPXW/t2f+2zxrwLSAxxqFJtuYv83Pe8CnS3y4BlgHnBKYXH8XXuW8uvfc0lHKblhrspGBIAinx7vPLoGQcpYrn9USWUKq5d9FaCLQCDT9501FHKf5dlYQajevCUDnewtn5ohelOXjTJQClW3aygv/z+98Kq7ZhayeIiZu+SeP+Ay7lZPklXcy6eyRiQtGCa1yesb9v53jKtgxWewV4o6zyuUesdknZ/IBeNUgw8LepqTIJo6/ckyvBOsSQcda81DuYNUChZLYTSXYPHEUmYiz6CvNoLEgHF/oO5p6CZXOPWbmLWrAFd+0+1Tuq8BSh+PSdEREM3ZLOikkXoVzTKBgu4zpMvmBnjliBg7WhixkcG0v5WunlV9/oHAIpsKdL7AatU+oCPulp+xDpTKzRazEemYiWG9zYKzwSMk9Nc17e2tk+EtFSPsPo4iVCXMgdIZSTNBvynKEFXZQVPWVa+bYRdAmbSY8awiX7exxYL10UcpnN2q/AH/F7rQzAmo8eZ3OtD0+3Nk3JRx0/CMyzKLPYDpdUgwmaPb+s2Bsy7f7TfmA7jTa69YqB1/zVwlWULr0= |
|
149 | 857876ebaed4e315f63157bd157d6ce553c7ab73 0 iQIVAwUAWW9XW0emf/qjRqrOAQhI7A//cKXIM4l8vrWWsc1Os4knXm/2UaexmAwV70TpviKL9RxCy5zBP/EapCaGRCH8uNPOQTkWGR9Aucm3CtxhggCMzULQxxeH86mEpWf1xILWLySPXW/t2f+2zxrwLSAxxqFJtuYv83Pe8CnS3y4BlgHnBKYXH8XXuW8uvfc0lHKblhrspGBIAinx7vPLoGQcpYrn9USWUKq5d9FaCLQCDT9501FHKf5dlYQajevCUDnewtn5ohelOXjTJQClW3aygv/z+98Kq7ZhayeIiZu+SeP+Ay7lZPklXcy6eyRiQtGCa1yesb9v53jKtgxWewV4o6zyuUesdknZ/IBeNUgw8LepqTIJo6/ckyvBOsSQcda81DuYNUChZLYTSXYPHEUmYiz6CvNoLEgHF/oO5p6CZXOPWbmLWrAFd+0+1Tuq8BSh+PSdEREM3ZLOikkXoVzTKBgu4zpMvmBnjliBg7WhixkcG0v5WunlV9/oHAIpsKdL7AatU+oCPulp+xDpTKzRazEemYiWG9zYKzwSMk9Nc17e2tk+EtFSPsPo4iVCXMgdIZSTNBvynKEFXZQVPWVa+bYRdAmbSY8awiX7exxYL10UcpnN2q/AH/F7rQzAmo8eZ3OtD0+3Nk3JRx0/CMyzKLPYDpdUgwmaPb+s2Bsy7f7TfmA7jTa69YqB1/zVwlWULr0= | |
|
150 | 5544af8622863796a0027566f6b646e10d522c4c 0 iQIcBAABCAAGBQJZjJflAAoJELnJ3IJKpb3V19kQALCvTdPrpce5+rBNbFtLGNFxTMDol1dUy87EUAWiArnfOzW3rKBdYxvxDL23BpgUfjRm1fAXdayVvlj6VC6Dyb195OLmc/I9z7SjFxsfmxWilF6U0GIa3W0x37i05EjfcccrBIuSLrvR6AWyJhjLOBCcyAqD/HcEom00/L+o2ry9CDQNLEeVuNewJiupcUqsTIG2yS26lWbtLZuoqS2T4Nlg8wjJhiSXlsZSuAF55iUJKlTQP6KyWReiaYuEVfm/Bybp0A2bFcZCYpWPwnwKBdSCHhIalH8PO57gh9J7xJVnyyBg5PU6n4l6PrGOmKhNiU/xyNe36tEAdMW6svcVvt8hiY0dnwWqR6wgnFFDu0lnTMUcjsy5M5FBY6wSw9Fph8zcNRzYyaeUbasNonPvrIrk21nT3ET3RzVR3ri2nJDVF+0GlpogGfk9k7wY3808091BMsyV3448ZPKQeWiK4Yy4UOUwbKV7YAsS5MdDnC1uKjl4GwLn9UCY/+Q2/2R0CBZ13Tox+Nbo6hBRuRGtFIbLK9j7IIUhhZrIZFSh8cDNkC+UMaS52L5z7ECvoYIUpw+MJ7NkMLHIVGZ2Nxn0C7IbGO6uHyR7D6bdNpxilU+WZStHk0ppZItRTm/htar4jifnaCI8F8OQNYmZ3cQhxx6qV2Tyow8arvWb1NYXrocG | |||
|
151 | 943c91326b23954e6e1c6960d0239511f9530258 0 iQIcBAABCAAGBQJZjKKZAAoJELnJ3IJKpb3VGQkP/0iF6Khef0lBaRhbSAPwa7RUBb3iaBeuwmeic/hUjMoU1E5NR36bDDaF3u2di5mIYPBONFIeCPf9/DKyFkidueX1UnlAQa3mjh/QfKTb4/yO2Nrk7eH+QtrYxVUUYYjwgp4rS0Nd/++I1IUOor54vqJzJ7ZnM5O1RsE7VI1esAC/BTlUuO354bbm08B0owsZBwVvcVvpV4zeTvq5qyPxBJ3M0kw83Pgwh3JZB9IYhOabhSUBcA2fIPHgYGYnJVC+bLOeMWI1HJkJeoYfClNUiQUjAmi0cdTC733eQnHkDw7xyyFi+zkKu6JmU1opxkHSuj4Hrjul7Gtw3vVWWUPufz3AK7oymNp2Xr5y1HQLDtNJP3jicTTG1ae2TdX5Az3ze0I8VGbpR81/6ShAvY2cSKttV3I+2k4epxTTTf0xaZS1eUdnFOox6acElG2reNzx7EYYxpHj17K8N2qNzyY78iPgbJ+L39PBFoiGXMZJqWCxxIHoK1MxlXa8WwSnsXAU768dJvEn2N1x3fl+aeaWzeM4/5Qd83YjFuCeycuRnIo3rejSX3rWFAwZE0qQHKI5YWdKDLxIfdHTjdfMP7np+zLcHt0DV/dHmj2hKQgU0OK04fx7BrmdS1tw67Y9bL3H3TDohn7khU1FrqrKVuqSLbLsxnNyWRbZQF+DCoYrHlIW |
@@ -1,162 +1,164 | |||||
1 | d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f |
|
1 | d40cc5aacc31ed673d9b5b24f98bee78c283062c 0.4f | |
2 | 1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e |
|
2 | 1c590d34bf61e2ea12c71738e5a746cd74586157 0.4e | |
3 | 7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d |
|
3 | 7eca4cfa8aad5fce9a04f7d8acadcd0452e2f34e 0.4d | |
4 | b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c |
|
4 | b4d0c3786ad3e47beacf8412157326a32b6d25a4 0.4c | |
5 | f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5 |
|
5 | f40273b0ad7b3a6d3012fd37736d0611f41ecf54 0.5 | |
6 | 0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b |
|
6 | 0a28dfe59f8fab54a5118c5be4f40da34a53cdb7 0.5b | |
7 | 12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6 |
|
7 | 12e0fdbc57a0be78f0e817fd1d170a3615cd35da 0.6 | |
8 | 4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b |
|
8 | 4ccf3de52989b14c3d84e1097f59e39a992e00bd 0.6b | |
9 | eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c |
|
9 | eac9c8efcd9bd8244e72fb6821f769f450457a32 0.6c | |
10 | 979c049974485125e1f9357f6bbe9c1b548a64c3 0.7 |
|
10 | 979c049974485125e1f9357f6bbe9c1b548a64c3 0.7 | |
11 | 3a56574f329a368d645853e0f9e09472aee62349 0.8 |
|
11 | 3a56574f329a368d645853e0f9e09472aee62349 0.8 | |
12 | 6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1 |
|
12 | 6a03cff2b0f5d30281e6addefe96b993582f2eac 0.8.1 | |
13 | 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9 |
|
13 | 35fb62a3a673d5322f6274a44ba6456e5e4b3b37 0.9 | |
14 | 2be3001847cb18a23c403439d9e7d0ace30804e9 0.9.1 |
|
14 | 2be3001847cb18a23c403439d9e7d0ace30804e9 0.9.1 | |
15 | 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2 |
|
15 | 36a957364b1b89c150f2d0e60a99befe0ee08bd3 0.9.2 | |
16 | 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3 |
|
16 | 27230c29bfec36d5540fbe1c976810aefecfd1d2 0.9.3 | |
17 | fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4 |
|
17 | fb4b6d5fe100b0886f8bc3d6731ec0e5ed5c4694 0.9.4 | |
18 | 23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5 |
|
18 | 23889160905a1b09fffe1c07378e9fc1827606eb 0.9.5 | |
19 | bae2e9c838e90a393bae3973a7850280413e091a 1.0 |
|
19 | bae2e9c838e90a393bae3973a7850280413e091a 1.0 | |
20 | d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 1.0.1 |
|
20 | d5cbbe2c49cee22a9fbeb9ea41daa0ac4e26b846 1.0.1 | |
21 | d2375bbee6d47e62ba8e415c86e83a465dc4dce9 1.0.2 |
|
21 | d2375bbee6d47e62ba8e415c86e83a465dc4dce9 1.0.2 | |
22 | 2a67430f92f15ea5159c26b09ec4839a0c549a26 1.1 |
|
22 | 2a67430f92f15ea5159c26b09ec4839a0c549a26 1.1 | |
23 | 3773e510d433969e277b1863c317b674cbee2065 1.1.1 |
|
23 | 3773e510d433969e277b1863c317b674cbee2065 1.1.1 | |
24 | 11a4eb81fb4f4742451591489e2797dc47903277 1.1.2 |
|
24 | 11a4eb81fb4f4742451591489e2797dc47903277 1.1.2 | |
25 | 11efa41037e280d08cfb07c09ad485df30fb0ea8 1.2 |
|
25 | 11efa41037e280d08cfb07c09ad485df30fb0ea8 1.2 | |
26 | 02981000012e3adf40c4849bd7b3d5618f9ce82d 1.2.1 |
|
26 | 02981000012e3adf40c4849bd7b3d5618f9ce82d 1.2.1 | |
27 | 196d40e7c885fa6e95f89134809b3ec7bdbca34b 1.3 |
|
27 | 196d40e7c885fa6e95f89134809b3ec7bdbca34b 1.3 | |
28 | 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 1.3.1 |
|
28 | 3ef6c14a1e8e83a31226f5881b7fe6095bbfa6f6 1.3.1 | |
29 | 31ec469f9b556f11819937cf68ee53f2be927ebf 1.4 |
|
29 | 31ec469f9b556f11819937cf68ee53f2be927ebf 1.4 | |
30 | 439d7ea6fe3aa4ab9ec274a68846779153789de9 1.4.1 |
|
30 | 439d7ea6fe3aa4ab9ec274a68846779153789de9 1.4.1 | |
31 | 296a0b14a68621f6990c54fdba0083f6f20935bf 1.4.2 |
|
31 | 296a0b14a68621f6990c54fdba0083f6f20935bf 1.4.2 | |
32 | 4aa619c4c2c09907034d9824ebb1dd0e878206eb 1.4.3 |
|
32 | 4aa619c4c2c09907034d9824ebb1dd0e878206eb 1.4.3 | |
33 | ff2704a8ded37fbebd8b6eb5ec733731d725da8a 1.5 |
|
33 | ff2704a8ded37fbebd8b6eb5ec733731d725da8a 1.5 | |
34 | 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 1.5.1 |
|
34 | 2b01dab594167bc0dd33331dbaa6dca3dca1b3aa 1.5.1 | |
35 | 39f725929f0c48c5fb3b90c071fc3066012456ca 1.5.2 |
|
35 | 39f725929f0c48c5fb3b90c071fc3066012456ca 1.5.2 | |
36 | fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 1.5.3 |
|
36 | fdcf80f26604f233dc4d8f0a5ef9d7470e317e8a 1.5.3 | |
37 | 24fe2629c6fd0c74c90bd066e77387c2b02e8437 1.5.4 |
|
37 | 24fe2629c6fd0c74c90bd066e77387c2b02e8437 1.5.4 | |
38 | f786fc4b8764cd2a5526d259cf2f94d8a66924d9 1.6 |
|
38 | f786fc4b8764cd2a5526d259cf2f94d8a66924d9 1.6 | |
39 | bf1774d95bde614af3956d92b20e2a0c68c5fec7 1.6.1 |
|
39 | bf1774d95bde614af3956d92b20e2a0c68c5fec7 1.6.1 | |
40 | c00f03a4982e467fb6b6bd45908767db6df4771d 1.6.2 |
|
40 | c00f03a4982e467fb6b6bd45908767db6df4771d 1.6.2 | |
41 | ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 1.6.3 |
|
41 | ff5cec76b1c5b6be9c3bb923aae8c3c6d079d6b9 1.6.3 | |
42 | 93d8bff78c96fe7e33237b257558ee97290048a4 1.6.4 |
|
42 | 93d8bff78c96fe7e33237b257558ee97290048a4 1.6.4 | |
43 | 333421b9e0f96c7bc788e5667c146a58a9440a55 1.7 |
|
43 | 333421b9e0f96c7bc788e5667c146a58a9440a55 1.7 | |
44 | 4438875ec01bd0fc32be92b0872eb6daeed4d44f 1.7.1 |
|
44 | 4438875ec01bd0fc32be92b0872eb6daeed4d44f 1.7.1 | |
45 | 6aff4f144ad356311318b0011df0bb21f2c97429 1.7.2 |
|
45 | 6aff4f144ad356311318b0011df0bb21f2c97429 1.7.2 | |
46 | e3bf16703e2601de99e563cdb3a5d50b64e6d320 1.7.3 |
|
46 | e3bf16703e2601de99e563cdb3a5d50b64e6d320 1.7.3 | |
47 | a6c855c32ea081da3c3b8ff628f1847ff271482f 1.7.4 |
|
47 | a6c855c32ea081da3c3b8ff628f1847ff271482f 1.7.4 | |
48 | 2b2155623ee2559caf288fd333f30475966c4525 1.7.5 |
|
48 | 2b2155623ee2559caf288fd333f30475966c4525 1.7.5 | |
49 | 2616325766e3504c8ae7c84bd15ee610901fe91d 1.8 |
|
49 | 2616325766e3504c8ae7c84bd15ee610901fe91d 1.8 | |
50 | aa1f3be38ab127280761889d2dca906ca465b5f4 1.8.1 |
|
50 | aa1f3be38ab127280761889d2dca906ca465b5f4 1.8.1 | |
51 | b032bec2c0a651ca0ddecb65714bfe6770f67d70 1.8.2 |
|
51 | b032bec2c0a651ca0ddecb65714bfe6770f67d70 1.8.2 | |
52 | 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 1.8.3 |
|
52 | 3cb1e95676ad089596bd81d0937cad37d6e3b7fb 1.8.3 | |
53 | 733af5d9f6b22387913e1d11350fb8cb7c1487dd 1.8.4 |
|
53 | 733af5d9f6b22387913e1d11350fb8cb7c1487dd 1.8.4 | |
54 | de9eb6b1da4fc522b1cab16d86ca166204c24f25 1.9 |
|
54 | de9eb6b1da4fc522b1cab16d86ca166204c24f25 1.9 | |
55 | 4a43e23b8c55b4566b8200bf69fe2158485a2634 1.9.1 |
|
55 | 4a43e23b8c55b4566b8200bf69fe2158485a2634 1.9.1 | |
56 | d629f1e89021103f1753addcef6b310e4435b184 1.9.2 |
|
56 | d629f1e89021103f1753addcef6b310e4435b184 1.9.2 | |
57 | 351a9292e430e35766c552066ed3e87c557b803b 1.9.3 |
|
57 | 351a9292e430e35766c552066ed3e87c557b803b 1.9.3 | |
58 | 384082750f2c51dc917d85a7145748330fa6ef4d 2.0-rc |
|
58 | 384082750f2c51dc917d85a7145748330fa6ef4d 2.0-rc | |
59 | 41453d55b481ddfcc1dacb445179649e24ca861d 2.0 |
|
59 | 41453d55b481ddfcc1dacb445179649e24ca861d 2.0 | |
60 | 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 2.0.1 |
|
60 | 195dbd1cef0c2f9f8bcf4ea303238105f716bda3 2.0.1 | |
61 | 6344043924497cd06d781d9014c66802285072e4 2.0.2 |
|
61 | 6344043924497cd06d781d9014c66802285072e4 2.0.2 | |
62 | db33555eafeaf9df1e18950e29439eaa706d399b 2.1-rc |
|
62 | db33555eafeaf9df1e18950e29439eaa706d399b 2.1-rc | |
63 | 2aa5b51f310fb3befd26bed99c02267f5c12c734 2.1 |
|
63 | 2aa5b51f310fb3befd26bed99c02267f5c12c734 2.1 | |
64 | 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 2.1.1 |
|
64 | 53e2cd303ecf8ca7c7eeebd785c34e5ed6b0f4a4 2.1.1 | |
65 | b9bd95e61b49c221c4cca24e6da7c946fc02f992 2.1.2 |
|
65 | b9bd95e61b49c221c4cca24e6da7c946fc02f992 2.1.2 | |
66 | d9e2f09d5488c395ae9ddbb320ceacd24757e055 2.2-rc |
|
66 | d9e2f09d5488c395ae9ddbb320ceacd24757e055 2.2-rc | |
67 | 00182b3d087909e3c3ae44761efecdde8f319ef3 2.2 |
|
67 | 00182b3d087909e3c3ae44761efecdde8f319ef3 2.2 | |
68 | 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 2.2.1 |
|
68 | 5983de86462c5a9f42a3ad0f5e90ce5b1d221d25 2.2.1 | |
69 | 85a358df5bbbe404ca25730c9c459b34263441dc 2.2.2 |
|
69 | 85a358df5bbbe404ca25730c9c459b34263441dc 2.2.2 | |
70 | b013baa3898e117959984fc64c29d8c784d2f28b 2.2.3 |
|
70 | b013baa3898e117959984fc64c29d8c784d2f28b 2.2.3 | |
71 | a06e2681dd1786e2354d84a5fa9c1c88dd4fa3e0 2.3-rc |
|
71 | a06e2681dd1786e2354d84a5fa9c1c88dd4fa3e0 2.3-rc | |
72 | 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 2.3 |
|
72 | 7f5094bb3f423fc799e471aac2aee81a7ce57a0b 2.3 | |
73 | 072209ae4ddb654eb2d5fd35bff358c738414432 2.3.1 |
|
73 | 072209ae4ddb654eb2d5fd35bff358c738414432 2.3.1 | |
74 | b3f0f9a39c4e1d0250048cd803ab03542d6f140a 2.3.2 |
|
74 | b3f0f9a39c4e1d0250048cd803ab03542d6f140a 2.3.2 | |
75 | d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 2.4-rc |
|
75 | d118a4f4fd16d9b558ec3f3e87bfee772861d2b7 2.4-rc | |
76 | 195ad823b5d58c68903a6153a25e3fb4ed25239d 2.4 |
|
76 | 195ad823b5d58c68903a6153a25e3fb4ed25239d 2.4 | |
77 | 0c10cf8191469e7c3c8844922e17e71a176cb7cb 2.4.1 |
|
77 | 0c10cf8191469e7c3c8844922e17e71a176cb7cb 2.4.1 | |
78 | a4765077b65e6ae29ba42bab7834717b5072d5ba 2.4.2 |
|
78 | a4765077b65e6ae29ba42bab7834717b5072d5ba 2.4.2 | |
79 | f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 2.5-rc |
|
79 | f5fbe15ca7449f2c9a3cf817c86d0ae68b307214 2.5-rc | |
80 | a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 2.5 |
|
80 | a6088c05e43a8aee0472ca3a4f6f8d7dd914ebbf 2.5 | |
81 | 7511d4df752e61fe7ae4f3682e0a0008573b0402 2.5.1 |
|
81 | 7511d4df752e61fe7ae4f3682e0a0008573b0402 2.5.1 | |
82 | 5b7175377babacce80a6c1e12366d8032a6d4340 2.5.2 |
|
82 | 5b7175377babacce80a6c1e12366d8032a6d4340 2.5.2 | |
83 | 50c922c1b5145dab8baefefb0437d363b6a6c21c 2.5.3 |
|
83 | 50c922c1b5145dab8baefefb0437d363b6a6c21c 2.5.3 | |
84 | 8a7bd2dccd44ed571afe7424cd7f95594f27c092 2.5.4 |
|
84 | 8a7bd2dccd44ed571afe7424cd7f95594f27c092 2.5.4 | |
85 | 292cd385856d98bacb2c3086f8897bc660c2beea 2.6-rc |
|
85 | 292cd385856d98bacb2c3086f8897bc660c2beea 2.6-rc | |
86 | 23f785b38af38d2fca6b8f3db56b8007a84cd73a 2.6 |
|
86 | 23f785b38af38d2fca6b8f3db56b8007a84cd73a 2.6 | |
87 | ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 2.6.1 |
|
87 | ddc7a6be20212d18f3e27d9d7e6f079a66d96f21 2.6.1 | |
88 | cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 2.6.2 |
|
88 | cceaf7af4c9e9e6fa2dbfdcfe9856c5da69c4ffd 2.6.2 | |
89 | 009794acc6e37a650f0fae37872e733382ac1c0c 2.6.3 |
|
89 | 009794acc6e37a650f0fae37872e733382ac1c0c 2.6.3 | |
90 | f0d7721d7322dcfb5af33599c2543f27335334bb 2.7-rc |
|
90 | f0d7721d7322dcfb5af33599c2543f27335334bb 2.7-rc | |
91 | f37b5a17e6a0ee17afde2cdde5393dd74715fb58 2.7 |
|
91 | f37b5a17e6a0ee17afde2cdde5393dd74715fb58 2.7 | |
92 | 335a558f81dc73afeab4d7be63617392b130117f 2.7.1 |
|
92 | 335a558f81dc73afeab4d7be63617392b130117f 2.7.1 | |
93 | e7fa36d2ad3a7944a52dca126458d6f482db3524 2.7.2 |
|
93 | e7fa36d2ad3a7944a52dca126458d6f482db3524 2.7.2 | |
94 | 1596f2d8f2421314b1ddead8f7d0c91009358994 2.8-rc |
|
94 | 1596f2d8f2421314b1ddead8f7d0c91009358994 2.8-rc | |
95 | d825e4025e39d1c39db943cdc89818abd0a87c27 2.8 |
|
95 | d825e4025e39d1c39db943cdc89818abd0a87c27 2.8 | |
96 | 209e04a06467e2969c0cc6501335be0406d46ef0 2.8.1 |
|
96 | 209e04a06467e2969c0cc6501335be0406d46ef0 2.8.1 | |
97 | ca387377df7a3a67dbb90b6336b781cdadc3ef41 2.8.2 |
|
97 | ca387377df7a3a67dbb90b6336b781cdadc3ef41 2.8.2 | |
98 | 8862469e16f9236208581b20de5f96bd13cc039d 2.9-rc |
|
98 | 8862469e16f9236208581b20de5f96bd13cc039d 2.9-rc | |
99 | 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 2.9 |
|
99 | 3cec5134e9c4bceab6a00c60f52a4f80677a78f2 2.9 | |
100 | b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 2.9.1 |
|
100 | b96cb15ec9e04d8ac5ee08b34fcbbe4200588965 2.9.1 | |
101 | 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 2.9.2 |
|
101 | 3f83fc5cfe715d292069ee8417c83804f6c6c1e4 2.9.2 | |
102 | 564f55b251224f16508dd1311452db7780dafe2b 3.0-rc |
|
102 | 564f55b251224f16508dd1311452db7780dafe2b 3.0-rc | |
103 | 2195ac506c6ababe86985b932f4948837c0891b5 3.0 |
|
103 | 2195ac506c6ababe86985b932f4948837c0891b5 3.0 | |
104 | 269c80ee5b3cb3684fa8edc61501b3506d02eb10 3.0.1 |
|
104 | 269c80ee5b3cb3684fa8edc61501b3506d02eb10 3.0.1 | |
105 | 2d8cd3d0e83c7336c0cb45a9f88638363f993848 3.0.2 |
|
105 | 2d8cd3d0e83c7336c0cb45a9f88638363f993848 3.0.2 | |
106 | 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 3.1-rc |
|
106 | 6c36dc6cd61a0e1b563f1d51e55bdf4dacf12162 3.1-rc | |
107 | 3178e49892020336491cdc6945885c4de26ffa8b 3.1 |
|
107 | 3178e49892020336491cdc6945885c4de26ffa8b 3.1 | |
108 | 5dc91146f35369949ea56b40172308158b59063a 3.1.1 |
|
108 | 5dc91146f35369949ea56b40172308158b59063a 3.1.1 | |
109 | f768c888aaa68d12dd7f509dcc7f01c9584357d0 3.1.2 |
|
109 | f768c888aaa68d12dd7f509dcc7f01c9584357d0 3.1.2 | |
110 | 7f8d16af8cae246fa5a48e723d48d58b015aed94 3.2-rc |
|
110 | 7f8d16af8cae246fa5a48e723d48d58b015aed94 3.2-rc | |
111 | ced632394371a36953ce4d394f86278ae51a2aae 3.2 |
|
111 | ced632394371a36953ce4d394f86278ae51a2aae 3.2 | |
112 | 643c58303fb0ec020907af28b9e486be299ba043 3.2.1 |
|
112 | 643c58303fb0ec020907af28b9e486be299ba043 3.2.1 | |
113 | 902554884335e5ca3661d63be9978eb4aec3f68a 3.2.2 |
|
113 | 902554884335e5ca3661d63be9978eb4aec3f68a 3.2.2 | |
114 | 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 3.2.3 |
|
114 | 6dad422ecc5adb63d9fa649eeb8e05a5f9bc4900 3.2.3 | |
115 | 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 3.2.4 |
|
115 | 1265a3a71d75396f5d4cf6935ae7d9ba5407a547 3.2.4 | |
116 | db8e3f7948b1fdeb9ad12d448fc3525759908b9f 3.3-rc |
|
116 | db8e3f7948b1fdeb9ad12d448fc3525759908b9f 3.3-rc | |
117 | fbdd5195528fae4f41feebc1838215c110b25d6a 3.3 |
|
117 | fbdd5195528fae4f41feebc1838215c110b25d6a 3.3 | |
118 | 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 3.3.1 |
|
118 | 5b4ed033390bf6e2879c8f5c28c84e1ee3b87231 3.3.1 | |
119 | 07a92bbd02e5e3a625e0820389b47786b02b2cea 3.3.2 |
|
119 | 07a92bbd02e5e3a625e0820389b47786b02b2cea 3.3.2 | |
120 | 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 3.3.3 |
|
120 | 2e2e9a0750f91a6fe0ad88e4de34f8efefdcab08 3.3.3 | |
121 | e89f909edffad558b56f4affa8239e4832f88de0 3.4-rc |
|
121 | e89f909edffad558b56f4affa8239e4832f88de0 3.4-rc | |
122 | 8cc6036bca532e06681c5a8fa37efaa812de67b5 3.4 |
|
122 | 8cc6036bca532e06681c5a8fa37efaa812de67b5 3.4 | |
123 | ed18f4acf435a2824c6f49fba40f42b9df5da7ad 3.4.1 |
|
123 | ed18f4acf435a2824c6f49fba40f42b9df5da7ad 3.4.1 | |
124 | 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 3.4.2 |
|
124 | 540cd0ddac49c1125b2e013aa2ff18ecbd4dd954 3.4.2 | |
125 | 96a38d44ba093bd1d1ecfd34119e94056030278b 3.5-rc |
|
125 | 96a38d44ba093bd1d1ecfd34119e94056030278b 3.5-rc | |
126 | 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 3.5 |
|
126 | 21aa1c313b05b1a85f8ffa1120d51579ddf6bf24 3.5 | |
127 | 1a45e49a6bed023deb229102a8903234d18054d3 3.5.1 |
|
127 | 1a45e49a6bed023deb229102a8903234d18054d3 3.5.1 | |
128 | 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 3.5.2 |
|
128 | 9a466b9f9792e3ad7ae3fc6c43c3ff2e136b718d 3.5.2 | |
129 | b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 3.6-rc |
|
129 | b66e3ca0b90c3095ea28dfd39aa24247bebf5c20 3.6-rc | |
130 | 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 3.6 |
|
130 | 47dd34f2e7272be9e3b2a5a83cd0d20be44293f4 3.6 | |
131 | 1aa5083cbebbe7575c88f3402ab377539b484897 3.6.1 |
|
131 | 1aa5083cbebbe7575c88f3402ab377539b484897 3.6.1 | |
132 | 2d437a0f3355834a9485bbbeb30a52a052c98f19 3.6.2 |
|
132 | 2d437a0f3355834a9485bbbeb30a52a052c98f19 3.6.2 | |
133 | ea389970c08449440587712117f178d33bab3f1e 3.6.3 |
|
133 | ea389970c08449440587712117f178d33bab3f1e 3.6.3 | |
134 | 158bdc8965720ca4061f8f8d806563cfc7cdb62e 3.7-rc |
|
134 | 158bdc8965720ca4061f8f8d806563cfc7cdb62e 3.7-rc | |
135 | 2408645de650d8a29a6ce9e7dce601d8dd0d1474 3.7 |
|
135 | 2408645de650d8a29a6ce9e7dce601d8dd0d1474 3.7 | |
136 | b698abf971e7377d9b7ec7fc8c52df45255b0329 3.7.1 |
|
136 | b698abf971e7377d9b7ec7fc8c52df45255b0329 3.7.1 | |
137 | d493d64757eb45ada99fcb3693e479a51b7782da 3.7.2 |
|
137 | d493d64757eb45ada99fcb3693e479a51b7782da 3.7.2 | |
138 | ae279d4a19e9683214cbd1fe8298cf0b50571432 3.7.3 |
|
138 | ae279d4a19e9683214cbd1fe8298cf0b50571432 3.7.3 | |
139 | 740156eedf2c450aee58b1a90b0e826f47c5da64 3.8-rc |
|
139 | 740156eedf2c450aee58b1a90b0e826f47c5da64 3.8-rc | |
140 | f85de28eae32e7d3064b1a1321309071bbaaa069 3.8 |
|
140 | f85de28eae32e7d3064b1a1321309071bbaaa069 3.8 | |
141 | a56296f55a5e1038ea5016dace2076b693c28a56 3.8.1 |
|
141 | a56296f55a5e1038ea5016dace2076b693c28a56 3.8.1 | |
142 | aaabed77791a75968a12b8c43ad263631a23ee81 3.8.2 |
|
142 | aaabed77791a75968a12b8c43ad263631a23ee81 3.8.2 | |
143 | a9764ab80e11bcf6a37255db7dd079011f767c6c 3.8.3 |
|
143 | a9764ab80e11bcf6a37255db7dd079011f767c6c 3.8.3 | |
144 | 26a5d605b8683a292bb89aea11f37a81b06ac016 3.8.4 |
|
144 | 26a5d605b8683a292bb89aea11f37a81b06ac016 3.8.4 | |
145 | 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 3.9-rc |
|
145 | 519bb4f9d3a47a6e83c2b414d58811ed38f503c2 3.9-rc | |
146 | 299546f84e68dbb9bd026f0f3a974ce4bdb93686 3.9 |
|
146 | 299546f84e68dbb9bd026f0f3a974ce4bdb93686 3.9 | |
147 | ccd436f7db6d5d7b9af89715179b911d031d44f1 3.9.1 |
|
147 | ccd436f7db6d5d7b9af89715179b911d031d44f1 3.9.1 | |
148 | 149433e68974eb5c63ccb03f794d8b57339a80c4 3.9.2 |
|
148 | 149433e68974eb5c63ccb03f794d8b57339a80c4 3.9.2 | |
149 | 438173c415874f6ac653efc1099dec9c9150e90f 4.0-rc |
|
149 | 438173c415874f6ac653efc1099dec9c9150e90f 4.0-rc | |
150 | eab27446995210c334c3d06f1a659e3b9b5da769 4.0 |
|
150 | eab27446995210c334c3d06f1a659e3b9b5da769 4.0 | |
151 | b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 4.0.1 |
|
151 | b3b1ae98f6a0e14c1e1ba806a6c18e193b6dae5c 4.0.1 | |
152 | e69874dc1f4e142746ff3df91e678a09c6fc208c 4.0.2 |
|
152 | e69874dc1f4e142746ff3df91e678a09c6fc208c 4.0.2 | |
153 | a1dd2c0c479e0550040542e392e87bc91262517e 4.1-rc |
|
153 | a1dd2c0c479e0550040542e392e87bc91262517e 4.1-rc | |
154 | e1526da1e6d84e03146151c9b6e6950fe9a83d7d 4.1 |
|
154 | e1526da1e6d84e03146151c9b6e6950fe9a83d7d 4.1 | |
155 | 25703b624d27e3917d978af56d6ad59331e0464a 4.1.1 |
|
155 | 25703b624d27e3917d978af56d6ad59331e0464a 4.1.1 | |
156 | ed5b25874d998ababb181a939dd37a16ea644435 4.1.2 |
|
156 | ed5b25874d998ababb181a939dd37a16ea644435 4.1.2 | |
157 | 77eaf9539499a1b8be259ffe7ada787d07857f80 4.1.3 |
|
157 | 77eaf9539499a1b8be259ffe7ada787d07857f80 4.1.3 | |
158 | 616e788321cc4ae9975b7f0c54c849f36d82182b 4.2-rc |
|
158 | 616e788321cc4ae9975b7f0c54c849f36d82182b 4.2-rc | |
159 | bb96d4a497432722623ae60d9bc734a1e360179e 4.2 |
|
159 | bb96d4a497432722623ae60d9bc734a1e360179e 4.2 | |
160 | c850f0ed54c1d42f9aa079ad528f8127e5775217 4.2.1 |
|
160 | c850f0ed54c1d42f9aa079ad528f8127e5775217 4.2.1 | |
161 | 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 4.2.2 |
|
161 | 26c49ed51a698ec016d2b4c6b44ca3c3f73cc788 4.2.2 | |
162 | 857876ebaed4e315f63157bd157d6ce553c7ab73 4.3-rc |
|
162 | 857876ebaed4e315f63157bd157d6ce553c7ab73 4.3-rc | |
|
163 | 5544af8622863796a0027566f6b646e10d522c4c 4.3 | |||
|
164 | 943c91326b23954e6e1c6960d0239511f9530258 4.2.3 |
@@ -1,20 +1,20 | |||||
1 | Mercurial |
|
1 | Mercurial | |
2 | ========= |
|
2 | ========= | |
3 |
|
3 | |||
4 | Mercurial is a fast, easy to use, distributed revision control tool |
|
4 | Mercurial is a fast, easy to use, distributed revision control tool | |
5 | for software developers. |
|
5 | for software developers. | |
6 |
|
6 | |||
7 | Basic install: |
|
7 | Basic install:: | |
8 |
|
8 | |||
9 | $ make # see install targets |
|
9 | $ make # see install targets | |
10 | $ make install # do a system-wide install |
|
10 | $ make install # do a system-wide install | |
11 | $ hg debuginstall # sanity-check setup |
|
11 | $ hg debuginstall # sanity-check setup | |
12 | $ hg # see help |
|
12 | $ hg # see help | |
13 |
|
13 | |||
14 | Running without installing: |
|
14 | Running without installing:: | |
15 |
|
15 | |||
16 | $ make local # build for inplace usage |
|
16 | $ make local # build for inplace usage | |
17 | $ ./hg --version # should show the latest version |
|
17 | $ ./hg --version # should show the latest version | |
18 |
|
18 | |||
19 | See https://mercurial-scm.org/ for detailed installation |
|
19 | See https://mercurial-scm.org/ for detailed installation | |
20 | instructions, platform-specific notes, and Mercurial user information. |
|
20 | instructions, platform-specific notes, and Mercurial user information. |
@@ -1,52 +1,52 | |||||
1 | HG = $(CURDIR)/../../hg |
|
1 | HG = $(CURDIR)/../../hg | |
2 |
|
2 | |||
3 | TARGET = chg |
|
3 | TARGET = chg | |
4 | SRCS = chg.c hgclient.c procutil.c util.c |
|
4 | SRCS = chg.c hgclient.c procutil.c util.c | |
5 | OBJS = $(SRCS:.c=.o) |
|
5 | OBJS = $(SRCS:.c=.o) | |
6 |
|
6 | |||
7 | CFLAGS ?= -O2 -Wall -Wextra -pedantic -g |
|
7 | CFLAGS ?= -O2 -Wall -Wextra -pedantic -g | |
8 | CPPFLAGS ?= -D_FORTIFY_SOURCE=2 |
|
8 | CPPFLAGS ?= -D_FORTIFY_SOURCE=2 -D_GNU_SOURCE | |
9 | override CFLAGS += -std=gnu99 |
|
9 | override CFLAGS += -std=gnu99 | |
10 | ifdef HGPATH |
|
10 | ifdef HGPATH | |
11 | override CPPFLAGS += -DHGPATH=\"$(HGPATH)\" |
|
11 | override CPPFLAGS += -DHGPATH=\"$(HGPATH)\" | |
12 | endif |
|
12 | endif | |
13 |
|
13 | |||
14 | DESTDIR = |
|
14 | DESTDIR = | |
15 | PREFIX = /usr/local |
|
15 | PREFIX = /usr/local | |
16 | MANDIR = $(PREFIX)/share/man/man1 |
|
16 | MANDIR = $(PREFIX)/share/man/man1 | |
17 |
|
17 | |||
18 | CHGSOCKDIR = /tmp/chg$(shell id -u) |
|
18 | CHGSOCKDIR = /tmp/chg$(shell id -u) | |
19 | CHGSOCKNAME = $(CHGSOCKDIR)/server |
|
19 | CHGSOCKNAME = $(CHGSOCKDIR)/server | |
20 |
|
20 | |||
21 | .PHONY: all |
|
21 | .PHONY: all | |
22 | all: $(TARGET) |
|
22 | all: $(TARGET) | |
23 |
|
23 | |||
24 | $(TARGET): $(OBJS) |
|
24 | $(TARGET): $(OBJS) | |
25 | $(CC) $(LDFLAGS) -o $@ $(OBJS) |
|
25 | $(CC) $(LDFLAGS) -o $@ $(OBJS) | |
26 |
|
26 | |||
27 | chg.o: hgclient.h procutil.h util.h |
|
27 | chg.o: hgclient.h procutil.h util.h | |
28 | hgclient.o: hgclient.h procutil.h util.h |
|
28 | hgclient.o: hgclient.h procutil.h util.h | |
29 | procutil.o: procutil.h util.h |
|
29 | procutil.o: procutil.h util.h | |
30 | util.o: util.h |
|
30 | util.o: util.h | |
31 |
|
31 | |||
32 | .PHONY: install |
|
32 | .PHONY: install | |
33 | install: $(TARGET) |
|
33 | install: $(TARGET) | |
34 | install -d $(DESTDIR)$(PREFIX)/bin |
|
34 | install -d $(DESTDIR)$(PREFIX)/bin | |
35 | install -m 755 $(TARGET) $(DESTDIR)$(PREFIX)/bin |
|
35 | install -m 755 $(TARGET) $(DESTDIR)$(PREFIX)/bin | |
36 | install -d $(DESTDIR)$(MANDIR) |
|
36 | install -d $(DESTDIR)$(MANDIR) | |
37 | install -m 644 chg.1 $(DESTDIR)$(MANDIR) |
|
37 | install -m 644 chg.1 $(DESTDIR)$(MANDIR) | |
38 |
|
38 | |||
39 | .PHONY: serve |
|
39 | .PHONY: serve | |
40 | serve: |
|
40 | serve: | |
41 | [ -d $(CHGSOCKDIR) ] || ( umask 077; mkdir $(CHGSOCKDIR) ) |
|
41 | [ -d $(CHGSOCKDIR) ] || ( umask 077; mkdir $(CHGSOCKDIR) ) | |
42 | $(HG) serve --cwd / --cmdserver chgunix \ |
|
42 | $(HG) serve --cwd / --cmdserver chgunix \ | |
43 | --address $(CHGSOCKNAME) \ |
|
43 | --address $(CHGSOCKNAME) \ | |
44 | --config cmdserver.log=/dev/stderr |
|
44 | --config cmdserver.log=/dev/stderr | |
45 |
|
45 | |||
46 | .PHONY: clean |
|
46 | .PHONY: clean | |
47 | clean: |
|
47 | clean: | |
48 | $(RM) $(OBJS) |
|
48 | $(RM) $(OBJS) | |
49 |
|
49 | |||
50 | .PHONY: distclean |
|
50 | .PHONY: distclean | |
51 | distclean: |
|
51 | distclean: | |
52 | $(RM) $(OBJS) $(TARGET) |
|
52 | $(RM) $(OBJS) $(TARGET) |
@@ -1,6 +1,9 | |||||
1 | FROM centos:centos5 |
|
1 | FROM centos:centos5 | |
|
2 | RUN sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo | |||
|
3 | RUN sed -i 's/^#\(baseurl=\)http:\/\/mirror.centos.org\/centos/\1http:\/\/vault.centos.org/' /etc/yum.repos.d/*.repo | |||
|
4 | RUN sed -i 's/\$releasever/5.11/' /etc/yum.repos.d/*.repo | |||
2 | RUN yum install -y gcc make rpm-build gettext tar |
|
5 | RUN yum install -y gcc make rpm-build gettext tar | |
3 | RUN yum install -y python-devel python-docutils |
|
6 | RUN yum install -y python-devel python-docutils | |
4 | # For creating repo meta data |
|
7 | # For creating repo meta data | |
5 | RUN yum install -y createrepo |
|
8 | RUN yum install -y createrepo | |
6 | RUN yum install -y readline-devel openssl-devel ncurses-devel zlib-devel bzip2-devel |
|
9 | RUN yum install -y readline-devel openssl-devel ncurses-devel zlib-devel bzip2-devel |
@@ -1,1550 +1,1550 | |||||
1 | # rebase.py - rebasing feature for mercurial |
|
1 | # rebase.py - rebasing feature for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com> |
|
3 | # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | '''command to move sets of revisions to a different ancestor |
|
8 | '''command to move sets of revisions to a different ancestor | |
9 |
|
9 | |||
10 | This extension lets you rebase changesets in an existing Mercurial |
|
10 | This extension lets you rebase changesets in an existing Mercurial | |
11 | repository. |
|
11 | repository. | |
12 |
|
12 | |||
13 | For more information: |
|
13 | For more information: | |
14 | https://mercurial-scm.org/wiki/RebaseExtension |
|
14 | https://mercurial-scm.org/wiki/RebaseExtension | |
15 | ''' |
|
15 | ''' | |
16 |
|
16 | |||
17 | from __future__ import absolute_import |
|
17 | from __future__ import absolute_import | |
18 |
|
18 | |||
19 | import errno |
|
19 | import errno | |
20 | import os |
|
20 | import os | |
21 |
|
21 | |||
22 | from mercurial.i18n import _ |
|
22 | from mercurial.i18n import _ | |
23 | from mercurial.node import ( |
|
23 | from mercurial.node import ( | |
24 | hex, |
|
24 | hex, | |
25 | nullid, |
|
25 | nullid, | |
26 | nullrev, |
|
26 | nullrev, | |
27 | short, |
|
27 | short, | |
28 | ) |
|
28 | ) | |
29 | from mercurial import ( |
|
29 | from mercurial import ( | |
30 | bookmarks, |
|
30 | bookmarks, | |
31 | cmdutil, |
|
31 | cmdutil, | |
32 | commands, |
|
32 | commands, | |
33 | copies, |
|
33 | copies, | |
34 | destutil, |
|
34 | destutil, | |
35 | dirstateguard, |
|
35 | dirstateguard, | |
36 | error, |
|
36 | error, | |
37 | extensions, |
|
37 | extensions, | |
38 | hg, |
|
38 | hg, | |
39 | lock, |
|
39 | lock, | |
40 | merge as mergemod, |
|
40 | merge as mergemod, | |
41 | mergeutil, |
|
41 | mergeutil, | |
42 | obsolete, |
|
42 | obsolete, | |
43 | obsutil, |
|
43 | obsutil, | |
44 | patch, |
|
44 | patch, | |
45 | phases, |
|
45 | phases, | |
46 | registrar, |
|
46 | registrar, | |
47 | repair, |
|
47 | repair, | |
48 | repoview, |
|
48 | repoview, | |
49 | revset, |
|
49 | revset, | |
50 | scmutil, |
|
50 | scmutil, | |
51 | smartset, |
|
51 | smartset, | |
52 | util, |
|
52 | util, | |
53 | ) |
|
53 | ) | |
54 |
|
54 | |||
55 | release = lock.release |
|
55 | release = lock.release | |
56 | templateopts = cmdutil.templateopts |
|
56 | templateopts = cmdutil.templateopts | |
57 |
|
57 | |||
58 | # The following constants are used throughout the rebase module. The ordering of |
|
58 | # The following constants are used throughout the rebase module. The ordering of | |
59 | # their values must be maintained. |
|
59 | # their values must be maintained. | |
60 |
|
60 | |||
61 | # Indicates that a revision needs to be rebased |
|
61 | # Indicates that a revision needs to be rebased | |
62 | revtodo = -1 |
|
62 | revtodo = -1 | |
63 | nullmerge = -2 |
|
63 | nullmerge = -2 | |
64 | revignored = -3 |
|
64 | revignored = -3 | |
65 | # successor in rebase destination |
|
65 | # successor in rebase destination | |
66 | revprecursor = -4 |
|
66 | revprecursor = -4 | |
67 | # plain prune (no successor) |
|
67 | # plain prune (no successor) | |
68 | revpruned = -5 |
|
68 | revpruned = -5 | |
69 | revskipped = (revignored, revprecursor, revpruned) |
|
69 | revskipped = (revignored, revprecursor, revpruned) | |
70 |
|
70 | |||
71 | cmdtable = {} |
|
71 | cmdtable = {} | |
72 | command = registrar.command(cmdtable) |
|
72 | command = registrar.command(cmdtable) | |
73 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
73 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | |
74 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
74 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | |
75 | # be specifying the version(s) of Mercurial they are tested with, or |
|
75 | # be specifying the version(s) of Mercurial they are tested with, or | |
76 | # leave the attribute unspecified. |
|
76 | # leave the attribute unspecified. | |
77 | testedwith = 'ships-with-hg-core' |
|
77 | testedwith = 'ships-with-hg-core' | |
78 |
|
78 | |||
79 | def _nothingtorebase(): |
|
79 | def _nothingtorebase(): | |
80 | return 1 |
|
80 | return 1 | |
81 |
|
81 | |||
82 | def _savegraft(ctx, extra): |
|
82 | def _savegraft(ctx, extra): | |
83 | s = ctx.extra().get('source', None) |
|
83 | s = ctx.extra().get('source', None) | |
84 | if s is not None: |
|
84 | if s is not None: | |
85 | extra['source'] = s |
|
85 | extra['source'] = s | |
86 | s = ctx.extra().get('intermediate-source', None) |
|
86 | s = ctx.extra().get('intermediate-source', None) | |
87 | if s is not None: |
|
87 | if s is not None: | |
88 | extra['intermediate-source'] = s |
|
88 | extra['intermediate-source'] = s | |
89 |
|
89 | |||
90 | def _savebranch(ctx, extra): |
|
90 | def _savebranch(ctx, extra): | |
91 | extra['branch'] = ctx.branch() |
|
91 | extra['branch'] = ctx.branch() | |
92 |
|
92 | |||
93 | def _makeextrafn(copiers): |
|
93 | def _makeextrafn(copiers): | |
94 | """make an extrafn out of the given copy-functions. |
|
94 | """make an extrafn out of the given copy-functions. | |
95 |
|
95 | |||
96 | A copy function takes a context and an extra dict, and mutates the |
|
96 | A copy function takes a context and an extra dict, and mutates the | |
97 | extra dict as needed based on the given context. |
|
97 | extra dict as needed based on the given context. | |
98 | """ |
|
98 | """ | |
99 | def extrafn(ctx, extra): |
|
99 | def extrafn(ctx, extra): | |
100 | for c in copiers: |
|
100 | for c in copiers: | |
101 | c(ctx, extra) |
|
101 | c(ctx, extra) | |
102 | return extrafn |
|
102 | return extrafn | |
103 |
|
103 | |||
104 | def _destrebase(repo, sourceset, destspace=None): |
|
104 | def _destrebase(repo, sourceset, destspace=None): | |
105 | """small wrapper around destmerge to pass the right extra args |
|
105 | """small wrapper around destmerge to pass the right extra args | |
106 |
|
106 | |||
107 | Please wrap destutil.destmerge instead.""" |
|
107 | Please wrap destutil.destmerge instead.""" | |
108 | return destutil.destmerge(repo, action='rebase', sourceset=sourceset, |
|
108 | return destutil.destmerge(repo, action='rebase', sourceset=sourceset, | |
109 | onheadcheck=False, destspace=destspace) |
|
109 | onheadcheck=False, destspace=destspace) | |
110 |
|
110 | |||
111 | revsetpredicate = registrar.revsetpredicate() |
|
111 | revsetpredicate = registrar.revsetpredicate() | |
112 |
|
112 | |||
113 | @revsetpredicate('_destrebase') |
|
113 | @revsetpredicate('_destrebase') | |
114 | def _revsetdestrebase(repo, subset, x): |
|
114 | def _revsetdestrebase(repo, subset, x): | |
115 | # ``_rebasedefaultdest()`` |
|
115 | # ``_rebasedefaultdest()`` | |
116 |
|
116 | |||
117 | # default destination for rebase. |
|
117 | # default destination for rebase. | |
118 | # # XXX: Currently private because I expect the signature to change. |
|
118 | # # XXX: Currently private because I expect the signature to change. | |
119 | # # XXX: - bailing out in case of ambiguity vs returning all data. |
|
119 | # # XXX: - bailing out in case of ambiguity vs returning all data. | |
120 | # i18n: "_rebasedefaultdest" is a keyword |
|
120 | # i18n: "_rebasedefaultdest" is a keyword | |
121 | sourceset = None |
|
121 | sourceset = None | |
122 | if x is not None: |
|
122 | if x is not None: | |
123 | sourceset = revset.getset(repo, smartset.fullreposet(repo), x) |
|
123 | sourceset = revset.getset(repo, smartset.fullreposet(repo), x) | |
124 | return subset & smartset.baseset([_destrebase(repo, sourceset)]) |
|
124 | return subset & smartset.baseset([_destrebase(repo, sourceset)]) | |
125 |
|
125 | |||
126 | class rebaseruntime(object): |
|
126 | class rebaseruntime(object): | |
127 | """This class is a container for rebase runtime state""" |
|
127 | """This class is a container for rebase runtime state""" | |
128 | def __init__(self, repo, ui, opts=None): |
|
128 | def __init__(self, repo, ui, opts=None): | |
129 | if opts is None: |
|
129 | if opts is None: | |
130 | opts = {} |
|
130 | opts = {} | |
131 |
|
131 | |||
132 | self.repo = repo |
|
132 | self.repo = repo | |
133 | self.ui = ui |
|
133 | self.ui = ui | |
134 | self.opts = opts |
|
134 | self.opts = opts | |
135 | self.originalwd = None |
|
135 | self.originalwd = None | |
136 | self.external = nullrev |
|
136 | self.external = nullrev | |
137 | # Mapping between the old revision id and either what is the new rebased |
|
137 | # Mapping between the old revision id and either what is the new rebased | |
138 | # revision or what needs to be done with the old revision. The state |
|
138 | # revision or what needs to be done with the old revision. The state | |
139 | # dict will be what contains most of the rebase progress state. |
|
139 | # dict will be what contains most of the rebase progress state. | |
140 | self.state = {} |
|
140 | self.state = {} | |
141 | self.activebookmark = None |
|
141 | self.activebookmark = None | |
142 | self.dest = None |
|
142 | self.dest = None | |
143 | self.skipped = set() |
|
143 | self.skipped = set() | |
144 | self.destancestors = set() |
|
144 | self.destancestors = set() | |
145 |
|
145 | |||
146 | self.collapsef = opts.get('collapse', False) |
|
146 | self.collapsef = opts.get('collapse', False) | |
147 | self.collapsemsg = cmdutil.logmessage(ui, opts) |
|
147 | self.collapsemsg = cmdutil.logmessage(ui, opts) | |
148 | self.date = opts.get('date', None) |
|
148 | self.date = opts.get('date', None) | |
149 |
|
149 | |||
150 | e = opts.get('extrafn') # internal, used by e.g. hgsubversion |
|
150 | e = opts.get('extrafn') # internal, used by e.g. hgsubversion | |
151 | self.extrafns = [_savegraft] |
|
151 | self.extrafns = [_savegraft] | |
152 | if e: |
|
152 | if e: | |
153 | self.extrafns = [e] |
|
153 | self.extrafns = [e] | |
154 |
|
154 | |||
155 | self.keepf = opts.get('keep', False) |
|
155 | self.keepf = opts.get('keep', False) | |
156 | self.keepbranchesf = opts.get('keepbranches', False) |
|
156 | self.keepbranchesf = opts.get('keepbranches', False) | |
157 | # keepopen is not meant for use on the command line, but by |
|
157 | # keepopen is not meant for use on the command line, but by | |
158 | # other extensions |
|
158 | # other extensions | |
159 | self.keepopen = opts.get('keepopen', False) |
|
159 | self.keepopen = opts.get('keepopen', False) | |
160 | self.obsoletenotrebased = {} |
|
160 | self.obsoletenotrebased = {} | |
161 |
|
161 | |||
162 | def storestatus(self, tr=None): |
|
162 | def storestatus(self, tr=None): | |
163 | """Store the current status to allow recovery""" |
|
163 | """Store the current status to allow recovery""" | |
164 | if tr: |
|
164 | if tr: | |
165 | tr.addfilegenerator('rebasestate', ('rebasestate',), |
|
165 | tr.addfilegenerator('rebasestate', ('rebasestate',), | |
166 | self._writestatus, location='plain') |
|
166 | self._writestatus, location='plain') | |
167 | else: |
|
167 | else: | |
168 | with self.repo.vfs("rebasestate", "w") as f: |
|
168 | with self.repo.vfs("rebasestate", "w") as f: | |
169 | self._writestatus(f) |
|
169 | self._writestatus(f) | |
170 |
|
170 | |||
171 | def _writestatus(self, f): |
|
171 | def _writestatus(self, f): | |
172 | repo = self.repo.unfiltered() |
|
172 | repo = self.repo.unfiltered() | |
173 | f.write(repo[self.originalwd].hex() + '\n') |
|
173 | f.write(repo[self.originalwd].hex() + '\n') | |
174 | f.write(repo[self.dest].hex() + '\n') |
|
174 | f.write(repo[self.dest].hex() + '\n') | |
175 | f.write(repo[self.external].hex() + '\n') |
|
175 | f.write(repo[self.external].hex() + '\n') | |
176 | f.write('%d\n' % int(self.collapsef)) |
|
176 | f.write('%d\n' % int(self.collapsef)) | |
177 | f.write('%d\n' % int(self.keepf)) |
|
177 | f.write('%d\n' % int(self.keepf)) | |
178 | f.write('%d\n' % int(self.keepbranchesf)) |
|
178 | f.write('%d\n' % int(self.keepbranchesf)) | |
179 | f.write('%s\n' % (self.activebookmark or '')) |
|
179 | f.write('%s\n' % (self.activebookmark or '')) | |
180 | for d, v in self.state.iteritems(): |
|
180 | for d, v in self.state.iteritems(): | |
181 | oldrev = repo[d].hex() |
|
181 | oldrev = repo[d].hex() | |
182 | if v >= 0: |
|
182 | if v >= 0: | |
183 | newrev = repo[v].hex() |
|
183 | newrev = repo[v].hex() | |
184 | elif v == revtodo: |
|
184 | elif v == revtodo: | |
185 | # To maintain format compatibility, we have to use nullid. |
|
185 | # To maintain format compatibility, we have to use nullid. | |
186 | # Please do remove this special case when upgrading the format. |
|
186 | # Please do remove this special case when upgrading the format. | |
187 | newrev = hex(nullid) |
|
187 | newrev = hex(nullid) | |
188 | else: |
|
188 | else: | |
189 | newrev = v |
|
189 | newrev = v | |
190 | f.write("%s:%s\n" % (oldrev, newrev)) |
|
190 | f.write("%s:%s\n" % (oldrev, newrev)) | |
191 | repo.ui.debug('rebase status stored\n') |
|
191 | repo.ui.debug('rebase status stored\n') | |
192 |
|
192 | |||
193 | def restorestatus(self): |
|
193 | def restorestatus(self): | |
194 | """Restore a previously stored status""" |
|
194 | """Restore a previously stored status""" | |
195 | repo = self.repo |
|
195 | repo = self.repo | |
196 | keepbranches = None |
|
196 | keepbranches = None | |
197 | dest = None |
|
197 | dest = None | |
198 | collapse = False |
|
198 | collapse = False | |
199 | external = nullrev |
|
199 | external = nullrev | |
200 | activebookmark = None |
|
200 | activebookmark = None | |
201 | state = {} |
|
201 | state = {} | |
202 |
|
202 | |||
203 | try: |
|
203 | try: | |
204 | f = repo.vfs("rebasestate") |
|
204 | f = repo.vfs("rebasestate") | |
205 | for i, l in enumerate(f.read().splitlines()): |
|
205 | for i, l in enumerate(f.read().splitlines()): | |
206 | if i == 0: |
|
206 | if i == 0: | |
207 | originalwd = repo[l].rev() |
|
207 | originalwd = repo[l].rev() | |
208 | elif i == 1: |
|
208 | elif i == 1: | |
209 | dest = repo[l].rev() |
|
209 | dest = repo[l].rev() | |
210 | elif i == 2: |
|
210 | elif i == 2: | |
211 | external = repo[l].rev() |
|
211 | external = repo[l].rev() | |
212 | elif i == 3: |
|
212 | elif i == 3: | |
213 | collapse = bool(int(l)) |
|
213 | collapse = bool(int(l)) | |
214 | elif i == 4: |
|
214 | elif i == 4: | |
215 | keep = bool(int(l)) |
|
215 | keep = bool(int(l)) | |
216 | elif i == 5: |
|
216 | elif i == 5: | |
217 | keepbranches = bool(int(l)) |
|
217 | keepbranches = bool(int(l)) | |
218 | elif i == 6 and not (len(l) == 81 and ':' in l): |
|
218 | elif i == 6 and not (len(l) == 81 and ':' in l): | |
219 | # line 6 is a recent addition, so for backwards |
|
219 | # line 6 is a recent addition, so for backwards | |
220 | # compatibility check that the line doesn't look like the |
|
220 | # compatibility check that the line doesn't look like the | |
221 | # oldrev:newrev lines |
|
221 | # oldrev:newrev lines | |
222 | activebookmark = l |
|
222 | activebookmark = l | |
223 | else: |
|
223 | else: | |
224 | oldrev, newrev = l.split(':') |
|
224 | oldrev, newrev = l.split(':') | |
225 | if newrev in (str(nullmerge), str(revignored), |
|
225 | if newrev in (str(nullmerge), str(revignored), | |
226 | str(revprecursor), str(revpruned)): |
|
226 | str(revprecursor), str(revpruned)): | |
227 | state[repo[oldrev].rev()] = int(newrev) |
|
227 | state[repo[oldrev].rev()] = int(newrev) | |
228 | elif newrev == nullid: |
|
228 | elif newrev == nullid: | |
229 | state[repo[oldrev].rev()] = revtodo |
|
229 | state[repo[oldrev].rev()] = revtodo | |
230 | # Legacy compat special case |
|
230 | # Legacy compat special case | |
231 | else: |
|
231 | else: | |
232 | state[repo[oldrev].rev()] = repo[newrev].rev() |
|
232 | state[repo[oldrev].rev()] = repo[newrev].rev() | |
233 |
|
233 | |||
234 | except IOError as err: |
|
234 | except IOError as err: | |
235 | if err.errno != errno.ENOENT: |
|
235 | if err.errno != errno.ENOENT: | |
236 | raise |
|
236 | raise | |
237 | cmdutil.wrongtooltocontinue(repo, _('rebase')) |
|
237 | cmdutil.wrongtooltocontinue(repo, _('rebase')) | |
238 |
|
238 | |||
239 | if keepbranches is None: |
|
239 | if keepbranches is None: | |
240 | raise error.Abort(_('.hg/rebasestate is incomplete')) |
|
240 | raise error.Abort(_('.hg/rebasestate is incomplete')) | |
241 |
|
241 | |||
242 | skipped = set() |
|
242 | skipped = set() | |
243 | # recompute the set of skipped revs |
|
243 | # recompute the set of skipped revs | |
244 | if not collapse: |
|
244 | if not collapse: | |
245 | seen = {dest} |
|
245 | seen = {dest} | |
246 | for old, new in sorted(state.items()): |
|
246 | for old, new in sorted(state.items()): | |
247 | if new != revtodo and new in seen: |
|
247 | if new != revtodo and new in seen: | |
248 | skipped.add(old) |
|
248 | skipped.add(old) | |
249 | seen.add(new) |
|
249 | seen.add(new) | |
250 | repo.ui.debug('computed skipped revs: %s\n' % |
|
250 | repo.ui.debug('computed skipped revs: %s\n' % | |
251 | (' '.join(str(r) for r in sorted(skipped)) or None)) |
|
251 | (' '.join(str(r) for r in sorted(skipped)) or None)) | |
252 | repo.ui.debug('rebase status resumed\n') |
|
252 | repo.ui.debug('rebase status resumed\n') | |
253 | _setrebasesetvisibility(repo, set(state.keys()) | {originalwd}) |
|
253 | _setrebasesetvisibility(repo, set(state.keys()) | {originalwd}) | |
254 |
|
254 | |||
255 | self.originalwd = originalwd |
|
255 | self.originalwd = originalwd | |
256 | self.dest = dest |
|
256 | self.dest = dest | |
257 | self.state = state |
|
257 | self.state = state | |
258 | self.skipped = skipped |
|
258 | self.skipped = skipped | |
259 | self.collapsef = collapse |
|
259 | self.collapsef = collapse | |
260 | self.keepf = keep |
|
260 | self.keepf = keep | |
261 | self.keepbranchesf = keepbranches |
|
261 | self.keepbranchesf = keepbranches | |
262 | self.external = external |
|
262 | self.external = external | |
263 | self.activebookmark = activebookmark |
|
263 | self.activebookmark = activebookmark | |
264 |
|
264 | |||
265 | def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest): |
|
265 | def _handleskippingobsolete(self, rebaserevs, obsoleterevs, dest): | |
266 | """Compute structures necessary for skipping obsolete revisions |
|
266 | """Compute structures necessary for skipping obsolete revisions | |
267 |
|
267 | |||
268 | rebaserevs: iterable of all revisions that are to be rebased |
|
268 | rebaserevs: iterable of all revisions that are to be rebased | |
269 | obsoleterevs: iterable of all obsolete revisions in rebaseset |
|
269 | obsoleterevs: iterable of all obsolete revisions in rebaseset | |
270 | dest: a destination revision for the rebase operation |
|
270 | dest: a destination revision for the rebase operation | |
271 | """ |
|
271 | """ | |
272 | self.obsoletenotrebased = {} |
|
272 | self.obsoletenotrebased = {} | |
273 | if not self.ui.configbool('experimental', 'rebaseskipobsolete', |
|
273 | if not self.ui.configbool('experimental', 'rebaseskipobsolete', | |
274 | default=True): |
|
274 | default=True): | |
275 | return |
|
275 | return | |
276 | rebaseset = set(rebaserevs) |
|
276 | rebaseset = set(rebaserevs) | |
277 | obsoleteset = set(obsoleterevs) |
|
277 | obsoleteset = set(obsoleterevs) | |
278 | self.obsoletenotrebased = _computeobsoletenotrebased(self.repo, |
|
278 | self.obsoletenotrebased = _computeobsoletenotrebased(self.repo, | |
279 | obsoleteset, dest) |
|
279 | obsoleteset, dest) | |
280 | skippedset = set(self.obsoletenotrebased) |
|
280 | skippedset = set(self.obsoletenotrebased) | |
281 | _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset) |
|
281 | _checkobsrebase(self.repo, self.ui, obsoleteset, rebaseset, skippedset) | |
282 |
|
282 | |||
283 | def _prepareabortorcontinue(self, isabort): |
|
283 | def _prepareabortorcontinue(self, isabort): | |
284 | try: |
|
284 | try: | |
285 | self.restorestatus() |
|
285 | self.restorestatus() | |
286 | self.collapsemsg = restorecollapsemsg(self.repo, isabort) |
|
286 | self.collapsemsg = restorecollapsemsg(self.repo, isabort) | |
287 | except error.RepoLookupError: |
|
287 | except error.RepoLookupError: | |
288 | if isabort: |
|
288 | if isabort: | |
289 | clearstatus(self.repo) |
|
289 | clearstatus(self.repo) | |
290 | clearcollapsemsg(self.repo) |
|
290 | clearcollapsemsg(self.repo) | |
291 | self.repo.ui.warn(_('rebase aborted (no revision is removed,' |
|
291 | self.repo.ui.warn(_('rebase aborted (no revision is removed,' | |
292 | ' only broken state is cleared)\n')) |
|
292 | ' only broken state is cleared)\n')) | |
293 | return 0 |
|
293 | return 0 | |
294 | else: |
|
294 | else: | |
295 | msg = _('cannot continue inconsistent rebase') |
|
295 | msg = _('cannot continue inconsistent rebase') | |
296 | hint = _('use "hg rebase --abort" to clear broken state') |
|
296 | hint = _('use "hg rebase --abort" to clear broken state') | |
297 | raise error.Abort(msg, hint=hint) |
|
297 | raise error.Abort(msg, hint=hint) | |
298 | if isabort: |
|
298 | if isabort: | |
299 | return abort(self.repo, self.originalwd, self.dest, |
|
299 | return abort(self.repo, self.originalwd, self.dest, | |
300 | self.state, activebookmark=self.activebookmark) |
|
300 | self.state, activebookmark=self.activebookmark) | |
301 |
|
301 | |||
302 | obsrevs = (r for r, st in self.state.items() if st == revprecursor) |
|
302 | obsrevs = (r for r, st in self.state.items() if st == revprecursor) | |
303 | self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest) |
|
303 | self._handleskippingobsolete(self.state.keys(), obsrevs, self.dest) | |
304 |
|
304 | |||
305 | def _preparenewrebase(self, dest, rebaseset): |
|
305 | def _preparenewrebase(self, dest, rebaseset): | |
306 | if dest is None: |
|
306 | if dest is None: | |
307 | return _nothingtorebase() |
|
307 | return _nothingtorebase() | |
308 |
|
308 | |||
309 | allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt) |
|
309 | allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt) | |
310 | if (not (self.keepf or allowunstable) |
|
310 | if (not (self.keepf or allowunstable) | |
311 | and self.repo.revs('first(children(%ld) - %ld)', |
|
311 | and self.repo.revs('first(children(%ld) - %ld)', | |
312 | rebaseset, rebaseset)): |
|
312 | rebaseset, rebaseset)): | |
313 | raise error.Abort( |
|
313 | raise error.Abort( | |
314 | _("can't remove original changesets with" |
|
314 | _("can't remove original changesets with" | |
315 | " unrebased descendants"), |
|
315 | " unrebased descendants"), | |
316 | hint=_('use --keep to keep original changesets')) |
|
316 | hint=_('use --keep to keep original changesets')) | |
317 |
|
317 | |||
318 | obsrevs = _filterobsoleterevs(self.repo, set(rebaseset)) |
|
318 | obsrevs = _filterobsoleterevs(self.repo, set(rebaseset)) | |
319 | self._handleskippingobsolete(rebaseset, obsrevs, dest.rev()) |
|
319 | self._handleskippingobsolete(rebaseset, obsrevs, dest.rev()) | |
320 |
|
320 | |||
321 | result = buildstate(self.repo, dest, rebaseset, self.collapsef, |
|
321 | result = buildstate(self.repo, dest, rebaseset, self.collapsef, | |
322 | self.obsoletenotrebased) |
|
322 | self.obsoletenotrebased) | |
323 |
|
323 | |||
324 | if not result: |
|
324 | if not result: | |
325 | # Empty state built, nothing to rebase |
|
325 | # Empty state built, nothing to rebase | |
326 | self.ui.status(_('nothing to rebase\n')) |
|
326 | self.ui.status(_('nothing to rebase\n')) | |
327 | return _nothingtorebase() |
|
327 | return _nothingtorebase() | |
328 |
|
328 | |||
329 | for root in self.repo.set('roots(%ld)', rebaseset): |
|
329 | for root in self.repo.set('roots(%ld)', rebaseset): | |
330 | if not self.keepf and not root.mutable(): |
|
330 | if not self.keepf and not root.mutable(): | |
331 | raise error.Abort(_("can't rebase public changeset %s") |
|
331 | raise error.Abort(_("can't rebase public changeset %s") | |
332 | % root, |
|
332 | % root, | |
333 | hint=_("see 'hg help phases' for details")) |
|
333 | hint=_("see 'hg help phases' for details")) | |
334 |
|
334 | |||
335 | (self.originalwd, self.dest, self.state) = result |
|
335 | (self.originalwd, self.dest, self.state) = result | |
336 | if self.collapsef: |
|
336 | if self.collapsef: | |
337 | self.destancestors = self.repo.changelog.ancestors( |
|
337 | self.destancestors = self.repo.changelog.ancestors( | |
338 | [self.dest], |
|
338 | [self.dest], | |
339 | inclusive=True) |
|
339 | inclusive=True) | |
340 | self.external = externalparent(self.repo, self.state, |
|
340 | self.external = externalparent(self.repo, self.state, | |
341 | self.destancestors) |
|
341 | self.destancestors) | |
342 |
|
342 | |||
343 | if dest.closesbranch() and not self.keepbranchesf: |
|
343 | if dest.closesbranch() and not self.keepbranchesf: | |
344 | self.ui.status(_('reopening closed branch head %s\n') % dest) |
|
344 | self.ui.status(_('reopening closed branch head %s\n') % dest) | |
345 |
|
345 | |||
346 | def _performrebase(self, tr): |
|
346 | def _performrebase(self, tr): | |
347 | repo, ui, opts = self.repo, self.ui, self.opts |
|
347 | repo, ui, opts = self.repo, self.ui, self.opts | |
348 | if self.keepbranchesf: |
|
348 | if self.keepbranchesf: | |
349 | # insert _savebranch at the start of extrafns so if |
|
349 | # insert _savebranch at the start of extrafns so if | |
350 | # there's a user-provided extrafn it can clobber branch if |
|
350 | # there's a user-provided extrafn it can clobber branch if | |
351 | # desired |
|
351 | # desired | |
352 | self.extrafns.insert(0, _savebranch) |
|
352 | self.extrafns.insert(0, _savebranch) | |
353 | if self.collapsef: |
|
353 | if self.collapsef: | |
354 | branches = set() |
|
354 | branches = set() | |
355 | for rev in self.state: |
|
355 | for rev in self.state: | |
356 | branches.add(repo[rev].branch()) |
|
356 | branches.add(repo[rev].branch()) | |
357 | if len(branches) > 1: |
|
357 | if len(branches) > 1: | |
358 | raise error.Abort(_('cannot collapse multiple named ' |
|
358 | raise error.Abort(_('cannot collapse multiple named ' | |
359 | 'branches')) |
|
359 | 'branches')) | |
360 |
|
360 | |||
361 | # Rebase |
|
361 | # Rebase | |
362 | if not self.destancestors: |
|
362 | if not self.destancestors: | |
363 | self.destancestors = repo.changelog.ancestors([self.dest], |
|
363 | self.destancestors = repo.changelog.ancestors([self.dest], | |
364 | inclusive=True) |
|
364 | inclusive=True) | |
365 |
|
365 | |||
366 | # Keep track of the active bookmarks in order to reset them later |
|
366 | # Keep track of the active bookmarks in order to reset them later | |
367 | self.activebookmark = self.activebookmark or repo._activebookmark |
|
367 | self.activebookmark = self.activebookmark or repo._activebookmark | |
368 | if self.activebookmark: |
|
368 | if self.activebookmark: | |
369 | bookmarks.deactivate(repo) |
|
369 | bookmarks.deactivate(repo) | |
370 |
|
370 | |||
371 | # Store the state before we begin so users can run 'hg rebase --abort' |
|
371 | # Store the state before we begin so users can run 'hg rebase --abort' | |
372 | # if we fail before the transaction closes. |
|
372 | # if we fail before the transaction closes. | |
373 | self.storestatus() |
|
373 | self.storestatus() | |
374 |
|
374 | |||
375 | sortedrevs = repo.revs('sort(%ld, -topo)', self.state) |
|
375 | sortedrevs = repo.revs('sort(%ld, -topo)', self.state) | |
376 | cands = [k for k, v in self.state.iteritems() if v == revtodo] |
|
376 | cands = [k for k, v in self.state.iteritems() if v == revtodo] | |
377 | total = len(cands) |
|
377 | total = len(cands) | |
378 | pos = 0 |
|
378 | pos = 0 | |
379 | for rev in sortedrevs: |
|
379 | for rev in sortedrevs: | |
380 | ctx = repo[rev] |
|
380 | ctx = repo[rev] | |
381 | desc = '%d:%s "%s"' % (ctx.rev(), ctx, |
|
381 | desc = '%d:%s "%s"' % (ctx.rev(), ctx, | |
382 | ctx.description().split('\n', 1)[0]) |
|
382 | ctx.description().split('\n', 1)[0]) | |
383 | names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node()) |
|
383 | names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node()) | |
384 | if names: |
|
384 | if names: | |
385 | desc += ' (%s)' % ' '.join(names) |
|
385 | desc += ' (%s)' % ' '.join(names) | |
386 | if self.state[rev] == rev: |
|
386 | if self.state[rev] == rev: | |
387 | ui.status(_('already rebased %s\n') % desc) |
|
387 | ui.status(_('already rebased %s\n') % desc) | |
388 | elif self.state[rev] == revtodo: |
|
388 | elif self.state[rev] == revtodo: | |
389 | pos += 1 |
|
389 | pos += 1 | |
390 | ui.status(_('rebasing %s\n') % desc) |
|
390 | ui.status(_('rebasing %s\n') % desc) | |
391 | ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)), |
|
391 | ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)), | |
392 | _('changesets'), total) |
|
392 | _('changesets'), total) | |
393 | p1, p2, base = defineparents(repo, rev, self.dest, |
|
393 | p1, p2, base = defineparents(repo, rev, self.dest, | |
394 | self.state, |
|
394 | self.state, | |
395 | self.destancestors, |
|
395 | self.destancestors, | |
396 | self.obsoletenotrebased) |
|
396 | self.obsoletenotrebased) | |
397 | self.storestatus(tr=tr) |
|
397 | self.storestatus(tr=tr) | |
398 | storecollapsemsg(repo, self.collapsemsg) |
|
398 | storecollapsemsg(repo, self.collapsemsg) | |
399 | if len(repo[None].parents()) == 2: |
|
399 | if len(repo[None].parents()) == 2: | |
400 | repo.ui.debug('resuming interrupted rebase\n') |
|
400 | repo.ui.debug('resuming interrupted rebase\n') | |
401 | else: |
|
401 | else: | |
402 | try: |
|
402 | try: | |
403 | ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), |
|
403 | ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), | |
404 | 'rebase') |
|
404 | 'rebase') | |
405 | stats = rebasenode(repo, rev, p1, base, self.state, |
|
405 | stats = rebasenode(repo, rev, p1, base, self.state, | |
406 | self.collapsef, self.dest) |
|
406 | self.collapsef, self.dest) | |
407 | if stats and stats[3] > 0: |
|
407 | if stats and stats[3] > 0: | |
408 | raise error.InterventionRequired( |
|
408 | raise error.InterventionRequired( | |
409 | _('unresolved conflicts (see hg ' |
|
409 | _('unresolved conflicts (see hg ' | |
410 | 'resolve, then hg rebase --continue)')) |
|
410 | 'resolve, then hg rebase --continue)')) | |
411 | finally: |
|
411 | finally: | |
412 | ui.setconfig('ui', 'forcemerge', '', 'rebase') |
|
412 | ui.setconfig('ui', 'forcemerge', '', 'rebase') | |
413 | if not self.collapsef: |
|
413 | if not self.collapsef: | |
414 | merging = p2 != nullrev |
|
414 | merging = p2 != nullrev | |
415 | editform = cmdutil.mergeeditform(merging, 'rebase') |
|
415 | editform = cmdutil.mergeeditform(merging, 'rebase') | |
416 | editor = cmdutil.getcommiteditor(editform=editform, **opts) |
|
416 | editor = cmdutil.getcommiteditor(editform=editform, **opts) | |
417 | newnode = concludenode(repo, rev, p1, p2, |
|
417 | newnode = concludenode(repo, rev, p1, p2, | |
418 | extrafn=_makeextrafn(self.extrafns), |
|
418 | extrafn=_makeextrafn(self.extrafns), | |
419 | editor=editor, |
|
419 | editor=editor, | |
420 | keepbranches=self.keepbranchesf, |
|
420 | keepbranches=self.keepbranchesf, | |
421 | date=self.date) |
|
421 | date=self.date) | |
422 | if newnode is None: |
|
422 | if newnode is None: | |
423 | # If it ended up being a no-op commit, then the normal |
|
423 | # If it ended up being a no-op commit, then the normal | |
424 | # merge state clean-up path doesn't happen, so do it |
|
424 | # merge state clean-up path doesn't happen, so do it | |
425 | # here. Fix issue5494 |
|
425 | # here. Fix issue5494 | |
426 | mergemod.mergestate.clean(repo) |
|
426 | mergemod.mergestate.clean(repo) | |
427 | else: |
|
427 | else: | |
428 | # Skip commit if we are collapsing |
|
428 | # Skip commit if we are collapsing | |
429 | repo.setparents(repo[p1].node()) |
|
429 | repo.setparents(repo[p1].node()) | |
430 | newnode = None |
|
430 | newnode = None | |
431 | # Update the state |
|
431 | # Update the state | |
432 | if newnode is not None: |
|
432 | if newnode is not None: | |
433 | self.state[rev] = repo[newnode].rev() |
|
433 | self.state[rev] = repo[newnode].rev() | |
434 | ui.debug('rebased as %s\n' % short(newnode)) |
|
434 | ui.debug('rebased as %s\n' % short(newnode)) | |
435 | else: |
|
435 | else: | |
436 | if not self.collapsef: |
|
436 | if not self.collapsef: | |
437 | ui.warn(_('note: rebase of %d:%s created no changes ' |
|
437 | ui.warn(_('note: rebase of %d:%s created no changes ' | |
438 | 'to commit\n') % (rev, ctx)) |
|
438 | 'to commit\n') % (rev, ctx)) | |
439 | self.skipped.add(rev) |
|
439 | self.skipped.add(rev) | |
440 | self.state[rev] = p1 |
|
440 | self.state[rev] = p1 | |
441 | ui.debug('next revision set to %s\n' % p1) |
|
441 | ui.debug('next revision set to %s\n' % p1) | |
442 | elif self.state[rev] == nullmerge: |
|
442 | elif self.state[rev] == nullmerge: | |
443 | ui.debug('ignoring null merge rebase of %s\n' % rev) |
|
443 | ui.debug('ignoring null merge rebase of %s\n' % rev) | |
444 | elif self.state[rev] == revignored: |
|
444 | elif self.state[rev] == revignored: | |
445 | ui.status(_('not rebasing ignored %s\n') % desc) |
|
445 | ui.status(_('not rebasing ignored %s\n') % desc) | |
446 | elif self.state[rev] == revprecursor: |
|
446 | elif self.state[rev] == revprecursor: | |
447 | destctx = repo[self.obsoletenotrebased[rev]] |
|
447 | destctx = repo[self.obsoletenotrebased[rev]] | |
448 | descdest = '%d:%s "%s"' % (destctx.rev(), destctx, |
|
448 | descdest = '%d:%s "%s"' % (destctx.rev(), destctx, | |
449 | destctx.description().split('\n', 1)[0]) |
|
449 | destctx.description().split('\n', 1)[0]) | |
450 | msg = _('note: not rebasing %s, already in destination as %s\n') |
|
450 | msg = _('note: not rebasing %s, already in destination as %s\n') | |
451 | ui.status(msg % (desc, descdest)) |
|
451 | ui.status(msg % (desc, descdest)) | |
452 | elif self.state[rev] == revpruned: |
|
452 | elif self.state[rev] == revpruned: | |
453 | msg = _('note: not rebasing %s, it has no successor\n') |
|
453 | msg = _('note: not rebasing %s, it has no successor\n') | |
454 | ui.status(msg % desc) |
|
454 | ui.status(msg % desc) | |
455 | else: |
|
455 | else: | |
456 | ui.status(_('already rebased %s as %s\n') % |
|
456 | ui.status(_('already rebased %s as %s\n') % | |
457 | (desc, repo[self.state[rev]])) |
|
457 | (desc, repo[self.state[rev]])) | |
458 |
|
458 | |||
459 | ui.progress(_('rebasing'), None) |
|
459 | ui.progress(_('rebasing'), None) | |
460 | ui.note(_('rebase merging completed\n')) |
|
460 | ui.note(_('rebase merging completed\n')) | |
461 |
|
461 | |||
462 | def _finishrebase(self): |
|
462 | def _finishrebase(self): | |
463 | repo, ui, opts = self.repo, self.ui, self.opts |
|
463 | repo, ui, opts = self.repo, self.ui, self.opts | |
464 | if self.collapsef and not self.keepopen: |
|
464 | if self.collapsef and not self.keepopen: | |
465 | p1, p2, _base = defineparents(repo, min(self.state), |
|
465 | p1, p2, _base = defineparents(repo, min(self.state), | |
466 | self.dest, self.state, |
|
466 | self.dest, self.state, | |
467 | self.destancestors, |
|
467 | self.destancestors, | |
468 | self.obsoletenotrebased) |
|
468 | self.obsoletenotrebased) | |
469 | editopt = opts.get('edit') |
|
469 | editopt = opts.get('edit') | |
470 | editform = 'rebase.collapse' |
|
470 | editform = 'rebase.collapse' | |
471 | if self.collapsemsg: |
|
471 | if self.collapsemsg: | |
472 | commitmsg = self.collapsemsg |
|
472 | commitmsg = self.collapsemsg | |
473 | else: |
|
473 | else: | |
474 | commitmsg = 'Collapsed revision' |
|
474 | commitmsg = 'Collapsed revision' | |
475 | for rebased in self.state: |
|
475 | for rebased in sorted(self.state): | |
476 | if rebased not in self.skipped and\ |
|
476 | if rebased not in self.skipped and\ | |
477 | self.state[rebased] > nullmerge: |
|
477 | self.state[rebased] > nullmerge: | |
478 | commitmsg += '\n* %s' % repo[rebased].description() |
|
478 | commitmsg += '\n* %s' % repo[rebased].description() | |
479 | editopt = True |
|
479 | editopt = True | |
480 | editor = cmdutil.getcommiteditor(edit=editopt, editform=editform) |
|
480 | editor = cmdutil.getcommiteditor(edit=editopt, editform=editform) | |
481 | revtoreuse = max(self.state) |
|
481 | revtoreuse = max(self.state) | |
482 |
|
482 | |||
483 | dsguard = None |
|
483 | dsguard = None | |
484 | if ui.configbool('rebase', 'singletransaction'): |
|
484 | if ui.configbool('rebase', 'singletransaction'): | |
485 | dsguard = dirstateguard.dirstateguard(repo, 'rebase') |
|
485 | dsguard = dirstateguard.dirstateguard(repo, 'rebase') | |
486 | with util.acceptintervention(dsguard): |
|
486 | with util.acceptintervention(dsguard): | |
487 | newnode = concludenode(repo, revtoreuse, p1, self.external, |
|
487 | newnode = concludenode(repo, revtoreuse, p1, self.external, | |
488 | commitmsg=commitmsg, |
|
488 | commitmsg=commitmsg, | |
489 | extrafn=_makeextrafn(self.extrafns), |
|
489 | extrafn=_makeextrafn(self.extrafns), | |
490 | editor=editor, |
|
490 | editor=editor, | |
491 | keepbranches=self.keepbranchesf, |
|
491 | keepbranches=self.keepbranchesf, | |
492 | date=self.date) |
|
492 | date=self.date) | |
493 | if newnode is None: |
|
493 | if newnode is None: | |
494 | newrev = self.dest |
|
494 | newrev = self.dest | |
495 | else: |
|
495 | else: | |
496 | newrev = repo[newnode].rev() |
|
496 | newrev = repo[newnode].rev() | |
497 | for oldrev in self.state.iterkeys(): |
|
497 | for oldrev in self.state.iterkeys(): | |
498 | if self.state[oldrev] > nullmerge: |
|
498 | if self.state[oldrev] > nullmerge: | |
499 | self.state[oldrev] = newrev |
|
499 | self.state[oldrev] = newrev | |
500 |
|
500 | |||
501 | if 'qtip' in repo.tags(): |
|
501 | if 'qtip' in repo.tags(): | |
502 | updatemq(repo, self.state, self.skipped, **opts) |
|
502 | updatemq(repo, self.state, self.skipped, **opts) | |
503 |
|
503 | |||
504 | # restore original working directory |
|
504 | # restore original working directory | |
505 | # (we do this before stripping) |
|
505 | # (we do this before stripping) | |
506 | newwd = self.state.get(self.originalwd, self.originalwd) |
|
506 | newwd = self.state.get(self.originalwd, self.originalwd) | |
507 | if newwd == revprecursor: |
|
507 | if newwd == revprecursor: | |
508 | newwd = self.obsoletenotrebased[self.originalwd] |
|
508 | newwd = self.obsoletenotrebased[self.originalwd] | |
509 | elif newwd < 0: |
|
509 | elif newwd < 0: | |
510 | # original directory is a parent of rebase set root or ignored |
|
510 | # original directory is a parent of rebase set root or ignored | |
511 | newwd = self.originalwd |
|
511 | newwd = self.originalwd | |
512 | if newwd not in [c.rev() for c in repo[None].parents()]: |
|
512 | if newwd not in [c.rev() for c in repo[None].parents()]: | |
513 | ui.note(_("update back to initial working directory parent\n")) |
|
513 | ui.note(_("update back to initial working directory parent\n")) | |
514 | hg.updaterepo(repo, newwd, False) |
|
514 | hg.updaterepo(repo, newwd, False) | |
515 |
|
515 | |||
516 | if not self.keepf: |
|
516 | if not self.keepf: | |
517 | collapsedas = None |
|
517 | collapsedas = None | |
518 | if self.collapsef: |
|
518 | if self.collapsef: | |
519 | collapsedas = newnode |
|
519 | collapsedas = newnode | |
520 | clearrebased(ui, repo, self.dest, self.state, self.skipped, |
|
520 | clearrebased(ui, repo, self.dest, self.state, self.skipped, | |
521 | collapsedas) |
|
521 | collapsedas) | |
522 |
|
522 | |||
523 | clearstatus(repo) |
|
523 | clearstatus(repo) | |
524 | clearcollapsemsg(repo) |
|
524 | clearcollapsemsg(repo) | |
525 |
|
525 | |||
526 | ui.note(_("rebase completed\n")) |
|
526 | ui.note(_("rebase completed\n")) | |
527 | util.unlinkpath(repo.sjoin('undo'), ignoremissing=True) |
|
527 | util.unlinkpath(repo.sjoin('undo'), ignoremissing=True) | |
528 | if self.skipped: |
|
528 | if self.skipped: | |
529 | skippedlen = len(self.skipped) |
|
529 | skippedlen = len(self.skipped) | |
530 | ui.note(_("%d revisions have been skipped\n") % skippedlen) |
|
530 | ui.note(_("%d revisions have been skipped\n") % skippedlen) | |
531 |
|
531 | |||
532 | if (self.activebookmark and self.activebookmark in repo._bookmarks and |
|
532 | if (self.activebookmark and self.activebookmark in repo._bookmarks and | |
533 | repo['.'].node() == repo._bookmarks[self.activebookmark]): |
|
533 | repo['.'].node() == repo._bookmarks[self.activebookmark]): | |
534 | bookmarks.activate(repo, self.activebookmark) |
|
534 | bookmarks.activate(repo, self.activebookmark) | |
535 |
|
535 | |||
536 | @command('rebase', |
|
536 | @command('rebase', | |
537 | [('s', 'source', '', |
|
537 | [('s', 'source', '', | |
538 | _('rebase the specified changeset and descendants'), _('REV')), |
|
538 | _('rebase the specified changeset and descendants'), _('REV')), | |
539 | ('b', 'base', '', |
|
539 | ('b', 'base', '', | |
540 | _('rebase everything from branching point of specified changeset'), |
|
540 | _('rebase everything from branching point of specified changeset'), | |
541 | _('REV')), |
|
541 | _('REV')), | |
542 | ('r', 'rev', [], |
|
542 | ('r', 'rev', [], | |
543 | _('rebase these revisions'), |
|
543 | _('rebase these revisions'), | |
544 | _('REV')), |
|
544 | _('REV')), | |
545 | ('d', 'dest', '', |
|
545 | ('d', 'dest', '', | |
546 | _('rebase onto the specified changeset'), _('REV')), |
|
546 | _('rebase onto the specified changeset'), _('REV')), | |
547 | ('', 'collapse', False, _('collapse the rebased changesets')), |
|
547 | ('', 'collapse', False, _('collapse the rebased changesets')), | |
548 | ('m', 'message', '', |
|
548 | ('m', 'message', '', | |
549 | _('use text as collapse commit message'), _('TEXT')), |
|
549 | _('use text as collapse commit message'), _('TEXT')), | |
550 | ('e', 'edit', False, _('invoke editor on commit messages')), |
|
550 | ('e', 'edit', False, _('invoke editor on commit messages')), | |
551 | ('l', 'logfile', '', |
|
551 | ('l', 'logfile', '', | |
552 | _('read collapse commit message from file'), _('FILE')), |
|
552 | _('read collapse commit message from file'), _('FILE')), | |
553 | ('k', 'keep', False, _('keep original changesets')), |
|
553 | ('k', 'keep', False, _('keep original changesets')), | |
554 | ('', 'keepbranches', False, _('keep original branch names')), |
|
554 | ('', 'keepbranches', False, _('keep original branch names')), | |
555 | ('D', 'detach', False, _('(DEPRECATED)')), |
|
555 | ('D', 'detach', False, _('(DEPRECATED)')), | |
556 | ('i', 'interactive', False, _('(DEPRECATED)')), |
|
556 | ('i', 'interactive', False, _('(DEPRECATED)')), | |
557 | ('t', 'tool', '', _('specify merge tool')), |
|
557 | ('t', 'tool', '', _('specify merge tool')), | |
558 | ('c', 'continue', False, _('continue an interrupted rebase')), |
|
558 | ('c', 'continue', False, _('continue an interrupted rebase')), | |
559 | ('a', 'abort', False, _('abort an interrupted rebase'))] + |
|
559 | ('a', 'abort', False, _('abort an interrupted rebase'))] + | |
560 | templateopts, |
|
560 | templateopts, | |
561 | _('[-s REV | -b REV] [-d REV] [OPTION]')) |
|
561 | _('[-s REV | -b REV] [-d REV] [OPTION]')) | |
562 | def rebase(ui, repo, **opts): |
|
562 | def rebase(ui, repo, **opts): | |
563 | """move changeset (and descendants) to a different branch |
|
563 | """move changeset (and descendants) to a different branch | |
564 |
|
564 | |||
565 | Rebase uses repeated merging to graft changesets from one part of |
|
565 | Rebase uses repeated merging to graft changesets from one part of | |
566 | history (the source) onto another (the destination). This can be |
|
566 | history (the source) onto another (the destination). This can be | |
567 | useful for linearizing *local* changes relative to a master |
|
567 | useful for linearizing *local* changes relative to a master | |
568 | development tree. |
|
568 | development tree. | |
569 |
|
569 | |||
570 | Published commits cannot be rebased (see :hg:`help phases`). |
|
570 | Published commits cannot be rebased (see :hg:`help phases`). | |
571 | To copy commits, see :hg:`help graft`. |
|
571 | To copy commits, see :hg:`help graft`. | |
572 |
|
572 | |||
573 | If you don't specify a destination changeset (``-d/--dest``), rebase |
|
573 | If you don't specify a destination changeset (``-d/--dest``), rebase | |
574 | will use the same logic as :hg:`merge` to pick a destination. if |
|
574 | will use the same logic as :hg:`merge` to pick a destination. if | |
575 | the current branch contains exactly one other head, the other head |
|
575 | the current branch contains exactly one other head, the other head | |
576 | is merged with by default. Otherwise, an explicit revision with |
|
576 | is merged with by default. Otherwise, an explicit revision with | |
577 | which to merge with must be provided. (destination changeset is not |
|
577 | which to merge with must be provided. (destination changeset is not | |
578 | modified by rebasing, but new changesets are added as its |
|
578 | modified by rebasing, but new changesets are added as its | |
579 | descendants.) |
|
579 | descendants.) | |
580 |
|
580 | |||
581 | Here are the ways to select changesets: |
|
581 | Here are the ways to select changesets: | |
582 |
|
582 | |||
583 | 1. Explicitly select them using ``--rev``. |
|
583 | 1. Explicitly select them using ``--rev``. | |
584 |
|
584 | |||
585 | 2. Use ``--source`` to select a root changeset and include all of its |
|
585 | 2. Use ``--source`` to select a root changeset and include all of its | |
586 | descendants. |
|
586 | descendants. | |
587 |
|
587 | |||
588 | 3. Use ``--base`` to select a changeset; rebase will find ancestors |
|
588 | 3. Use ``--base`` to select a changeset; rebase will find ancestors | |
589 | and their descendants which are not also ancestors of the destination. |
|
589 | and their descendants which are not also ancestors of the destination. | |
590 |
|
590 | |||
591 | 4. If you do not specify any of ``--rev``, ``source``, or ``--base``, |
|
591 | 4. If you do not specify any of ``--rev``, ``source``, or ``--base``, | |
592 | rebase will use ``--base .`` as above. |
|
592 | rebase will use ``--base .`` as above. | |
593 |
|
593 | |||
594 | Rebase will destroy original changesets unless you use ``--keep``. |
|
594 | Rebase will destroy original changesets unless you use ``--keep``. | |
595 | It will also move your bookmarks (even if you do). |
|
595 | It will also move your bookmarks (even if you do). | |
596 |
|
596 | |||
597 | Some changesets may be dropped if they do not contribute changes |
|
597 | Some changesets may be dropped if they do not contribute changes | |
598 | (e.g. merges from the destination branch). |
|
598 | (e.g. merges from the destination branch). | |
599 |
|
599 | |||
600 | Unlike ``merge``, rebase will do nothing if you are at the branch tip of |
|
600 | Unlike ``merge``, rebase will do nothing if you are at the branch tip of | |
601 | a named branch with two heads. You will need to explicitly specify source |
|
601 | a named branch with two heads. You will need to explicitly specify source | |
602 | and/or destination. |
|
602 | and/or destination. | |
603 |
|
603 | |||
604 | If you need to use a tool to automate merge/conflict decisions, you |
|
604 | If you need to use a tool to automate merge/conflict decisions, you | |
605 | can specify one with ``--tool``, see :hg:`help merge-tools`. |
|
605 | can specify one with ``--tool``, see :hg:`help merge-tools`. | |
606 | As a caveat: the tool will not be used to mediate when a file was |
|
606 | As a caveat: the tool will not be used to mediate when a file was | |
607 | deleted, there is no hook presently available for this. |
|
607 | deleted, there is no hook presently available for this. | |
608 |
|
608 | |||
609 | If a rebase is interrupted to manually resolve a conflict, it can be |
|
609 | If a rebase is interrupted to manually resolve a conflict, it can be | |
610 | continued with --continue/-c or aborted with --abort/-a. |
|
610 | continued with --continue/-c or aborted with --abort/-a. | |
611 |
|
611 | |||
612 | .. container:: verbose |
|
612 | .. container:: verbose | |
613 |
|
613 | |||
614 | Examples: |
|
614 | Examples: | |
615 |
|
615 | |||
616 | - move "local changes" (current commit back to branching point) |
|
616 | - move "local changes" (current commit back to branching point) | |
617 | to the current branch tip after a pull:: |
|
617 | to the current branch tip after a pull:: | |
618 |
|
618 | |||
619 | hg rebase |
|
619 | hg rebase | |
620 |
|
620 | |||
621 | - move a single changeset to the stable branch:: |
|
621 | - move a single changeset to the stable branch:: | |
622 |
|
622 | |||
623 | hg rebase -r 5f493448 -d stable |
|
623 | hg rebase -r 5f493448 -d stable | |
624 |
|
624 | |||
625 | - splice a commit and all its descendants onto another part of history:: |
|
625 | - splice a commit and all its descendants onto another part of history:: | |
626 |
|
626 | |||
627 | hg rebase --source c0c3 --dest 4cf9 |
|
627 | hg rebase --source c0c3 --dest 4cf9 | |
628 |
|
628 | |||
629 | - rebase everything on a branch marked by a bookmark onto the |
|
629 | - rebase everything on a branch marked by a bookmark onto the | |
630 | default branch:: |
|
630 | default branch:: | |
631 |
|
631 | |||
632 | hg rebase --base myfeature --dest default |
|
632 | hg rebase --base myfeature --dest default | |
633 |
|
633 | |||
634 | - collapse a sequence of changes into a single commit:: |
|
634 | - collapse a sequence of changes into a single commit:: | |
635 |
|
635 | |||
636 | hg rebase --collapse -r 1520:1525 -d . |
|
636 | hg rebase --collapse -r 1520:1525 -d . | |
637 |
|
637 | |||
638 | - move a named branch while preserving its name:: |
|
638 | - move a named branch while preserving its name:: | |
639 |
|
639 | |||
640 | hg rebase -r "branch(featureX)" -d 1.3 --keepbranches |
|
640 | hg rebase -r "branch(featureX)" -d 1.3 --keepbranches | |
641 |
|
641 | |||
642 | Configuration Options: |
|
642 | Configuration Options: | |
643 |
|
643 | |||
644 | You can make rebase require a destination if you set the following config |
|
644 | You can make rebase require a destination if you set the following config | |
645 | option:: |
|
645 | option:: | |
646 |
|
646 | |||
647 | [commands] |
|
647 | [commands] | |
648 | rebase.requiredest = True |
|
648 | rebase.requiredest = True | |
649 |
|
649 | |||
650 | By default, rebase will close the transaction after each commit. For |
|
650 | By default, rebase will close the transaction after each commit. For | |
651 | performance purposes, you can configure rebase to use a single transaction |
|
651 | performance purposes, you can configure rebase to use a single transaction | |
652 | across the entire rebase. WARNING: This setting introduces a significant |
|
652 | across the entire rebase. WARNING: This setting introduces a significant | |
653 | risk of losing the work you've done in a rebase if the rebase aborts |
|
653 | risk of losing the work you've done in a rebase if the rebase aborts | |
654 | unexpectedly:: |
|
654 | unexpectedly:: | |
655 |
|
655 | |||
656 | [rebase] |
|
656 | [rebase] | |
657 | singletransaction = True |
|
657 | singletransaction = True | |
658 |
|
658 | |||
659 | Return Values: |
|
659 | Return Values: | |
660 |
|
660 | |||
661 | Returns 0 on success, 1 if nothing to rebase or there are |
|
661 | Returns 0 on success, 1 if nothing to rebase or there are | |
662 | unresolved conflicts. |
|
662 | unresolved conflicts. | |
663 |
|
663 | |||
664 | """ |
|
664 | """ | |
665 | rbsrt = rebaseruntime(repo, ui, opts) |
|
665 | rbsrt = rebaseruntime(repo, ui, opts) | |
666 |
|
666 | |||
667 | with repo.wlock(), repo.lock(): |
|
667 | with repo.wlock(), repo.lock(): | |
668 | # Validate input and define rebasing points |
|
668 | # Validate input and define rebasing points | |
669 | destf = opts.get('dest', None) |
|
669 | destf = opts.get('dest', None) | |
670 | srcf = opts.get('source', None) |
|
670 | srcf = opts.get('source', None) | |
671 | basef = opts.get('base', None) |
|
671 | basef = opts.get('base', None) | |
672 | revf = opts.get('rev', []) |
|
672 | revf = opts.get('rev', []) | |
673 | # search default destination in this space |
|
673 | # search default destination in this space | |
674 | # used in the 'hg pull --rebase' case, see issue 5214. |
|
674 | # used in the 'hg pull --rebase' case, see issue 5214. | |
675 | destspace = opts.get('_destspace') |
|
675 | destspace = opts.get('_destspace') | |
676 | contf = opts.get('continue') |
|
676 | contf = opts.get('continue') | |
677 | abortf = opts.get('abort') |
|
677 | abortf = opts.get('abort') | |
678 | if opts.get('interactive'): |
|
678 | if opts.get('interactive'): | |
679 | try: |
|
679 | try: | |
680 | if extensions.find('histedit'): |
|
680 | if extensions.find('histedit'): | |
681 | enablehistedit = '' |
|
681 | enablehistedit = '' | |
682 | except KeyError: |
|
682 | except KeyError: | |
683 | enablehistedit = " --config extensions.histedit=" |
|
683 | enablehistedit = " --config extensions.histedit=" | |
684 | help = "hg%s help -e histedit" % enablehistedit |
|
684 | help = "hg%s help -e histedit" % enablehistedit | |
685 | msg = _("interactive history editing is supported by the " |
|
685 | msg = _("interactive history editing is supported by the " | |
686 | "'histedit' extension (see \"%s\")") % help |
|
686 | "'histedit' extension (see \"%s\")") % help | |
687 | raise error.Abort(msg) |
|
687 | raise error.Abort(msg) | |
688 |
|
688 | |||
689 | if rbsrt.collapsemsg and not rbsrt.collapsef: |
|
689 | if rbsrt.collapsemsg and not rbsrt.collapsef: | |
690 | raise error.Abort( |
|
690 | raise error.Abort( | |
691 | _('message can only be specified with collapse')) |
|
691 | _('message can only be specified with collapse')) | |
692 |
|
692 | |||
693 | if contf or abortf: |
|
693 | if contf or abortf: | |
694 | if contf and abortf: |
|
694 | if contf and abortf: | |
695 | raise error.Abort(_('cannot use both abort and continue')) |
|
695 | raise error.Abort(_('cannot use both abort and continue')) | |
696 | if rbsrt.collapsef: |
|
696 | if rbsrt.collapsef: | |
697 | raise error.Abort( |
|
697 | raise error.Abort( | |
698 | _('cannot use collapse with continue or abort')) |
|
698 | _('cannot use collapse with continue or abort')) | |
699 | if srcf or basef or destf: |
|
699 | if srcf or basef or destf: | |
700 | raise error.Abort( |
|
700 | raise error.Abort( | |
701 | _('abort and continue do not allow specifying revisions')) |
|
701 | _('abort and continue do not allow specifying revisions')) | |
702 | if abortf and opts.get('tool', False): |
|
702 | if abortf and opts.get('tool', False): | |
703 | ui.warn(_('tool option will be ignored\n')) |
|
703 | ui.warn(_('tool option will be ignored\n')) | |
704 | if contf: |
|
704 | if contf: | |
705 | ms = mergemod.mergestate.read(repo) |
|
705 | ms = mergemod.mergestate.read(repo) | |
706 | mergeutil.checkunresolved(ms) |
|
706 | mergeutil.checkunresolved(ms) | |
707 |
|
707 | |||
708 | retcode = rbsrt._prepareabortorcontinue(abortf) |
|
708 | retcode = rbsrt._prepareabortorcontinue(abortf) | |
709 | if retcode is not None: |
|
709 | if retcode is not None: | |
710 | return retcode |
|
710 | return retcode | |
711 | else: |
|
711 | else: | |
712 | dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf, |
|
712 | dest, rebaseset = _definesets(ui, repo, destf, srcf, basef, revf, | |
713 | destspace=destspace) |
|
713 | destspace=destspace) | |
714 | retcode = rbsrt._preparenewrebase(dest, rebaseset) |
|
714 | retcode = rbsrt._preparenewrebase(dest, rebaseset) | |
715 | if retcode is not None: |
|
715 | if retcode is not None: | |
716 | return retcode |
|
716 | return retcode | |
717 |
|
717 | |||
718 | tr = None |
|
718 | tr = None | |
719 | dsguard = None |
|
719 | dsguard = None | |
720 |
|
720 | |||
721 | singletr = ui.configbool('rebase', 'singletransaction') |
|
721 | singletr = ui.configbool('rebase', 'singletransaction') | |
722 | if singletr: |
|
722 | if singletr: | |
723 | tr = repo.transaction('rebase') |
|
723 | tr = repo.transaction('rebase') | |
724 | with util.acceptintervention(tr): |
|
724 | with util.acceptintervention(tr): | |
725 | if singletr: |
|
725 | if singletr: | |
726 | dsguard = dirstateguard.dirstateguard(repo, 'rebase') |
|
726 | dsguard = dirstateguard.dirstateguard(repo, 'rebase') | |
727 | with util.acceptintervention(dsguard): |
|
727 | with util.acceptintervention(dsguard): | |
728 | rbsrt._performrebase(tr) |
|
728 | rbsrt._performrebase(tr) | |
729 |
|
729 | |||
730 | rbsrt._finishrebase() |
|
730 | rbsrt._finishrebase() | |
731 |
|
731 | |||
732 | def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None, |
|
732 | def _definesets(ui, repo, destf=None, srcf=None, basef=None, revf=None, | |
733 | destspace=None): |
|
733 | destspace=None): | |
734 | """use revisions argument to define destination and rebase set |
|
734 | """use revisions argument to define destination and rebase set | |
735 | """ |
|
735 | """ | |
736 | if revf is None: |
|
736 | if revf is None: | |
737 | revf = [] |
|
737 | revf = [] | |
738 |
|
738 | |||
739 | # destspace is here to work around issues with `hg pull --rebase` see |
|
739 | # destspace is here to work around issues with `hg pull --rebase` see | |
740 | # issue5214 for details |
|
740 | # issue5214 for details | |
741 | if srcf and basef: |
|
741 | if srcf and basef: | |
742 | raise error.Abort(_('cannot specify both a source and a base')) |
|
742 | raise error.Abort(_('cannot specify both a source and a base')) | |
743 | if revf and basef: |
|
743 | if revf and basef: | |
744 | raise error.Abort(_('cannot specify both a revision and a base')) |
|
744 | raise error.Abort(_('cannot specify both a revision and a base')) | |
745 | if revf and srcf: |
|
745 | if revf and srcf: | |
746 | raise error.Abort(_('cannot specify both a revision and a source')) |
|
746 | raise error.Abort(_('cannot specify both a revision and a source')) | |
747 |
|
747 | |||
748 | cmdutil.checkunfinished(repo) |
|
748 | cmdutil.checkunfinished(repo) | |
749 | cmdutil.bailifchanged(repo) |
|
749 | cmdutil.bailifchanged(repo) | |
750 |
|
750 | |||
751 | if ui.configbool('commands', 'rebase.requiredest') and not destf: |
|
751 | if ui.configbool('commands', 'rebase.requiredest') and not destf: | |
752 | raise error.Abort(_('you must specify a destination'), |
|
752 | raise error.Abort(_('you must specify a destination'), | |
753 | hint=_('use: hg rebase -d REV')) |
|
753 | hint=_('use: hg rebase -d REV')) | |
754 |
|
754 | |||
755 | if destf: |
|
755 | if destf: | |
756 | dest = scmutil.revsingle(repo, destf) |
|
756 | dest = scmutil.revsingle(repo, destf) | |
757 |
|
757 | |||
758 | if revf: |
|
758 | if revf: | |
759 | rebaseset = scmutil.revrange(repo, revf) |
|
759 | rebaseset = scmutil.revrange(repo, revf) | |
760 | if not rebaseset: |
|
760 | if not rebaseset: | |
761 | ui.status(_('empty "rev" revision set - nothing to rebase\n')) |
|
761 | ui.status(_('empty "rev" revision set - nothing to rebase\n')) | |
762 | return None, None |
|
762 | return None, None | |
763 | elif srcf: |
|
763 | elif srcf: | |
764 | src = scmutil.revrange(repo, [srcf]) |
|
764 | src = scmutil.revrange(repo, [srcf]) | |
765 | if not src: |
|
765 | if not src: | |
766 | ui.status(_('empty "source" revision set - nothing to rebase\n')) |
|
766 | ui.status(_('empty "source" revision set - nothing to rebase\n')) | |
767 | return None, None |
|
767 | return None, None | |
768 | rebaseset = repo.revs('(%ld)::', src) |
|
768 | rebaseset = repo.revs('(%ld)::', src) | |
769 | assert rebaseset |
|
769 | assert rebaseset | |
770 | else: |
|
770 | else: | |
771 | base = scmutil.revrange(repo, [basef or '.']) |
|
771 | base = scmutil.revrange(repo, [basef or '.']) | |
772 | if not base: |
|
772 | if not base: | |
773 | ui.status(_('empty "base" revision set - ' |
|
773 | ui.status(_('empty "base" revision set - ' | |
774 | "can't compute rebase set\n")) |
|
774 | "can't compute rebase set\n")) | |
775 | return None, None |
|
775 | return None, None | |
776 | if not destf: |
|
776 | if not destf: | |
777 | dest = repo[_destrebase(repo, base, destspace=destspace)] |
|
777 | dest = repo[_destrebase(repo, base, destspace=destspace)] | |
778 | destf = str(dest) |
|
778 | destf = str(dest) | |
779 |
|
779 | |||
780 | roots = [] # selected children of branching points |
|
780 | roots = [] # selected children of branching points | |
781 | bpbase = {} # {branchingpoint: [origbase]} |
|
781 | bpbase = {} # {branchingpoint: [origbase]} | |
782 | for b in base: # group bases by branching points |
|
782 | for b in base: # group bases by branching points | |
783 | bp = repo.revs('ancestor(%d, %d)', b, dest).first() |
|
783 | bp = repo.revs('ancestor(%d, %d)', b, dest).first() | |
784 | bpbase[bp] = bpbase.get(bp, []) + [b] |
|
784 | bpbase[bp] = bpbase.get(bp, []) + [b] | |
785 | if None in bpbase: |
|
785 | if None in bpbase: | |
786 | # emulate the old behavior, showing "nothing to rebase" (a better |
|
786 | # emulate the old behavior, showing "nothing to rebase" (a better | |
787 | # behavior may be abort with "cannot find branching point" error) |
|
787 | # behavior may be abort with "cannot find branching point" error) | |
788 | bpbase.clear() |
|
788 | bpbase.clear() | |
789 | for bp, bs in bpbase.iteritems(): # calculate roots |
|
789 | for bp, bs in bpbase.iteritems(): # calculate roots | |
790 | roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs)) |
|
790 | roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs)) | |
791 |
|
791 | |||
792 | rebaseset = repo.revs('%ld::', roots) |
|
792 | rebaseset = repo.revs('%ld::', roots) | |
793 |
|
793 | |||
794 | if not rebaseset: |
|
794 | if not rebaseset: | |
795 | # transform to list because smartsets are not comparable to |
|
795 | # transform to list because smartsets are not comparable to | |
796 | # lists. This should be improved to honor laziness of |
|
796 | # lists. This should be improved to honor laziness of | |
797 | # smartset. |
|
797 | # smartset. | |
798 | if list(base) == [dest.rev()]: |
|
798 | if list(base) == [dest.rev()]: | |
799 | if basef: |
|
799 | if basef: | |
800 | ui.status(_('nothing to rebase - %s is both "base"' |
|
800 | ui.status(_('nothing to rebase - %s is both "base"' | |
801 | ' and destination\n') % dest) |
|
801 | ' and destination\n') % dest) | |
802 | else: |
|
802 | else: | |
803 | ui.status(_('nothing to rebase - working directory ' |
|
803 | ui.status(_('nothing to rebase - working directory ' | |
804 | 'parent is also destination\n')) |
|
804 | 'parent is also destination\n')) | |
805 | elif not repo.revs('%ld - ::%d', base, dest): |
|
805 | elif not repo.revs('%ld - ::%d', base, dest): | |
806 | if basef: |
|
806 | if basef: | |
807 | ui.status(_('nothing to rebase - "base" %s is ' |
|
807 | ui.status(_('nothing to rebase - "base" %s is ' | |
808 | 'already an ancestor of destination ' |
|
808 | 'already an ancestor of destination ' | |
809 | '%s\n') % |
|
809 | '%s\n') % | |
810 | ('+'.join(str(repo[r]) for r in base), |
|
810 | ('+'.join(str(repo[r]) for r in base), | |
811 | dest)) |
|
811 | dest)) | |
812 | else: |
|
812 | else: | |
813 | ui.status(_('nothing to rebase - working ' |
|
813 | ui.status(_('nothing to rebase - working ' | |
814 | 'directory parent is already an ' |
|
814 | 'directory parent is already an ' | |
815 | 'ancestor of destination %s\n') % dest) |
|
815 | 'ancestor of destination %s\n') % dest) | |
816 | else: # can it happen? |
|
816 | else: # can it happen? | |
817 | ui.status(_('nothing to rebase from %s to %s\n') % |
|
817 | ui.status(_('nothing to rebase from %s to %s\n') % | |
818 | ('+'.join(str(repo[r]) for r in base), dest)) |
|
818 | ('+'.join(str(repo[r]) for r in base), dest)) | |
819 | return None, None |
|
819 | return None, None | |
820 |
|
820 | |||
821 | if not destf: |
|
821 | if not destf: | |
822 | dest = repo[_destrebase(repo, rebaseset, destspace=destspace)] |
|
822 | dest = repo[_destrebase(repo, rebaseset, destspace=destspace)] | |
823 | destf = str(dest) |
|
823 | destf = str(dest) | |
824 |
|
824 | |||
825 | return dest, rebaseset |
|
825 | return dest, rebaseset | |
826 |
|
826 | |||
827 | def externalparent(repo, state, destancestors): |
|
827 | def externalparent(repo, state, destancestors): | |
828 | """Return the revision that should be used as the second parent |
|
828 | """Return the revision that should be used as the second parent | |
829 | when the revisions in state is collapsed on top of destancestors. |
|
829 | when the revisions in state is collapsed on top of destancestors. | |
830 | Abort if there is more than one parent. |
|
830 | Abort if there is more than one parent. | |
831 | """ |
|
831 | """ | |
832 | parents = set() |
|
832 | parents = set() | |
833 | source = min(state) |
|
833 | source = min(state) | |
834 | for rev in state: |
|
834 | for rev in state: | |
835 | if rev == source: |
|
835 | if rev == source: | |
836 | continue |
|
836 | continue | |
837 | for p in repo[rev].parents(): |
|
837 | for p in repo[rev].parents(): | |
838 | if (p.rev() not in state |
|
838 | if (p.rev() not in state | |
839 | and p.rev() not in destancestors): |
|
839 | and p.rev() not in destancestors): | |
840 | parents.add(p.rev()) |
|
840 | parents.add(p.rev()) | |
841 | if not parents: |
|
841 | if not parents: | |
842 | return nullrev |
|
842 | return nullrev | |
843 | if len(parents) == 1: |
|
843 | if len(parents) == 1: | |
844 | return parents.pop() |
|
844 | return parents.pop() | |
845 | raise error.Abort(_('unable to collapse on top of %s, there is more ' |
|
845 | raise error.Abort(_('unable to collapse on top of %s, there is more ' | |
846 | 'than one external parent: %s') % |
|
846 | 'than one external parent: %s') % | |
847 | (max(destancestors), |
|
847 | (max(destancestors), | |
848 | ', '.join(str(p) for p in sorted(parents)))) |
|
848 | ', '.join(str(p) for p in sorted(parents)))) | |
849 |
|
849 | |||
850 | def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None, |
|
850 | def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None, | |
851 | keepbranches=False, date=None): |
|
851 | keepbranches=False, date=None): | |
852 | '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev |
|
852 | '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev | |
853 | but also store useful information in extra. |
|
853 | but also store useful information in extra. | |
854 | Return node of committed revision.''' |
|
854 | Return node of committed revision.''' | |
855 | dsguard = util.nullcontextmanager() |
|
855 | dsguard = util.nullcontextmanager() | |
856 | if not repo.ui.configbool('rebase', 'singletransaction'): |
|
856 | if not repo.ui.configbool('rebase', 'singletransaction'): | |
857 | dsguard = dirstateguard.dirstateguard(repo, 'rebase') |
|
857 | dsguard = dirstateguard.dirstateguard(repo, 'rebase') | |
858 | with dsguard: |
|
858 | with dsguard: | |
859 | repo.setparents(repo[p1].node(), repo[p2].node()) |
|
859 | repo.setparents(repo[p1].node(), repo[p2].node()) | |
860 | ctx = repo[rev] |
|
860 | ctx = repo[rev] | |
861 | if commitmsg is None: |
|
861 | if commitmsg is None: | |
862 | commitmsg = ctx.description() |
|
862 | commitmsg = ctx.description() | |
863 | keepbranch = keepbranches and repo[p1].branch() != ctx.branch() |
|
863 | keepbranch = keepbranches and repo[p1].branch() != ctx.branch() | |
864 | extra = {'rebase_source': ctx.hex()} |
|
864 | extra = {'rebase_source': ctx.hex()} | |
865 | if extrafn: |
|
865 | if extrafn: | |
866 | extrafn(ctx, extra) |
|
866 | extrafn(ctx, extra) | |
867 |
|
867 | |||
868 | destphase = max(ctx.phase(), phases.draft) |
|
868 | destphase = max(ctx.phase(), phases.draft) | |
869 | overrides = {('phases', 'new-commit'): destphase} |
|
869 | overrides = {('phases', 'new-commit'): destphase} | |
870 | with repo.ui.configoverride(overrides, 'rebase'): |
|
870 | with repo.ui.configoverride(overrides, 'rebase'): | |
871 | if keepbranch: |
|
871 | if keepbranch: | |
872 | repo.ui.setconfig('ui', 'allowemptycommit', True) |
|
872 | repo.ui.setconfig('ui', 'allowemptycommit', True) | |
873 | # Commit might fail if unresolved files exist |
|
873 | # Commit might fail if unresolved files exist | |
874 | if date is None: |
|
874 | if date is None: | |
875 | date = ctx.date() |
|
875 | date = ctx.date() | |
876 | newnode = repo.commit(text=commitmsg, user=ctx.user(), |
|
876 | newnode = repo.commit(text=commitmsg, user=ctx.user(), | |
877 | date=date, extra=extra, editor=editor) |
|
877 | date=date, extra=extra, editor=editor) | |
878 |
|
878 | |||
879 | repo.dirstate.setbranch(repo[newnode].branch()) |
|
879 | repo.dirstate.setbranch(repo[newnode].branch()) | |
880 | return newnode |
|
880 | return newnode | |
881 |
|
881 | |||
882 | def rebasenode(repo, rev, p1, base, state, collapse, dest): |
|
882 | def rebasenode(repo, rev, p1, base, state, collapse, dest): | |
883 | 'Rebase a single revision rev on top of p1 using base as merge ancestor' |
|
883 | 'Rebase a single revision rev on top of p1 using base as merge ancestor' | |
884 | # Merge phase |
|
884 | # Merge phase | |
885 | # Update to destination and merge it with local |
|
885 | # Update to destination and merge it with local | |
886 | if repo['.'].rev() != p1: |
|
886 | if repo['.'].rev() != p1: | |
887 | repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1])) |
|
887 | repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1])) | |
888 | mergemod.update(repo, p1, False, True) |
|
888 | mergemod.update(repo, p1, False, True) | |
889 | else: |
|
889 | else: | |
890 | repo.ui.debug(" already in destination\n") |
|
890 | repo.ui.debug(" already in destination\n") | |
891 | repo.dirstate.write(repo.currenttransaction()) |
|
891 | repo.dirstate.write(repo.currenttransaction()) | |
892 | repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev])) |
|
892 | repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev])) | |
893 | if base is not None: |
|
893 | if base is not None: | |
894 | repo.ui.debug(" detach base %d:%s\n" % (base, repo[base])) |
|
894 | repo.ui.debug(" detach base %d:%s\n" % (base, repo[base])) | |
895 | # When collapsing in-place, the parent is the common ancestor, we |
|
895 | # When collapsing in-place, the parent is the common ancestor, we | |
896 | # have to allow merging with it. |
|
896 | # have to allow merging with it. | |
897 | stats = mergemod.update(repo, rev, True, True, base, collapse, |
|
897 | stats = mergemod.update(repo, rev, True, True, base, collapse, | |
898 | labels=['dest', 'source']) |
|
898 | labels=['dest', 'source']) | |
899 | if collapse: |
|
899 | if collapse: | |
900 | copies.duplicatecopies(repo, rev, dest) |
|
900 | copies.duplicatecopies(repo, rev, dest) | |
901 | else: |
|
901 | else: | |
902 | # If we're not using --collapse, we need to |
|
902 | # If we're not using --collapse, we need to | |
903 | # duplicate copies between the revision we're |
|
903 | # duplicate copies between the revision we're | |
904 | # rebasing and its first parent, but *not* |
|
904 | # rebasing and its first parent, but *not* | |
905 | # duplicate any copies that have already been |
|
905 | # duplicate any copies that have already been | |
906 | # performed in the destination. |
|
906 | # performed in the destination. | |
907 | p1rev = repo[rev].p1().rev() |
|
907 | p1rev = repo[rev].p1().rev() | |
908 | copies.duplicatecopies(repo, rev, p1rev, skiprev=dest) |
|
908 | copies.duplicatecopies(repo, rev, p1rev, skiprev=dest) | |
909 | return stats |
|
909 | return stats | |
910 |
|
910 | |||
911 | def adjustdest(repo, rev, dest, state): |
|
911 | def adjustdest(repo, rev, dest, state): | |
912 | """adjust rebase destination given the current rebase state |
|
912 | """adjust rebase destination given the current rebase state | |
913 |
|
913 | |||
914 | rev is what is being rebased. Return a list of two revs, which are the |
|
914 | rev is what is being rebased. Return a list of two revs, which are the | |
915 | adjusted destinations for rev's p1 and p2, respectively. If a parent is |
|
915 | adjusted destinations for rev's p1 and p2, respectively. If a parent is | |
916 | nullrev, return dest without adjustment for it. |
|
916 | nullrev, return dest without adjustment for it. | |
917 |
|
917 | |||
918 | For example, when doing rebase -r B+E -d F, rebase will first move B to B1, |
|
918 | For example, when doing rebase -r B+E -d F, rebase will first move B to B1, | |
919 | and E's destination will be adjusted from F to B1. |
|
919 | and E's destination will be adjusted from F to B1. | |
920 |
|
920 | |||
921 | B1 <- written during rebasing B |
|
921 | B1 <- written during rebasing B | |
922 | | |
|
922 | | | |
923 | F <- original destination of B, E |
|
923 | F <- original destination of B, E | |
924 | | |
|
924 | | | |
925 | | E <- rev, which is being rebased |
|
925 | | E <- rev, which is being rebased | |
926 | | | |
|
926 | | | | |
927 | | D <- prev, one parent of rev being checked |
|
927 | | D <- prev, one parent of rev being checked | |
928 | | | |
|
928 | | | | |
929 | | x <- skipped, ex. no successor or successor in (::dest) |
|
929 | | x <- skipped, ex. no successor or successor in (::dest) | |
930 | | | |
|
930 | | | | |
931 | | C |
|
931 | | C | |
932 | | | |
|
932 | | | | |
933 | | B <- rebased as B1 |
|
933 | | B <- rebased as B1 | |
934 | |/ |
|
934 | |/ | |
935 | A |
|
935 | A | |
936 |
|
936 | |||
937 | Another example about merge changeset, rebase -r C+G+H -d K, rebase will |
|
937 | Another example about merge changeset, rebase -r C+G+H -d K, rebase will | |
938 | first move C to C1, G to G1, and when it's checking H, the adjusted |
|
938 | first move C to C1, G to G1, and when it's checking H, the adjusted | |
939 | destinations will be [C1, G1]. |
|
939 | destinations will be [C1, G1]. | |
940 |
|
940 | |||
941 | H C1 G1 |
|
941 | H C1 G1 | |
942 | /| | / |
|
942 | /| | / | |
943 | F G |/ |
|
943 | F G |/ | |
944 | K | | -> K |
|
944 | K | | -> K | |
945 | | C D | |
|
945 | | C D | | |
946 | | |/ | |
|
946 | | |/ | | |
947 | | B | ... |
|
947 | | B | ... | |
948 | |/ |/ |
|
948 | |/ |/ | |
949 | A A |
|
949 | A A | |
950 | """ |
|
950 | """ | |
951 | result = [] |
|
951 | result = [] | |
952 | for prev in repo.changelog.parentrevs(rev): |
|
952 | for prev in repo.changelog.parentrevs(rev): | |
953 | adjusted = dest |
|
953 | adjusted = dest | |
954 | if prev != nullrev: |
|
954 | if prev != nullrev: | |
955 | # pick already rebased revs from state |
|
955 | # pick already rebased revs from state | |
956 | source = [s for s, d in state.items() if d > 0] |
|
956 | source = [s for s, d in state.items() if d > 0] | |
957 | candidate = repo.revs('max(%ld and (::%d))', source, prev).first() |
|
957 | candidate = repo.revs('max(%ld and (::%d))', source, prev).first() | |
958 | if candidate is not None: |
|
958 | if candidate is not None: | |
959 | adjusted = state[candidate] |
|
959 | adjusted = state[candidate] | |
960 | result.append(adjusted) |
|
960 | result.append(adjusted) | |
961 | return result |
|
961 | return result | |
962 |
|
962 | |||
963 | def nearestrebased(repo, rev, state): |
|
963 | def nearestrebased(repo, rev, state): | |
964 | """return the nearest ancestors of rev in the rebase result""" |
|
964 | """return the nearest ancestors of rev in the rebase result""" | |
965 | rebased = [r for r in state if state[r] > nullmerge] |
|
965 | rebased = [r for r in state if state[r] > nullmerge] | |
966 | candidates = repo.revs('max(%ld and (::%d))', rebased, rev) |
|
966 | candidates = repo.revs('max(%ld and (::%d))', rebased, rev) | |
967 | if candidates: |
|
967 | if candidates: | |
968 | return state[candidates.first()] |
|
968 | return state[candidates.first()] | |
969 | else: |
|
969 | else: | |
970 | return None |
|
970 | return None | |
971 |
|
971 | |||
972 | def _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, rebaseobsskipped): |
|
972 | def _checkobsrebase(repo, ui, rebaseobsrevs, rebasesetrevs, rebaseobsskipped): | |
973 | """ |
|
973 | """ | |
974 | Abort if rebase will create divergence or rebase is noop because of markers |
|
974 | Abort if rebase will create divergence or rebase is noop because of markers | |
975 |
|
975 | |||
976 | `rebaseobsrevs`: set of obsolete revision in source |
|
976 | `rebaseobsrevs`: set of obsolete revision in source | |
977 | `rebasesetrevs`: set of revisions to be rebased from source |
|
977 | `rebasesetrevs`: set of revisions to be rebased from source | |
978 | `rebaseobsskipped`: set of revisions from source skipped because they have |
|
978 | `rebaseobsskipped`: set of revisions from source skipped because they have | |
979 | successors in destination |
|
979 | successors in destination | |
980 | """ |
|
980 | """ | |
981 | # Obsolete node with successors not in dest leads to divergence |
|
981 | # Obsolete node with successors not in dest leads to divergence | |
982 | divergenceok = ui.configbool('experimental', |
|
982 | divergenceok = ui.configbool('experimental', | |
983 | 'allowdivergence') |
|
983 | 'allowdivergence') | |
984 | divergencebasecandidates = rebaseobsrevs - rebaseobsskipped |
|
984 | divergencebasecandidates = rebaseobsrevs - rebaseobsskipped | |
985 |
|
985 | |||
986 | if divergencebasecandidates and not divergenceok: |
|
986 | if divergencebasecandidates and not divergenceok: | |
987 | divhashes = (str(repo[r]) |
|
987 | divhashes = (str(repo[r]) | |
988 | for r in divergencebasecandidates) |
|
988 | for r in divergencebasecandidates) | |
989 | msg = _("this rebase will cause " |
|
989 | msg = _("this rebase will cause " | |
990 | "divergences from: %s") |
|
990 | "divergences from: %s") | |
991 | h = _("to force the rebase please set " |
|
991 | h = _("to force the rebase please set " | |
992 | "experimental.allowdivergence=True") |
|
992 | "experimental.allowdivergence=True") | |
993 | raise error.Abort(msg % (",".join(divhashes),), hint=h) |
|
993 | raise error.Abort(msg % (",".join(divhashes),), hint=h) | |
994 |
|
994 | |||
995 | def defineparents(repo, rev, dest, state, destancestors, |
|
995 | def defineparents(repo, rev, dest, state, destancestors, | |
996 | obsoletenotrebased): |
|
996 | obsoletenotrebased): | |
997 | 'Return the new parent relationship of the revision that will be rebased' |
|
997 | 'Return the new parent relationship of the revision that will be rebased' | |
998 | parents = repo[rev].parents() |
|
998 | parents = repo[rev].parents() | |
999 | p1 = p2 = nullrev |
|
999 | p1 = p2 = nullrev | |
1000 | rp1 = None |
|
1000 | rp1 = None | |
1001 |
|
1001 | |||
1002 | p1n = parents[0].rev() |
|
1002 | p1n = parents[0].rev() | |
1003 | if p1n in destancestors: |
|
1003 | if p1n in destancestors: | |
1004 | p1 = dest |
|
1004 | p1 = dest | |
1005 | elif p1n in state: |
|
1005 | elif p1n in state: | |
1006 | if state[p1n] == nullmerge: |
|
1006 | if state[p1n] == nullmerge: | |
1007 | p1 = dest |
|
1007 | p1 = dest | |
1008 | elif state[p1n] in revskipped: |
|
1008 | elif state[p1n] in revskipped: | |
1009 | p1 = nearestrebased(repo, p1n, state) |
|
1009 | p1 = nearestrebased(repo, p1n, state) | |
1010 | if p1 is None: |
|
1010 | if p1 is None: | |
1011 | p1 = dest |
|
1011 | p1 = dest | |
1012 | else: |
|
1012 | else: | |
1013 | p1 = state[p1n] |
|
1013 | p1 = state[p1n] | |
1014 | else: # p1n external |
|
1014 | else: # p1n external | |
1015 | p1 = dest |
|
1015 | p1 = dest | |
1016 | p2 = p1n |
|
1016 | p2 = p1n | |
1017 |
|
1017 | |||
1018 | if len(parents) == 2 and parents[1].rev() not in destancestors: |
|
1018 | if len(parents) == 2 and parents[1].rev() not in destancestors: | |
1019 | p2n = parents[1].rev() |
|
1019 | p2n = parents[1].rev() | |
1020 | # interesting second parent |
|
1020 | # interesting second parent | |
1021 | if p2n in state: |
|
1021 | if p2n in state: | |
1022 | if p1 == dest: # p1n in destancestors or external |
|
1022 | if p1 == dest: # p1n in destancestors or external | |
1023 | p1 = state[p2n] |
|
1023 | p1 = state[p2n] | |
1024 | if p1 == revprecursor: |
|
1024 | if p1 == revprecursor: | |
1025 | rp1 = obsoletenotrebased[p2n] |
|
1025 | rp1 = obsoletenotrebased[p2n] | |
1026 | elif state[p2n] in revskipped: |
|
1026 | elif state[p2n] in revskipped: | |
1027 | p2 = nearestrebased(repo, p2n, state) |
|
1027 | p2 = nearestrebased(repo, p2n, state) | |
1028 | if p2 is None: |
|
1028 | if p2 is None: | |
1029 | # no ancestors rebased yet, detach |
|
1029 | # no ancestors rebased yet, detach | |
1030 | p2 = dest |
|
1030 | p2 = dest | |
1031 | else: |
|
1031 | else: | |
1032 | p2 = state[p2n] |
|
1032 | p2 = state[p2n] | |
1033 | else: # p2n external |
|
1033 | else: # p2n external | |
1034 | if p2 != nullrev: # p1n external too => rev is a merged revision |
|
1034 | if p2 != nullrev: # p1n external too => rev is a merged revision | |
1035 | raise error.Abort(_('cannot use revision %d as base, result ' |
|
1035 | raise error.Abort(_('cannot use revision %d as base, result ' | |
1036 | 'would have 3 parents') % rev) |
|
1036 | 'would have 3 parents') % rev) | |
1037 | p2 = p2n |
|
1037 | p2 = p2n | |
1038 | repo.ui.debug(" future parents are %d and %d\n" % |
|
1038 | repo.ui.debug(" future parents are %d and %d\n" % | |
1039 | (repo[rp1 or p1].rev(), repo[p2].rev())) |
|
1039 | (repo[rp1 or p1].rev(), repo[p2].rev())) | |
1040 |
|
1040 | |||
1041 | if not any(p.rev() in state for p in parents): |
|
1041 | if not any(p.rev() in state for p in parents): | |
1042 | # Case (1) root changeset of a non-detaching rebase set. |
|
1042 | # Case (1) root changeset of a non-detaching rebase set. | |
1043 | # Let the merge mechanism find the base itself. |
|
1043 | # Let the merge mechanism find the base itself. | |
1044 | base = None |
|
1044 | base = None | |
1045 | elif not repo[rev].p2(): |
|
1045 | elif not repo[rev].p2(): | |
1046 | # Case (2) detaching the node with a single parent, use this parent |
|
1046 | # Case (2) detaching the node with a single parent, use this parent | |
1047 | base = repo[rev].p1().rev() |
|
1047 | base = repo[rev].p1().rev() | |
1048 | else: |
|
1048 | else: | |
1049 | # Assuming there is a p1, this is the case where there also is a p2. |
|
1049 | # Assuming there is a p1, this is the case where there also is a p2. | |
1050 | # We are thus rebasing a merge and need to pick the right merge base. |
|
1050 | # We are thus rebasing a merge and need to pick the right merge base. | |
1051 | # |
|
1051 | # | |
1052 | # Imagine we have: |
|
1052 | # Imagine we have: | |
1053 | # - M: current rebase revision in this step |
|
1053 | # - M: current rebase revision in this step | |
1054 | # - A: one parent of M |
|
1054 | # - A: one parent of M | |
1055 | # - B: other parent of M |
|
1055 | # - B: other parent of M | |
1056 | # - D: destination of this merge step (p1 var) |
|
1056 | # - D: destination of this merge step (p1 var) | |
1057 | # |
|
1057 | # | |
1058 | # Consider the case where D is a descendant of A or B and the other is |
|
1058 | # Consider the case where D is a descendant of A or B and the other is | |
1059 | # 'outside'. In this case, the right merge base is the D ancestor. |
|
1059 | # 'outside'. In this case, the right merge base is the D ancestor. | |
1060 | # |
|
1060 | # | |
1061 | # An informal proof, assuming A is 'outside' and B is the D ancestor: |
|
1061 | # An informal proof, assuming A is 'outside' and B is the D ancestor: | |
1062 | # |
|
1062 | # | |
1063 | # If we pick B as the base, the merge involves: |
|
1063 | # If we pick B as the base, the merge involves: | |
1064 | # - changes from B to M (actual changeset payload) |
|
1064 | # - changes from B to M (actual changeset payload) | |
1065 | # - changes from B to D (induced by rebase) as D is a rebased |
|
1065 | # - changes from B to D (induced by rebase) as D is a rebased | |
1066 | # version of B) |
|
1066 | # version of B) | |
1067 | # Which exactly represent the rebase operation. |
|
1067 | # Which exactly represent the rebase operation. | |
1068 | # |
|
1068 | # | |
1069 | # If we pick A as the base, the merge involves: |
|
1069 | # If we pick A as the base, the merge involves: | |
1070 | # - changes from A to M (actual changeset payload) |
|
1070 | # - changes from A to M (actual changeset payload) | |
1071 | # - changes from A to D (with include changes between unrelated A and B |
|
1071 | # - changes from A to D (with include changes between unrelated A and B | |
1072 | # plus changes induced by rebase) |
|
1072 | # plus changes induced by rebase) | |
1073 | # Which does not represent anything sensible and creates a lot of |
|
1073 | # Which does not represent anything sensible and creates a lot of | |
1074 | # conflicts. A is thus not the right choice - B is. |
|
1074 | # conflicts. A is thus not the right choice - B is. | |
1075 | # |
|
1075 | # | |
1076 | # Note: The base found in this 'proof' is only correct in the specified |
|
1076 | # Note: The base found in this 'proof' is only correct in the specified | |
1077 | # case. This base does not make sense if is not D a descendant of A or B |
|
1077 | # case. This base does not make sense if is not D a descendant of A or B | |
1078 | # or if the other is not parent 'outside' (especially not if the other |
|
1078 | # or if the other is not parent 'outside' (especially not if the other | |
1079 | # parent has been rebased). The current implementation does not |
|
1079 | # parent has been rebased). The current implementation does not | |
1080 | # make it feasible to consider different cases separately. In these |
|
1080 | # make it feasible to consider different cases separately. In these | |
1081 | # other cases we currently just leave it to the user to correctly |
|
1081 | # other cases we currently just leave it to the user to correctly | |
1082 | # resolve an impossible merge using a wrong ancestor. |
|
1082 | # resolve an impossible merge using a wrong ancestor. | |
1083 | # |
|
1083 | # | |
1084 | # xx, p1 could be -4, and both parents could probably be -4... |
|
1084 | # xx, p1 could be -4, and both parents could probably be -4... | |
1085 | for p in repo[rev].parents(): |
|
1085 | for p in repo[rev].parents(): | |
1086 | if state.get(p.rev()) == p1: |
|
1086 | if state.get(p.rev()) == p1: | |
1087 | base = p.rev() |
|
1087 | base = p.rev() | |
1088 | break |
|
1088 | break | |
1089 | else: # fallback when base not found |
|
1089 | else: # fallback when base not found | |
1090 | base = None |
|
1090 | base = None | |
1091 |
|
1091 | |||
1092 | # Raise because this function is called wrong (see issue 4106) |
|
1092 | # Raise because this function is called wrong (see issue 4106) | |
1093 | raise AssertionError('no base found to rebase on ' |
|
1093 | raise AssertionError('no base found to rebase on ' | |
1094 | '(defineparents called wrong)') |
|
1094 | '(defineparents called wrong)') | |
1095 | return rp1 or p1, p2, base |
|
1095 | return rp1 or p1, p2, base | |
1096 |
|
1096 | |||
1097 | def isagitpatch(repo, patchname): |
|
1097 | def isagitpatch(repo, patchname): | |
1098 | 'Return true if the given patch is in git format' |
|
1098 | 'Return true if the given patch is in git format' | |
1099 | mqpatch = os.path.join(repo.mq.path, patchname) |
|
1099 | mqpatch = os.path.join(repo.mq.path, patchname) | |
1100 | for line in patch.linereader(file(mqpatch, 'rb')): |
|
1100 | for line in patch.linereader(file(mqpatch, 'rb')): | |
1101 | if line.startswith('diff --git'): |
|
1101 | if line.startswith('diff --git'): | |
1102 | return True |
|
1102 | return True | |
1103 | return False |
|
1103 | return False | |
1104 |
|
1104 | |||
1105 | def updatemq(repo, state, skipped, **opts): |
|
1105 | def updatemq(repo, state, skipped, **opts): | |
1106 | 'Update rebased mq patches - finalize and then import them' |
|
1106 | 'Update rebased mq patches - finalize and then import them' | |
1107 | mqrebase = {} |
|
1107 | mqrebase = {} | |
1108 | mq = repo.mq |
|
1108 | mq = repo.mq | |
1109 | original_series = mq.fullseries[:] |
|
1109 | original_series = mq.fullseries[:] | |
1110 | skippedpatches = set() |
|
1110 | skippedpatches = set() | |
1111 |
|
1111 | |||
1112 | for p in mq.applied: |
|
1112 | for p in mq.applied: | |
1113 | rev = repo[p.node].rev() |
|
1113 | rev = repo[p.node].rev() | |
1114 | if rev in state: |
|
1114 | if rev in state: | |
1115 | repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' % |
|
1115 | repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' % | |
1116 | (rev, p.name)) |
|
1116 | (rev, p.name)) | |
1117 | mqrebase[rev] = (p.name, isagitpatch(repo, p.name)) |
|
1117 | mqrebase[rev] = (p.name, isagitpatch(repo, p.name)) | |
1118 | else: |
|
1118 | else: | |
1119 | # Applied but not rebased, not sure this should happen |
|
1119 | # Applied but not rebased, not sure this should happen | |
1120 | skippedpatches.add(p.name) |
|
1120 | skippedpatches.add(p.name) | |
1121 |
|
1121 | |||
1122 | if mqrebase: |
|
1122 | if mqrebase: | |
1123 | mq.finish(repo, mqrebase.keys()) |
|
1123 | mq.finish(repo, mqrebase.keys()) | |
1124 |
|
1124 | |||
1125 | # We must start import from the newest revision |
|
1125 | # We must start import from the newest revision | |
1126 | for rev in sorted(mqrebase, reverse=True): |
|
1126 | for rev in sorted(mqrebase, reverse=True): | |
1127 | if rev not in skipped: |
|
1127 | if rev not in skipped: | |
1128 | name, isgit = mqrebase[rev] |
|
1128 | name, isgit = mqrebase[rev] | |
1129 | repo.ui.note(_('updating mq patch %s to %s:%s\n') % |
|
1129 | repo.ui.note(_('updating mq patch %s to %s:%s\n') % | |
1130 | (name, state[rev], repo[state[rev]])) |
|
1130 | (name, state[rev], repo[state[rev]])) | |
1131 | mq.qimport(repo, (), patchname=name, git=isgit, |
|
1131 | mq.qimport(repo, (), patchname=name, git=isgit, | |
1132 | rev=[str(state[rev])]) |
|
1132 | rev=[str(state[rev])]) | |
1133 | else: |
|
1133 | else: | |
1134 | # Rebased and skipped |
|
1134 | # Rebased and skipped | |
1135 | skippedpatches.add(mqrebase[rev][0]) |
|
1135 | skippedpatches.add(mqrebase[rev][0]) | |
1136 |
|
1136 | |||
1137 | # Patches were either applied and rebased and imported in |
|
1137 | # Patches were either applied and rebased and imported in | |
1138 | # order, applied and removed or unapplied. Discard the removed |
|
1138 | # order, applied and removed or unapplied. Discard the removed | |
1139 | # ones while preserving the original series order and guards. |
|
1139 | # ones while preserving the original series order and guards. | |
1140 | newseries = [s for s in original_series |
|
1140 | newseries = [s for s in original_series | |
1141 | if mq.guard_re.split(s, 1)[0] not in skippedpatches] |
|
1141 | if mq.guard_re.split(s, 1)[0] not in skippedpatches] | |
1142 | mq.fullseries[:] = newseries |
|
1142 | mq.fullseries[:] = newseries | |
1143 | mq.seriesdirty = True |
|
1143 | mq.seriesdirty = True | |
1144 | mq.savedirty() |
|
1144 | mq.savedirty() | |
1145 |
|
1145 | |||
1146 | def storecollapsemsg(repo, collapsemsg): |
|
1146 | def storecollapsemsg(repo, collapsemsg): | |
1147 | 'Store the collapse message to allow recovery' |
|
1147 | 'Store the collapse message to allow recovery' | |
1148 | collapsemsg = collapsemsg or '' |
|
1148 | collapsemsg = collapsemsg or '' | |
1149 | f = repo.vfs("last-message.txt", "w") |
|
1149 | f = repo.vfs("last-message.txt", "w") | |
1150 | f.write("%s\n" % collapsemsg) |
|
1150 | f.write("%s\n" % collapsemsg) | |
1151 | f.close() |
|
1151 | f.close() | |
1152 |
|
1152 | |||
1153 | def clearcollapsemsg(repo): |
|
1153 | def clearcollapsemsg(repo): | |
1154 | 'Remove collapse message file' |
|
1154 | 'Remove collapse message file' | |
1155 | repo.vfs.unlinkpath("last-message.txt", ignoremissing=True) |
|
1155 | repo.vfs.unlinkpath("last-message.txt", ignoremissing=True) | |
1156 |
|
1156 | |||
1157 | def restorecollapsemsg(repo, isabort): |
|
1157 | def restorecollapsemsg(repo, isabort): | |
1158 | 'Restore previously stored collapse message' |
|
1158 | 'Restore previously stored collapse message' | |
1159 | try: |
|
1159 | try: | |
1160 | f = repo.vfs("last-message.txt") |
|
1160 | f = repo.vfs("last-message.txt") | |
1161 | collapsemsg = f.readline().strip() |
|
1161 | collapsemsg = f.readline().strip() | |
1162 | f.close() |
|
1162 | f.close() | |
1163 | except IOError as err: |
|
1163 | except IOError as err: | |
1164 | if err.errno != errno.ENOENT: |
|
1164 | if err.errno != errno.ENOENT: | |
1165 | raise |
|
1165 | raise | |
1166 | if isabort: |
|
1166 | if isabort: | |
1167 | # Oh well, just abort like normal |
|
1167 | # Oh well, just abort like normal | |
1168 | collapsemsg = '' |
|
1168 | collapsemsg = '' | |
1169 | else: |
|
1169 | else: | |
1170 | raise error.Abort(_('missing .hg/last-message.txt for rebase')) |
|
1170 | raise error.Abort(_('missing .hg/last-message.txt for rebase')) | |
1171 | return collapsemsg |
|
1171 | return collapsemsg | |
1172 |
|
1172 | |||
1173 | def clearstatus(repo): |
|
1173 | def clearstatus(repo): | |
1174 | 'Remove the status files' |
|
1174 | 'Remove the status files' | |
1175 | _clearrebasesetvisibiliy(repo) |
|
1175 | _clearrebasesetvisibiliy(repo) | |
1176 | # Make sure the active transaction won't write the state file |
|
1176 | # Make sure the active transaction won't write the state file | |
1177 | tr = repo.currenttransaction() |
|
1177 | tr = repo.currenttransaction() | |
1178 | if tr: |
|
1178 | if tr: | |
1179 | tr.removefilegenerator('rebasestate') |
|
1179 | tr.removefilegenerator('rebasestate') | |
1180 | repo.vfs.unlinkpath("rebasestate", ignoremissing=True) |
|
1180 | repo.vfs.unlinkpath("rebasestate", ignoremissing=True) | |
1181 |
|
1181 | |||
1182 | def needupdate(repo, state): |
|
1182 | def needupdate(repo, state): | |
1183 | '''check whether we should `update --clean` away from a merge, or if |
|
1183 | '''check whether we should `update --clean` away from a merge, or if | |
1184 | somehow the working dir got forcibly updated, e.g. by older hg''' |
|
1184 | somehow the working dir got forcibly updated, e.g. by older hg''' | |
1185 | parents = [p.rev() for p in repo[None].parents()] |
|
1185 | parents = [p.rev() for p in repo[None].parents()] | |
1186 |
|
1186 | |||
1187 | # Are we in a merge state at all? |
|
1187 | # Are we in a merge state at all? | |
1188 | if len(parents) < 2: |
|
1188 | if len(parents) < 2: | |
1189 | return False |
|
1189 | return False | |
1190 |
|
1190 | |||
1191 | # We should be standing on the first as-of-yet unrebased commit. |
|
1191 | # We should be standing on the first as-of-yet unrebased commit. | |
1192 | firstunrebased = min([old for old, new in state.iteritems() |
|
1192 | firstunrebased = min([old for old, new in state.iteritems() | |
1193 | if new == nullrev]) |
|
1193 | if new == nullrev]) | |
1194 | if firstunrebased in parents: |
|
1194 | if firstunrebased in parents: | |
1195 | return True |
|
1195 | return True | |
1196 |
|
1196 | |||
1197 | return False |
|
1197 | return False | |
1198 |
|
1198 | |||
1199 | def abort(repo, originalwd, dest, state, activebookmark=None): |
|
1199 | def abort(repo, originalwd, dest, state, activebookmark=None): | |
1200 | '''Restore the repository to its original state. Additional args: |
|
1200 | '''Restore the repository to its original state. Additional args: | |
1201 |
|
1201 | |||
1202 | activebookmark: the name of the bookmark that should be active after the |
|
1202 | activebookmark: the name of the bookmark that should be active after the | |
1203 | restore''' |
|
1203 | restore''' | |
1204 |
|
1204 | |||
1205 | try: |
|
1205 | try: | |
1206 | # If the first commits in the rebased set get skipped during the rebase, |
|
1206 | # If the first commits in the rebased set get skipped during the rebase, | |
1207 | # their values within the state mapping will be the dest rev id. The |
|
1207 | # their values within the state mapping will be the dest rev id. The | |
1208 | # dstates list must must not contain the dest rev (issue4896) |
|
1208 | # dstates list must must not contain the dest rev (issue4896) | |
1209 | dstates = [s for s in state.values() if s >= 0 and s != dest] |
|
1209 | dstates = [s for s in state.values() if s >= 0 and s != dest] | |
1210 | immutable = [d for d in dstates if not repo[d].mutable()] |
|
1210 | immutable = [d for d in dstates if not repo[d].mutable()] | |
1211 | cleanup = True |
|
1211 | cleanup = True | |
1212 | if immutable: |
|
1212 | if immutable: | |
1213 | repo.ui.warn(_("warning: can't clean up public changesets %s\n") |
|
1213 | repo.ui.warn(_("warning: can't clean up public changesets %s\n") | |
1214 | % ', '.join(str(repo[r]) for r in immutable), |
|
1214 | % ', '.join(str(repo[r]) for r in immutable), | |
1215 | hint=_("see 'hg help phases' for details")) |
|
1215 | hint=_("see 'hg help phases' for details")) | |
1216 | cleanup = False |
|
1216 | cleanup = False | |
1217 |
|
1217 | |||
1218 | descendants = set() |
|
1218 | descendants = set() | |
1219 | if dstates: |
|
1219 | if dstates: | |
1220 | descendants = set(repo.changelog.descendants(dstates)) |
|
1220 | descendants = set(repo.changelog.descendants(dstates)) | |
1221 | if descendants - set(dstates): |
|
1221 | if descendants - set(dstates): | |
1222 | repo.ui.warn(_("warning: new changesets detected on destination " |
|
1222 | repo.ui.warn(_("warning: new changesets detected on destination " | |
1223 | "branch, can't strip\n")) |
|
1223 | "branch, can't strip\n")) | |
1224 | cleanup = False |
|
1224 | cleanup = False | |
1225 |
|
1225 | |||
1226 | if cleanup: |
|
1226 | if cleanup: | |
1227 | shouldupdate = False |
|
1227 | shouldupdate = False | |
1228 | rebased = filter(lambda x: x >= 0 and x != dest, state.values()) |
|
1228 | rebased = filter(lambda x: x >= 0 and x != dest, state.values()) | |
1229 | if rebased: |
|
1229 | if rebased: | |
1230 | strippoints = [ |
|
1230 | strippoints = [ | |
1231 | c.node() for c in repo.set('roots(%ld)', rebased)] |
|
1231 | c.node() for c in repo.set('roots(%ld)', rebased)] | |
1232 |
|
1232 | |||
1233 | updateifonnodes = set(rebased) |
|
1233 | updateifonnodes = set(rebased) | |
1234 | updateifonnodes.add(dest) |
|
1234 | updateifonnodes.add(dest) | |
1235 | updateifonnodes.add(originalwd) |
|
1235 | updateifonnodes.add(originalwd) | |
1236 | shouldupdate = repo['.'].rev() in updateifonnodes |
|
1236 | shouldupdate = repo['.'].rev() in updateifonnodes | |
1237 |
|
1237 | |||
1238 | # Update away from the rebase if necessary |
|
1238 | # Update away from the rebase if necessary | |
1239 | if shouldupdate or needupdate(repo, state): |
|
1239 | if shouldupdate or needupdate(repo, state): | |
1240 | mergemod.update(repo, originalwd, False, True) |
|
1240 | mergemod.update(repo, originalwd, False, True) | |
1241 |
|
1241 | |||
1242 | # Strip from the first rebased revision |
|
1242 | # Strip from the first rebased revision | |
1243 | if rebased: |
|
1243 | if rebased: | |
1244 | # no backup of rebased cset versions needed |
|
1244 | # no backup of rebased cset versions needed | |
1245 | repair.strip(repo.ui, repo, strippoints) |
|
1245 | repair.strip(repo.ui, repo, strippoints) | |
1246 |
|
1246 | |||
1247 | if activebookmark and activebookmark in repo._bookmarks: |
|
1247 | if activebookmark and activebookmark in repo._bookmarks: | |
1248 | bookmarks.activate(repo, activebookmark) |
|
1248 | bookmarks.activate(repo, activebookmark) | |
1249 |
|
1249 | |||
1250 | finally: |
|
1250 | finally: | |
1251 | clearstatus(repo) |
|
1251 | clearstatus(repo) | |
1252 | clearcollapsemsg(repo) |
|
1252 | clearcollapsemsg(repo) | |
1253 | repo.ui.warn(_('rebase aborted\n')) |
|
1253 | repo.ui.warn(_('rebase aborted\n')) | |
1254 | return 0 |
|
1254 | return 0 | |
1255 |
|
1255 | |||
1256 | def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased): |
|
1256 | def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased): | |
1257 | '''Define which revisions are going to be rebased and where |
|
1257 | '''Define which revisions are going to be rebased and where | |
1258 |
|
1258 | |||
1259 | repo: repo |
|
1259 | repo: repo | |
1260 | dest: context |
|
1260 | dest: context | |
1261 | rebaseset: set of rev |
|
1261 | rebaseset: set of rev | |
1262 | ''' |
|
1262 | ''' | |
1263 | originalwd = repo['.'].rev() |
|
1263 | originalwd = repo['.'].rev() | |
1264 | _setrebasesetvisibility(repo, set(rebaseset) | {originalwd}) |
|
1264 | _setrebasesetvisibility(repo, set(rebaseset) | {originalwd}) | |
1265 |
|
1265 | |||
1266 | # This check isn't strictly necessary, since mq detects commits over an |
|
1266 | # This check isn't strictly necessary, since mq detects commits over an | |
1267 | # applied patch. But it prevents messing up the working directory when |
|
1267 | # applied patch. But it prevents messing up the working directory when | |
1268 | # a partially completed rebase is blocked by mq. |
|
1268 | # a partially completed rebase is blocked by mq. | |
1269 | if 'qtip' in repo.tags() and (dest.node() in |
|
1269 | if 'qtip' in repo.tags() and (dest.node() in | |
1270 | [s.node for s in repo.mq.applied]): |
|
1270 | [s.node for s in repo.mq.applied]): | |
1271 | raise error.Abort(_('cannot rebase onto an applied mq patch')) |
|
1271 | raise error.Abort(_('cannot rebase onto an applied mq patch')) | |
1272 |
|
1272 | |||
1273 | roots = list(repo.set('roots(%ld)', rebaseset)) |
|
1273 | roots = list(repo.set('roots(%ld)', rebaseset)) | |
1274 | if not roots: |
|
1274 | if not roots: | |
1275 | raise error.Abort(_('no matching revisions')) |
|
1275 | raise error.Abort(_('no matching revisions')) | |
1276 | roots.sort() |
|
1276 | roots.sort() | |
1277 | state = dict.fromkeys(rebaseset, revtodo) |
|
1277 | state = dict.fromkeys(rebaseset, revtodo) | |
1278 | detachset = set() |
|
1278 | detachset = set() | |
1279 | emptyrebase = True |
|
1279 | emptyrebase = True | |
1280 | for root in roots: |
|
1280 | for root in roots: | |
1281 | commonbase = root.ancestor(dest) |
|
1281 | commonbase = root.ancestor(dest) | |
1282 | if commonbase == root: |
|
1282 | if commonbase == root: | |
1283 | raise error.Abort(_('source is ancestor of destination')) |
|
1283 | raise error.Abort(_('source is ancestor of destination')) | |
1284 | if commonbase == dest: |
|
1284 | if commonbase == dest: | |
1285 | wctx = repo[None] |
|
1285 | wctx = repo[None] | |
1286 | if dest == wctx.p1(): |
|
1286 | if dest == wctx.p1(): | |
1287 | # when rebasing to '.', it will use the current wd branch name |
|
1287 | # when rebasing to '.', it will use the current wd branch name | |
1288 | samebranch = root.branch() == wctx.branch() |
|
1288 | samebranch = root.branch() == wctx.branch() | |
1289 | else: |
|
1289 | else: | |
1290 | samebranch = root.branch() == dest.branch() |
|
1290 | samebranch = root.branch() == dest.branch() | |
1291 | if not collapse and samebranch and dest in root.parents(): |
|
1291 | if not collapse and samebranch and dest in root.parents(): | |
1292 | # mark the revision as done by setting its new revision |
|
1292 | # mark the revision as done by setting its new revision | |
1293 | # equal to its old (current) revisions |
|
1293 | # equal to its old (current) revisions | |
1294 | state[root.rev()] = root.rev() |
|
1294 | state[root.rev()] = root.rev() | |
1295 | repo.ui.debug('source is a child of destination\n') |
|
1295 | repo.ui.debug('source is a child of destination\n') | |
1296 | continue |
|
1296 | continue | |
1297 |
|
1297 | |||
1298 | emptyrebase = False |
|
1298 | emptyrebase = False | |
1299 | repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root)) |
|
1299 | repo.ui.debug('rebase onto %s starting from %s\n' % (dest, root)) | |
1300 | # Rebase tries to turn <dest> into a parent of <root> while |
|
1300 | # Rebase tries to turn <dest> into a parent of <root> while | |
1301 | # preserving the number of parents of rebased changesets: |
|
1301 | # preserving the number of parents of rebased changesets: | |
1302 | # |
|
1302 | # | |
1303 | # - A changeset with a single parent will always be rebased as a |
|
1303 | # - A changeset with a single parent will always be rebased as a | |
1304 | # changeset with a single parent. |
|
1304 | # changeset with a single parent. | |
1305 | # |
|
1305 | # | |
1306 | # - A merge will be rebased as merge unless its parents are both |
|
1306 | # - A merge will be rebased as merge unless its parents are both | |
1307 | # ancestors of <dest> or are themselves in the rebased set and |
|
1307 | # ancestors of <dest> or are themselves in the rebased set and | |
1308 | # pruned while rebased. |
|
1308 | # pruned while rebased. | |
1309 | # |
|
1309 | # | |
1310 | # If one parent of <root> is an ancestor of <dest>, the rebased |
|
1310 | # If one parent of <root> is an ancestor of <dest>, the rebased | |
1311 | # version of this parent will be <dest>. This is always true with |
|
1311 | # version of this parent will be <dest>. This is always true with | |
1312 | # --base option. |
|
1312 | # --base option. | |
1313 | # |
|
1313 | # | |
1314 | # Otherwise, we need to *replace* the original parents with |
|
1314 | # Otherwise, we need to *replace* the original parents with | |
1315 | # <dest>. This "detaches" the rebased set from its former location |
|
1315 | # <dest>. This "detaches" the rebased set from its former location | |
1316 | # and rebases it onto <dest>. Changes introduced by ancestors of |
|
1316 | # and rebases it onto <dest>. Changes introduced by ancestors of | |
1317 | # <root> not common with <dest> (the detachset, marked as |
|
1317 | # <root> not common with <dest> (the detachset, marked as | |
1318 | # nullmerge) are "removed" from the rebased changesets. |
|
1318 | # nullmerge) are "removed" from the rebased changesets. | |
1319 | # |
|
1319 | # | |
1320 | # - If <root> has a single parent, set it to <dest>. |
|
1320 | # - If <root> has a single parent, set it to <dest>. | |
1321 | # |
|
1321 | # | |
1322 | # - If <root> is a merge, we cannot decide which parent to |
|
1322 | # - If <root> is a merge, we cannot decide which parent to | |
1323 | # replace, the rebase operation is not clearly defined. |
|
1323 | # replace, the rebase operation is not clearly defined. | |
1324 | # |
|
1324 | # | |
1325 | # The table below sums up this behavior: |
|
1325 | # The table below sums up this behavior: | |
1326 | # |
|
1326 | # | |
1327 | # +------------------+----------------------+-------------------------+ |
|
1327 | # +------------------+----------------------+-------------------------+ | |
1328 | # | | one parent | merge | |
|
1328 | # | | one parent | merge | | |
1329 | # +------------------+----------------------+-------------------------+ |
|
1329 | # +------------------+----------------------+-------------------------+ | |
1330 | # | parent in | new parent is <dest> | parents in ::<dest> are | |
|
1330 | # | parent in | new parent is <dest> | parents in ::<dest> are | | |
1331 | # | ::<dest> | | remapped to <dest> | |
|
1331 | # | ::<dest> | | remapped to <dest> | | |
1332 | # +------------------+----------------------+-------------------------+ |
|
1332 | # +------------------+----------------------+-------------------------+ | |
1333 | # | unrelated source | new parent is <dest> | ambiguous, abort | |
|
1333 | # | unrelated source | new parent is <dest> | ambiguous, abort | | |
1334 | # +------------------+----------------------+-------------------------+ |
|
1334 | # +------------------+----------------------+-------------------------+ | |
1335 | # |
|
1335 | # | |
1336 | # The actual abort is handled by `defineparents` |
|
1336 | # The actual abort is handled by `defineparents` | |
1337 | if len(root.parents()) <= 1: |
|
1337 | if len(root.parents()) <= 1: | |
1338 | # ancestors of <root> not ancestors of <dest> |
|
1338 | # ancestors of <root> not ancestors of <dest> | |
1339 | detachset.update(repo.changelog.findmissingrevs([commonbase.rev()], |
|
1339 | detachset.update(repo.changelog.findmissingrevs([commonbase.rev()], | |
1340 | [root.rev()])) |
|
1340 | [root.rev()])) | |
1341 | if emptyrebase: |
|
1341 | if emptyrebase: | |
1342 | return None |
|
1342 | return None | |
1343 | for rev in sorted(state): |
|
1343 | for rev in sorted(state): | |
1344 | parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev] |
|
1344 | parents = [p for p in repo.changelog.parentrevs(rev) if p != nullrev] | |
1345 | # if all parents of this revision are done, then so is this revision |
|
1345 | # if all parents of this revision are done, then so is this revision | |
1346 | if parents and all((state.get(p) == p for p in parents)): |
|
1346 | if parents and all((state.get(p) == p for p in parents)): | |
1347 | state[rev] = rev |
|
1347 | state[rev] = rev | |
1348 | for r in detachset: |
|
1348 | for r in detachset: | |
1349 | if r not in state: |
|
1349 | if r not in state: | |
1350 | state[r] = nullmerge |
|
1350 | state[r] = nullmerge | |
1351 | if len(roots) > 1: |
|
1351 | if len(roots) > 1: | |
1352 | # If we have multiple roots, we may have "hole" in the rebase set. |
|
1352 | # If we have multiple roots, we may have "hole" in the rebase set. | |
1353 | # Rebase roots that descend from those "hole" should not be detached as |
|
1353 | # Rebase roots that descend from those "hole" should not be detached as | |
1354 | # other root are. We use the special `revignored` to inform rebase that |
|
1354 | # other root are. We use the special `revignored` to inform rebase that | |
1355 | # the revision should be ignored but that `defineparents` should search |
|
1355 | # the revision should be ignored but that `defineparents` should search | |
1356 | # a rebase destination that make sense regarding rebased topology. |
|
1356 | # a rebase destination that make sense regarding rebased topology. | |
1357 | rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset)) |
|
1357 | rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset)) | |
1358 | for ignored in set(rebasedomain) - set(rebaseset): |
|
1358 | for ignored in set(rebasedomain) - set(rebaseset): | |
1359 | state[ignored] = revignored |
|
1359 | state[ignored] = revignored | |
1360 | for r in obsoletenotrebased: |
|
1360 | for r in obsoletenotrebased: | |
1361 | if obsoletenotrebased[r] is None: |
|
1361 | if obsoletenotrebased[r] is None: | |
1362 | state[r] = revpruned |
|
1362 | state[r] = revpruned | |
1363 | else: |
|
1363 | else: | |
1364 | state[r] = revprecursor |
|
1364 | state[r] = revprecursor | |
1365 | return originalwd, dest.rev(), state |
|
1365 | return originalwd, dest.rev(), state | |
1366 |
|
1366 | |||
1367 | def clearrebased(ui, repo, dest, state, skipped, collapsedas=None): |
|
1367 | def clearrebased(ui, repo, dest, state, skipped, collapsedas=None): | |
1368 | """dispose of rebased revision at the end of the rebase |
|
1368 | """dispose of rebased revision at the end of the rebase | |
1369 |
|
1369 | |||
1370 | If `collapsedas` is not None, the rebase was a collapse whose result if the |
|
1370 | If `collapsedas` is not None, the rebase was a collapse whose result if the | |
1371 | `collapsedas` node.""" |
|
1371 | `collapsedas` node.""" | |
1372 | tonode = repo.changelog.node |
|
1372 | tonode = repo.changelog.node | |
1373 | # Move bookmark of skipped nodes to destination. This cannot be handled |
|
1373 | # Move bookmark of skipped nodes to destination. This cannot be handled | |
1374 | # by scmutil.cleanupnodes since it will treat rev as removed (no successor) |
|
1374 | # by scmutil.cleanupnodes since it will treat rev as removed (no successor) | |
1375 | # and move bookmark backwards. |
|
1375 | # and move bookmark backwards. | |
1376 | bmchanges = [(name, tonode(max(adjustdest(repo, rev, dest, state)))) |
|
1376 | bmchanges = [(name, tonode(max(adjustdest(repo, rev, dest, state)))) | |
1377 | for rev in skipped |
|
1377 | for rev in skipped | |
1378 | for name in repo.nodebookmarks(tonode(rev))] |
|
1378 | for name in repo.nodebookmarks(tonode(rev))] | |
1379 | if bmchanges: |
|
1379 | if bmchanges: | |
1380 | with repo.transaction('rebase') as tr: |
|
1380 | with repo.transaction('rebase') as tr: | |
1381 | repo._bookmarks.applychanges(repo, tr, bmchanges) |
|
1381 | repo._bookmarks.applychanges(repo, tr, bmchanges) | |
1382 | mapping = {} |
|
1382 | mapping = {} | |
1383 | for rev, newrev in sorted(state.items()): |
|
1383 | for rev, newrev in sorted(state.items()): | |
1384 | if newrev >= 0 and newrev != rev: |
|
1384 | if newrev >= 0 and newrev != rev: | |
1385 | if rev in skipped: |
|
1385 | if rev in skipped: | |
1386 | succs = () |
|
1386 | succs = () | |
1387 | elif collapsedas is not None: |
|
1387 | elif collapsedas is not None: | |
1388 | succs = (collapsedas,) |
|
1388 | succs = (collapsedas,) | |
1389 | else: |
|
1389 | else: | |
1390 | succs = (tonode(newrev),) |
|
1390 | succs = (tonode(newrev),) | |
1391 | mapping[tonode(rev)] = succs |
|
1391 | mapping[tonode(rev)] = succs | |
1392 | scmutil.cleanupnodes(repo, mapping, 'rebase') |
|
1392 | scmutil.cleanupnodes(repo, mapping, 'rebase') | |
1393 |
|
1393 | |||
1394 | def pullrebase(orig, ui, repo, *args, **opts): |
|
1394 | def pullrebase(orig, ui, repo, *args, **opts): | |
1395 | 'Call rebase after pull if the latter has been invoked with --rebase' |
|
1395 | 'Call rebase after pull if the latter has been invoked with --rebase' | |
1396 | ret = None |
|
1396 | ret = None | |
1397 | if opts.get('rebase'): |
|
1397 | if opts.get('rebase'): | |
1398 | if ui.configbool('commands', 'rebase.requiredest'): |
|
1398 | if ui.configbool('commands', 'rebase.requiredest'): | |
1399 | msg = _('rebase destination required by configuration') |
|
1399 | msg = _('rebase destination required by configuration') | |
1400 | hint = _('use hg pull followed by hg rebase -d DEST') |
|
1400 | hint = _('use hg pull followed by hg rebase -d DEST') | |
1401 | raise error.Abort(msg, hint=hint) |
|
1401 | raise error.Abort(msg, hint=hint) | |
1402 |
|
1402 | |||
1403 | with repo.wlock(), repo.lock(): |
|
1403 | with repo.wlock(), repo.lock(): | |
1404 | if opts.get('update'): |
|
1404 | if opts.get('update'): | |
1405 | del opts['update'] |
|
1405 | del opts['update'] | |
1406 | ui.debug('--update and --rebase are not compatible, ignoring ' |
|
1406 | ui.debug('--update and --rebase are not compatible, ignoring ' | |
1407 | 'the update flag\n') |
|
1407 | 'the update flag\n') | |
1408 |
|
1408 | |||
1409 | cmdutil.checkunfinished(repo) |
|
1409 | cmdutil.checkunfinished(repo) | |
1410 | cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: ' |
|
1410 | cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: ' | |
1411 | 'please commit or shelve your changes first')) |
|
1411 | 'please commit or shelve your changes first')) | |
1412 |
|
1412 | |||
1413 | revsprepull = len(repo) |
|
1413 | revsprepull = len(repo) | |
1414 | origpostincoming = commands.postincoming |
|
1414 | origpostincoming = commands.postincoming | |
1415 | def _dummy(*args, **kwargs): |
|
1415 | def _dummy(*args, **kwargs): | |
1416 | pass |
|
1416 | pass | |
1417 | commands.postincoming = _dummy |
|
1417 | commands.postincoming = _dummy | |
1418 | try: |
|
1418 | try: | |
1419 | ret = orig(ui, repo, *args, **opts) |
|
1419 | ret = orig(ui, repo, *args, **opts) | |
1420 | finally: |
|
1420 | finally: | |
1421 | commands.postincoming = origpostincoming |
|
1421 | commands.postincoming = origpostincoming | |
1422 | revspostpull = len(repo) |
|
1422 | revspostpull = len(repo) | |
1423 | if revspostpull > revsprepull: |
|
1423 | if revspostpull > revsprepull: | |
1424 | # --rev option from pull conflict with rebase own --rev |
|
1424 | # --rev option from pull conflict with rebase own --rev | |
1425 | # dropping it |
|
1425 | # dropping it | |
1426 | if 'rev' in opts: |
|
1426 | if 'rev' in opts: | |
1427 | del opts['rev'] |
|
1427 | del opts['rev'] | |
1428 | # positional argument from pull conflicts with rebase's own |
|
1428 | # positional argument from pull conflicts with rebase's own | |
1429 | # --source. |
|
1429 | # --source. | |
1430 | if 'source' in opts: |
|
1430 | if 'source' in opts: | |
1431 | del opts['source'] |
|
1431 | del opts['source'] | |
1432 | # revsprepull is the len of the repo, not revnum of tip. |
|
1432 | # revsprepull is the len of the repo, not revnum of tip. | |
1433 | destspace = list(repo.changelog.revs(start=revsprepull)) |
|
1433 | destspace = list(repo.changelog.revs(start=revsprepull)) | |
1434 | opts['_destspace'] = destspace |
|
1434 | opts['_destspace'] = destspace | |
1435 | try: |
|
1435 | try: | |
1436 | rebase(ui, repo, **opts) |
|
1436 | rebase(ui, repo, **opts) | |
1437 | except error.NoMergeDestAbort: |
|
1437 | except error.NoMergeDestAbort: | |
1438 | # we can maybe update instead |
|
1438 | # we can maybe update instead | |
1439 | rev, _a, _b = destutil.destupdate(repo) |
|
1439 | rev, _a, _b = destutil.destupdate(repo) | |
1440 | if rev == repo['.'].rev(): |
|
1440 | if rev == repo['.'].rev(): | |
1441 | ui.status(_('nothing to rebase\n')) |
|
1441 | ui.status(_('nothing to rebase\n')) | |
1442 | else: |
|
1442 | else: | |
1443 | ui.status(_('nothing to rebase - updating instead\n')) |
|
1443 | ui.status(_('nothing to rebase - updating instead\n')) | |
1444 | # not passing argument to get the bare update behavior |
|
1444 | # not passing argument to get the bare update behavior | |
1445 | # with warning and trumpets |
|
1445 | # with warning and trumpets | |
1446 | commands.update(ui, repo) |
|
1446 | commands.update(ui, repo) | |
1447 | else: |
|
1447 | else: | |
1448 | if opts.get('tool'): |
|
1448 | if opts.get('tool'): | |
1449 | raise error.Abort(_('--tool can only be used with --rebase')) |
|
1449 | raise error.Abort(_('--tool can only be used with --rebase')) | |
1450 | ret = orig(ui, repo, *args, **opts) |
|
1450 | ret = orig(ui, repo, *args, **opts) | |
1451 |
|
1451 | |||
1452 | return ret |
|
1452 | return ret | |
1453 |
|
1453 | |||
1454 | def _setrebasesetvisibility(repo, revs): |
|
1454 | def _setrebasesetvisibility(repo, revs): | |
1455 | """store the currently rebased set on the repo object |
|
1455 | """store the currently rebased set on the repo object | |
1456 |
|
1456 | |||
1457 | This is used by another function to prevent rebased revision to because |
|
1457 | This is used by another function to prevent rebased revision to because | |
1458 | hidden (see issue4504)""" |
|
1458 | hidden (see issue4504)""" | |
1459 | repo = repo.unfiltered() |
|
1459 | repo = repo.unfiltered() | |
1460 | repo._rebaseset = revs |
|
1460 | repo._rebaseset = revs | |
1461 | # invalidate cache if visibility changes |
|
1461 | # invalidate cache if visibility changes | |
1462 | hiddens = repo.filteredrevcache.get('visible', set()) |
|
1462 | hiddens = repo.filteredrevcache.get('visible', set()) | |
1463 | if revs & hiddens: |
|
1463 | if revs & hiddens: | |
1464 | repo.invalidatevolatilesets() |
|
1464 | repo.invalidatevolatilesets() | |
1465 |
|
1465 | |||
1466 | def _clearrebasesetvisibiliy(repo): |
|
1466 | def _clearrebasesetvisibiliy(repo): | |
1467 | """remove rebaseset data from the repo""" |
|
1467 | """remove rebaseset data from the repo""" | |
1468 | repo = repo.unfiltered() |
|
1468 | repo = repo.unfiltered() | |
1469 | if '_rebaseset' in vars(repo): |
|
1469 | if '_rebaseset' in vars(repo): | |
1470 | del repo._rebaseset |
|
1470 | del repo._rebaseset | |
1471 |
|
1471 | |||
1472 | def _rebasedvisible(orig, repo): |
|
1472 | def _rebasedvisible(orig, repo): | |
1473 | """ensure rebased revs stay visible (see issue4504)""" |
|
1473 | """ensure rebased revs stay visible (see issue4504)""" | |
1474 | blockers = orig(repo) |
|
1474 | blockers = orig(repo) | |
1475 | blockers.update(getattr(repo, '_rebaseset', ())) |
|
1475 | blockers.update(getattr(repo, '_rebaseset', ())) | |
1476 | return blockers |
|
1476 | return blockers | |
1477 |
|
1477 | |||
1478 | def _filterobsoleterevs(repo, revs): |
|
1478 | def _filterobsoleterevs(repo, revs): | |
1479 | """returns a set of the obsolete revisions in revs""" |
|
1479 | """returns a set of the obsolete revisions in revs""" | |
1480 | return set(r for r in revs if repo[r].obsolete()) |
|
1480 | return set(r for r in revs if repo[r].obsolete()) | |
1481 |
|
1481 | |||
1482 | def _computeobsoletenotrebased(repo, rebaseobsrevs, dest): |
|
1482 | def _computeobsoletenotrebased(repo, rebaseobsrevs, dest): | |
1483 | """return a mapping obsolete => successor for all obsolete nodes to be |
|
1483 | """return a mapping obsolete => successor for all obsolete nodes to be | |
1484 | rebased that have a successors in the destination |
|
1484 | rebased that have a successors in the destination | |
1485 |
|
1485 | |||
1486 | obsolete => None entries in the mapping indicate nodes with no successor""" |
|
1486 | obsolete => None entries in the mapping indicate nodes with no successor""" | |
1487 | obsoletenotrebased = {} |
|
1487 | obsoletenotrebased = {} | |
1488 |
|
1488 | |||
1489 | # Build a mapping successor => obsolete nodes for the obsolete |
|
1489 | # Build a mapping successor => obsolete nodes for the obsolete | |
1490 | # nodes to be rebased |
|
1490 | # nodes to be rebased | |
1491 | allsuccessors = {} |
|
1491 | allsuccessors = {} | |
1492 | cl = repo.changelog |
|
1492 | cl = repo.changelog | |
1493 | for r in rebaseobsrevs: |
|
1493 | for r in rebaseobsrevs: | |
1494 | node = cl.node(r) |
|
1494 | node = cl.node(r) | |
1495 | for s in obsutil.allsuccessors(repo.obsstore, [node]): |
|
1495 | for s in obsutil.allsuccessors(repo.obsstore, [node]): | |
1496 | try: |
|
1496 | try: | |
1497 | allsuccessors[cl.rev(s)] = cl.rev(node) |
|
1497 | allsuccessors[cl.rev(s)] = cl.rev(node) | |
1498 | except LookupError: |
|
1498 | except LookupError: | |
1499 | pass |
|
1499 | pass | |
1500 |
|
1500 | |||
1501 | if allsuccessors: |
|
1501 | if allsuccessors: | |
1502 | # Look for successors of obsolete nodes to be rebased among |
|
1502 | # Look for successors of obsolete nodes to be rebased among | |
1503 | # the ancestors of dest |
|
1503 | # the ancestors of dest | |
1504 | ancs = cl.ancestors([dest], |
|
1504 | ancs = cl.ancestors([dest], | |
1505 | stoprev=min(allsuccessors), |
|
1505 | stoprev=min(allsuccessors), | |
1506 | inclusive=True) |
|
1506 | inclusive=True) | |
1507 | for s in allsuccessors: |
|
1507 | for s in allsuccessors: | |
1508 | if s in ancs: |
|
1508 | if s in ancs: | |
1509 | obsoletenotrebased[allsuccessors[s]] = s |
|
1509 | obsoletenotrebased[allsuccessors[s]] = s | |
1510 | elif (s == allsuccessors[s] and |
|
1510 | elif (s == allsuccessors[s] and | |
1511 | allsuccessors.values().count(s) == 1): |
|
1511 | allsuccessors.values().count(s) == 1): | |
1512 | # plain prune |
|
1512 | # plain prune | |
1513 | obsoletenotrebased[s] = None |
|
1513 | obsoletenotrebased[s] = None | |
1514 |
|
1514 | |||
1515 | return obsoletenotrebased |
|
1515 | return obsoletenotrebased | |
1516 |
|
1516 | |||
1517 | def summaryhook(ui, repo): |
|
1517 | def summaryhook(ui, repo): | |
1518 | if not repo.vfs.exists('rebasestate'): |
|
1518 | if not repo.vfs.exists('rebasestate'): | |
1519 | return |
|
1519 | return | |
1520 | try: |
|
1520 | try: | |
1521 | rbsrt = rebaseruntime(repo, ui, {}) |
|
1521 | rbsrt = rebaseruntime(repo, ui, {}) | |
1522 | rbsrt.restorestatus() |
|
1522 | rbsrt.restorestatus() | |
1523 | state = rbsrt.state |
|
1523 | state = rbsrt.state | |
1524 | except error.RepoLookupError: |
|
1524 | except error.RepoLookupError: | |
1525 | # i18n: column positioning for "hg summary" |
|
1525 | # i18n: column positioning for "hg summary" | |
1526 | msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n') |
|
1526 | msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n') | |
1527 | ui.write(msg) |
|
1527 | ui.write(msg) | |
1528 | return |
|
1528 | return | |
1529 | numrebased = len([i for i in state.itervalues() if i >= 0]) |
|
1529 | numrebased = len([i for i in state.itervalues() if i >= 0]) | |
1530 | # i18n: column positioning for "hg summary" |
|
1530 | # i18n: column positioning for "hg summary" | |
1531 | ui.write(_('rebase: %s, %s (rebase --continue)\n') % |
|
1531 | ui.write(_('rebase: %s, %s (rebase --continue)\n') % | |
1532 | (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased, |
|
1532 | (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased, | |
1533 | ui.label(_('%d remaining'), 'rebase.remaining') % |
|
1533 | ui.label(_('%d remaining'), 'rebase.remaining') % | |
1534 | (len(state) - numrebased))) |
|
1534 | (len(state) - numrebased))) | |
1535 |
|
1535 | |||
1536 | def uisetup(ui): |
|
1536 | def uisetup(ui): | |
1537 | #Replace pull with a decorator to provide --rebase option |
|
1537 | #Replace pull with a decorator to provide --rebase option | |
1538 | entry = extensions.wrapcommand(commands.table, 'pull', pullrebase) |
|
1538 | entry = extensions.wrapcommand(commands.table, 'pull', pullrebase) | |
1539 | entry[1].append(('', 'rebase', None, |
|
1539 | entry[1].append(('', 'rebase', None, | |
1540 | _("rebase working directory to branch head"))) |
|
1540 | _("rebase working directory to branch head"))) | |
1541 | entry[1].append(('t', 'tool', '', |
|
1541 | entry[1].append(('t', 'tool', '', | |
1542 | _("specify merge tool for rebase"))) |
|
1542 | _("specify merge tool for rebase"))) | |
1543 | cmdutil.summaryhooks.add('rebase', summaryhook) |
|
1543 | cmdutil.summaryhooks.add('rebase', summaryhook) | |
1544 | cmdutil.unfinishedstates.append( |
|
1544 | cmdutil.unfinishedstates.append( | |
1545 | ['rebasestate', False, False, _('rebase in progress'), |
|
1545 | ['rebasestate', False, False, _('rebase in progress'), | |
1546 | _("use 'hg rebase --continue' or 'hg rebase --abort'")]) |
|
1546 | _("use 'hg rebase --continue' or 'hg rebase --abort'")]) | |
1547 | cmdutil.afterresolvedstates.append( |
|
1547 | cmdutil.afterresolvedstates.append( | |
1548 | ['rebasestate', _('hg rebase --continue')]) |
|
1548 | ['rebasestate', _('hg rebase --continue')]) | |
1549 | # ensure rebased rev are not hidden |
|
1549 | # ensure rebased rev are not hidden | |
1550 | extensions.wrapfunction(repoview, 'pinnedrevs', _rebasedvisible) |
|
1550 | extensions.wrapfunction(repoview, 'pinnedrevs', _rebasedvisible) |
@@ -1,141 +1,147 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # |
|
2 | # | |
3 | # hggettext - carefully extract docstrings for Mercurial |
|
3 | # hggettext - carefully extract docstrings for Mercurial | |
4 | # |
|
4 | # | |
5 | # Copyright 2009 Matt Mackall <mpm@selenic.com> and others |
|
5 | # Copyright 2009 Matt Mackall <mpm@selenic.com> and others | |
6 | # |
|
6 | # | |
7 | # This software may be used and distributed according to the terms of the |
|
7 | # This software may be used and distributed according to the terms of the | |
8 | # GNU General Public License version 2 or any later version. |
|
8 | # GNU General Public License version 2 or any later version. | |
9 |
|
9 | |||
10 | # The normalize function is taken from pygettext which is distributed |
|
10 | # The normalize function is taken from pygettext which is distributed | |
11 | # with Python under the Python License, which is GPL compatible. |
|
11 | # with Python under the Python License, which is GPL compatible. | |
12 |
|
12 | |||
13 | """Extract docstrings from Mercurial commands. |
|
13 | """Extract docstrings from Mercurial commands. | |
14 |
|
14 | |||
15 | Compared to pygettext, this script knows about the cmdtable and table |
|
15 | Compared to pygettext, this script knows about the cmdtable and table | |
16 | dictionaries used by Mercurial, and will only extract docstrings from |
|
16 | dictionaries used by Mercurial, and will only extract docstrings from | |
17 | functions mentioned therein. |
|
17 | functions mentioned therein. | |
18 |
|
18 | |||
19 | Use xgettext like normal to extract strings marked as translatable and |
|
19 | Use xgettext like normal to extract strings marked as translatable and | |
20 | join the message cataloges to get the final catalog. |
|
20 | join the message cataloges to get the final catalog. | |
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 | from __future__ import absolute_import, print_function |
|
23 | from __future__ import absolute_import, print_function | |
24 |
|
24 | |||
25 | import inspect |
|
25 | import inspect | |
26 | import os |
|
26 | import os | |
27 | import sys |
|
27 | import sys | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | def escape(s): |
|
30 | def escape(s): | |
31 | # The order is important, the backslash must be escaped first |
|
31 | # The order is important, the backslash must be escaped first | |
32 | # since the other replacements introduce new backslashes |
|
32 | # since the other replacements introduce new backslashes | |
33 | # themselves. |
|
33 | # themselves. | |
34 | s = s.replace('\\', '\\\\') |
|
34 | s = s.replace('\\', '\\\\') | |
35 | s = s.replace('\n', '\\n') |
|
35 | s = s.replace('\n', '\\n') | |
36 | s = s.replace('\r', '\\r') |
|
36 | s = s.replace('\r', '\\r') | |
37 | s = s.replace('\t', '\\t') |
|
37 | s = s.replace('\t', '\\t') | |
38 | s = s.replace('"', '\\"') |
|
38 | s = s.replace('"', '\\"') | |
39 | return s |
|
39 | return s | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | def normalize(s): |
|
42 | def normalize(s): | |
43 | # This converts the various Python string types into a format that |
|
43 | # This converts the various Python string types into a format that | |
44 | # is appropriate for .po files, namely much closer to C style. |
|
44 | # is appropriate for .po files, namely much closer to C style. | |
45 | lines = s.split('\n') |
|
45 | lines = s.split('\n') | |
46 | if len(lines) == 1: |
|
46 | if len(lines) == 1: | |
47 | s = '"' + escape(s) + '"' |
|
47 | s = '"' + escape(s) + '"' | |
48 | else: |
|
48 | else: | |
49 | if not lines[-1]: |
|
49 | if not lines[-1]: | |
50 | del lines[-1] |
|
50 | del lines[-1] | |
51 | lines[-1] = lines[-1] + '\n' |
|
51 | lines[-1] = lines[-1] + '\n' | |
52 | lines = map(escape, lines) |
|
52 | lines = map(escape, lines) | |
53 | lineterm = '\\n"\n"' |
|
53 | lineterm = '\\n"\n"' | |
54 | s = '""\n"' + lineterm.join(lines) + '"' |
|
54 | s = '""\n"' + lineterm.join(lines) + '"' | |
55 | return s |
|
55 | return s | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | def poentry(path, lineno, s): |
|
58 | def poentry(path, lineno, s): | |
59 | return ('#: %s:%d\n' % (path, lineno) + |
|
59 | return ('#: %s:%d\n' % (path, lineno) + | |
60 | 'msgid %s\n' % normalize(s) + |
|
60 | 'msgid %s\n' % normalize(s) + | |
61 | 'msgstr ""\n') |
|
61 | 'msgstr ""\n') | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | def offset(src, doc, name, default): |
|
64 | def offset(src, doc, name, default): | |
65 | """Compute offset or issue a warning on stdout.""" |
|
65 | """Compute offset or issue a warning on stdout.""" | |
66 | # Backslashes in doc appear doubled in src. |
|
66 | # Backslashes in doc appear doubled in src. | |
67 | end = src.find(doc.replace('\\', '\\\\')) |
|
67 | end = src.find(doc.replace('\\', '\\\\')) | |
68 | if end == -1: |
|
68 | if end == -1: | |
69 | # This can happen if the docstring contains unnecessary escape |
|
69 | # This can happen if the docstring contains unnecessary escape | |
70 | # sequences such as \" in a triple-quoted string. The problem |
|
70 | # sequences such as \" in a triple-quoted string. The problem | |
71 | # is that \" is turned into " and so doc wont appear in src. |
|
71 | # is that \" is turned into " and so doc wont appear in src. | |
72 | sys.stderr.write("warning: unknown offset in %s, assuming %d lines\n" |
|
72 | sys.stderr.write("warning: unknown offset in %s, assuming %d lines\n" | |
73 | % (name, default)) |
|
73 | % (name, default)) | |
74 | return default |
|
74 | return default | |
75 | else: |
|
75 | else: | |
76 | return src.count('\n', 0, end) |
|
76 | return src.count('\n', 0, end) | |
77 |
|
77 | |||
78 |
|
78 | |||
79 | def importpath(path): |
|
79 | def importpath(path): | |
80 | """Import a path like foo/bar/baz.py and return the baz module.""" |
|
80 | """Import a path like foo/bar/baz.py and return the baz module.""" | |
81 | if path.endswith('.py'): |
|
81 | if path.endswith('.py'): | |
82 | path = path[:-3] |
|
82 | path = path[:-3] | |
83 | if path.endswith('/__init__'): |
|
83 | if path.endswith('/__init__'): | |
84 | path = path[:-9] |
|
84 | path = path[:-9] | |
85 | path = path.replace('/', '.') |
|
85 | path = path.replace('/', '.') | |
86 | mod = __import__(path) |
|
86 | mod = __import__(path) | |
87 | for comp in path.split('.')[1:]: |
|
87 | for comp in path.split('.')[1:]: | |
88 | mod = getattr(mod, comp) |
|
88 | mod = getattr(mod, comp) | |
89 | return mod |
|
89 | return mod | |
90 |
|
90 | |||
91 |
|
91 | |||
92 | def docstrings(path): |
|
92 | def docstrings(path): | |
93 | """Extract docstrings from path. |
|
93 | """Extract docstrings from path. | |
94 |
|
94 | |||
95 | This respects the Mercurial cmdtable/table convention and will |
|
95 | This respects the Mercurial cmdtable/table convention and will | |
96 | only extract docstrings from functions mentioned in these tables. |
|
96 | only extract docstrings from functions mentioned in these tables. | |
97 | """ |
|
97 | """ | |
98 | mod = importpath(path) |
|
98 | mod = importpath(path) | |
99 | if mod.__doc__: |
|
99 | if mod.__doc__: | |
100 | src = open(path).read() |
|
100 | src = open(path).read() | |
101 | lineno = 1 + offset(src, mod.__doc__, path, 7) |
|
101 | lineno = 1 + offset(src, mod.__doc__, path, 7) | |
102 | print(poentry(path, lineno, mod.__doc__)) |
|
102 | print(poentry(path, lineno, mod.__doc__)) | |
103 |
|
103 | |||
104 | functions = list(getattr(mod, 'i18nfunctions', [])) |
|
104 | functions = list(getattr(mod, 'i18nfunctions', [])) | |
105 | functions = [(f, True) for f in functions] |
|
105 | functions = [(f, True) for f in functions] | |
106 |
|
106 | |||
107 | cmdtable = getattr(mod, 'cmdtable', {}) |
|
107 | cmdtable = getattr(mod, 'cmdtable', {}) | |
108 | if not cmdtable: |
|
108 | if not cmdtable: | |
109 | # Maybe we are processing mercurial.commands? |
|
109 | # Maybe we are processing mercurial.commands? | |
110 | cmdtable = getattr(mod, 'table', {}) |
|
110 | cmdtable = getattr(mod, 'table', {}) | |
111 | functions.extend((c[0], False) for c in cmdtable.itervalues()) |
|
111 | functions.extend((c[0], False) for c in cmdtable.itervalues()) | |
112 |
|
112 | |||
113 | for func, rstrip in functions: |
|
113 | for func, rstrip in functions: | |
114 | if func.__doc__: |
|
114 | if func.__doc__: | |
|
115 | funcmod = inspect.getmodule(func) | |||
|
116 | extra = '' | |||
|
117 | if funcmod.__package__ == funcmod.__name__: | |||
|
118 | extra = '/__init__' | |||
|
119 | actualpath = '%s%s.py' % (funcmod.__name__.replace('.', '/'), extra) | |||
|
120 | ||||
115 | src = inspect.getsource(func) |
|
121 | src = inspect.getsource(func) | |
116 | name = "%s.%s" % (path, func.__name__) |
|
122 | name = "%s.%s" % (actualpath, func.__name__) | |
117 | lineno = inspect.getsourcelines(func)[1] |
|
123 | lineno = inspect.getsourcelines(func)[1] | |
118 | doc = func.__doc__ |
|
124 | doc = func.__doc__ | |
119 | if rstrip: |
|
125 | if rstrip: | |
120 | doc = doc.rstrip() |
|
126 | doc = doc.rstrip() | |
121 | lineno += offset(src, doc, name, 1) |
|
127 | lineno += offset(src, doc, name, 1) | |
122 | print(poentry(path, lineno, doc)) |
|
128 | print(poentry(actualpath, lineno, doc)) | |
123 |
|
129 | |||
124 |
|
130 | |||
125 | def rawtext(path): |
|
131 | def rawtext(path): | |
126 | src = open(path).read() |
|
132 | src = open(path).read() | |
127 | print(poentry(path, 1, src)) |
|
133 | print(poentry(path, 1, src)) | |
128 |
|
134 | |||
129 |
|
135 | |||
130 | if __name__ == "__main__": |
|
136 | if __name__ == "__main__": | |
131 | # It is very important that we import the Mercurial modules from |
|
137 | # It is very important that we import the Mercurial modules from | |
132 | # the source tree where hggettext is executed. Otherwise we might |
|
138 | # the source tree where hggettext is executed. Otherwise we might | |
133 | # accidentally import and extract strings from a Mercurial |
|
139 | # accidentally import and extract strings from a Mercurial | |
134 | # installation mentioned in PYTHONPATH. |
|
140 | # installation mentioned in PYTHONPATH. | |
135 | sys.path.insert(0, os.getcwd()) |
|
141 | sys.path.insert(0, os.getcwd()) | |
136 | from mercurial import demandimport; demandimport.enable() |
|
142 | from mercurial import demandimport; demandimport.enable() | |
137 | for path in sys.argv[1:]: |
|
143 | for path in sys.argv[1:]: | |
138 | if path.endswith('.txt'): |
|
144 | if path.endswith('.txt'): | |
139 | rawtext(path) |
|
145 | rawtext(path) | |
140 | else: |
|
146 | else: | |
141 | docstrings(path) |
|
147 | docstrings(path) |
@@ -1,3752 +1,3762 | |||||
1 | # cmdutil.py - help for command processing in mercurial |
|
1 | # cmdutil.py - help for command processing in mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import errno |
|
10 | import errno | |
11 | import itertools |
|
11 | import itertools | |
12 | import os |
|
12 | import os | |
13 | import re |
|
13 | import re | |
14 | import tempfile |
|
14 | import tempfile | |
15 |
|
15 | |||
16 | from .i18n import _ |
|
16 | from .i18n import _ | |
17 | from .node import ( |
|
17 | from .node import ( | |
18 | hex, |
|
18 | hex, | |
19 | nullid, |
|
19 | nullid, | |
20 | nullrev, |
|
20 | nullrev, | |
21 | short, |
|
21 | short, | |
22 | ) |
|
22 | ) | |
23 |
|
23 | |||
24 | from . import ( |
|
24 | from . import ( | |
25 | bookmarks, |
|
25 | bookmarks, | |
26 | changelog, |
|
26 | changelog, | |
27 | copies, |
|
27 | copies, | |
28 | crecord as crecordmod, |
|
28 | crecord as crecordmod, | |
|
29 | dirstateguard, | |||
29 | encoding, |
|
30 | encoding, | |
30 | error, |
|
31 | error, | |
31 | formatter, |
|
32 | formatter, | |
32 | graphmod, |
|
33 | graphmod, | |
33 | match as matchmod, |
|
34 | match as matchmod, | |
34 | obsolete, |
|
35 | obsolete, | |
35 | patch, |
|
36 | patch, | |
36 | pathutil, |
|
37 | pathutil, | |
37 | phases, |
|
38 | phases, | |
38 | pycompat, |
|
39 | pycompat, | |
39 | registrar, |
|
40 | registrar, | |
40 | revlog, |
|
41 | revlog, | |
41 | revset, |
|
42 | revset, | |
42 | scmutil, |
|
43 | scmutil, | |
43 | smartset, |
|
44 | smartset, | |
44 | templatekw, |
|
45 | templatekw, | |
45 | templater, |
|
46 | templater, | |
46 | util, |
|
47 | util, | |
47 | vfs as vfsmod, |
|
48 | vfs as vfsmod, | |
48 | ) |
|
49 | ) | |
49 | stringio = util.stringio |
|
50 | stringio = util.stringio | |
50 |
|
51 | |||
51 | # templates of common command options |
|
52 | # templates of common command options | |
52 |
|
53 | |||
53 | dryrunopts = [ |
|
54 | dryrunopts = [ | |
54 | ('n', 'dry-run', None, |
|
55 | ('n', 'dry-run', None, | |
55 | _('do not perform actions, just print output')), |
|
56 | _('do not perform actions, just print output')), | |
56 | ] |
|
57 | ] | |
57 |
|
58 | |||
58 | remoteopts = [ |
|
59 | remoteopts = [ | |
59 | ('e', 'ssh', '', |
|
60 | ('e', 'ssh', '', | |
60 | _('specify ssh command to use'), _('CMD')), |
|
61 | _('specify ssh command to use'), _('CMD')), | |
61 | ('', 'remotecmd', '', |
|
62 | ('', 'remotecmd', '', | |
62 | _('specify hg command to run on the remote side'), _('CMD')), |
|
63 | _('specify hg command to run on the remote side'), _('CMD')), | |
63 | ('', 'insecure', None, |
|
64 | ('', 'insecure', None, | |
64 | _('do not verify server certificate (ignoring web.cacerts config)')), |
|
65 | _('do not verify server certificate (ignoring web.cacerts config)')), | |
65 | ] |
|
66 | ] | |
66 |
|
67 | |||
67 | walkopts = [ |
|
68 | walkopts = [ | |
68 | ('I', 'include', [], |
|
69 | ('I', 'include', [], | |
69 | _('include names matching the given patterns'), _('PATTERN')), |
|
70 | _('include names matching the given patterns'), _('PATTERN')), | |
70 | ('X', 'exclude', [], |
|
71 | ('X', 'exclude', [], | |
71 | _('exclude names matching the given patterns'), _('PATTERN')), |
|
72 | _('exclude names matching the given patterns'), _('PATTERN')), | |
72 | ] |
|
73 | ] | |
73 |
|
74 | |||
74 | commitopts = [ |
|
75 | commitopts = [ | |
75 | ('m', 'message', '', |
|
76 | ('m', 'message', '', | |
76 | _('use text as commit message'), _('TEXT')), |
|
77 | _('use text as commit message'), _('TEXT')), | |
77 | ('l', 'logfile', '', |
|
78 | ('l', 'logfile', '', | |
78 | _('read commit message from file'), _('FILE')), |
|
79 | _('read commit message from file'), _('FILE')), | |
79 | ] |
|
80 | ] | |
80 |
|
81 | |||
81 | commitopts2 = [ |
|
82 | commitopts2 = [ | |
82 | ('d', 'date', '', |
|
83 | ('d', 'date', '', | |
83 | _('record the specified date as commit date'), _('DATE')), |
|
84 | _('record the specified date as commit date'), _('DATE')), | |
84 | ('u', 'user', '', |
|
85 | ('u', 'user', '', | |
85 | _('record the specified user as committer'), _('USER')), |
|
86 | _('record the specified user as committer'), _('USER')), | |
86 | ] |
|
87 | ] | |
87 |
|
88 | |||
88 | # hidden for now |
|
89 | # hidden for now | |
89 | formatteropts = [ |
|
90 | formatteropts = [ | |
90 | ('T', 'template', '', |
|
91 | ('T', 'template', '', | |
91 | _('display with template (EXPERIMENTAL)'), _('TEMPLATE')), |
|
92 | _('display with template (EXPERIMENTAL)'), _('TEMPLATE')), | |
92 | ] |
|
93 | ] | |
93 |
|
94 | |||
94 | templateopts = [ |
|
95 | templateopts = [ | |
95 | ('', 'style', '', |
|
96 | ('', 'style', '', | |
96 | _('display using template map file (DEPRECATED)'), _('STYLE')), |
|
97 | _('display using template map file (DEPRECATED)'), _('STYLE')), | |
97 | ('T', 'template', '', |
|
98 | ('T', 'template', '', | |
98 | _('display with template'), _('TEMPLATE')), |
|
99 | _('display with template'), _('TEMPLATE')), | |
99 | ] |
|
100 | ] | |
100 |
|
101 | |||
101 | logopts = [ |
|
102 | logopts = [ | |
102 | ('p', 'patch', None, _('show patch')), |
|
103 | ('p', 'patch', None, _('show patch')), | |
103 | ('g', 'git', None, _('use git extended diff format')), |
|
104 | ('g', 'git', None, _('use git extended diff format')), | |
104 | ('l', 'limit', '', |
|
105 | ('l', 'limit', '', | |
105 | _('limit number of changes displayed'), _('NUM')), |
|
106 | _('limit number of changes displayed'), _('NUM')), | |
106 | ('M', 'no-merges', None, _('do not show merges')), |
|
107 | ('M', 'no-merges', None, _('do not show merges')), | |
107 | ('', 'stat', None, _('output diffstat-style summary of changes')), |
|
108 | ('', 'stat', None, _('output diffstat-style summary of changes')), | |
108 | ('G', 'graph', None, _("show the revision DAG")), |
|
109 | ('G', 'graph', None, _("show the revision DAG")), | |
109 | ] + templateopts |
|
110 | ] + templateopts | |
110 |
|
111 | |||
111 | diffopts = [ |
|
112 | diffopts = [ | |
112 | ('a', 'text', None, _('treat all files as text')), |
|
113 | ('a', 'text', None, _('treat all files as text')), | |
113 | ('g', 'git', None, _('use git extended diff format')), |
|
114 | ('g', 'git', None, _('use git extended diff format')), | |
114 | ('', 'binary', None, _('generate binary diffs in git mode (default)')), |
|
115 | ('', 'binary', None, _('generate binary diffs in git mode (default)')), | |
115 | ('', 'nodates', None, _('omit dates from diff headers')) |
|
116 | ('', 'nodates', None, _('omit dates from diff headers')) | |
116 | ] |
|
117 | ] | |
117 |
|
118 | |||
118 | diffwsopts = [ |
|
119 | diffwsopts = [ | |
119 | ('w', 'ignore-all-space', None, |
|
120 | ('w', 'ignore-all-space', None, | |
120 | _('ignore white space when comparing lines')), |
|
121 | _('ignore white space when comparing lines')), | |
121 | ('b', 'ignore-space-change', None, |
|
122 | ('b', 'ignore-space-change', None, | |
122 | _('ignore changes in the amount of white space')), |
|
123 | _('ignore changes in the amount of white space')), | |
123 | ('B', 'ignore-blank-lines', None, |
|
124 | ('B', 'ignore-blank-lines', None, | |
124 | _('ignore changes whose lines are all blank')), |
|
125 | _('ignore changes whose lines are all blank')), | |
125 | ] |
|
126 | ] | |
126 |
|
127 | |||
127 | diffopts2 = [ |
|
128 | diffopts2 = [ | |
128 | ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')), |
|
129 | ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')), | |
129 | ('p', 'show-function', None, _('show which function each change is in')), |
|
130 | ('p', 'show-function', None, _('show which function each change is in')), | |
130 | ('', 'reverse', None, _('produce a diff that undoes the changes')), |
|
131 | ('', 'reverse', None, _('produce a diff that undoes the changes')), | |
131 | ] + diffwsopts + [ |
|
132 | ] + diffwsopts + [ | |
132 | ('U', 'unified', '', |
|
133 | ('U', 'unified', '', | |
133 | _('number of lines of context to show'), _('NUM')), |
|
134 | _('number of lines of context to show'), _('NUM')), | |
134 | ('', 'stat', None, _('output diffstat-style summary of changes')), |
|
135 | ('', 'stat', None, _('output diffstat-style summary of changes')), | |
135 | ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')), |
|
136 | ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')), | |
136 | ] |
|
137 | ] | |
137 |
|
138 | |||
138 | mergetoolopts = [ |
|
139 | mergetoolopts = [ | |
139 | ('t', 'tool', '', _('specify merge tool')), |
|
140 | ('t', 'tool', '', _('specify merge tool')), | |
140 | ] |
|
141 | ] | |
141 |
|
142 | |||
142 | similarityopts = [ |
|
143 | similarityopts = [ | |
143 | ('s', 'similarity', '', |
|
144 | ('s', 'similarity', '', | |
144 | _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY')) |
|
145 | _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY')) | |
145 | ] |
|
146 | ] | |
146 |
|
147 | |||
147 | subrepoopts = [ |
|
148 | subrepoopts = [ | |
148 | ('S', 'subrepos', None, |
|
149 | ('S', 'subrepos', None, | |
149 | _('recurse into subrepositories')) |
|
150 | _('recurse into subrepositories')) | |
150 | ] |
|
151 | ] | |
151 |
|
152 | |||
152 | debugrevlogopts = [ |
|
153 | debugrevlogopts = [ | |
153 | ('c', 'changelog', False, _('open changelog')), |
|
154 | ('c', 'changelog', False, _('open changelog')), | |
154 | ('m', 'manifest', False, _('open manifest')), |
|
155 | ('m', 'manifest', False, _('open manifest')), | |
155 | ('', 'dir', '', _('open directory manifest')), |
|
156 | ('', 'dir', '', _('open directory manifest')), | |
156 | ] |
|
157 | ] | |
157 |
|
158 | |||
158 | # special string such that everything below this line will be ingored in the |
|
159 | # special string such that everything below this line will be ingored in the | |
159 | # editor text |
|
160 | # editor text | |
160 | _linebelow = "^HG: ------------------------ >8 ------------------------$" |
|
161 | _linebelow = "^HG: ------------------------ >8 ------------------------$" | |
161 |
|
162 | |||
162 | def ishunk(x): |
|
163 | def ishunk(x): | |
163 | hunkclasses = (crecordmod.uihunk, patch.recordhunk) |
|
164 | hunkclasses = (crecordmod.uihunk, patch.recordhunk) | |
164 | return isinstance(x, hunkclasses) |
|
165 | return isinstance(x, hunkclasses) | |
165 |
|
166 | |||
166 | def newandmodified(chunks, originalchunks): |
|
167 | def newandmodified(chunks, originalchunks): | |
167 | newlyaddedandmodifiedfiles = set() |
|
168 | newlyaddedandmodifiedfiles = set() | |
168 | for chunk in chunks: |
|
169 | for chunk in chunks: | |
169 | if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \ |
|
170 | if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \ | |
170 | originalchunks: |
|
171 | originalchunks: | |
171 | newlyaddedandmodifiedfiles.add(chunk.header.filename()) |
|
172 | newlyaddedandmodifiedfiles.add(chunk.header.filename()) | |
172 | return newlyaddedandmodifiedfiles |
|
173 | return newlyaddedandmodifiedfiles | |
173 |
|
174 | |||
174 | def parsealiases(cmd): |
|
175 | def parsealiases(cmd): | |
175 | return cmd.lstrip("^").split("|") |
|
176 | return cmd.lstrip("^").split("|") | |
176 |
|
177 | |||
177 | def setupwrapcolorwrite(ui): |
|
178 | def setupwrapcolorwrite(ui): | |
178 | # wrap ui.write so diff output can be labeled/colorized |
|
179 | # wrap ui.write so diff output can be labeled/colorized | |
179 | def wrapwrite(orig, *args, **kw): |
|
180 | def wrapwrite(orig, *args, **kw): | |
180 | label = kw.pop('label', '') |
|
181 | label = kw.pop('label', '') | |
181 | for chunk, l in patch.difflabel(lambda: args): |
|
182 | for chunk, l in patch.difflabel(lambda: args): | |
182 | orig(chunk, label=label + l) |
|
183 | orig(chunk, label=label + l) | |
183 |
|
184 | |||
184 | oldwrite = ui.write |
|
185 | oldwrite = ui.write | |
185 | def wrap(*args, **kwargs): |
|
186 | def wrap(*args, **kwargs): | |
186 | return wrapwrite(oldwrite, *args, **kwargs) |
|
187 | return wrapwrite(oldwrite, *args, **kwargs) | |
187 | setattr(ui, 'write', wrap) |
|
188 | setattr(ui, 'write', wrap) | |
188 | return oldwrite |
|
189 | return oldwrite | |
189 |
|
190 | |||
190 | def filterchunks(ui, originalhunks, usecurses, testfile, operation=None): |
|
191 | def filterchunks(ui, originalhunks, usecurses, testfile, operation=None): | |
191 | if usecurses: |
|
192 | if usecurses: | |
192 | if testfile: |
|
193 | if testfile: | |
193 | recordfn = crecordmod.testdecorator(testfile, |
|
194 | recordfn = crecordmod.testdecorator(testfile, | |
194 | crecordmod.testchunkselector) |
|
195 | crecordmod.testchunkselector) | |
195 | else: |
|
196 | else: | |
196 | recordfn = crecordmod.chunkselector |
|
197 | recordfn = crecordmod.chunkselector | |
197 |
|
198 | |||
198 | return crecordmod.filterpatch(ui, originalhunks, recordfn, operation) |
|
199 | return crecordmod.filterpatch(ui, originalhunks, recordfn, operation) | |
199 |
|
200 | |||
200 | else: |
|
201 | else: | |
201 | return patch.filterpatch(ui, originalhunks, operation) |
|
202 | return patch.filterpatch(ui, originalhunks, operation) | |
202 |
|
203 | |||
203 | def recordfilter(ui, originalhunks, operation=None): |
|
204 | def recordfilter(ui, originalhunks, operation=None): | |
204 | """ Prompts the user to filter the originalhunks and return a list of |
|
205 | """ Prompts the user to filter the originalhunks and return a list of | |
205 | selected hunks. |
|
206 | selected hunks. | |
206 | *operation* is used for to build ui messages to indicate the user what |
|
207 | *operation* is used for to build ui messages to indicate the user what | |
207 | kind of filtering they are doing: reverting, committing, shelving, etc. |
|
208 | kind of filtering they are doing: reverting, committing, shelving, etc. | |
208 | (see patch.filterpatch). |
|
209 | (see patch.filterpatch). | |
209 | """ |
|
210 | """ | |
210 | usecurses = crecordmod.checkcurses(ui) |
|
211 | usecurses = crecordmod.checkcurses(ui) | |
211 | testfile = ui.config('experimental', 'crecordtest') |
|
212 | testfile = ui.config('experimental', 'crecordtest') | |
212 | oldwrite = setupwrapcolorwrite(ui) |
|
213 | oldwrite = setupwrapcolorwrite(ui) | |
213 | try: |
|
214 | try: | |
214 | newchunks, newopts = filterchunks(ui, originalhunks, usecurses, |
|
215 | newchunks, newopts = filterchunks(ui, originalhunks, usecurses, | |
215 | testfile, operation) |
|
216 | testfile, operation) | |
216 | finally: |
|
217 | finally: | |
217 | ui.write = oldwrite |
|
218 | ui.write = oldwrite | |
218 | return newchunks, newopts |
|
219 | return newchunks, newopts | |
219 |
|
220 | |||
220 | def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, |
|
221 | def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, | |
221 | filterfn, *pats, **opts): |
|
222 | filterfn, *pats, **opts): | |
222 | from . import merge as mergemod |
|
223 | from . import merge as mergemod | |
223 | opts = pycompat.byteskwargs(opts) |
|
224 | opts = pycompat.byteskwargs(opts) | |
224 | if not ui.interactive(): |
|
225 | if not ui.interactive(): | |
225 | if cmdsuggest: |
|
226 | if cmdsuggest: | |
226 | msg = _('running non-interactively, use %s instead') % cmdsuggest |
|
227 | msg = _('running non-interactively, use %s instead') % cmdsuggest | |
227 | else: |
|
228 | else: | |
228 | msg = _('running non-interactively') |
|
229 | msg = _('running non-interactively') | |
229 | raise error.Abort(msg) |
|
230 | raise error.Abort(msg) | |
230 |
|
231 | |||
231 | # make sure username is set before going interactive |
|
232 | # make sure username is set before going interactive | |
232 | if not opts.get('user'): |
|
233 | if not opts.get('user'): | |
233 | ui.username() # raise exception, username not provided |
|
234 | ui.username() # raise exception, username not provided | |
234 |
|
235 | |||
235 | def recordfunc(ui, repo, message, match, opts): |
|
236 | def recordfunc(ui, repo, message, match, opts): | |
236 | """This is generic record driver. |
|
237 | """This is generic record driver. | |
237 |
|
238 | |||
238 | Its job is to interactively filter local changes, and |
|
239 | Its job is to interactively filter local changes, and | |
239 | accordingly prepare working directory into a state in which the |
|
240 | accordingly prepare working directory into a state in which the | |
240 | job can be delegated to a non-interactive commit command such as |
|
241 | job can be delegated to a non-interactive commit command such as | |
241 | 'commit' or 'qrefresh'. |
|
242 | 'commit' or 'qrefresh'. | |
242 |
|
243 | |||
243 | After the actual job is done by non-interactive command, the |
|
244 | After the actual job is done by non-interactive command, the | |
244 | working directory is restored to its original state. |
|
245 | working directory is restored to its original state. | |
245 |
|
246 | |||
246 | In the end we'll record interesting changes, and everything else |
|
247 | In the end we'll record interesting changes, and everything else | |
247 | will be left in place, so the user can continue working. |
|
248 | will be left in place, so the user can continue working. | |
248 | """ |
|
249 | """ | |
249 |
|
250 | |||
250 | checkunfinished(repo, commit=True) |
|
251 | checkunfinished(repo, commit=True) | |
251 | wctx = repo[None] |
|
252 | wctx = repo[None] | |
252 | merge = len(wctx.parents()) > 1 |
|
253 | merge = len(wctx.parents()) > 1 | |
253 | if merge: |
|
254 | if merge: | |
254 | raise error.Abort(_('cannot partially commit a merge ' |
|
255 | raise error.Abort(_('cannot partially commit a merge ' | |
255 | '(use "hg commit" instead)')) |
|
256 | '(use "hg commit" instead)')) | |
256 |
|
257 | |||
257 | def fail(f, msg): |
|
258 | def fail(f, msg): | |
258 | raise error.Abort('%s: %s' % (f, msg)) |
|
259 | raise error.Abort('%s: %s' % (f, msg)) | |
259 |
|
260 | |||
260 | force = opts.get('force') |
|
261 | force = opts.get('force') | |
261 | if not force: |
|
262 | if not force: | |
262 | vdirs = [] |
|
263 | vdirs = [] | |
263 | match.explicitdir = vdirs.append |
|
264 | match.explicitdir = vdirs.append | |
264 | match.bad = fail |
|
265 | match.bad = fail | |
265 |
|
266 | |||
266 | status = repo.status(match=match) |
|
267 | status = repo.status(match=match) | |
267 | if not force: |
|
268 | if not force: | |
268 | repo.checkcommitpatterns(wctx, vdirs, match, status, fail) |
|
269 | repo.checkcommitpatterns(wctx, vdirs, match, status, fail) | |
269 | diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True) |
|
270 | diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True) | |
270 | diffopts.nodates = True |
|
271 | diffopts.nodates = True | |
271 | diffopts.git = True |
|
272 | diffopts.git = True | |
272 | diffopts.showfunc = True |
|
273 | diffopts.showfunc = True | |
273 | originaldiff = patch.diff(repo, changes=status, opts=diffopts) |
|
274 | originaldiff = patch.diff(repo, changes=status, opts=diffopts) | |
274 | originalchunks = patch.parsepatch(originaldiff) |
|
275 | originalchunks = patch.parsepatch(originaldiff) | |
275 |
|
276 | |||
276 | # 1. filter patch, since we are intending to apply subset of it |
|
277 | # 1. filter patch, since we are intending to apply subset of it | |
277 | try: |
|
278 | try: | |
278 | chunks, newopts = filterfn(ui, originalchunks) |
|
279 | chunks, newopts = filterfn(ui, originalchunks) | |
279 | except patch.PatchError as err: |
|
280 | except patch.PatchError as err: | |
280 | raise error.Abort(_('error parsing patch: %s') % err) |
|
281 | raise error.Abort(_('error parsing patch: %s') % err) | |
281 | opts.update(newopts) |
|
282 | opts.update(newopts) | |
282 |
|
283 | |||
283 | # We need to keep a backup of files that have been newly added and |
|
284 | # We need to keep a backup of files that have been newly added and | |
284 | # modified during the recording process because there is a previous |
|
285 | # modified during the recording process because there is a previous | |
285 | # version without the edit in the workdir |
|
286 | # version without the edit in the workdir | |
286 | newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks) |
|
287 | newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks) | |
287 | contenders = set() |
|
288 | contenders = set() | |
288 | for h in chunks: |
|
289 | for h in chunks: | |
289 | try: |
|
290 | try: | |
290 | contenders.update(set(h.files())) |
|
291 | contenders.update(set(h.files())) | |
291 | except AttributeError: |
|
292 | except AttributeError: | |
292 | pass |
|
293 | pass | |
293 |
|
294 | |||
294 | changed = status.modified + status.added + status.removed |
|
295 | changed = status.modified + status.added + status.removed | |
295 | newfiles = [f for f in changed if f in contenders] |
|
296 | newfiles = [f for f in changed if f in contenders] | |
296 | if not newfiles: |
|
297 | if not newfiles: | |
297 | ui.status(_('no changes to record\n')) |
|
298 | ui.status(_('no changes to record\n')) | |
298 | return 0 |
|
299 | return 0 | |
299 |
|
300 | |||
300 | modified = set(status.modified) |
|
301 | modified = set(status.modified) | |
301 |
|
302 | |||
302 | # 2. backup changed files, so we can restore them in the end |
|
303 | # 2. backup changed files, so we can restore them in the end | |
303 |
|
304 | |||
304 | if backupall: |
|
305 | if backupall: | |
305 | tobackup = changed |
|
306 | tobackup = changed | |
306 | else: |
|
307 | else: | |
307 | tobackup = [f for f in newfiles if f in modified or f in \ |
|
308 | tobackup = [f for f in newfiles if f in modified or f in \ | |
308 | newlyaddedandmodifiedfiles] |
|
309 | newlyaddedandmodifiedfiles] | |
309 | backups = {} |
|
310 | backups = {} | |
310 | if tobackup: |
|
311 | if tobackup: | |
311 | backupdir = repo.vfs.join('record-backups') |
|
312 | backupdir = repo.vfs.join('record-backups') | |
312 | try: |
|
313 | try: | |
313 | os.mkdir(backupdir) |
|
314 | os.mkdir(backupdir) | |
314 | except OSError as err: |
|
315 | except OSError as err: | |
315 | if err.errno != errno.EEXIST: |
|
316 | if err.errno != errno.EEXIST: | |
316 | raise |
|
317 | raise | |
317 | try: |
|
318 | try: | |
318 | # backup continues |
|
319 | # backup continues | |
319 | for f in tobackup: |
|
320 | for f in tobackup: | |
320 | fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.', |
|
321 | fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.', | |
321 | dir=backupdir) |
|
322 | dir=backupdir) | |
322 | os.close(fd) |
|
323 | os.close(fd) | |
323 | ui.debug('backup %r as %r\n' % (f, tmpname)) |
|
324 | ui.debug('backup %r as %r\n' % (f, tmpname)) | |
324 | util.copyfile(repo.wjoin(f), tmpname, copystat=True) |
|
325 | util.copyfile(repo.wjoin(f), tmpname, copystat=True) | |
325 | backups[f] = tmpname |
|
326 | backups[f] = tmpname | |
326 |
|
327 | |||
327 | fp = stringio() |
|
328 | fp = stringio() | |
328 | for c in chunks: |
|
329 | for c in chunks: | |
329 | fname = c.filename() |
|
330 | fname = c.filename() | |
330 | if fname in backups: |
|
331 | if fname in backups: | |
331 | c.write(fp) |
|
332 | c.write(fp) | |
332 | dopatch = fp.tell() |
|
333 | dopatch = fp.tell() | |
333 | fp.seek(0) |
|
334 | fp.seek(0) | |
334 |
|
335 | |||
335 | # 2.5 optionally review / modify patch in text editor |
|
336 | # 2.5 optionally review / modify patch in text editor | |
336 | if opts.get('review', False): |
|
337 | if opts.get('review', False): | |
337 | patchtext = (crecordmod.diffhelptext |
|
338 | patchtext = (crecordmod.diffhelptext | |
338 | + crecordmod.patchhelptext |
|
339 | + crecordmod.patchhelptext | |
339 | + fp.read()) |
|
340 | + fp.read()) | |
340 | reviewedpatch = ui.edit(patchtext, "", |
|
341 | reviewedpatch = ui.edit(patchtext, "", | |
341 | extra={"suffix": ".diff"}, |
|
342 | extra={"suffix": ".diff"}, | |
342 | repopath=repo.path) |
|
343 | repopath=repo.path) | |
343 | fp.truncate(0) |
|
344 | fp.truncate(0) | |
344 | fp.write(reviewedpatch) |
|
345 | fp.write(reviewedpatch) | |
345 | fp.seek(0) |
|
346 | fp.seek(0) | |
346 |
|
347 | |||
347 | [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles] |
|
348 | [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles] | |
348 | # 3a. apply filtered patch to clean repo (clean) |
|
349 | # 3a. apply filtered patch to clean repo (clean) | |
349 | if backups: |
|
350 | if backups: | |
350 | # Equivalent to hg.revert |
|
351 | # Equivalent to hg.revert | |
351 | m = scmutil.matchfiles(repo, backups.keys()) |
|
352 | m = scmutil.matchfiles(repo, backups.keys()) | |
352 | mergemod.update(repo, repo.dirstate.p1(), |
|
353 | mergemod.update(repo, repo.dirstate.p1(), | |
353 | False, True, matcher=m) |
|
354 | False, True, matcher=m) | |
354 |
|
355 | |||
355 | # 3b. (apply) |
|
356 | # 3b. (apply) | |
356 | if dopatch: |
|
357 | if dopatch: | |
357 | try: |
|
358 | try: | |
358 | ui.debug('applying patch\n') |
|
359 | ui.debug('applying patch\n') | |
359 | ui.debug(fp.getvalue()) |
|
360 | ui.debug(fp.getvalue()) | |
360 | patch.internalpatch(ui, repo, fp, 1, eolmode=None) |
|
361 | patch.internalpatch(ui, repo, fp, 1, eolmode=None) | |
361 | except patch.PatchError as err: |
|
362 | except patch.PatchError as err: | |
362 | raise error.Abort(str(err)) |
|
363 | raise error.Abort(str(err)) | |
363 | del fp |
|
364 | del fp | |
364 |
|
365 | |||
365 | # 4. We prepared working directory according to filtered |
|
366 | # 4. We prepared working directory according to filtered | |
366 | # patch. Now is the time to delegate the job to |
|
367 | # patch. Now is the time to delegate the job to | |
367 | # commit/qrefresh or the like! |
|
368 | # commit/qrefresh or the like! | |
368 |
|
369 | |||
369 | # Make all of the pathnames absolute. |
|
370 | # Make all of the pathnames absolute. | |
370 | newfiles = [repo.wjoin(nf) for nf in newfiles] |
|
371 | newfiles = [repo.wjoin(nf) for nf in newfiles] | |
371 | return commitfunc(ui, repo, *newfiles, **opts) |
|
372 | return commitfunc(ui, repo, *newfiles, **opts) | |
372 | finally: |
|
373 | finally: | |
373 | # 5. finally restore backed-up files |
|
374 | # 5. finally restore backed-up files | |
374 | try: |
|
375 | try: | |
375 | dirstate = repo.dirstate |
|
376 | dirstate = repo.dirstate | |
376 | for realname, tmpname in backups.iteritems(): |
|
377 | for realname, tmpname in backups.iteritems(): | |
377 | ui.debug('restoring %r to %r\n' % (tmpname, realname)) |
|
378 | ui.debug('restoring %r to %r\n' % (tmpname, realname)) | |
378 |
|
379 | |||
379 | if dirstate[realname] == 'n': |
|
380 | if dirstate[realname] == 'n': | |
380 | # without normallookup, restoring timestamp |
|
381 | # without normallookup, restoring timestamp | |
381 | # may cause partially committed files |
|
382 | # may cause partially committed files | |
382 | # to be treated as unmodified |
|
383 | # to be treated as unmodified | |
383 | dirstate.normallookup(realname) |
|
384 | dirstate.normallookup(realname) | |
384 |
|
385 | |||
385 | # copystat=True here and above are a hack to trick any |
|
386 | # copystat=True here and above are a hack to trick any | |
386 | # editors that have f open that we haven't modified them. |
|
387 | # editors that have f open that we haven't modified them. | |
387 | # |
|
388 | # | |
388 | # Also note that this racy as an editor could notice the |
|
389 | # Also note that this racy as an editor could notice the | |
389 | # file's mtime before we've finished writing it. |
|
390 | # file's mtime before we've finished writing it. | |
390 | util.copyfile(tmpname, repo.wjoin(realname), copystat=True) |
|
391 | util.copyfile(tmpname, repo.wjoin(realname), copystat=True) | |
391 | os.unlink(tmpname) |
|
392 | os.unlink(tmpname) | |
392 | if tobackup: |
|
393 | if tobackup: | |
393 | os.rmdir(backupdir) |
|
394 | os.rmdir(backupdir) | |
394 | except OSError: |
|
395 | except OSError: | |
395 | pass |
|
396 | pass | |
396 |
|
397 | |||
397 | def recordinwlock(ui, repo, message, match, opts): |
|
398 | def recordinwlock(ui, repo, message, match, opts): | |
398 | with repo.wlock(): |
|
399 | with repo.wlock(): | |
399 | return recordfunc(ui, repo, message, match, opts) |
|
400 | return recordfunc(ui, repo, message, match, opts) | |
400 |
|
401 | |||
401 | return commit(ui, repo, recordinwlock, pats, opts) |
|
402 | return commit(ui, repo, recordinwlock, pats, opts) | |
402 |
|
403 | |||
403 | def tersestatus(root, statlist, status, ignorefn, ignore): |
|
404 | def tersestatus(root, statlist, status, ignorefn, ignore): | |
404 | """ |
|
405 | """ | |
405 | Returns a list of statuses with directory collapsed if all the files in the |
|
406 | Returns a list of statuses with directory collapsed if all the files in the | |
406 | directory has the same status. |
|
407 | directory has the same status. | |
407 | """ |
|
408 | """ | |
408 |
|
409 | |||
409 | def numfiles(dirname): |
|
410 | def numfiles(dirname): | |
410 | """ |
|
411 | """ | |
411 | Calculates the number of tracked files in a given directory which also |
|
412 | Calculates the number of tracked files in a given directory which also | |
412 | includes files which were removed or deleted. Considers ignored files |
|
413 | includes files which were removed or deleted. Considers ignored files | |
413 | if ignore argument is True or 'i' is present in status argument. |
|
414 | if ignore argument is True or 'i' is present in status argument. | |
414 | """ |
|
415 | """ | |
415 | if lencache.get(dirname): |
|
416 | if lencache.get(dirname): | |
416 | return lencache[dirname] |
|
417 | return lencache[dirname] | |
417 | if 'i' in status or ignore: |
|
418 | if 'i' in status or ignore: | |
418 | def match(localpath): |
|
419 | def match(localpath): | |
419 | absolutepath = os.path.join(root, localpath) |
|
420 | absolutepath = os.path.join(root, localpath) | |
420 | if os.path.isdir(absolutepath) and isemptydir(absolutepath): |
|
421 | if os.path.isdir(absolutepath) and isemptydir(absolutepath): | |
421 | return True |
|
422 | return True | |
422 | return False |
|
423 | return False | |
423 | else: |
|
424 | else: | |
424 | def match(localpath): |
|
425 | def match(localpath): | |
425 | # there can be directory whose all the files are ignored and |
|
426 | # there can be directory whose all the files are ignored and | |
426 | # hence the drectory should also be ignored while counting |
|
427 | # hence the drectory should also be ignored while counting | |
427 | # number of files or subdirs in it's parent directory. This |
|
428 | # number of files or subdirs in it's parent directory. This | |
428 | # checks the same. |
|
429 | # checks the same. | |
429 | # XXX: We need a better logic here. |
|
430 | # XXX: We need a better logic here. | |
430 | if os.path.isdir(os.path.join(root, localpath)): |
|
431 | if os.path.isdir(os.path.join(root, localpath)): | |
431 | return isignoreddir(localpath) |
|
432 | return isignoreddir(localpath) | |
432 | else: |
|
433 | else: | |
433 | # XXX: there can be files which have the ignored pattern but |
|
434 | # XXX: there can be files which have the ignored pattern but | |
434 | # are not ignored. That leads to bug in counting number of |
|
435 | # are not ignored. That leads to bug in counting number of | |
435 | # tracked files in the directory. |
|
436 | # tracked files in the directory. | |
436 | return ignorefn(localpath) |
|
437 | return ignorefn(localpath) | |
437 | lendir = 0 |
|
438 | lendir = 0 | |
438 | abspath = os.path.join(root, dirname) |
|
439 | abspath = os.path.join(root, dirname) | |
439 | # There might be cases when a directory does not exists as the whole |
|
440 | # There might be cases when a directory does not exists as the whole | |
440 | # directory can be removed and/or deleted. |
|
441 | # directory can be removed and/or deleted. | |
441 | try: |
|
442 | try: | |
442 | for f in os.listdir(abspath): |
|
443 | for f in os.listdir(abspath): | |
443 | localpath = os.path.join(dirname, f) |
|
444 | localpath = os.path.join(dirname, f) | |
444 | if not match(localpath): |
|
445 | if not match(localpath): | |
445 | lendir += 1 |
|
446 | lendir += 1 | |
446 | except OSError: |
|
447 | except OSError: | |
447 | pass |
|
448 | pass | |
448 | lendir += len(absentdir.get(dirname, [])) |
|
449 | lendir += len(absentdir.get(dirname, [])) | |
449 | lencache[dirname] = lendir |
|
450 | lencache[dirname] = lendir | |
450 | return lendir |
|
451 | return lendir | |
451 |
|
452 | |||
452 | def isemptydir(abspath): |
|
453 | def isemptydir(abspath): | |
453 | """ |
|
454 | """ | |
454 | Check whether a directory is empty or not, i.e. there is no files in the |
|
455 | Check whether a directory is empty or not, i.e. there is no files in the | |
455 | directory and all its subdirectories. |
|
456 | directory and all its subdirectories. | |
456 | """ |
|
457 | """ | |
457 | for f in os.listdir(abspath): |
|
458 | for f in os.listdir(abspath): | |
458 | fullpath = os.path.join(abspath, f) |
|
459 | fullpath = os.path.join(abspath, f) | |
459 | if os.path.isdir(fullpath): |
|
460 | if os.path.isdir(fullpath): | |
460 | # recursion here |
|
461 | # recursion here | |
461 | ret = isemptydir(fullpath) |
|
462 | ret = isemptydir(fullpath) | |
462 | if not ret: |
|
463 | if not ret: | |
463 | return False |
|
464 | return False | |
464 | else: |
|
465 | else: | |
465 | return False |
|
466 | return False | |
466 | return True |
|
467 | return True | |
467 |
|
468 | |||
468 | def isignoreddir(localpath): |
|
469 | def isignoreddir(localpath): | |
469 | """Return True if `localpath` directory is ignored or contains only |
|
470 | """Return True if `localpath` directory is ignored or contains only | |
470 | ignored files and should hence be considered ignored. |
|
471 | ignored files and should hence be considered ignored. | |
471 | """ |
|
472 | """ | |
472 | dirpath = os.path.join(root, localpath) |
|
473 | dirpath = os.path.join(root, localpath) | |
473 | if ignorefn(dirpath): |
|
474 | if ignorefn(dirpath): | |
474 | return True |
|
475 | return True | |
475 | for f in os.listdir(dirpath): |
|
476 | for f in os.listdir(dirpath): | |
476 | filepath = os.path.join(dirpath, f) |
|
477 | filepath = os.path.join(dirpath, f) | |
477 | if os.path.isdir(filepath): |
|
478 | if os.path.isdir(filepath): | |
478 | # recursion here |
|
479 | # recursion here | |
479 | ret = isignoreddir(os.path.join(localpath, f)) |
|
480 | ret = isignoreddir(os.path.join(localpath, f)) | |
480 | if not ret: |
|
481 | if not ret: | |
481 | return False |
|
482 | return False | |
482 | else: |
|
483 | else: | |
483 | if not ignorefn(os.path.join(localpath, f)): |
|
484 | if not ignorefn(os.path.join(localpath, f)): | |
484 | return False |
|
485 | return False | |
485 | return True |
|
486 | return True | |
486 |
|
487 | |||
487 | def absentones(removedfiles, missingfiles): |
|
488 | def absentones(removedfiles, missingfiles): | |
488 | """ |
|
489 | """ | |
489 | Returns a dictionary of directories with files in it which are either |
|
490 | Returns a dictionary of directories with files in it which are either | |
490 | removed or missing (deleted) in them. |
|
491 | removed or missing (deleted) in them. | |
491 | """ |
|
492 | """ | |
492 | absentdir = {} |
|
493 | absentdir = {} | |
493 | absentfiles = removedfiles + missingfiles |
|
494 | absentfiles = removedfiles + missingfiles | |
494 | while absentfiles: |
|
495 | while absentfiles: | |
495 | f = absentfiles.pop() |
|
496 | f = absentfiles.pop() | |
496 | par = os.path.dirname(f) |
|
497 | par = os.path.dirname(f) | |
497 | if par == '': |
|
498 | if par == '': | |
498 | continue |
|
499 | continue | |
499 | # we need to store files rather than number of files as some files |
|
500 | # we need to store files rather than number of files as some files | |
500 | # or subdirectories in a directory can be counted twice. This is |
|
501 | # or subdirectories in a directory can be counted twice. This is | |
501 | # also we have used sets here. |
|
502 | # also we have used sets here. | |
502 | try: |
|
503 | try: | |
503 | absentdir[par].add(f) |
|
504 | absentdir[par].add(f) | |
504 | except KeyError: |
|
505 | except KeyError: | |
505 | absentdir[par] = set([f]) |
|
506 | absentdir[par] = set([f]) | |
506 | absentfiles.append(par) |
|
507 | absentfiles.append(par) | |
507 | return absentdir |
|
508 | return absentdir | |
508 |
|
509 | |||
509 | indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6} |
|
510 | indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6} | |
510 | # get a dictonary of directories and files which are missing as os.listdir() |
|
511 | # get a dictonary of directories and files which are missing as os.listdir() | |
511 | # won't be able to list them. |
|
512 | # won't be able to list them. | |
512 | absentdir = absentones(statlist[2], statlist[3]) |
|
513 | absentdir = absentones(statlist[2], statlist[3]) | |
513 | finalrs = [[]] * len(indexes) |
|
514 | finalrs = [[]] * len(indexes) | |
514 | didsomethingchanged = False |
|
515 | didsomethingchanged = False | |
515 | # dictionary to store number of files and subdir in a directory so that we |
|
516 | # dictionary to store number of files and subdir in a directory so that we | |
516 | # don't compute that again. |
|
517 | # don't compute that again. | |
517 | lencache = {} |
|
518 | lencache = {} | |
518 |
|
519 | |||
519 | for st in pycompat.bytestr(status): |
|
520 | for st in pycompat.bytestr(status): | |
520 |
|
521 | |||
521 | try: |
|
522 | try: | |
522 | ind = indexes[st] |
|
523 | ind = indexes[st] | |
523 | except KeyError: |
|
524 | except KeyError: | |
524 | # TODO: Need a better error message here |
|
525 | # TODO: Need a better error message here | |
525 | raise error.Abort("'%s' not recognized" % st) |
|
526 | raise error.Abort("'%s' not recognized" % st) | |
526 |
|
527 | |||
527 | sfiles = statlist[ind] |
|
528 | sfiles = statlist[ind] | |
528 | if not sfiles: |
|
529 | if not sfiles: | |
529 | continue |
|
530 | continue | |
530 | pardict = {} |
|
531 | pardict = {} | |
531 | for a in sfiles: |
|
532 | for a in sfiles: | |
532 | par = os.path.dirname(a) |
|
533 | par = os.path.dirname(a) | |
533 | pardict.setdefault(par, []).append(a) |
|
534 | pardict.setdefault(par, []).append(a) | |
534 |
|
535 | |||
535 | rs = [] |
|
536 | rs = [] | |
536 | newls = [] |
|
537 | newls = [] | |
537 | for par, files in pardict.iteritems(): |
|
538 | for par, files in pardict.iteritems(): | |
538 | lenpar = numfiles(par) |
|
539 | lenpar = numfiles(par) | |
539 | if lenpar == len(files): |
|
540 | if lenpar == len(files): | |
540 | newls.append(par) |
|
541 | newls.append(par) | |
541 |
|
542 | |||
542 | if not newls: |
|
543 | if not newls: | |
543 | continue |
|
544 | continue | |
544 |
|
545 | |||
545 | while newls: |
|
546 | while newls: | |
546 | newel = newls.pop() |
|
547 | newel = newls.pop() | |
547 | if newel == '': |
|
548 | if newel == '': | |
548 | continue |
|
549 | continue | |
549 | parn = os.path.dirname(newel) |
|
550 | parn = os.path.dirname(newel) | |
550 | pardict[newel] = [] |
|
551 | pardict[newel] = [] | |
551 | # Adding pycompat.ossep as newel is a directory. |
|
552 | # Adding pycompat.ossep as newel is a directory. | |
552 | pardict.setdefault(parn, []).append(newel + pycompat.ossep) |
|
553 | pardict.setdefault(parn, []).append(newel + pycompat.ossep) | |
553 | lenpar = numfiles(parn) |
|
554 | lenpar = numfiles(parn) | |
554 | if lenpar == len(pardict[parn]): |
|
555 | if lenpar == len(pardict[parn]): | |
555 | newls.append(parn) |
|
556 | newls.append(parn) | |
556 |
|
557 | |||
557 | # dict.values() for Py3 compatibility |
|
558 | # dict.values() for Py3 compatibility | |
558 | for files in pardict.values(): |
|
559 | for files in pardict.values(): | |
559 | rs.extend(files) |
|
560 | rs.extend(files) | |
560 |
|
561 | |||
561 | rs.sort() |
|
562 | rs.sort() | |
562 | finalrs[ind] = rs |
|
563 | finalrs[ind] = rs | |
563 | didsomethingchanged = True |
|
564 | didsomethingchanged = True | |
564 |
|
565 | |||
565 | # If nothing is changed, make sure the order of files is preserved. |
|
566 | # If nothing is changed, make sure the order of files is preserved. | |
566 | if not didsomethingchanged: |
|
567 | if not didsomethingchanged: | |
567 | return statlist |
|
568 | return statlist | |
568 |
|
569 | |||
569 | for x in xrange(len(indexes)): |
|
570 | for x in xrange(len(indexes)): | |
570 | if not finalrs[x]: |
|
571 | if not finalrs[x]: | |
571 | finalrs[x] = statlist[x] |
|
572 | finalrs[x] = statlist[x] | |
572 |
|
573 | |||
573 | return finalrs |
|
574 | return finalrs | |
574 |
|
575 | |||
575 | def findpossible(cmd, table, strict=False): |
|
576 | def findpossible(cmd, table, strict=False): | |
576 | """ |
|
577 | """ | |
577 | Return cmd -> (aliases, command table entry) |
|
578 | Return cmd -> (aliases, command table entry) | |
578 | for each matching command. |
|
579 | for each matching command. | |
579 | Return debug commands (or their aliases) only if no normal command matches. |
|
580 | Return debug commands (or their aliases) only if no normal command matches. | |
580 | """ |
|
581 | """ | |
581 | choice = {} |
|
582 | choice = {} | |
582 | debugchoice = {} |
|
583 | debugchoice = {} | |
583 |
|
584 | |||
584 | if cmd in table: |
|
585 | if cmd in table: | |
585 | # short-circuit exact matches, "log" alias beats "^log|history" |
|
586 | # short-circuit exact matches, "log" alias beats "^log|history" | |
586 | keys = [cmd] |
|
587 | keys = [cmd] | |
587 | else: |
|
588 | else: | |
588 | keys = table.keys() |
|
589 | keys = table.keys() | |
589 |
|
590 | |||
590 | allcmds = [] |
|
591 | allcmds = [] | |
591 | for e in keys: |
|
592 | for e in keys: | |
592 | aliases = parsealiases(e) |
|
593 | aliases = parsealiases(e) | |
593 | allcmds.extend(aliases) |
|
594 | allcmds.extend(aliases) | |
594 | found = None |
|
595 | found = None | |
595 | if cmd in aliases: |
|
596 | if cmd in aliases: | |
596 | found = cmd |
|
597 | found = cmd | |
597 | elif not strict: |
|
598 | elif not strict: | |
598 | for a in aliases: |
|
599 | for a in aliases: | |
599 | if a.startswith(cmd): |
|
600 | if a.startswith(cmd): | |
600 | found = a |
|
601 | found = a | |
601 | break |
|
602 | break | |
602 | if found is not None: |
|
603 | if found is not None: | |
603 | if aliases[0].startswith("debug") or found.startswith("debug"): |
|
604 | if aliases[0].startswith("debug") or found.startswith("debug"): | |
604 | debugchoice[found] = (aliases, table[e]) |
|
605 | debugchoice[found] = (aliases, table[e]) | |
605 | else: |
|
606 | else: | |
606 | choice[found] = (aliases, table[e]) |
|
607 | choice[found] = (aliases, table[e]) | |
607 |
|
608 | |||
608 | if not choice and debugchoice: |
|
609 | if not choice and debugchoice: | |
609 | choice = debugchoice |
|
610 | choice = debugchoice | |
610 |
|
611 | |||
611 | return choice, allcmds |
|
612 | return choice, allcmds | |
612 |
|
613 | |||
613 | def findcmd(cmd, table, strict=True): |
|
614 | def findcmd(cmd, table, strict=True): | |
614 | """Return (aliases, command table entry) for command string.""" |
|
615 | """Return (aliases, command table entry) for command string.""" | |
615 | choice, allcmds = findpossible(cmd, table, strict) |
|
616 | choice, allcmds = findpossible(cmd, table, strict) | |
616 |
|
617 | |||
617 | if cmd in choice: |
|
618 | if cmd in choice: | |
618 | return choice[cmd] |
|
619 | return choice[cmd] | |
619 |
|
620 | |||
620 | if len(choice) > 1: |
|
621 | if len(choice) > 1: | |
621 | clist = sorted(choice) |
|
622 | clist = sorted(choice) | |
622 | raise error.AmbiguousCommand(cmd, clist) |
|
623 | raise error.AmbiguousCommand(cmd, clist) | |
623 |
|
624 | |||
624 | if choice: |
|
625 | if choice: | |
625 | return list(choice.values())[0] |
|
626 | return list(choice.values())[0] | |
626 |
|
627 | |||
627 | raise error.UnknownCommand(cmd, allcmds) |
|
628 | raise error.UnknownCommand(cmd, allcmds) | |
628 |
|
629 | |||
629 | def findrepo(p): |
|
630 | def findrepo(p): | |
630 | while not os.path.isdir(os.path.join(p, ".hg")): |
|
631 | while not os.path.isdir(os.path.join(p, ".hg")): | |
631 | oldp, p = p, os.path.dirname(p) |
|
632 | oldp, p = p, os.path.dirname(p) | |
632 | if p == oldp: |
|
633 | if p == oldp: | |
633 | return None |
|
634 | return None | |
634 |
|
635 | |||
635 | return p |
|
636 | return p | |
636 |
|
637 | |||
637 | def bailifchanged(repo, merge=True, hint=None): |
|
638 | def bailifchanged(repo, merge=True, hint=None): | |
638 | """ enforce the precondition that working directory must be clean. |
|
639 | """ enforce the precondition that working directory must be clean. | |
639 |
|
640 | |||
640 | 'merge' can be set to false if a pending uncommitted merge should be |
|
641 | 'merge' can be set to false if a pending uncommitted merge should be | |
641 | ignored (such as when 'update --check' runs). |
|
642 | ignored (such as when 'update --check' runs). | |
642 |
|
643 | |||
643 | 'hint' is the usual hint given to Abort exception. |
|
644 | 'hint' is the usual hint given to Abort exception. | |
644 | """ |
|
645 | """ | |
645 |
|
646 | |||
646 | if merge and repo.dirstate.p2() != nullid: |
|
647 | if merge and repo.dirstate.p2() != nullid: | |
647 | raise error.Abort(_('outstanding uncommitted merge'), hint=hint) |
|
648 | raise error.Abort(_('outstanding uncommitted merge'), hint=hint) | |
648 | modified, added, removed, deleted = repo.status()[:4] |
|
649 | modified, added, removed, deleted = repo.status()[:4] | |
649 | if modified or added or removed or deleted: |
|
650 | if modified or added or removed or deleted: | |
650 | raise error.Abort(_('uncommitted changes'), hint=hint) |
|
651 | raise error.Abort(_('uncommitted changes'), hint=hint) | |
651 | ctx = repo[None] |
|
652 | ctx = repo[None] | |
652 | for s in sorted(ctx.substate): |
|
653 | for s in sorted(ctx.substate): | |
653 | ctx.sub(s).bailifchanged(hint=hint) |
|
654 | ctx.sub(s).bailifchanged(hint=hint) | |
654 |
|
655 | |||
655 | def logmessage(ui, opts): |
|
656 | def logmessage(ui, opts): | |
656 | """ get the log message according to -m and -l option """ |
|
657 | """ get the log message according to -m and -l option """ | |
657 | message = opts.get('message') |
|
658 | message = opts.get('message') | |
658 | logfile = opts.get('logfile') |
|
659 | logfile = opts.get('logfile') | |
659 |
|
660 | |||
660 | if message and logfile: |
|
661 | if message and logfile: | |
661 | raise error.Abort(_('options --message and --logfile are mutually ' |
|
662 | raise error.Abort(_('options --message and --logfile are mutually ' | |
662 | 'exclusive')) |
|
663 | 'exclusive')) | |
663 | if not message and logfile: |
|
664 | if not message and logfile: | |
664 | try: |
|
665 | try: | |
665 | if isstdiofilename(logfile): |
|
666 | if isstdiofilename(logfile): | |
666 | message = ui.fin.read() |
|
667 | message = ui.fin.read() | |
667 | else: |
|
668 | else: | |
668 | message = '\n'.join(util.readfile(logfile).splitlines()) |
|
669 | message = '\n'.join(util.readfile(logfile).splitlines()) | |
669 | except IOError as inst: |
|
670 | except IOError as inst: | |
670 | raise error.Abort(_("can't read commit message '%s': %s") % |
|
671 | raise error.Abort(_("can't read commit message '%s': %s") % | |
671 | (logfile, inst.strerror)) |
|
672 | (logfile, inst.strerror)) | |
672 | return message |
|
673 | return message | |
673 |
|
674 | |||
674 | def mergeeditform(ctxorbool, baseformname): |
|
675 | def mergeeditform(ctxorbool, baseformname): | |
675 | """return appropriate editform name (referencing a committemplate) |
|
676 | """return appropriate editform name (referencing a committemplate) | |
676 |
|
677 | |||
677 | 'ctxorbool' is either a ctx to be committed, or a bool indicating whether |
|
678 | 'ctxorbool' is either a ctx to be committed, or a bool indicating whether | |
678 | merging is committed. |
|
679 | merging is committed. | |
679 |
|
680 | |||
680 | This returns baseformname with '.merge' appended if it is a merge, |
|
681 | This returns baseformname with '.merge' appended if it is a merge, | |
681 | otherwise '.normal' is appended. |
|
682 | otherwise '.normal' is appended. | |
682 | """ |
|
683 | """ | |
683 | if isinstance(ctxorbool, bool): |
|
684 | if isinstance(ctxorbool, bool): | |
684 | if ctxorbool: |
|
685 | if ctxorbool: | |
685 | return baseformname + ".merge" |
|
686 | return baseformname + ".merge" | |
686 | elif 1 < len(ctxorbool.parents()): |
|
687 | elif 1 < len(ctxorbool.parents()): | |
687 | return baseformname + ".merge" |
|
688 | return baseformname + ".merge" | |
688 |
|
689 | |||
689 | return baseformname + ".normal" |
|
690 | return baseformname + ".normal" | |
690 |
|
691 | |||
691 | def getcommiteditor(edit=False, finishdesc=None, extramsg=None, |
|
692 | def getcommiteditor(edit=False, finishdesc=None, extramsg=None, | |
692 | editform='', **opts): |
|
693 | editform='', **opts): | |
693 | """get appropriate commit message editor according to '--edit' option |
|
694 | """get appropriate commit message editor according to '--edit' option | |
694 |
|
695 | |||
695 | 'finishdesc' is a function to be called with edited commit message |
|
696 | 'finishdesc' is a function to be called with edited commit message | |
696 | (= 'description' of the new changeset) just after editing, but |
|
697 | (= 'description' of the new changeset) just after editing, but | |
697 | before checking empty-ness. It should return actual text to be |
|
698 | before checking empty-ness. It should return actual text to be | |
698 | stored into history. This allows to change description before |
|
699 | stored into history. This allows to change description before | |
699 | storing. |
|
700 | storing. | |
700 |
|
701 | |||
701 | 'extramsg' is a extra message to be shown in the editor instead of |
|
702 | 'extramsg' is a extra message to be shown in the editor instead of | |
702 | 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL |
|
703 | 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL | |
703 | is automatically added. |
|
704 | is automatically added. | |
704 |
|
705 | |||
705 | 'editform' is a dot-separated list of names, to distinguish |
|
706 | 'editform' is a dot-separated list of names, to distinguish | |
706 | the purpose of commit text editing. |
|
707 | the purpose of commit text editing. | |
707 |
|
708 | |||
708 | 'getcommiteditor' returns 'commitforceeditor' regardless of |
|
709 | 'getcommiteditor' returns 'commitforceeditor' regardless of | |
709 | 'edit', if one of 'finishdesc' or 'extramsg' is specified, because |
|
710 | 'edit', if one of 'finishdesc' or 'extramsg' is specified, because | |
710 | they are specific for usage in MQ. |
|
711 | they are specific for usage in MQ. | |
711 | """ |
|
712 | """ | |
712 | if edit or finishdesc or extramsg: |
|
713 | if edit or finishdesc or extramsg: | |
713 | return lambda r, c, s: commitforceeditor(r, c, s, |
|
714 | return lambda r, c, s: commitforceeditor(r, c, s, | |
714 | finishdesc=finishdesc, |
|
715 | finishdesc=finishdesc, | |
715 | extramsg=extramsg, |
|
716 | extramsg=extramsg, | |
716 | editform=editform) |
|
717 | editform=editform) | |
717 | elif editform: |
|
718 | elif editform: | |
718 | return lambda r, c, s: commiteditor(r, c, s, editform=editform) |
|
719 | return lambda r, c, s: commiteditor(r, c, s, editform=editform) | |
719 | else: |
|
720 | else: | |
720 | return commiteditor |
|
721 | return commiteditor | |
721 |
|
722 | |||
722 | def loglimit(opts): |
|
723 | def loglimit(opts): | |
723 | """get the log limit according to option -l/--limit""" |
|
724 | """get the log limit according to option -l/--limit""" | |
724 | limit = opts.get('limit') |
|
725 | limit = opts.get('limit') | |
725 | if limit: |
|
726 | if limit: | |
726 | try: |
|
727 | try: | |
727 | limit = int(limit) |
|
728 | limit = int(limit) | |
728 | except ValueError: |
|
729 | except ValueError: | |
729 | raise error.Abort(_('limit must be a positive integer')) |
|
730 | raise error.Abort(_('limit must be a positive integer')) | |
730 | if limit <= 0: |
|
731 | if limit <= 0: | |
731 | raise error.Abort(_('limit must be positive')) |
|
732 | raise error.Abort(_('limit must be positive')) | |
732 | else: |
|
733 | else: | |
733 | limit = None |
|
734 | limit = None | |
734 | return limit |
|
735 | return limit | |
735 |
|
736 | |||
736 | def makefilename(repo, pat, node, desc=None, |
|
737 | def makefilename(repo, pat, node, desc=None, | |
737 | total=None, seqno=None, revwidth=None, pathname=None): |
|
738 | total=None, seqno=None, revwidth=None, pathname=None): | |
738 | node_expander = { |
|
739 | node_expander = { | |
739 | 'H': lambda: hex(node), |
|
740 | 'H': lambda: hex(node), | |
740 | 'R': lambda: str(repo.changelog.rev(node)), |
|
741 | 'R': lambda: str(repo.changelog.rev(node)), | |
741 | 'h': lambda: short(node), |
|
742 | 'h': lambda: short(node), | |
742 | 'm': lambda: re.sub('[^\w]', '_', str(desc)) |
|
743 | 'm': lambda: re.sub('[^\w]', '_', str(desc)) | |
743 | } |
|
744 | } | |
744 | expander = { |
|
745 | expander = { | |
745 | '%': lambda: '%', |
|
746 | '%': lambda: '%', | |
746 | 'b': lambda: os.path.basename(repo.root), |
|
747 | 'b': lambda: os.path.basename(repo.root), | |
747 | } |
|
748 | } | |
748 |
|
749 | |||
749 | try: |
|
750 | try: | |
750 | if node: |
|
751 | if node: | |
751 | expander.update(node_expander) |
|
752 | expander.update(node_expander) | |
752 | if node: |
|
753 | if node: | |
753 | expander['r'] = (lambda: |
|
754 | expander['r'] = (lambda: | |
754 | str(repo.changelog.rev(node)).zfill(revwidth or 0)) |
|
755 | str(repo.changelog.rev(node)).zfill(revwidth or 0)) | |
755 | if total is not None: |
|
756 | if total is not None: | |
756 | expander['N'] = lambda: str(total) |
|
757 | expander['N'] = lambda: str(total) | |
757 | if seqno is not None: |
|
758 | if seqno is not None: | |
758 | expander['n'] = lambda: str(seqno) |
|
759 | expander['n'] = lambda: str(seqno) | |
759 | if total is not None and seqno is not None: |
|
760 | if total is not None and seqno is not None: | |
760 | expander['n'] = lambda: str(seqno).zfill(len(str(total))) |
|
761 | expander['n'] = lambda: str(seqno).zfill(len(str(total))) | |
761 | if pathname is not None: |
|
762 | if pathname is not None: | |
762 | expander['s'] = lambda: os.path.basename(pathname) |
|
763 | expander['s'] = lambda: os.path.basename(pathname) | |
763 | expander['d'] = lambda: os.path.dirname(pathname) or '.' |
|
764 | expander['d'] = lambda: os.path.dirname(pathname) or '.' | |
764 | expander['p'] = lambda: pathname |
|
765 | expander['p'] = lambda: pathname | |
765 |
|
766 | |||
766 | newname = [] |
|
767 | newname = [] | |
767 | patlen = len(pat) |
|
768 | patlen = len(pat) | |
768 | i = 0 |
|
769 | i = 0 | |
769 | while i < patlen: |
|
770 | while i < patlen: | |
770 | c = pat[i:i + 1] |
|
771 | c = pat[i:i + 1] | |
771 | if c == '%': |
|
772 | if c == '%': | |
772 | i += 1 |
|
773 | i += 1 | |
773 | c = pat[i:i + 1] |
|
774 | c = pat[i:i + 1] | |
774 | c = expander[c]() |
|
775 | c = expander[c]() | |
775 | newname.append(c) |
|
776 | newname.append(c) | |
776 | i += 1 |
|
777 | i += 1 | |
777 | return ''.join(newname) |
|
778 | return ''.join(newname) | |
778 | except KeyError as inst: |
|
779 | except KeyError as inst: | |
779 | raise error.Abort(_("invalid format spec '%%%s' in output filename") % |
|
780 | raise error.Abort(_("invalid format spec '%%%s' in output filename") % | |
780 | inst.args[0]) |
|
781 | inst.args[0]) | |
781 |
|
782 | |||
782 | def isstdiofilename(pat): |
|
783 | def isstdiofilename(pat): | |
783 | """True if the given pat looks like a filename denoting stdin/stdout""" |
|
784 | """True if the given pat looks like a filename denoting stdin/stdout""" | |
784 | return not pat or pat == '-' |
|
785 | return not pat or pat == '-' | |
785 |
|
786 | |||
786 | class _unclosablefile(object): |
|
787 | class _unclosablefile(object): | |
787 | def __init__(self, fp): |
|
788 | def __init__(self, fp): | |
788 | self._fp = fp |
|
789 | self._fp = fp | |
789 |
|
790 | |||
790 | def close(self): |
|
791 | def close(self): | |
791 | pass |
|
792 | pass | |
792 |
|
793 | |||
793 | def __iter__(self): |
|
794 | def __iter__(self): | |
794 | return iter(self._fp) |
|
795 | return iter(self._fp) | |
795 |
|
796 | |||
796 | def __getattr__(self, attr): |
|
797 | def __getattr__(self, attr): | |
797 | return getattr(self._fp, attr) |
|
798 | return getattr(self._fp, attr) | |
798 |
|
799 | |||
799 | def __enter__(self): |
|
800 | def __enter__(self): | |
800 | return self |
|
801 | return self | |
801 |
|
802 | |||
802 | def __exit__(self, exc_type, exc_value, exc_tb): |
|
803 | def __exit__(self, exc_type, exc_value, exc_tb): | |
803 | pass |
|
804 | pass | |
804 |
|
805 | |||
805 | def makefileobj(repo, pat, node=None, desc=None, total=None, |
|
806 | def makefileobj(repo, pat, node=None, desc=None, total=None, | |
806 | seqno=None, revwidth=None, mode='wb', modemap=None, |
|
807 | seqno=None, revwidth=None, mode='wb', modemap=None, | |
807 | pathname=None): |
|
808 | pathname=None): | |
808 |
|
809 | |||
809 | writable = mode not in ('r', 'rb') |
|
810 | writable = mode not in ('r', 'rb') | |
810 |
|
811 | |||
811 | if isstdiofilename(pat): |
|
812 | if isstdiofilename(pat): | |
812 | if writable: |
|
813 | if writable: | |
813 | fp = repo.ui.fout |
|
814 | fp = repo.ui.fout | |
814 | else: |
|
815 | else: | |
815 | fp = repo.ui.fin |
|
816 | fp = repo.ui.fin | |
816 | return _unclosablefile(fp) |
|
817 | return _unclosablefile(fp) | |
817 | fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname) |
|
818 | fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname) | |
818 | if modemap is not None: |
|
819 | if modemap is not None: | |
819 | mode = modemap.get(fn, mode) |
|
820 | mode = modemap.get(fn, mode) | |
820 | if mode == 'wb': |
|
821 | if mode == 'wb': | |
821 | modemap[fn] = 'ab' |
|
822 | modemap[fn] = 'ab' | |
822 | return open(fn, mode) |
|
823 | return open(fn, mode) | |
823 |
|
824 | |||
824 | def openrevlog(repo, cmd, file_, opts): |
|
825 | def openrevlog(repo, cmd, file_, opts): | |
825 | """opens the changelog, manifest, a filelog or a given revlog""" |
|
826 | """opens the changelog, manifest, a filelog or a given revlog""" | |
826 | cl = opts['changelog'] |
|
827 | cl = opts['changelog'] | |
827 | mf = opts['manifest'] |
|
828 | mf = opts['manifest'] | |
828 | dir = opts['dir'] |
|
829 | dir = opts['dir'] | |
829 | msg = None |
|
830 | msg = None | |
830 | if cl and mf: |
|
831 | if cl and mf: | |
831 | msg = _('cannot specify --changelog and --manifest at the same time') |
|
832 | msg = _('cannot specify --changelog and --manifest at the same time') | |
832 | elif cl and dir: |
|
833 | elif cl and dir: | |
833 | msg = _('cannot specify --changelog and --dir at the same time') |
|
834 | msg = _('cannot specify --changelog and --dir at the same time') | |
834 | elif cl or mf or dir: |
|
835 | elif cl or mf or dir: | |
835 | if file_: |
|
836 | if file_: | |
836 | msg = _('cannot specify filename with --changelog or --manifest') |
|
837 | msg = _('cannot specify filename with --changelog or --manifest') | |
837 | elif not repo: |
|
838 | elif not repo: | |
838 | msg = _('cannot specify --changelog or --manifest or --dir ' |
|
839 | msg = _('cannot specify --changelog or --manifest or --dir ' | |
839 | 'without a repository') |
|
840 | 'without a repository') | |
840 | if msg: |
|
841 | if msg: | |
841 | raise error.Abort(msg) |
|
842 | raise error.Abort(msg) | |
842 |
|
843 | |||
843 | r = None |
|
844 | r = None | |
844 | if repo: |
|
845 | if repo: | |
845 | if cl: |
|
846 | if cl: | |
846 | r = repo.unfiltered().changelog |
|
847 | r = repo.unfiltered().changelog | |
847 | elif dir: |
|
848 | elif dir: | |
848 | if 'treemanifest' not in repo.requirements: |
|
849 | if 'treemanifest' not in repo.requirements: | |
849 | raise error.Abort(_("--dir can only be used on repos with " |
|
850 | raise error.Abort(_("--dir can only be used on repos with " | |
850 | "treemanifest enabled")) |
|
851 | "treemanifest enabled")) | |
851 | dirlog = repo.manifestlog._revlog.dirlog(dir) |
|
852 | dirlog = repo.manifestlog._revlog.dirlog(dir) | |
852 | if len(dirlog): |
|
853 | if len(dirlog): | |
853 | r = dirlog |
|
854 | r = dirlog | |
854 | elif mf: |
|
855 | elif mf: | |
855 | r = repo.manifestlog._revlog |
|
856 | r = repo.manifestlog._revlog | |
856 | elif file_: |
|
857 | elif file_: | |
857 | filelog = repo.file(file_) |
|
858 | filelog = repo.file(file_) | |
858 | if len(filelog): |
|
859 | if len(filelog): | |
859 | r = filelog |
|
860 | r = filelog | |
860 | if not r: |
|
861 | if not r: | |
861 | if not file_: |
|
862 | if not file_: | |
862 | raise error.CommandError(cmd, _('invalid arguments')) |
|
863 | raise error.CommandError(cmd, _('invalid arguments')) | |
863 | if not os.path.isfile(file_): |
|
864 | if not os.path.isfile(file_): | |
864 | raise error.Abort(_("revlog '%s' not found") % file_) |
|
865 | raise error.Abort(_("revlog '%s' not found") % file_) | |
865 | r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), |
|
866 | r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), | |
866 | file_[:-2] + ".i") |
|
867 | file_[:-2] + ".i") | |
867 | return r |
|
868 | return r | |
868 |
|
869 | |||
869 | def copy(ui, repo, pats, opts, rename=False): |
|
870 | def copy(ui, repo, pats, opts, rename=False): | |
870 | # called with the repo lock held |
|
871 | # called with the repo lock held | |
871 | # |
|
872 | # | |
872 | # hgsep => pathname that uses "/" to separate directories |
|
873 | # hgsep => pathname that uses "/" to separate directories | |
873 | # ossep => pathname that uses os.sep to separate directories |
|
874 | # ossep => pathname that uses os.sep to separate directories | |
874 | cwd = repo.getcwd() |
|
875 | cwd = repo.getcwd() | |
875 | targets = {} |
|
876 | targets = {} | |
876 | after = opts.get("after") |
|
877 | after = opts.get("after") | |
877 | dryrun = opts.get("dry_run") |
|
878 | dryrun = opts.get("dry_run") | |
878 | wctx = repo[None] |
|
879 | wctx = repo[None] | |
879 |
|
880 | |||
880 | def walkpat(pat): |
|
881 | def walkpat(pat): | |
881 | srcs = [] |
|
882 | srcs = [] | |
882 | if after: |
|
883 | if after: | |
883 | badstates = '?' |
|
884 | badstates = '?' | |
884 | else: |
|
885 | else: | |
885 | badstates = '?r' |
|
886 | badstates = '?r' | |
886 | m = scmutil.match(wctx, [pat], opts, globbed=True) |
|
887 | m = scmutil.match(wctx, [pat], opts, globbed=True) | |
887 | for abs in wctx.walk(m): |
|
888 | for abs in wctx.walk(m): | |
888 | state = repo.dirstate[abs] |
|
889 | state = repo.dirstate[abs] | |
889 | rel = m.rel(abs) |
|
890 | rel = m.rel(abs) | |
890 | exact = m.exact(abs) |
|
891 | exact = m.exact(abs) | |
891 | if state in badstates: |
|
892 | if state in badstates: | |
892 | if exact and state == '?': |
|
893 | if exact and state == '?': | |
893 | ui.warn(_('%s: not copying - file is not managed\n') % rel) |
|
894 | ui.warn(_('%s: not copying - file is not managed\n') % rel) | |
894 | if exact and state == 'r': |
|
895 | if exact and state == 'r': | |
895 | ui.warn(_('%s: not copying - file has been marked for' |
|
896 | ui.warn(_('%s: not copying - file has been marked for' | |
896 | ' remove\n') % rel) |
|
897 | ' remove\n') % rel) | |
897 | continue |
|
898 | continue | |
898 | # abs: hgsep |
|
899 | # abs: hgsep | |
899 | # rel: ossep |
|
900 | # rel: ossep | |
900 | srcs.append((abs, rel, exact)) |
|
901 | srcs.append((abs, rel, exact)) | |
901 | return srcs |
|
902 | return srcs | |
902 |
|
903 | |||
903 | # abssrc: hgsep |
|
904 | # abssrc: hgsep | |
904 | # relsrc: ossep |
|
905 | # relsrc: ossep | |
905 | # otarget: ossep |
|
906 | # otarget: ossep | |
906 | def copyfile(abssrc, relsrc, otarget, exact): |
|
907 | def copyfile(abssrc, relsrc, otarget, exact): | |
907 | abstarget = pathutil.canonpath(repo.root, cwd, otarget) |
|
908 | abstarget = pathutil.canonpath(repo.root, cwd, otarget) | |
908 | if '/' in abstarget: |
|
909 | if '/' in abstarget: | |
909 | # We cannot normalize abstarget itself, this would prevent |
|
910 | # We cannot normalize abstarget itself, this would prevent | |
910 | # case only renames, like a => A. |
|
911 | # case only renames, like a => A. | |
911 | abspath, absname = abstarget.rsplit('/', 1) |
|
912 | abspath, absname = abstarget.rsplit('/', 1) | |
912 | abstarget = repo.dirstate.normalize(abspath) + '/' + absname |
|
913 | abstarget = repo.dirstate.normalize(abspath) + '/' + absname | |
913 | reltarget = repo.pathto(abstarget, cwd) |
|
914 | reltarget = repo.pathto(abstarget, cwd) | |
914 | target = repo.wjoin(abstarget) |
|
915 | target = repo.wjoin(abstarget) | |
915 | src = repo.wjoin(abssrc) |
|
916 | src = repo.wjoin(abssrc) | |
916 | state = repo.dirstate[abstarget] |
|
917 | state = repo.dirstate[abstarget] | |
917 |
|
918 | |||
918 | scmutil.checkportable(ui, abstarget) |
|
919 | scmutil.checkportable(ui, abstarget) | |
919 |
|
920 | |||
920 | # check for collisions |
|
921 | # check for collisions | |
921 | prevsrc = targets.get(abstarget) |
|
922 | prevsrc = targets.get(abstarget) | |
922 | if prevsrc is not None: |
|
923 | if prevsrc is not None: | |
923 | ui.warn(_('%s: not overwriting - %s collides with %s\n') % |
|
924 | ui.warn(_('%s: not overwriting - %s collides with %s\n') % | |
924 | (reltarget, repo.pathto(abssrc, cwd), |
|
925 | (reltarget, repo.pathto(abssrc, cwd), | |
925 | repo.pathto(prevsrc, cwd))) |
|
926 | repo.pathto(prevsrc, cwd))) | |
926 | return |
|
927 | return | |
927 |
|
928 | |||
928 | # check for overwrites |
|
929 | # check for overwrites | |
929 | exists = os.path.lexists(target) |
|
930 | exists = os.path.lexists(target) | |
930 | samefile = False |
|
931 | samefile = False | |
931 | if exists and abssrc != abstarget: |
|
932 | if exists and abssrc != abstarget: | |
932 | if (repo.dirstate.normalize(abssrc) == |
|
933 | if (repo.dirstate.normalize(abssrc) == | |
933 | repo.dirstate.normalize(abstarget)): |
|
934 | repo.dirstate.normalize(abstarget)): | |
934 | if not rename: |
|
935 | if not rename: | |
935 | ui.warn(_("%s: can't copy - same file\n") % reltarget) |
|
936 | ui.warn(_("%s: can't copy - same file\n") % reltarget) | |
936 | return |
|
937 | return | |
937 | exists = False |
|
938 | exists = False | |
938 | samefile = True |
|
939 | samefile = True | |
939 |
|
940 | |||
940 | if not after and exists or after and state in 'mn': |
|
941 | if not after and exists or after and state in 'mn': | |
941 | if not opts['force']: |
|
942 | if not opts['force']: | |
942 | if state in 'mn': |
|
943 | if state in 'mn': | |
943 | msg = _('%s: not overwriting - file already committed\n') |
|
944 | msg = _('%s: not overwriting - file already committed\n') | |
944 | if after: |
|
945 | if after: | |
945 | flags = '--after --force' |
|
946 | flags = '--after --force' | |
946 | else: |
|
947 | else: | |
947 | flags = '--force' |
|
948 | flags = '--force' | |
948 | if rename: |
|
949 | if rename: | |
949 | hint = _('(hg rename %s to replace the file by ' |
|
950 | hint = _('(hg rename %s to replace the file by ' | |
950 | 'recording a rename)\n') % flags |
|
951 | 'recording a rename)\n') % flags | |
951 | else: |
|
952 | else: | |
952 | hint = _('(hg copy %s to replace the file by ' |
|
953 | hint = _('(hg copy %s to replace the file by ' | |
953 | 'recording a copy)\n') % flags |
|
954 | 'recording a copy)\n') % flags | |
954 | else: |
|
955 | else: | |
955 | msg = _('%s: not overwriting - file exists\n') |
|
956 | msg = _('%s: not overwriting - file exists\n') | |
956 | if rename: |
|
957 | if rename: | |
957 | hint = _('(hg rename --after to record the rename)\n') |
|
958 | hint = _('(hg rename --after to record the rename)\n') | |
958 | else: |
|
959 | else: | |
959 | hint = _('(hg copy --after to record the copy)\n') |
|
960 | hint = _('(hg copy --after to record the copy)\n') | |
960 | ui.warn(msg % reltarget) |
|
961 | ui.warn(msg % reltarget) | |
961 | ui.warn(hint) |
|
962 | ui.warn(hint) | |
962 | return |
|
963 | return | |
963 |
|
964 | |||
964 | if after: |
|
965 | if after: | |
965 | if not exists: |
|
966 | if not exists: | |
966 | if rename: |
|
967 | if rename: | |
967 | ui.warn(_('%s: not recording move - %s does not exist\n') % |
|
968 | ui.warn(_('%s: not recording move - %s does not exist\n') % | |
968 | (relsrc, reltarget)) |
|
969 | (relsrc, reltarget)) | |
969 | else: |
|
970 | else: | |
970 | ui.warn(_('%s: not recording copy - %s does not exist\n') % |
|
971 | ui.warn(_('%s: not recording copy - %s does not exist\n') % | |
971 | (relsrc, reltarget)) |
|
972 | (relsrc, reltarget)) | |
972 | return |
|
973 | return | |
973 | elif not dryrun: |
|
974 | elif not dryrun: | |
974 | try: |
|
975 | try: | |
975 | if exists: |
|
976 | if exists: | |
976 | os.unlink(target) |
|
977 | os.unlink(target) | |
977 | targetdir = os.path.dirname(target) or '.' |
|
978 | targetdir = os.path.dirname(target) or '.' | |
978 | if not os.path.isdir(targetdir): |
|
979 | if not os.path.isdir(targetdir): | |
979 | os.makedirs(targetdir) |
|
980 | os.makedirs(targetdir) | |
980 | if samefile: |
|
981 | if samefile: | |
981 | tmp = target + "~hgrename" |
|
982 | tmp = target + "~hgrename" | |
982 | os.rename(src, tmp) |
|
983 | os.rename(src, tmp) | |
983 | os.rename(tmp, target) |
|
984 | os.rename(tmp, target) | |
984 | else: |
|
985 | else: | |
985 | util.copyfile(src, target) |
|
986 | util.copyfile(src, target) | |
986 | srcexists = True |
|
987 | srcexists = True | |
987 | except IOError as inst: |
|
988 | except IOError as inst: | |
988 | if inst.errno == errno.ENOENT: |
|
989 | if inst.errno == errno.ENOENT: | |
989 | ui.warn(_('%s: deleted in working directory\n') % relsrc) |
|
990 | ui.warn(_('%s: deleted in working directory\n') % relsrc) | |
990 | srcexists = False |
|
991 | srcexists = False | |
991 | else: |
|
992 | else: | |
992 | ui.warn(_('%s: cannot copy - %s\n') % |
|
993 | ui.warn(_('%s: cannot copy - %s\n') % | |
993 | (relsrc, inst.strerror)) |
|
994 | (relsrc, inst.strerror)) | |
994 | return True # report a failure |
|
995 | return True # report a failure | |
995 |
|
996 | |||
996 | if ui.verbose or not exact: |
|
997 | if ui.verbose or not exact: | |
997 | if rename: |
|
998 | if rename: | |
998 | ui.status(_('moving %s to %s\n') % (relsrc, reltarget)) |
|
999 | ui.status(_('moving %s to %s\n') % (relsrc, reltarget)) | |
999 | else: |
|
1000 | else: | |
1000 | ui.status(_('copying %s to %s\n') % (relsrc, reltarget)) |
|
1001 | ui.status(_('copying %s to %s\n') % (relsrc, reltarget)) | |
1001 |
|
1002 | |||
1002 | targets[abstarget] = abssrc |
|
1003 | targets[abstarget] = abssrc | |
1003 |
|
1004 | |||
1004 | # fix up dirstate |
|
1005 | # fix up dirstate | |
1005 | scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget, |
|
1006 | scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget, | |
1006 | dryrun=dryrun, cwd=cwd) |
|
1007 | dryrun=dryrun, cwd=cwd) | |
1007 | if rename and not dryrun: |
|
1008 | if rename and not dryrun: | |
1008 | if not after and srcexists and not samefile: |
|
1009 | if not after and srcexists and not samefile: | |
1009 | repo.wvfs.unlinkpath(abssrc) |
|
1010 | repo.wvfs.unlinkpath(abssrc) | |
1010 | wctx.forget([abssrc]) |
|
1011 | wctx.forget([abssrc]) | |
1011 |
|
1012 | |||
1012 | # pat: ossep |
|
1013 | # pat: ossep | |
1013 | # dest ossep |
|
1014 | # dest ossep | |
1014 | # srcs: list of (hgsep, hgsep, ossep, bool) |
|
1015 | # srcs: list of (hgsep, hgsep, ossep, bool) | |
1015 | # return: function that takes hgsep and returns ossep |
|
1016 | # return: function that takes hgsep and returns ossep | |
1016 | def targetpathfn(pat, dest, srcs): |
|
1017 | def targetpathfn(pat, dest, srcs): | |
1017 | if os.path.isdir(pat): |
|
1018 | if os.path.isdir(pat): | |
1018 | abspfx = pathutil.canonpath(repo.root, cwd, pat) |
|
1019 | abspfx = pathutil.canonpath(repo.root, cwd, pat) | |
1019 | abspfx = util.localpath(abspfx) |
|
1020 | abspfx = util.localpath(abspfx) | |
1020 | if destdirexists: |
|
1021 | if destdirexists: | |
1021 | striplen = len(os.path.split(abspfx)[0]) |
|
1022 | striplen = len(os.path.split(abspfx)[0]) | |
1022 | else: |
|
1023 | else: | |
1023 | striplen = len(abspfx) |
|
1024 | striplen = len(abspfx) | |
1024 | if striplen: |
|
1025 | if striplen: | |
1025 | striplen += len(pycompat.ossep) |
|
1026 | striplen += len(pycompat.ossep) | |
1026 | res = lambda p: os.path.join(dest, util.localpath(p)[striplen:]) |
|
1027 | res = lambda p: os.path.join(dest, util.localpath(p)[striplen:]) | |
1027 | elif destdirexists: |
|
1028 | elif destdirexists: | |
1028 | res = lambda p: os.path.join(dest, |
|
1029 | res = lambda p: os.path.join(dest, | |
1029 | os.path.basename(util.localpath(p))) |
|
1030 | os.path.basename(util.localpath(p))) | |
1030 | else: |
|
1031 | else: | |
1031 | res = lambda p: dest |
|
1032 | res = lambda p: dest | |
1032 | return res |
|
1033 | return res | |
1033 |
|
1034 | |||
1034 | # pat: ossep |
|
1035 | # pat: ossep | |
1035 | # dest ossep |
|
1036 | # dest ossep | |
1036 | # srcs: list of (hgsep, hgsep, ossep, bool) |
|
1037 | # srcs: list of (hgsep, hgsep, ossep, bool) | |
1037 | # return: function that takes hgsep and returns ossep |
|
1038 | # return: function that takes hgsep and returns ossep | |
1038 | def targetpathafterfn(pat, dest, srcs): |
|
1039 | def targetpathafterfn(pat, dest, srcs): | |
1039 | if matchmod.patkind(pat): |
|
1040 | if matchmod.patkind(pat): | |
1040 | # a mercurial pattern |
|
1041 | # a mercurial pattern | |
1041 | res = lambda p: os.path.join(dest, |
|
1042 | res = lambda p: os.path.join(dest, | |
1042 | os.path.basename(util.localpath(p))) |
|
1043 | os.path.basename(util.localpath(p))) | |
1043 | else: |
|
1044 | else: | |
1044 | abspfx = pathutil.canonpath(repo.root, cwd, pat) |
|
1045 | abspfx = pathutil.canonpath(repo.root, cwd, pat) | |
1045 | if len(abspfx) < len(srcs[0][0]): |
|
1046 | if len(abspfx) < len(srcs[0][0]): | |
1046 | # A directory. Either the target path contains the last |
|
1047 | # A directory. Either the target path contains the last | |
1047 | # component of the source path or it does not. |
|
1048 | # component of the source path or it does not. | |
1048 | def evalpath(striplen): |
|
1049 | def evalpath(striplen): | |
1049 | score = 0 |
|
1050 | score = 0 | |
1050 | for s in srcs: |
|
1051 | for s in srcs: | |
1051 | t = os.path.join(dest, util.localpath(s[0])[striplen:]) |
|
1052 | t = os.path.join(dest, util.localpath(s[0])[striplen:]) | |
1052 | if os.path.lexists(t): |
|
1053 | if os.path.lexists(t): | |
1053 | score += 1 |
|
1054 | score += 1 | |
1054 | return score |
|
1055 | return score | |
1055 |
|
1056 | |||
1056 | abspfx = util.localpath(abspfx) |
|
1057 | abspfx = util.localpath(abspfx) | |
1057 | striplen = len(abspfx) |
|
1058 | striplen = len(abspfx) | |
1058 | if striplen: |
|
1059 | if striplen: | |
1059 | striplen += len(pycompat.ossep) |
|
1060 | striplen += len(pycompat.ossep) | |
1060 | if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])): |
|
1061 | if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])): | |
1061 | score = evalpath(striplen) |
|
1062 | score = evalpath(striplen) | |
1062 | striplen1 = len(os.path.split(abspfx)[0]) |
|
1063 | striplen1 = len(os.path.split(abspfx)[0]) | |
1063 | if striplen1: |
|
1064 | if striplen1: | |
1064 | striplen1 += len(pycompat.ossep) |
|
1065 | striplen1 += len(pycompat.ossep) | |
1065 | if evalpath(striplen1) > score: |
|
1066 | if evalpath(striplen1) > score: | |
1066 | striplen = striplen1 |
|
1067 | striplen = striplen1 | |
1067 | res = lambda p: os.path.join(dest, |
|
1068 | res = lambda p: os.path.join(dest, | |
1068 | util.localpath(p)[striplen:]) |
|
1069 | util.localpath(p)[striplen:]) | |
1069 | else: |
|
1070 | else: | |
1070 | # a file |
|
1071 | # a file | |
1071 | if destdirexists: |
|
1072 | if destdirexists: | |
1072 | res = lambda p: os.path.join(dest, |
|
1073 | res = lambda p: os.path.join(dest, | |
1073 | os.path.basename(util.localpath(p))) |
|
1074 | os.path.basename(util.localpath(p))) | |
1074 | else: |
|
1075 | else: | |
1075 | res = lambda p: dest |
|
1076 | res = lambda p: dest | |
1076 | return res |
|
1077 | return res | |
1077 |
|
1078 | |||
1078 | pats = scmutil.expandpats(pats) |
|
1079 | pats = scmutil.expandpats(pats) | |
1079 | if not pats: |
|
1080 | if not pats: | |
1080 | raise error.Abort(_('no source or destination specified')) |
|
1081 | raise error.Abort(_('no source or destination specified')) | |
1081 | if len(pats) == 1: |
|
1082 | if len(pats) == 1: | |
1082 | raise error.Abort(_('no destination specified')) |
|
1083 | raise error.Abort(_('no destination specified')) | |
1083 | dest = pats.pop() |
|
1084 | dest = pats.pop() | |
1084 | destdirexists = os.path.isdir(dest) and not os.path.islink(dest) |
|
1085 | destdirexists = os.path.isdir(dest) and not os.path.islink(dest) | |
1085 | if not destdirexists: |
|
1086 | if not destdirexists: | |
1086 | if len(pats) > 1 or matchmod.patkind(pats[0]): |
|
1087 | if len(pats) > 1 or matchmod.patkind(pats[0]): | |
1087 | raise error.Abort(_('with multiple sources, destination must be an ' |
|
1088 | raise error.Abort(_('with multiple sources, destination must be an ' | |
1088 | 'existing directory')) |
|
1089 | 'existing directory')) | |
1089 | if util.endswithsep(dest): |
|
1090 | if util.endswithsep(dest): | |
1090 | raise error.Abort(_('destination %s is not a directory') % dest) |
|
1091 | raise error.Abort(_('destination %s is not a directory') % dest) | |
1091 |
|
1092 | |||
1092 | tfn = targetpathfn |
|
1093 | tfn = targetpathfn | |
1093 | if after: |
|
1094 | if after: | |
1094 | tfn = targetpathafterfn |
|
1095 | tfn = targetpathafterfn | |
1095 | copylist = [] |
|
1096 | copylist = [] | |
1096 | for pat in pats: |
|
1097 | for pat in pats: | |
1097 | srcs = walkpat(pat) |
|
1098 | srcs = walkpat(pat) | |
1098 | if not srcs: |
|
1099 | if not srcs: | |
1099 | continue |
|
1100 | continue | |
1100 | copylist.append((tfn(pat, dest, srcs), srcs)) |
|
1101 | copylist.append((tfn(pat, dest, srcs), srcs)) | |
1101 | if not copylist: |
|
1102 | if not copylist: | |
1102 | raise error.Abort(_('no files to copy')) |
|
1103 | raise error.Abort(_('no files to copy')) | |
1103 |
|
1104 | |||
1104 | errors = 0 |
|
1105 | errors = 0 | |
1105 | for targetpath, srcs in copylist: |
|
1106 | for targetpath, srcs in copylist: | |
1106 | for abssrc, relsrc, exact in srcs: |
|
1107 | for abssrc, relsrc, exact in srcs: | |
1107 | if copyfile(abssrc, relsrc, targetpath(abssrc), exact): |
|
1108 | if copyfile(abssrc, relsrc, targetpath(abssrc), exact): | |
1108 | errors += 1 |
|
1109 | errors += 1 | |
1109 |
|
1110 | |||
1110 | if errors: |
|
1111 | if errors: | |
1111 | ui.warn(_('(consider using --after)\n')) |
|
1112 | ui.warn(_('(consider using --after)\n')) | |
1112 |
|
1113 | |||
1113 | return errors != 0 |
|
1114 | return errors != 0 | |
1114 |
|
1115 | |||
1115 | ## facility to let extension process additional data into an import patch |
|
1116 | ## facility to let extension process additional data into an import patch | |
1116 | # list of identifier to be executed in order |
|
1117 | # list of identifier to be executed in order | |
1117 | extrapreimport = [] # run before commit |
|
1118 | extrapreimport = [] # run before commit | |
1118 | extrapostimport = [] # run after commit |
|
1119 | extrapostimport = [] # run after commit | |
1119 | # mapping from identifier to actual import function |
|
1120 | # mapping from identifier to actual import function | |
1120 | # |
|
1121 | # | |
1121 | # 'preimport' are run before the commit is made and are provided the following |
|
1122 | # 'preimport' are run before the commit is made and are provided the following | |
1122 | # arguments: |
|
1123 | # arguments: | |
1123 | # - repo: the localrepository instance, |
|
1124 | # - repo: the localrepository instance, | |
1124 | # - patchdata: data extracted from patch header (cf m.patch.patchheadermap), |
|
1125 | # - patchdata: data extracted from patch header (cf m.patch.patchheadermap), | |
1125 | # - extra: the future extra dictionary of the changeset, please mutate it, |
|
1126 | # - extra: the future extra dictionary of the changeset, please mutate it, | |
1126 | # - opts: the import options. |
|
1127 | # - opts: the import options. | |
1127 | # XXX ideally, we would just pass an ctx ready to be computed, that would allow |
|
1128 | # XXX ideally, we would just pass an ctx ready to be computed, that would allow | |
1128 | # mutation of in memory commit and more. Feel free to rework the code to get |
|
1129 | # mutation of in memory commit and more. Feel free to rework the code to get | |
1129 | # there. |
|
1130 | # there. | |
1130 | extrapreimportmap = {} |
|
1131 | extrapreimportmap = {} | |
1131 | # 'postimport' are run after the commit is made and are provided the following |
|
1132 | # 'postimport' are run after the commit is made and are provided the following | |
1132 | # argument: |
|
1133 | # argument: | |
1133 | # - ctx: the changectx created by import. |
|
1134 | # - ctx: the changectx created by import. | |
1134 | extrapostimportmap = {} |
|
1135 | extrapostimportmap = {} | |
1135 |
|
1136 | |||
1136 | def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc): |
|
1137 | def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc): | |
1137 | """Utility function used by commands.import to import a single patch |
|
1138 | """Utility function used by commands.import to import a single patch | |
1138 |
|
1139 | |||
1139 | This function is explicitly defined here to help the evolve extension to |
|
1140 | This function is explicitly defined here to help the evolve extension to | |
1140 | wrap this part of the import logic. |
|
1141 | wrap this part of the import logic. | |
1141 |
|
1142 | |||
1142 | The API is currently a bit ugly because it a simple code translation from |
|
1143 | The API is currently a bit ugly because it a simple code translation from | |
1143 | the import command. Feel free to make it better. |
|
1144 | the import command. Feel free to make it better. | |
1144 |
|
1145 | |||
1145 | :hunk: a patch (as a binary string) |
|
1146 | :hunk: a patch (as a binary string) | |
1146 | :parents: nodes that will be parent of the created commit |
|
1147 | :parents: nodes that will be parent of the created commit | |
1147 | :opts: the full dict of option passed to the import command |
|
1148 | :opts: the full dict of option passed to the import command | |
1148 | :msgs: list to save commit message to. |
|
1149 | :msgs: list to save commit message to. | |
1149 | (used in case we need to save it when failing) |
|
1150 | (used in case we need to save it when failing) | |
1150 | :updatefunc: a function that update a repo to a given node |
|
1151 | :updatefunc: a function that update a repo to a given node | |
1151 | updatefunc(<repo>, <node>) |
|
1152 | updatefunc(<repo>, <node>) | |
1152 | """ |
|
1153 | """ | |
1153 | # avoid cycle context -> subrepo -> cmdutil |
|
1154 | # avoid cycle context -> subrepo -> cmdutil | |
1154 | from . import context |
|
1155 | from . import context | |
1155 | extractdata = patch.extract(ui, hunk) |
|
1156 | extractdata = patch.extract(ui, hunk) | |
1156 | tmpname = extractdata.get('filename') |
|
1157 | tmpname = extractdata.get('filename') | |
1157 | message = extractdata.get('message') |
|
1158 | message = extractdata.get('message') | |
1158 | user = opts.get('user') or extractdata.get('user') |
|
1159 | user = opts.get('user') or extractdata.get('user') | |
1159 | date = opts.get('date') or extractdata.get('date') |
|
1160 | date = opts.get('date') or extractdata.get('date') | |
1160 | branch = extractdata.get('branch') |
|
1161 | branch = extractdata.get('branch') | |
1161 | nodeid = extractdata.get('nodeid') |
|
1162 | nodeid = extractdata.get('nodeid') | |
1162 | p1 = extractdata.get('p1') |
|
1163 | p1 = extractdata.get('p1') | |
1163 | p2 = extractdata.get('p2') |
|
1164 | p2 = extractdata.get('p2') | |
1164 |
|
1165 | |||
1165 | nocommit = opts.get('no_commit') |
|
1166 | nocommit = opts.get('no_commit') | |
1166 | importbranch = opts.get('import_branch') |
|
1167 | importbranch = opts.get('import_branch') | |
1167 | update = not opts.get('bypass') |
|
1168 | update = not opts.get('bypass') | |
1168 | strip = opts["strip"] |
|
1169 | strip = opts["strip"] | |
1169 | prefix = opts["prefix"] |
|
1170 | prefix = opts["prefix"] | |
1170 | sim = float(opts.get('similarity') or 0) |
|
1171 | sim = float(opts.get('similarity') or 0) | |
1171 | if not tmpname: |
|
1172 | if not tmpname: | |
1172 | return (None, None, False) |
|
1173 | return (None, None, False) | |
1173 |
|
1174 | |||
1174 | rejects = False |
|
1175 | rejects = False | |
1175 |
|
1176 | |||
1176 | try: |
|
1177 | try: | |
1177 | cmdline_message = logmessage(ui, opts) |
|
1178 | cmdline_message = logmessage(ui, opts) | |
1178 | if cmdline_message: |
|
1179 | if cmdline_message: | |
1179 | # pickup the cmdline msg |
|
1180 | # pickup the cmdline msg | |
1180 | message = cmdline_message |
|
1181 | message = cmdline_message | |
1181 | elif message: |
|
1182 | elif message: | |
1182 | # pickup the patch msg |
|
1183 | # pickup the patch msg | |
1183 | message = message.strip() |
|
1184 | message = message.strip() | |
1184 | else: |
|
1185 | else: | |
1185 | # launch the editor |
|
1186 | # launch the editor | |
1186 | message = None |
|
1187 | message = None | |
1187 | ui.debug('message:\n%s\n' % message) |
|
1188 | ui.debug('message:\n%s\n' % message) | |
1188 |
|
1189 | |||
1189 | if len(parents) == 1: |
|
1190 | if len(parents) == 1: | |
1190 | parents.append(repo[nullid]) |
|
1191 | parents.append(repo[nullid]) | |
1191 | if opts.get('exact'): |
|
1192 | if opts.get('exact'): | |
1192 | if not nodeid or not p1: |
|
1193 | if not nodeid or not p1: | |
1193 | raise error.Abort(_('not a Mercurial patch')) |
|
1194 | raise error.Abort(_('not a Mercurial patch')) | |
1194 | p1 = repo[p1] |
|
1195 | p1 = repo[p1] | |
1195 | p2 = repo[p2 or nullid] |
|
1196 | p2 = repo[p2 or nullid] | |
1196 | elif p2: |
|
1197 | elif p2: | |
1197 | try: |
|
1198 | try: | |
1198 | p1 = repo[p1] |
|
1199 | p1 = repo[p1] | |
1199 | p2 = repo[p2] |
|
1200 | p2 = repo[p2] | |
1200 | # Without any options, consider p2 only if the |
|
1201 | # Without any options, consider p2 only if the | |
1201 | # patch is being applied on top of the recorded |
|
1202 | # patch is being applied on top of the recorded | |
1202 | # first parent. |
|
1203 | # first parent. | |
1203 | if p1 != parents[0]: |
|
1204 | if p1 != parents[0]: | |
1204 | p1 = parents[0] |
|
1205 | p1 = parents[0] | |
1205 | p2 = repo[nullid] |
|
1206 | p2 = repo[nullid] | |
1206 | except error.RepoError: |
|
1207 | except error.RepoError: | |
1207 | p1, p2 = parents |
|
1208 | p1, p2 = parents | |
1208 | if p2.node() == nullid: |
|
1209 | if p2.node() == nullid: | |
1209 | ui.warn(_("warning: import the patch as a normal revision\n" |
|
1210 | ui.warn(_("warning: import the patch as a normal revision\n" | |
1210 | "(use --exact to import the patch as a merge)\n")) |
|
1211 | "(use --exact to import the patch as a merge)\n")) | |
1211 | else: |
|
1212 | else: | |
1212 | p1, p2 = parents |
|
1213 | p1, p2 = parents | |
1213 |
|
1214 | |||
1214 | n = None |
|
1215 | n = None | |
1215 | if update: |
|
1216 | if update: | |
1216 | if p1 != parents[0]: |
|
1217 | if p1 != parents[0]: | |
1217 | updatefunc(repo, p1.node()) |
|
1218 | updatefunc(repo, p1.node()) | |
1218 | if p2 != parents[1]: |
|
1219 | if p2 != parents[1]: | |
1219 | repo.setparents(p1.node(), p2.node()) |
|
1220 | repo.setparents(p1.node(), p2.node()) | |
1220 |
|
1221 | |||
1221 | if opts.get('exact') or importbranch: |
|
1222 | if opts.get('exact') or importbranch: | |
1222 | repo.dirstate.setbranch(branch or 'default') |
|
1223 | repo.dirstate.setbranch(branch or 'default') | |
1223 |
|
1224 | |||
1224 | partial = opts.get('partial', False) |
|
1225 | partial = opts.get('partial', False) | |
1225 | files = set() |
|
1226 | files = set() | |
1226 | try: |
|
1227 | try: | |
1227 | patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix, |
|
1228 | patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix, | |
1228 | files=files, eolmode=None, similarity=sim / 100.0) |
|
1229 | files=files, eolmode=None, similarity=sim / 100.0) | |
1229 | except patch.PatchError as e: |
|
1230 | except patch.PatchError as e: | |
1230 | if not partial: |
|
1231 | if not partial: | |
1231 | raise error.Abort(str(e)) |
|
1232 | raise error.Abort(str(e)) | |
1232 | if partial: |
|
1233 | if partial: | |
1233 | rejects = True |
|
1234 | rejects = True | |
1234 |
|
1235 | |||
1235 | files = list(files) |
|
1236 | files = list(files) | |
1236 | if nocommit: |
|
1237 | if nocommit: | |
1237 | if message: |
|
1238 | if message: | |
1238 | msgs.append(message) |
|
1239 | msgs.append(message) | |
1239 | else: |
|
1240 | else: | |
1240 | if opts.get('exact') or p2: |
|
1241 | if opts.get('exact') or p2: | |
1241 | # If you got here, you either use --force and know what |
|
1242 | # If you got here, you either use --force and know what | |
1242 | # you are doing or used --exact or a merge patch while |
|
1243 | # you are doing or used --exact or a merge patch while | |
1243 | # being updated to its first parent. |
|
1244 | # being updated to its first parent. | |
1244 | m = None |
|
1245 | m = None | |
1245 | else: |
|
1246 | else: | |
1246 | m = scmutil.matchfiles(repo, files or []) |
|
1247 | m = scmutil.matchfiles(repo, files or []) | |
1247 | editform = mergeeditform(repo[None], 'import.normal') |
|
1248 | editform = mergeeditform(repo[None], 'import.normal') | |
1248 | if opts.get('exact'): |
|
1249 | if opts.get('exact'): | |
1249 | editor = None |
|
1250 | editor = None | |
1250 | else: |
|
1251 | else: | |
1251 | editor = getcommiteditor(editform=editform, **opts) |
|
1252 | editor = getcommiteditor(editform=editform, **opts) | |
1252 | extra = {} |
|
1253 | extra = {} | |
1253 | for idfunc in extrapreimport: |
|
1254 | for idfunc in extrapreimport: | |
1254 | extrapreimportmap[idfunc](repo, extractdata, extra, opts) |
|
1255 | extrapreimportmap[idfunc](repo, extractdata, extra, opts) | |
1255 | overrides = {} |
|
1256 | overrides = {} | |
1256 | if partial: |
|
1257 | if partial: | |
1257 | overrides[('ui', 'allowemptycommit')] = True |
|
1258 | overrides[('ui', 'allowemptycommit')] = True | |
1258 | with repo.ui.configoverride(overrides, 'import'): |
|
1259 | with repo.ui.configoverride(overrides, 'import'): | |
1259 | n = repo.commit(message, user, |
|
1260 | n = repo.commit(message, user, | |
1260 | date, match=m, |
|
1261 | date, match=m, | |
1261 | editor=editor, extra=extra) |
|
1262 | editor=editor, extra=extra) | |
1262 | for idfunc in extrapostimport: |
|
1263 | for idfunc in extrapostimport: | |
1263 | extrapostimportmap[idfunc](repo[n]) |
|
1264 | extrapostimportmap[idfunc](repo[n]) | |
1264 | else: |
|
1265 | else: | |
1265 | if opts.get('exact') or importbranch: |
|
1266 | if opts.get('exact') or importbranch: | |
1266 | branch = branch or 'default' |
|
1267 | branch = branch or 'default' | |
1267 | else: |
|
1268 | else: | |
1268 | branch = p1.branch() |
|
1269 | branch = p1.branch() | |
1269 | store = patch.filestore() |
|
1270 | store = patch.filestore() | |
1270 | try: |
|
1271 | try: | |
1271 | files = set() |
|
1272 | files = set() | |
1272 | try: |
|
1273 | try: | |
1273 | patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix, |
|
1274 | patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix, | |
1274 | files, eolmode=None) |
|
1275 | files, eolmode=None) | |
1275 | except patch.PatchError as e: |
|
1276 | except patch.PatchError as e: | |
1276 | raise error.Abort(str(e)) |
|
1277 | raise error.Abort(str(e)) | |
1277 | if opts.get('exact'): |
|
1278 | if opts.get('exact'): | |
1278 | editor = None |
|
1279 | editor = None | |
1279 | else: |
|
1280 | else: | |
1280 | editor = getcommiteditor(editform='import.bypass') |
|
1281 | editor = getcommiteditor(editform='import.bypass') | |
1281 | memctx = context.memctx(repo, (p1.node(), p2.node()), |
|
1282 | memctx = context.memctx(repo, (p1.node(), p2.node()), | |
1282 | message, |
|
1283 | message, | |
1283 | files=files, |
|
1284 | files=files, | |
1284 | filectxfn=store, |
|
1285 | filectxfn=store, | |
1285 | user=user, |
|
1286 | user=user, | |
1286 | date=date, |
|
1287 | date=date, | |
1287 | branch=branch, |
|
1288 | branch=branch, | |
1288 | editor=editor) |
|
1289 | editor=editor) | |
1289 | n = memctx.commit() |
|
1290 | n = memctx.commit() | |
1290 | finally: |
|
1291 | finally: | |
1291 | store.close() |
|
1292 | store.close() | |
1292 | if opts.get('exact') and nocommit: |
|
1293 | if opts.get('exact') and nocommit: | |
1293 | # --exact with --no-commit is still useful in that it does merge |
|
1294 | # --exact with --no-commit is still useful in that it does merge | |
1294 | # and branch bits |
|
1295 | # and branch bits | |
1295 | ui.warn(_("warning: can't check exact import with --no-commit\n")) |
|
1296 | ui.warn(_("warning: can't check exact import with --no-commit\n")) | |
1296 | elif opts.get('exact') and hex(n) != nodeid: |
|
1297 | elif opts.get('exact') and hex(n) != nodeid: | |
1297 | raise error.Abort(_('patch is damaged or loses information')) |
|
1298 | raise error.Abort(_('patch is damaged or loses information')) | |
1298 | msg = _('applied to working directory') |
|
1299 | msg = _('applied to working directory') | |
1299 | if n: |
|
1300 | if n: | |
1300 | # i18n: refers to a short changeset id |
|
1301 | # i18n: refers to a short changeset id | |
1301 | msg = _('created %s') % short(n) |
|
1302 | msg = _('created %s') % short(n) | |
1302 | return (msg, n, rejects) |
|
1303 | return (msg, n, rejects) | |
1303 | finally: |
|
1304 | finally: | |
1304 | os.unlink(tmpname) |
|
1305 | os.unlink(tmpname) | |
1305 |
|
1306 | |||
1306 | # facility to let extensions include additional data in an exported patch |
|
1307 | # facility to let extensions include additional data in an exported patch | |
1307 | # list of identifiers to be executed in order |
|
1308 | # list of identifiers to be executed in order | |
1308 | extraexport = [] |
|
1309 | extraexport = [] | |
1309 | # mapping from identifier to actual export function |
|
1310 | # mapping from identifier to actual export function | |
1310 | # function as to return a string to be added to the header or None |
|
1311 | # function as to return a string to be added to the header or None | |
1311 | # it is given two arguments (sequencenumber, changectx) |
|
1312 | # it is given two arguments (sequencenumber, changectx) | |
1312 | extraexportmap = {} |
|
1313 | extraexportmap = {} | |
1313 |
|
1314 | |||
1314 | def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts): |
|
1315 | def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts): | |
1315 | node = scmutil.binnode(ctx) |
|
1316 | node = scmutil.binnode(ctx) | |
1316 | parents = [p.node() for p in ctx.parents() if p] |
|
1317 | parents = [p.node() for p in ctx.parents() if p] | |
1317 | branch = ctx.branch() |
|
1318 | branch = ctx.branch() | |
1318 | if switch_parent: |
|
1319 | if switch_parent: | |
1319 | parents.reverse() |
|
1320 | parents.reverse() | |
1320 |
|
1321 | |||
1321 | if parents: |
|
1322 | if parents: | |
1322 | prev = parents[0] |
|
1323 | prev = parents[0] | |
1323 | else: |
|
1324 | else: | |
1324 | prev = nullid |
|
1325 | prev = nullid | |
1325 |
|
1326 | |||
1326 | write("# HG changeset patch\n") |
|
1327 | write("# HG changeset patch\n") | |
1327 | write("# User %s\n" % ctx.user()) |
|
1328 | write("# User %s\n" % ctx.user()) | |
1328 | write("# Date %d %d\n" % ctx.date()) |
|
1329 | write("# Date %d %d\n" % ctx.date()) | |
1329 | write("# %s\n" % util.datestr(ctx.date())) |
|
1330 | write("# %s\n" % util.datestr(ctx.date())) | |
1330 | if branch and branch != 'default': |
|
1331 | if branch and branch != 'default': | |
1331 | write("# Branch %s\n" % branch) |
|
1332 | write("# Branch %s\n" % branch) | |
1332 | write("# Node ID %s\n" % hex(node)) |
|
1333 | write("# Node ID %s\n" % hex(node)) | |
1333 | write("# Parent %s\n" % hex(prev)) |
|
1334 | write("# Parent %s\n" % hex(prev)) | |
1334 | if len(parents) > 1: |
|
1335 | if len(parents) > 1: | |
1335 | write("# Parent %s\n" % hex(parents[1])) |
|
1336 | write("# Parent %s\n" % hex(parents[1])) | |
1336 |
|
1337 | |||
1337 | for headerid in extraexport: |
|
1338 | for headerid in extraexport: | |
1338 | header = extraexportmap[headerid](seqno, ctx) |
|
1339 | header = extraexportmap[headerid](seqno, ctx) | |
1339 | if header is not None: |
|
1340 | if header is not None: | |
1340 | write('# %s\n' % header) |
|
1341 | write('# %s\n' % header) | |
1341 | write(ctx.description().rstrip()) |
|
1342 | write(ctx.description().rstrip()) | |
1342 | write("\n\n") |
|
1343 | write("\n\n") | |
1343 |
|
1344 | |||
1344 | for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts): |
|
1345 | for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts): | |
1345 | write(chunk, label=label) |
|
1346 | write(chunk, label=label) | |
1346 |
|
1347 | |||
1347 | def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False, |
|
1348 | def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False, | |
1348 | opts=None, match=None): |
|
1349 | opts=None, match=None): | |
1349 | '''export changesets as hg patches |
|
1350 | '''export changesets as hg patches | |
1350 |
|
1351 | |||
1351 | Args: |
|
1352 | Args: | |
1352 | repo: The repository from which we're exporting revisions. |
|
1353 | repo: The repository from which we're exporting revisions. | |
1353 | revs: A list of revisions to export as revision numbers. |
|
1354 | revs: A list of revisions to export as revision numbers. | |
1354 | fntemplate: An optional string to use for generating patch file names. |
|
1355 | fntemplate: An optional string to use for generating patch file names. | |
1355 | fp: An optional file-like object to which patches should be written. |
|
1356 | fp: An optional file-like object to which patches should be written. | |
1356 | switch_parent: If True, show diffs against second parent when not nullid. |
|
1357 | switch_parent: If True, show diffs against second parent when not nullid. | |
1357 | Default is false, which always shows diff against p1. |
|
1358 | Default is false, which always shows diff against p1. | |
1358 | opts: diff options to use for generating the patch. |
|
1359 | opts: diff options to use for generating the patch. | |
1359 | match: If specified, only export changes to files matching this matcher. |
|
1360 | match: If specified, only export changes to files matching this matcher. | |
1360 |
|
1361 | |||
1361 | Returns: |
|
1362 | Returns: | |
1362 | Nothing. |
|
1363 | Nothing. | |
1363 |
|
1364 | |||
1364 | Side Effect: |
|
1365 | Side Effect: | |
1365 | "HG Changeset Patch" data is emitted to one of the following |
|
1366 | "HG Changeset Patch" data is emitted to one of the following | |
1366 | destinations: |
|
1367 | destinations: | |
1367 | fp is specified: All revs are written to the specified |
|
1368 | fp is specified: All revs are written to the specified | |
1368 | file-like object. |
|
1369 | file-like object. | |
1369 | fntemplate specified: Each rev is written to a unique file named using |
|
1370 | fntemplate specified: Each rev is written to a unique file named using | |
1370 | the given template. |
|
1371 | the given template. | |
1371 | Neither fp nor template specified: All revs written to repo.ui.write() |
|
1372 | Neither fp nor template specified: All revs written to repo.ui.write() | |
1372 | ''' |
|
1373 | ''' | |
1373 |
|
1374 | |||
1374 | total = len(revs) |
|
1375 | total = len(revs) | |
1375 | revwidth = max(len(str(rev)) for rev in revs) |
|
1376 | revwidth = max(len(str(rev)) for rev in revs) | |
1376 | filemode = {} |
|
1377 | filemode = {} | |
1377 |
|
1378 | |||
1378 | write = None |
|
1379 | write = None | |
1379 | dest = '<unnamed>' |
|
1380 | dest = '<unnamed>' | |
1380 | if fp: |
|
1381 | if fp: | |
1381 | dest = getattr(fp, 'name', dest) |
|
1382 | dest = getattr(fp, 'name', dest) | |
1382 | def write(s, **kw): |
|
1383 | def write(s, **kw): | |
1383 | fp.write(s) |
|
1384 | fp.write(s) | |
1384 | elif not fntemplate: |
|
1385 | elif not fntemplate: | |
1385 | write = repo.ui.write |
|
1386 | write = repo.ui.write | |
1386 |
|
1387 | |||
1387 | for seqno, rev in enumerate(revs, 1): |
|
1388 | for seqno, rev in enumerate(revs, 1): | |
1388 | ctx = repo[rev] |
|
1389 | ctx = repo[rev] | |
1389 | fo = None |
|
1390 | fo = None | |
1390 | if not fp and fntemplate: |
|
1391 | if not fp and fntemplate: | |
1391 | desc_lines = ctx.description().rstrip().split('\n') |
|
1392 | desc_lines = ctx.description().rstrip().split('\n') | |
1392 | desc = desc_lines[0] #Commit always has a first line. |
|
1393 | desc = desc_lines[0] #Commit always has a first line. | |
1393 | fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc, |
|
1394 | fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc, | |
1394 | total=total, seqno=seqno, revwidth=revwidth, |
|
1395 | total=total, seqno=seqno, revwidth=revwidth, | |
1395 | mode='wb', modemap=filemode) |
|
1396 | mode='wb', modemap=filemode) | |
1396 | dest = fo.name |
|
1397 | dest = fo.name | |
1397 | def write(s, **kw): |
|
1398 | def write(s, **kw): | |
1398 | fo.write(s) |
|
1399 | fo.write(s) | |
1399 | if not dest.startswith('<'): |
|
1400 | if not dest.startswith('<'): | |
1400 | repo.ui.note("%s\n" % dest) |
|
1401 | repo.ui.note("%s\n" % dest) | |
1401 | _exportsingle( |
|
1402 | _exportsingle( | |
1402 | repo, ctx, match, switch_parent, rev, seqno, write, opts) |
|
1403 | repo, ctx, match, switch_parent, rev, seqno, write, opts) | |
1403 | if fo is not None: |
|
1404 | if fo is not None: | |
1404 | fo.close() |
|
1405 | fo.close() | |
1405 |
|
1406 | |||
1406 | def diffordiffstat(ui, repo, diffopts, node1, node2, match, |
|
1407 | def diffordiffstat(ui, repo, diffopts, node1, node2, match, | |
1407 | changes=None, stat=False, fp=None, prefix='', |
|
1408 | changes=None, stat=False, fp=None, prefix='', | |
1408 | root='', listsubrepos=False): |
|
1409 | root='', listsubrepos=False): | |
1409 | '''show diff or diffstat.''' |
|
1410 | '''show diff or diffstat.''' | |
1410 | if fp is None: |
|
1411 | if fp is None: | |
1411 | write = ui.write |
|
1412 | write = ui.write | |
1412 | else: |
|
1413 | else: | |
1413 | def write(s, **kw): |
|
1414 | def write(s, **kw): | |
1414 | fp.write(s) |
|
1415 | fp.write(s) | |
1415 |
|
1416 | |||
1416 | if root: |
|
1417 | if root: | |
1417 | relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) |
|
1418 | relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) | |
1418 | else: |
|
1419 | else: | |
1419 | relroot = '' |
|
1420 | relroot = '' | |
1420 | if relroot != '': |
|
1421 | if relroot != '': | |
1421 | # XXX relative roots currently don't work if the root is within a |
|
1422 | # XXX relative roots currently don't work if the root is within a | |
1422 | # subrepo |
|
1423 | # subrepo | |
1423 | uirelroot = match.uipath(relroot) |
|
1424 | uirelroot = match.uipath(relroot) | |
1424 | relroot += '/' |
|
1425 | relroot += '/' | |
1425 | for matchroot in match.files(): |
|
1426 | for matchroot in match.files(): | |
1426 | if not matchroot.startswith(relroot): |
|
1427 | if not matchroot.startswith(relroot): | |
1427 | ui.warn(_('warning: %s not inside relative root %s\n') % ( |
|
1428 | ui.warn(_('warning: %s not inside relative root %s\n') % ( | |
1428 | match.uipath(matchroot), uirelroot)) |
|
1429 | match.uipath(matchroot), uirelroot)) | |
1429 |
|
1430 | |||
1430 | if stat: |
|
1431 | if stat: | |
1431 | diffopts = diffopts.copy(context=0) |
|
1432 | diffopts = diffopts.copy(context=0) | |
1432 | width = 80 |
|
1433 | width = 80 | |
1433 | if not ui.plain(): |
|
1434 | if not ui.plain(): | |
1434 | width = ui.termwidth() |
|
1435 | width = ui.termwidth() | |
1435 | chunks = patch.diff(repo, node1, node2, match, changes, diffopts, |
|
1436 | chunks = patch.diff(repo, node1, node2, match, changes, diffopts, | |
1436 | prefix=prefix, relroot=relroot) |
|
1437 | prefix=prefix, relroot=relroot) | |
1437 | for chunk, label in patch.diffstatui(util.iterlines(chunks), |
|
1438 | for chunk, label in patch.diffstatui(util.iterlines(chunks), | |
1438 | width=width): |
|
1439 | width=width): | |
1439 | write(chunk, label=label) |
|
1440 | write(chunk, label=label) | |
1440 | else: |
|
1441 | else: | |
1441 | for chunk, label in patch.diffui(repo, node1, node2, match, |
|
1442 | for chunk, label in patch.diffui(repo, node1, node2, match, | |
1442 | changes, diffopts, prefix=prefix, |
|
1443 | changes, diffopts, prefix=prefix, | |
1443 | relroot=relroot): |
|
1444 | relroot=relroot): | |
1444 | write(chunk, label=label) |
|
1445 | write(chunk, label=label) | |
1445 |
|
1446 | |||
1446 | if listsubrepos: |
|
1447 | if listsubrepos: | |
1447 | ctx1 = repo[node1] |
|
1448 | ctx1 = repo[node1] | |
1448 | ctx2 = repo[node2] |
|
1449 | ctx2 = repo[node2] | |
1449 | for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): |
|
1450 | for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): | |
1450 | tempnode2 = node2 |
|
1451 | tempnode2 = node2 | |
1451 | try: |
|
1452 | try: | |
1452 | if node2 is not None: |
|
1453 | if node2 is not None: | |
1453 | tempnode2 = ctx2.substate[subpath][1] |
|
1454 | tempnode2 = ctx2.substate[subpath][1] | |
1454 | except KeyError: |
|
1455 | except KeyError: | |
1455 | # A subrepo that existed in node1 was deleted between node1 and |
|
1456 | # A subrepo that existed in node1 was deleted between node1 and | |
1456 | # node2 (inclusive). Thus, ctx2's substate won't contain that |
|
1457 | # node2 (inclusive). Thus, ctx2's substate won't contain that | |
1457 | # subpath. The best we can do is to ignore it. |
|
1458 | # subpath. The best we can do is to ignore it. | |
1458 | tempnode2 = None |
|
1459 | tempnode2 = None | |
1459 | submatch = matchmod.subdirmatcher(subpath, match) |
|
1460 | submatch = matchmod.subdirmatcher(subpath, match) | |
1460 | sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, |
|
1461 | sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, | |
1461 | stat=stat, fp=fp, prefix=prefix) |
|
1462 | stat=stat, fp=fp, prefix=prefix) | |
1462 |
|
1463 | |||
1463 | def _changesetlabels(ctx): |
|
1464 | def _changesetlabels(ctx): | |
1464 | labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()] |
|
1465 | labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()] | |
1465 | if ctx.obsolete(): |
|
1466 | if ctx.obsolete(): | |
1466 | labels.append('changeset.obsolete') |
|
1467 | labels.append('changeset.obsolete') | |
1467 | if ctx.isunstable(): |
|
1468 | if ctx.isunstable(): | |
1468 | labels.append('changeset.troubled') |
|
1469 | labels.append('changeset.troubled') | |
1469 | for instability in ctx.instabilities(): |
|
1470 | for instability in ctx.instabilities(): | |
1470 | labels.append('trouble.%s' % instability) |
|
1471 | labels.append('trouble.%s' % instability) | |
1471 | return ' '.join(labels) |
|
1472 | return ' '.join(labels) | |
1472 |
|
1473 | |||
1473 | class changeset_printer(object): |
|
1474 | class changeset_printer(object): | |
1474 | '''show changeset information when templating not requested.''' |
|
1475 | '''show changeset information when templating not requested.''' | |
1475 |
|
1476 | |||
1476 | def __init__(self, ui, repo, matchfn, diffopts, buffered): |
|
1477 | def __init__(self, ui, repo, matchfn, diffopts, buffered): | |
1477 | self.ui = ui |
|
1478 | self.ui = ui | |
1478 | self.repo = repo |
|
1479 | self.repo = repo | |
1479 | self.buffered = buffered |
|
1480 | self.buffered = buffered | |
1480 | self.matchfn = matchfn |
|
1481 | self.matchfn = matchfn | |
1481 | self.diffopts = diffopts |
|
1482 | self.diffopts = diffopts | |
1482 | self.header = {} |
|
1483 | self.header = {} | |
1483 | self.hunk = {} |
|
1484 | self.hunk = {} | |
1484 | self.lastheader = None |
|
1485 | self.lastheader = None | |
1485 | self.footer = None |
|
1486 | self.footer = None | |
1486 |
|
1487 | |||
1487 | def flush(self, ctx): |
|
1488 | def flush(self, ctx): | |
1488 | rev = ctx.rev() |
|
1489 | rev = ctx.rev() | |
1489 | if rev in self.header: |
|
1490 | if rev in self.header: | |
1490 | h = self.header[rev] |
|
1491 | h = self.header[rev] | |
1491 | if h != self.lastheader: |
|
1492 | if h != self.lastheader: | |
1492 | self.lastheader = h |
|
1493 | self.lastheader = h | |
1493 | self.ui.write(h) |
|
1494 | self.ui.write(h) | |
1494 | del self.header[rev] |
|
1495 | del self.header[rev] | |
1495 | if rev in self.hunk: |
|
1496 | if rev in self.hunk: | |
1496 | self.ui.write(self.hunk[rev]) |
|
1497 | self.ui.write(self.hunk[rev]) | |
1497 | del self.hunk[rev] |
|
1498 | del self.hunk[rev] | |
1498 | return 1 |
|
1499 | return 1 | |
1499 | return 0 |
|
1500 | return 0 | |
1500 |
|
1501 | |||
1501 | def close(self): |
|
1502 | def close(self): | |
1502 | if self.footer: |
|
1503 | if self.footer: | |
1503 | self.ui.write(self.footer) |
|
1504 | self.ui.write(self.footer) | |
1504 |
|
1505 | |||
1505 | def show(self, ctx, copies=None, matchfn=None, **props): |
|
1506 | def show(self, ctx, copies=None, matchfn=None, **props): | |
1506 | props = pycompat.byteskwargs(props) |
|
1507 | props = pycompat.byteskwargs(props) | |
1507 | if self.buffered: |
|
1508 | if self.buffered: | |
1508 | self.ui.pushbuffer(labeled=True) |
|
1509 | self.ui.pushbuffer(labeled=True) | |
1509 | self._show(ctx, copies, matchfn, props) |
|
1510 | self._show(ctx, copies, matchfn, props) | |
1510 | self.hunk[ctx.rev()] = self.ui.popbuffer() |
|
1511 | self.hunk[ctx.rev()] = self.ui.popbuffer() | |
1511 | else: |
|
1512 | else: | |
1512 | self._show(ctx, copies, matchfn, props) |
|
1513 | self._show(ctx, copies, matchfn, props) | |
1513 |
|
1514 | |||
1514 | def _show(self, ctx, copies, matchfn, props): |
|
1515 | def _show(self, ctx, copies, matchfn, props): | |
1515 | '''show a single changeset or file revision''' |
|
1516 | '''show a single changeset or file revision''' | |
1516 | changenode = ctx.node() |
|
1517 | changenode = ctx.node() | |
1517 | rev = ctx.rev() |
|
1518 | rev = ctx.rev() | |
1518 | if self.ui.debugflag: |
|
1519 | if self.ui.debugflag: | |
1519 | hexfunc = hex |
|
1520 | hexfunc = hex | |
1520 | else: |
|
1521 | else: | |
1521 | hexfunc = short |
|
1522 | hexfunc = short | |
1522 | # as of now, wctx.node() and wctx.rev() return None, but we want to |
|
1523 | # as of now, wctx.node() and wctx.rev() return None, but we want to | |
1523 | # show the same values as {node} and {rev} templatekw |
|
1524 | # show the same values as {node} and {rev} templatekw | |
1524 | revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx))) |
|
1525 | revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx))) | |
1525 |
|
1526 | |||
1526 | if self.ui.quiet: |
|
1527 | if self.ui.quiet: | |
1527 | self.ui.write("%d:%s\n" % revnode, label='log.node') |
|
1528 | self.ui.write("%d:%s\n" % revnode, label='log.node') | |
1528 | return |
|
1529 | return | |
1529 |
|
1530 | |||
1530 | date = util.datestr(ctx.date()) |
|
1531 | date = util.datestr(ctx.date()) | |
1531 |
|
1532 | |||
1532 | # i18n: column positioning for "hg log" |
|
1533 | # i18n: column positioning for "hg log" | |
1533 | self.ui.write(_("changeset: %d:%s\n") % revnode, |
|
1534 | self.ui.write(_("changeset: %d:%s\n") % revnode, | |
1534 | label=_changesetlabels(ctx)) |
|
1535 | label=_changesetlabels(ctx)) | |
1535 |
|
1536 | |||
1536 | # branches are shown first before any other names due to backwards |
|
1537 | # branches are shown first before any other names due to backwards | |
1537 | # compatibility |
|
1538 | # compatibility | |
1538 | branch = ctx.branch() |
|
1539 | branch = ctx.branch() | |
1539 | # don't show the default branch name |
|
1540 | # don't show the default branch name | |
1540 | if branch != 'default': |
|
1541 | if branch != 'default': | |
1541 | # i18n: column positioning for "hg log" |
|
1542 | # i18n: column positioning for "hg log" | |
1542 | self.ui.write(_("branch: %s\n") % branch, |
|
1543 | self.ui.write(_("branch: %s\n") % branch, | |
1543 | label='log.branch') |
|
1544 | label='log.branch') | |
1544 |
|
1545 | |||
1545 | for nsname, ns in self.repo.names.iteritems(): |
|
1546 | for nsname, ns in self.repo.names.iteritems(): | |
1546 | # branches has special logic already handled above, so here we just |
|
1547 | # branches has special logic already handled above, so here we just | |
1547 | # skip it |
|
1548 | # skip it | |
1548 | if nsname == 'branches': |
|
1549 | if nsname == 'branches': | |
1549 | continue |
|
1550 | continue | |
1550 | # we will use the templatename as the color name since those two |
|
1551 | # we will use the templatename as the color name since those two | |
1551 | # should be the same |
|
1552 | # should be the same | |
1552 | for name in ns.names(self.repo, changenode): |
|
1553 | for name in ns.names(self.repo, changenode): | |
1553 | self.ui.write(ns.logfmt % name, |
|
1554 | self.ui.write(ns.logfmt % name, | |
1554 | label='log.%s' % ns.colorname) |
|
1555 | label='log.%s' % ns.colorname) | |
1555 | if self.ui.debugflag: |
|
1556 | if self.ui.debugflag: | |
1556 | # i18n: column positioning for "hg log" |
|
1557 | # i18n: column positioning for "hg log" | |
1557 | self.ui.write(_("phase: %s\n") % ctx.phasestr(), |
|
1558 | self.ui.write(_("phase: %s\n") % ctx.phasestr(), | |
1558 | label='log.phase') |
|
1559 | label='log.phase') | |
1559 | for pctx in scmutil.meaningfulparents(self.repo, ctx): |
|
1560 | for pctx in scmutil.meaningfulparents(self.repo, ctx): | |
1560 | label = 'log.parent changeset.%s' % pctx.phasestr() |
|
1561 | label = 'log.parent changeset.%s' % pctx.phasestr() | |
1561 | # i18n: column positioning for "hg log" |
|
1562 | # i18n: column positioning for "hg log" | |
1562 | self.ui.write(_("parent: %d:%s\n") |
|
1563 | self.ui.write(_("parent: %d:%s\n") | |
1563 | % (pctx.rev(), hexfunc(pctx.node())), |
|
1564 | % (pctx.rev(), hexfunc(pctx.node())), | |
1564 | label=label) |
|
1565 | label=label) | |
1565 |
|
1566 | |||
1566 | if self.ui.debugflag and rev is not None: |
|
1567 | if self.ui.debugflag and rev is not None: | |
1567 | mnode = ctx.manifestnode() |
|
1568 | mnode = ctx.manifestnode() | |
1568 | # i18n: column positioning for "hg log" |
|
1569 | # i18n: column positioning for "hg log" | |
1569 | self.ui.write(_("manifest: %d:%s\n") % |
|
1570 | self.ui.write(_("manifest: %d:%s\n") % | |
1570 | (self.repo.manifestlog._revlog.rev(mnode), |
|
1571 | (self.repo.manifestlog._revlog.rev(mnode), | |
1571 | hex(mnode)), |
|
1572 | hex(mnode)), | |
1572 | label='ui.debug log.manifest') |
|
1573 | label='ui.debug log.manifest') | |
1573 | # i18n: column positioning for "hg log" |
|
1574 | # i18n: column positioning for "hg log" | |
1574 | self.ui.write(_("user: %s\n") % ctx.user(), |
|
1575 | self.ui.write(_("user: %s\n") % ctx.user(), | |
1575 | label='log.user') |
|
1576 | label='log.user') | |
1576 | # i18n: column positioning for "hg log" |
|
1577 | # i18n: column positioning for "hg log" | |
1577 | self.ui.write(_("date: %s\n") % date, |
|
1578 | self.ui.write(_("date: %s\n") % date, | |
1578 | label='log.date') |
|
1579 | label='log.date') | |
1579 |
|
1580 | |||
1580 | if ctx.isunstable(): |
|
1581 | if ctx.isunstable(): | |
1581 | # i18n: column positioning for "hg log" |
|
1582 | # i18n: column positioning for "hg log" | |
1582 | instabilities = ctx.instabilities() |
|
1583 | instabilities = ctx.instabilities() | |
1583 | self.ui.write(_("instability: %s\n") % ', '.join(instabilities), |
|
1584 | self.ui.write(_("instability: %s\n") % ', '.join(instabilities), | |
1584 | label='log.trouble') |
|
1585 | label='log.trouble') | |
1585 |
|
1586 | |||
1586 | self._exthook(ctx) |
|
1587 | self._exthook(ctx) | |
1587 |
|
1588 | |||
1588 | if self.ui.debugflag: |
|
1589 | if self.ui.debugflag: | |
1589 | files = ctx.p1().status(ctx)[:3] |
|
1590 | files = ctx.p1().status(ctx)[:3] | |
1590 | for key, value in zip([# i18n: column positioning for "hg log" |
|
1591 | for key, value in zip([# i18n: column positioning for "hg log" | |
1591 | _("files:"), |
|
1592 | _("files:"), | |
1592 | # i18n: column positioning for "hg log" |
|
1593 | # i18n: column positioning for "hg log" | |
1593 | _("files+:"), |
|
1594 | _("files+:"), | |
1594 | # i18n: column positioning for "hg log" |
|
1595 | # i18n: column positioning for "hg log" | |
1595 | _("files-:")], files): |
|
1596 | _("files-:")], files): | |
1596 | if value: |
|
1597 | if value: | |
1597 | self.ui.write("%-12s %s\n" % (key, " ".join(value)), |
|
1598 | self.ui.write("%-12s %s\n" % (key, " ".join(value)), | |
1598 | label='ui.debug log.files') |
|
1599 | label='ui.debug log.files') | |
1599 | elif ctx.files() and self.ui.verbose: |
|
1600 | elif ctx.files() and self.ui.verbose: | |
1600 | # i18n: column positioning for "hg log" |
|
1601 | # i18n: column positioning for "hg log" | |
1601 | self.ui.write(_("files: %s\n") % " ".join(ctx.files()), |
|
1602 | self.ui.write(_("files: %s\n") % " ".join(ctx.files()), | |
1602 | label='ui.note log.files') |
|
1603 | label='ui.note log.files') | |
1603 | if copies and self.ui.verbose: |
|
1604 | if copies and self.ui.verbose: | |
1604 | copies = ['%s (%s)' % c for c in copies] |
|
1605 | copies = ['%s (%s)' % c for c in copies] | |
1605 | # i18n: column positioning for "hg log" |
|
1606 | # i18n: column positioning for "hg log" | |
1606 | self.ui.write(_("copies: %s\n") % ' '.join(copies), |
|
1607 | self.ui.write(_("copies: %s\n") % ' '.join(copies), | |
1607 | label='ui.note log.copies') |
|
1608 | label='ui.note log.copies') | |
1608 |
|
1609 | |||
1609 | extra = ctx.extra() |
|
1610 | extra = ctx.extra() | |
1610 | if extra and self.ui.debugflag: |
|
1611 | if extra and self.ui.debugflag: | |
1611 | for key, value in sorted(extra.items()): |
|
1612 | for key, value in sorted(extra.items()): | |
1612 | # i18n: column positioning for "hg log" |
|
1613 | # i18n: column positioning for "hg log" | |
1613 | self.ui.write(_("extra: %s=%s\n") |
|
1614 | self.ui.write(_("extra: %s=%s\n") | |
1614 | % (key, util.escapestr(value)), |
|
1615 | % (key, util.escapestr(value)), | |
1615 | label='ui.debug log.extra') |
|
1616 | label='ui.debug log.extra') | |
1616 |
|
1617 | |||
1617 | description = ctx.description().strip() |
|
1618 | description = ctx.description().strip() | |
1618 | if description: |
|
1619 | if description: | |
1619 | if self.ui.verbose: |
|
1620 | if self.ui.verbose: | |
1620 | self.ui.write(_("description:\n"), |
|
1621 | self.ui.write(_("description:\n"), | |
1621 | label='ui.note log.description') |
|
1622 | label='ui.note log.description') | |
1622 | self.ui.write(description, |
|
1623 | self.ui.write(description, | |
1623 | label='ui.note log.description') |
|
1624 | label='ui.note log.description') | |
1624 | self.ui.write("\n\n") |
|
1625 | self.ui.write("\n\n") | |
1625 | else: |
|
1626 | else: | |
1626 | # i18n: column positioning for "hg log" |
|
1627 | # i18n: column positioning for "hg log" | |
1627 | self.ui.write(_("summary: %s\n") % |
|
1628 | self.ui.write(_("summary: %s\n") % | |
1628 | description.splitlines()[0], |
|
1629 | description.splitlines()[0], | |
1629 | label='log.summary') |
|
1630 | label='log.summary') | |
1630 | self.ui.write("\n") |
|
1631 | self.ui.write("\n") | |
1631 |
|
1632 | |||
1632 | self.showpatch(ctx, matchfn) |
|
1633 | self.showpatch(ctx, matchfn) | |
1633 |
|
1634 | |||
1634 | def _exthook(self, ctx): |
|
1635 | def _exthook(self, ctx): | |
1635 | '''empty method used by extension as a hook point |
|
1636 | '''empty method used by extension as a hook point | |
1636 | ''' |
|
1637 | ''' | |
1637 | pass |
|
1638 | pass | |
1638 |
|
1639 | |||
1639 | def showpatch(self, ctx, matchfn): |
|
1640 | def showpatch(self, ctx, matchfn): | |
1640 | if not matchfn: |
|
1641 | if not matchfn: | |
1641 | matchfn = self.matchfn |
|
1642 | matchfn = self.matchfn | |
1642 | if matchfn: |
|
1643 | if matchfn: | |
1643 | stat = self.diffopts.get('stat') |
|
1644 | stat = self.diffopts.get('stat') | |
1644 | diff = self.diffopts.get('patch') |
|
1645 | diff = self.diffopts.get('patch') | |
1645 | diffopts = patch.diffallopts(self.ui, self.diffopts) |
|
1646 | diffopts = patch.diffallopts(self.ui, self.diffopts) | |
1646 | node = ctx.node() |
|
1647 | node = ctx.node() | |
1647 | prev = ctx.p1().node() |
|
1648 | prev = ctx.p1().node() | |
1648 | if stat: |
|
1649 | if stat: | |
1649 | diffordiffstat(self.ui, self.repo, diffopts, prev, node, |
|
1650 | diffordiffstat(self.ui, self.repo, diffopts, prev, node, | |
1650 | match=matchfn, stat=True) |
|
1651 | match=matchfn, stat=True) | |
1651 | if diff: |
|
1652 | if diff: | |
1652 | if stat: |
|
1653 | if stat: | |
1653 | self.ui.write("\n") |
|
1654 | self.ui.write("\n") | |
1654 | diffordiffstat(self.ui, self.repo, diffopts, prev, node, |
|
1655 | diffordiffstat(self.ui, self.repo, diffopts, prev, node, | |
1655 | match=matchfn, stat=False) |
|
1656 | match=matchfn, stat=False) | |
1656 | self.ui.write("\n") |
|
1657 | self.ui.write("\n") | |
1657 |
|
1658 | |||
1658 | class jsonchangeset(changeset_printer): |
|
1659 | class jsonchangeset(changeset_printer): | |
1659 | '''format changeset information.''' |
|
1660 | '''format changeset information.''' | |
1660 |
|
1661 | |||
1661 | def __init__(self, ui, repo, matchfn, diffopts, buffered): |
|
1662 | def __init__(self, ui, repo, matchfn, diffopts, buffered): | |
1662 | changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered) |
|
1663 | changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered) | |
1663 | self.cache = {} |
|
1664 | self.cache = {} | |
1664 | self._first = True |
|
1665 | self._first = True | |
1665 |
|
1666 | |||
1666 | def close(self): |
|
1667 | def close(self): | |
1667 | if not self._first: |
|
1668 | if not self._first: | |
1668 | self.ui.write("\n]\n") |
|
1669 | self.ui.write("\n]\n") | |
1669 | else: |
|
1670 | else: | |
1670 | self.ui.write("[]\n") |
|
1671 | self.ui.write("[]\n") | |
1671 |
|
1672 | |||
1672 | def _show(self, ctx, copies, matchfn, props): |
|
1673 | def _show(self, ctx, copies, matchfn, props): | |
1673 | '''show a single changeset or file revision''' |
|
1674 | '''show a single changeset or file revision''' | |
1674 | rev = ctx.rev() |
|
1675 | rev = ctx.rev() | |
1675 | if rev is None: |
|
1676 | if rev is None: | |
1676 | jrev = jnode = 'null' |
|
1677 | jrev = jnode = 'null' | |
1677 | else: |
|
1678 | else: | |
1678 | jrev = '%d' % rev |
|
1679 | jrev = '%d' % rev | |
1679 | jnode = '"%s"' % hex(ctx.node()) |
|
1680 | jnode = '"%s"' % hex(ctx.node()) | |
1680 | j = encoding.jsonescape |
|
1681 | j = encoding.jsonescape | |
1681 |
|
1682 | |||
1682 | if self._first: |
|
1683 | if self._first: | |
1683 | self.ui.write("[\n {") |
|
1684 | self.ui.write("[\n {") | |
1684 | self._first = False |
|
1685 | self._first = False | |
1685 | else: |
|
1686 | else: | |
1686 | self.ui.write(",\n {") |
|
1687 | self.ui.write(",\n {") | |
1687 |
|
1688 | |||
1688 | if self.ui.quiet: |
|
1689 | if self.ui.quiet: | |
1689 | self.ui.write(('\n "rev": %s') % jrev) |
|
1690 | self.ui.write(('\n "rev": %s') % jrev) | |
1690 | self.ui.write((',\n "node": %s') % jnode) |
|
1691 | self.ui.write((',\n "node": %s') % jnode) | |
1691 | self.ui.write('\n }') |
|
1692 | self.ui.write('\n }') | |
1692 | return |
|
1693 | return | |
1693 |
|
1694 | |||
1694 | self.ui.write(('\n "rev": %s') % jrev) |
|
1695 | self.ui.write(('\n "rev": %s') % jrev) | |
1695 | self.ui.write((',\n "node": %s') % jnode) |
|
1696 | self.ui.write((',\n "node": %s') % jnode) | |
1696 | self.ui.write((',\n "branch": "%s"') % j(ctx.branch())) |
|
1697 | self.ui.write((',\n "branch": "%s"') % j(ctx.branch())) | |
1697 | self.ui.write((',\n "phase": "%s"') % ctx.phasestr()) |
|
1698 | self.ui.write((',\n "phase": "%s"') % ctx.phasestr()) | |
1698 | self.ui.write((',\n "user": "%s"') % j(ctx.user())) |
|
1699 | self.ui.write((',\n "user": "%s"') % j(ctx.user())) | |
1699 | self.ui.write((',\n "date": [%d, %d]') % ctx.date()) |
|
1700 | self.ui.write((',\n "date": [%d, %d]') % ctx.date()) | |
1700 | self.ui.write((',\n "desc": "%s"') % j(ctx.description())) |
|
1701 | self.ui.write((',\n "desc": "%s"') % j(ctx.description())) | |
1701 |
|
1702 | |||
1702 | self.ui.write((',\n "bookmarks": [%s]') % |
|
1703 | self.ui.write((',\n "bookmarks": [%s]') % | |
1703 | ", ".join('"%s"' % j(b) for b in ctx.bookmarks())) |
|
1704 | ", ".join('"%s"' % j(b) for b in ctx.bookmarks())) | |
1704 | self.ui.write((',\n "tags": [%s]') % |
|
1705 | self.ui.write((',\n "tags": [%s]') % | |
1705 | ", ".join('"%s"' % j(t) for t in ctx.tags())) |
|
1706 | ", ".join('"%s"' % j(t) for t in ctx.tags())) | |
1706 | self.ui.write((',\n "parents": [%s]') % |
|
1707 | self.ui.write((',\n "parents": [%s]') % | |
1707 | ", ".join('"%s"' % c.hex() for c in ctx.parents())) |
|
1708 | ", ".join('"%s"' % c.hex() for c in ctx.parents())) | |
1708 |
|
1709 | |||
1709 | if self.ui.debugflag: |
|
1710 | if self.ui.debugflag: | |
1710 | if rev is None: |
|
1711 | if rev is None: | |
1711 | jmanifestnode = 'null' |
|
1712 | jmanifestnode = 'null' | |
1712 | else: |
|
1713 | else: | |
1713 | jmanifestnode = '"%s"' % hex(ctx.manifestnode()) |
|
1714 | jmanifestnode = '"%s"' % hex(ctx.manifestnode()) | |
1714 | self.ui.write((',\n "manifest": %s') % jmanifestnode) |
|
1715 | self.ui.write((',\n "manifest": %s') % jmanifestnode) | |
1715 |
|
1716 | |||
1716 | self.ui.write((',\n "extra": {%s}') % |
|
1717 | self.ui.write((',\n "extra": {%s}') % | |
1717 | ", ".join('"%s": "%s"' % (j(k), j(v)) |
|
1718 | ", ".join('"%s": "%s"' % (j(k), j(v)) | |
1718 | for k, v in ctx.extra().items())) |
|
1719 | for k, v in ctx.extra().items())) | |
1719 |
|
1720 | |||
1720 | files = ctx.p1().status(ctx) |
|
1721 | files = ctx.p1().status(ctx) | |
1721 | self.ui.write((',\n "modified": [%s]') % |
|
1722 | self.ui.write((',\n "modified": [%s]') % | |
1722 | ", ".join('"%s"' % j(f) for f in files[0])) |
|
1723 | ", ".join('"%s"' % j(f) for f in files[0])) | |
1723 | self.ui.write((',\n "added": [%s]') % |
|
1724 | self.ui.write((',\n "added": [%s]') % | |
1724 | ", ".join('"%s"' % j(f) for f in files[1])) |
|
1725 | ", ".join('"%s"' % j(f) for f in files[1])) | |
1725 | self.ui.write((',\n "removed": [%s]') % |
|
1726 | self.ui.write((',\n "removed": [%s]') % | |
1726 | ", ".join('"%s"' % j(f) for f in files[2])) |
|
1727 | ", ".join('"%s"' % j(f) for f in files[2])) | |
1727 |
|
1728 | |||
1728 | elif self.ui.verbose: |
|
1729 | elif self.ui.verbose: | |
1729 | self.ui.write((',\n "files": [%s]') % |
|
1730 | self.ui.write((',\n "files": [%s]') % | |
1730 | ", ".join('"%s"' % j(f) for f in ctx.files())) |
|
1731 | ", ".join('"%s"' % j(f) for f in ctx.files())) | |
1731 |
|
1732 | |||
1732 | if copies: |
|
1733 | if copies: | |
1733 | self.ui.write((',\n "copies": {%s}') % |
|
1734 | self.ui.write((',\n "copies": {%s}') % | |
1734 | ", ".join('"%s": "%s"' % (j(k), j(v)) |
|
1735 | ", ".join('"%s": "%s"' % (j(k), j(v)) | |
1735 | for k, v in copies)) |
|
1736 | for k, v in copies)) | |
1736 |
|
1737 | |||
1737 | matchfn = self.matchfn |
|
1738 | matchfn = self.matchfn | |
1738 | if matchfn: |
|
1739 | if matchfn: | |
1739 | stat = self.diffopts.get('stat') |
|
1740 | stat = self.diffopts.get('stat') | |
1740 | diff = self.diffopts.get('patch') |
|
1741 | diff = self.diffopts.get('patch') | |
1741 | diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True) |
|
1742 | diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True) | |
1742 | node, prev = ctx.node(), ctx.p1().node() |
|
1743 | node, prev = ctx.node(), ctx.p1().node() | |
1743 | if stat: |
|
1744 | if stat: | |
1744 | self.ui.pushbuffer() |
|
1745 | self.ui.pushbuffer() | |
1745 | diffordiffstat(self.ui, self.repo, diffopts, prev, node, |
|
1746 | diffordiffstat(self.ui, self.repo, diffopts, prev, node, | |
1746 | match=matchfn, stat=True) |
|
1747 | match=matchfn, stat=True) | |
1747 | self.ui.write((',\n "diffstat": "%s"') |
|
1748 | self.ui.write((',\n "diffstat": "%s"') | |
1748 | % j(self.ui.popbuffer())) |
|
1749 | % j(self.ui.popbuffer())) | |
1749 | if diff: |
|
1750 | if diff: | |
1750 | self.ui.pushbuffer() |
|
1751 | self.ui.pushbuffer() | |
1751 | diffordiffstat(self.ui, self.repo, diffopts, prev, node, |
|
1752 | diffordiffstat(self.ui, self.repo, diffopts, prev, node, | |
1752 | match=matchfn, stat=False) |
|
1753 | match=matchfn, stat=False) | |
1753 | self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer())) |
|
1754 | self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer())) | |
1754 |
|
1755 | |||
1755 | self.ui.write("\n }") |
|
1756 | self.ui.write("\n }") | |
1756 |
|
1757 | |||
1757 | class changeset_templater(changeset_printer): |
|
1758 | class changeset_templater(changeset_printer): | |
1758 | '''format changeset information.''' |
|
1759 | '''format changeset information.''' | |
1759 |
|
1760 | |||
1760 | # Arguments before "buffered" used to be positional. Consider not |
|
1761 | # Arguments before "buffered" used to be positional. Consider not | |
1761 | # adding/removing arguments before "buffered" to not break callers. |
|
1762 | # adding/removing arguments before "buffered" to not break callers. | |
1762 | def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None, |
|
1763 | def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None, | |
1763 | buffered=False): |
|
1764 | buffered=False): | |
1764 | diffopts = diffopts or {} |
|
1765 | diffopts = diffopts or {} | |
1765 |
|
1766 | |||
1766 | changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered) |
|
1767 | changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered) | |
1767 | self.t = formatter.loadtemplater(ui, tmplspec, |
|
1768 | self.t = formatter.loadtemplater(ui, tmplspec, | |
1768 | cache=templatekw.defaulttempl) |
|
1769 | cache=templatekw.defaulttempl) | |
1769 | self._counter = itertools.count() |
|
1770 | self._counter = itertools.count() | |
1770 | self.cache = {} |
|
1771 | self.cache = {} | |
1771 |
|
1772 | |||
1772 | self._tref = tmplspec.ref |
|
1773 | self._tref = tmplspec.ref | |
1773 | self._parts = {'header': '', 'footer': '', |
|
1774 | self._parts = {'header': '', 'footer': '', | |
1774 | tmplspec.ref: tmplspec.ref, |
|
1775 | tmplspec.ref: tmplspec.ref, | |
1775 | 'docheader': '', 'docfooter': '', |
|
1776 | 'docheader': '', 'docfooter': '', | |
1776 | 'separator': ''} |
|
1777 | 'separator': ''} | |
1777 | if tmplspec.mapfile: |
|
1778 | if tmplspec.mapfile: | |
1778 | # find correct templates for current mode, for backward |
|
1779 | # find correct templates for current mode, for backward | |
1779 | # compatibility with 'log -v/-q/--debug' using a mapfile |
|
1780 | # compatibility with 'log -v/-q/--debug' using a mapfile | |
1780 | tmplmodes = [ |
|
1781 | tmplmodes = [ | |
1781 | (True, ''), |
|
1782 | (True, ''), | |
1782 | (self.ui.verbose, '_verbose'), |
|
1783 | (self.ui.verbose, '_verbose'), | |
1783 | (self.ui.quiet, '_quiet'), |
|
1784 | (self.ui.quiet, '_quiet'), | |
1784 | (self.ui.debugflag, '_debug'), |
|
1785 | (self.ui.debugflag, '_debug'), | |
1785 | ] |
|
1786 | ] | |
1786 | for mode, postfix in tmplmodes: |
|
1787 | for mode, postfix in tmplmodes: | |
1787 | for t in self._parts: |
|
1788 | for t in self._parts: | |
1788 | cur = t + postfix |
|
1789 | cur = t + postfix | |
1789 | if mode and cur in self.t: |
|
1790 | if mode and cur in self.t: | |
1790 | self._parts[t] = cur |
|
1791 | self._parts[t] = cur | |
1791 | else: |
|
1792 | else: | |
1792 | partnames = [p for p in self._parts.keys() if p != tmplspec.ref] |
|
1793 | partnames = [p for p in self._parts.keys() if p != tmplspec.ref] | |
1793 | m = formatter.templatepartsmap(tmplspec, self.t, partnames) |
|
1794 | m = formatter.templatepartsmap(tmplspec, self.t, partnames) | |
1794 | self._parts.update(m) |
|
1795 | self._parts.update(m) | |
1795 |
|
1796 | |||
1796 | if self._parts['docheader']: |
|
1797 | if self._parts['docheader']: | |
1797 | self.ui.write(templater.stringify(self.t(self._parts['docheader']))) |
|
1798 | self.ui.write(templater.stringify(self.t(self._parts['docheader']))) | |
1798 |
|
1799 | |||
1799 | def close(self): |
|
1800 | def close(self): | |
1800 | if self._parts['docfooter']: |
|
1801 | if self._parts['docfooter']: | |
1801 | if not self.footer: |
|
1802 | if not self.footer: | |
1802 | self.footer = "" |
|
1803 | self.footer = "" | |
1803 | self.footer += templater.stringify(self.t(self._parts['docfooter'])) |
|
1804 | self.footer += templater.stringify(self.t(self._parts['docfooter'])) | |
1804 | return super(changeset_templater, self).close() |
|
1805 | return super(changeset_templater, self).close() | |
1805 |
|
1806 | |||
1806 | def _show(self, ctx, copies, matchfn, props): |
|
1807 | def _show(self, ctx, copies, matchfn, props): | |
1807 | '''show a single changeset or file revision''' |
|
1808 | '''show a single changeset or file revision''' | |
1808 | props = props.copy() |
|
1809 | props = props.copy() | |
1809 | props.update(templatekw.keywords) |
|
1810 | props.update(templatekw.keywords) | |
1810 | props['templ'] = self.t |
|
1811 | props['templ'] = self.t | |
1811 | props['ctx'] = ctx |
|
1812 | props['ctx'] = ctx | |
1812 | props['repo'] = self.repo |
|
1813 | props['repo'] = self.repo | |
1813 | props['ui'] = self.repo.ui |
|
1814 | props['ui'] = self.repo.ui | |
1814 | props['index'] = index = next(self._counter) |
|
1815 | props['index'] = index = next(self._counter) | |
1815 | props['revcache'] = {'copies': copies} |
|
1816 | props['revcache'] = {'copies': copies} | |
1816 | props['cache'] = self.cache |
|
1817 | props['cache'] = self.cache | |
1817 | props = pycompat.strkwargs(props) |
|
1818 | props = pycompat.strkwargs(props) | |
1818 |
|
1819 | |||
1819 | # write separator, which wouldn't work well with the header part below |
|
1820 | # write separator, which wouldn't work well with the header part below | |
1820 | # since there's inherently a conflict between header (across items) and |
|
1821 | # since there's inherently a conflict between header (across items) and | |
1821 | # separator (per item) |
|
1822 | # separator (per item) | |
1822 | if self._parts['separator'] and index > 0: |
|
1823 | if self._parts['separator'] and index > 0: | |
1823 | self.ui.write(templater.stringify(self.t(self._parts['separator']))) |
|
1824 | self.ui.write(templater.stringify(self.t(self._parts['separator']))) | |
1824 |
|
1825 | |||
1825 | # write header |
|
1826 | # write header | |
1826 | if self._parts['header']: |
|
1827 | if self._parts['header']: | |
1827 | h = templater.stringify(self.t(self._parts['header'], **props)) |
|
1828 | h = templater.stringify(self.t(self._parts['header'], **props)) | |
1828 | if self.buffered: |
|
1829 | if self.buffered: | |
1829 | self.header[ctx.rev()] = h |
|
1830 | self.header[ctx.rev()] = h | |
1830 | else: |
|
1831 | else: | |
1831 | if self.lastheader != h: |
|
1832 | if self.lastheader != h: | |
1832 | self.lastheader = h |
|
1833 | self.lastheader = h | |
1833 | self.ui.write(h) |
|
1834 | self.ui.write(h) | |
1834 |
|
1835 | |||
1835 | # write changeset metadata, then patch if requested |
|
1836 | # write changeset metadata, then patch if requested | |
1836 | key = self._parts[self._tref] |
|
1837 | key = self._parts[self._tref] | |
1837 | self.ui.write(templater.stringify(self.t(key, **props))) |
|
1838 | self.ui.write(templater.stringify(self.t(key, **props))) | |
1838 | self.showpatch(ctx, matchfn) |
|
1839 | self.showpatch(ctx, matchfn) | |
1839 |
|
1840 | |||
1840 | if self._parts['footer']: |
|
1841 | if self._parts['footer']: | |
1841 | if not self.footer: |
|
1842 | if not self.footer: | |
1842 | self.footer = templater.stringify( |
|
1843 | self.footer = templater.stringify( | |
1843 | self.t(self._parts['footer'], **props)) |
|
1844 | self.t(self._parts['footer'], **props)) | |
1844 |
|
1845 | |||
1845 | def logtemplatespec(tmpl, mapfile): |
|
1846 | def logtemplatespec(tmpl, mapfile): | |
1846 | if mapfile: |
|
1847 | if mapfile: | |
1847 | return formatter.templatespec('changeset', tmpl, mapfile) |
|
1848 | return formatter.templatespec('changeset', tmpl, mapfile) | |
1848 | else: |
|
1849 | else: | |
1849 | return formatter.templatespec('', tmpl, None) |
|
1850 | return formatter.templatespec('', tmpl, None) | |
1850 |
|
1851 | |||
1851 | def _lookuplogtemplate(ui, tmpl, style): |
|
1852 | def _lookuplogtemplate(ui, tmpl, style): | |
1852 | """Find the template matching the given template spec or style |
|
1853 | """Find the template matching the given template spec or style | |
1853 |
|
1854 | |||
1854 | See formatter.lookuptemplate() for details. |
|
1855 | See formatter.lookuptemplate() for details. | |
1855 | """ |
|
1856 | """ | |
1856 |
|
1857 | |||
1857 | # ui settings |
|
1858 | # ui settings | |
1858 | if not tmpl and not style: # template are stronger than style |
|
1859 | if not tmpl and not style: # template are stronger than style | |
1859 | tmpl = ui.config('ui', 'logtemplate') |
|
1860 | tmpl = ui.config('ui', 'logtemplate') | |
1860 | if tmpl: |
|
1861 | if tmpl: | |
1861 | return logtemplatespec(templater.unquotestring(tmpl), None) |
|
1862 | return logtemplatespec(templater.unquotestring(tmpl), None) | |
1862 | else: |
|
1863 | else: | |
1863 | style = util.expandpath(ui.config('ui', 'style')) |
|
1864 | style = util.expandpath(ui.config('ui', 'style')) | |
1864 |
|
1865 | |||
1865 | if not tmpl and style: |
|
1866 | if not tmpl and style: | |
1866 | mapfile = style |
|
1867 | mapfile = style | |
1867 | if not os.path.split(mapfile)[0]: |
|
1868 | if not os.path.split(mapfile)[0]: | |
1868 | mapname = (templater.templatepath('map-cmdline.' + mapfile) |
|
1869 | mapname = (templater.templatepath('map-cmdline.' + mapfile) | |
1869 | or templater.templatepath(mapfile)) |
|
1870 | or templater.templatepath(mapfile)) | |
1870 | if mapname: |
|
1871 | if mapname: | |
1871 | mapfile = mapname |
|
1872 | mapfile = mapname | |
1872 | return logtemplatespec(None, mapfile) |
|
1873 | return logtemplatespec(None, mapfile) | |
1873 |
|
1874 | |||
1874 | if not tmpl: |
|
1875 | if not tmpl: | |
1875 | return logtemplatespec(None, None) |
|
1876 | return logtemplatespec(None, None) | |
1876 |
|
1877 | |||
1877 | return formatter.lookuptemplate(ui, 'changeset', tmpl) |
|
1878 | return formatter.lookuptemplate(ui, 'changeset', tmpl) | |
1878 |
|
1879 | |||
1879 | def makelogtemplater(ui, repo, tmpl, buffered=False): |
|
1880 | def makelogtemplater(ui, repo, tmpl, buffered=False): | |
1880 | """Create a changeset_templater from a literal template 'tmpl'""" |
|
1881 | """Create a changeset_templater from a literal template 'tmpl'""" | |
1881 | spec = logtemplatespec(tmpl, None) |
|
1882 | spec = logtemplatespec(tmpl, None) | |
1882 | return changeset_templater(ui, repo, spec, buffered=buffered) |
|
1883 | return changeset_templater(ui, repo, spec, buffered=buffered) | |
1883 |
|
1884 | |||
1884 | def show_changeset(ui, repo, opts, buffered=False): |
|
1885 | def show_changeset(ui, repo, opts, buffered=False): | |
1885 | """show one changeset using template or regular display. |
|
1886 | """show one changeset using template or regular display. | |
1886 |
|
1887 | |||
1887 | Display format will be the first non-empty hit of: |
|
1888 | Display format will be the first non-empty hit of: | |
1888 | 1. option 'template' |
|
1889 | 1. option 'template' | |
1889 | 2. option 'style' |
|
1890 | 2. option 'style' | |
1890 | 3. [ui] setting 'logtemplate' |
|
1891 | 3. [ui] setting 'logtemplate' | |
1891 | 4. [ui] setting 'style' |
|
1892 | 4. [ui] setting 'style' | |
1892 | If all of these values are either the unset or the empty string, |
|
1893 | If all of these values are either the unset or the empty string, | |
1893 | regular display via changeset_printer() is done. |
|
1894 | regular display via changeset_printer() is done. | |
1894 | """ |
|
1895 | """ | |
1895 | # options |
|
1896 | # options | |
1896 | matchfn = None |
|
1897 | matchfn = None | |
1897 | if opts.get('patch') or opts.get('stat'): |
|
1898 | if opts.get('patch') or opts.get('stat'): | |
1898 | matchfn = scmutil.matchall(repo) |
|
1899 | matchfn = scmutil.matchall(repo) | |
1899 |
|
1900 | |||
1900 | if opts.get('template') == 'json': |
|
1901 | if opts.get('template') == 'json': | |
1901 | return jsonchangeset(ui, repo, matchfn, opts, buffered) |
|
1902 | return jsonchangeset(ui, repo, matchfn, opts, buffered) | |
1902 |
|
1903 | |||
1903 | spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style')) |
|
1904 | spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style')) | |
1904 |
|
1905 | |||
1905 | if not spec.ref and not spec.tmpl and not spec.mapfile: |
|
1906 | if not spec.ref and not spec.tmpl and not spec.mapfile: | |
1906 | return changeset_printer(ui, repo, matchfn, opts, buffered) |
|
1907 | return changeset_printer(ui, repo, matchfn, opts, buffered) | |
1907 |
|
1908 | |||
1908 | return changeset_templater(ui, repo, spec, matchfn, opts, buffered) |
|
1909 | return changeset_templater(ui, repo, spec, matchfn, opts, buffered) | |
1909 |
|
1910 | |||
1910 | def showmarker(fm, marker, index=None): |
|
1911 | def showmarker(fm, marker, index=None): | |
1911 | """utility function to display obsolescence marker in a readable way |
|
1912 | """utility function to display obsolescence marker in a readable way | |
1912 |
|
1913 | |||
1913 | To be used by debug function.""" |
|
1914 | To be used by debug function.""" | |
1914 | if index is not None: |
|
1915 | if index is not None: | |
1915 | fm.write('index', '%i ', index) |
|
1916 | fm.write('index', '%i ', index) | |
1916 | fm.write('precnode', '%s ', hex(marker.prednode())) |
|
1917 | fm.write('precnode', '%s ', hex(marker.prednode())) | |
1917 | succs = marker.succnodes() |
|
1918 | succs = marker.succnodes() | |
1918 | fm.condwrite(succs, 'succnodes', '%s ', |
|
1919 | fm.condwrite(succs, 'succnodes', '%s ', | |
1919 | fm.formatlist(map(hex, succs), name='node')) |
|
1920 | fm.formatlist(map(hex, succs), name='node')) | |
1920 | fm.write('flag', '%X ', marker.flags()) |
|
1921 | fm.write('flag', '%X ', marker.flags()) | |
1921 | parents = marker.parentnodes() |
|
1922 | parents = marker.parentnodes() | |
1922 | if parents is not None: |
|
1923 | if parents is not None: | |
1923 | fm.write('parentnodes', '{%s} ', |
|
1924 | fm.write('parentnodes', '{%s} ', | |
1924 | fm.formatlist(map(hex, parents), name='node', sep=', ')) |
|
1925 | fm.formatlist(map(hex, parents), name='node', sep=', ')) | |
1925 | fm.write('date', '(%s) ', fm.formatdate(marker.date())) |
|
1926 | fm.write('date', '(%s) ', fm.formatdate(marker.date())) | |
1926 | meta = marker.metadata().copy() |
|
1927 | meta = marker.metadata().copy() | |
1927 | meta.pop('date', None) |
|
1928 | meta.pop('date', None) | |
1928 | fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', ')) |
|
1929 | fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', ')) | |
1929 | fm.plain('\n') |
|
1930 | fm.plain('\n') | |
1930 |
|
1931 | |||
1931 | def finddate(ui, repo, date): |
|
1932 | def finddate(ui, repo, date): | |
1932 | """Find the tipmost changeset that matches the given date spec""" |
|
1933 | """Find the tipmost changeset that matches the given date spec""" | |
1933 |
|
1934 | |||
1934 | df = util.matchdate(date) |
|
1935 | df = util.matchdate(date) | |
1935 | m = scmutil.matchall(repo) |
|
1936 | m = scmutil.matchall(repo) | |
1936 | results = {} |
|
1937 | results = {} | |
1937 |
|
1938 | |||
1938 | def prep(ctx, fns): |
|
1939 | def prep(ctx, fns): | |
1939 | d = ctx.date() |
|
1940 | d = ctx.date() | |
1940 | if df(d[0]): |
|
1941 | if df(d[0]): | |
1941 | results[ctx.rev()] = d |
|
1942 | results[ctx.rev()] = d | |
1942 |
|
1943 | |||
1943 | for ctx in walkchangerevs(repo, m, {'rev': None}, prep): |
|
1944 | for ctx in walkchangerevs(repo, m, {'rev': None}, prep): | |
1944 | rev = ctx.rev() |
|
1945 | rev = ctx.rev() | |
1945 | if rev in results: |
|
1946 | if rev in results: | |
1946 | ui.status(_("found revision %s from %s\n") % |
|
1947 | ui.status(_("found revision %s from %s\n") % | |
1947 | (rev, util.datestr(results[rev]))) |
|
1948 | (rev, util.datestr(results[rev]))) | |
1948 | return '%d' % rev |
|
1949 | return '%d' % rev | |
1949 |
|
1950 | |||
1950 | raise error.Abort(_("revision matching date not found")) |
|
1951 | raise error.Abort(_("revision matching date not found")) | |
1951 |
|
1952 | |||
1952 | def increasingwindows(windowsize=8, sizelimit=512): |
|
1953 | def increasingwindows(windowsize=8, sizelimit=512): | |
1953 | while True: |
|
1954 | while True: | |
1954 | yield windowsize |
|
1955 | yield windowsize | |
1955 | if windowsize < sizelimit: |
|
1956 | if windowsize < sizelimit: | |
1956 | windowsize *= 2 |
|
1957 | windowsize *= 2 | |
1957 |
|
1958 | |||
1958 | class FileWalkError(Exception): |
|
1959 | class FileWalkError(Exception): | |
1959 | pass |
|
1960 | pass | |
1960 |
|
1961 | |||
1961 | def walkfilerevs(repo, match, follow, revs, fncache): |
|
1962 | def walkfilerevs(repo, match, follow, revs, fncache): | |
1962 | '''Walks the file history for the matched files. |
|
1963 | '''Walks the file history for the matched files. | |
1963 |
|
1964 | |||
1964 | Returns the changeset revs that are involved in the file history. |
|
1965 | Returns the changeset revs that are involved in the file history. | |
1965 |
|
1966 | |||
1966 | Throws FileWalkError if the file history can't be walked using |
|
1967 | Throws FileWalkError if the file history can't be walked using | |
1967 | filelogs alone. |
|
1968 | filelogs alone. | |
1968 | ''' |
|
1969 | ''' | |
1969 | wanted = set() |
|
1970 | wanted = set() | |
1970 | copies = [] |
|
1971 | copies = [] | |
1971 | minrev, maxrev = min(revs), max(revs) |
|
1972 | minrev, maxrev = min(revs), max(revs) | |
1972 | def filerevgen(filelog, last): |
|
1973 | def filerevgen(filelog, last): | |
1973 | """ |
|
1974 | """ | |
1974 | Only files, no patterns. Check the history of each file. |
|
1975 | Only files, no patterns. Check the history of each file. | |
1975 |
|
1976 | |||
1976 | Examines filelog entries within minrev, maxrev linkrev range |
|
1977 | Examines filelog entries within minrev, maxrev linkrev range | |
1977 | Returns an iterator yielding (linkrev, parentlinkrevs, copied) |
|
1978 | Returns an iterator yielding (linkrev, parentlinkrevs, copied) | |
1978 | tuples in backwards order |
|
1979 | tuples in backwards order | |
1979 | """ |
|
1980 | """ | |
1980 | cl_count = len(repo) |
|
1981 | cl_count = len(repo) | |
1981 | revs = [] |
|
1982 | revs = [] | |
1982 | for j in xrange(0, last + 1): |
|
1983 | for j in xrange(0, last + 1): | |
1983 | linkrev = filelog.linkrev(j) |
|
1984 | linkrev = filelog.linkrev(j) | |
1984 | if linkrev < minrev: |
|
1985 | if linkrev < minrev: | |
1985 | continue |
|
1986 | continue | |
1986 | # only yield rev for which we have the changelog, it can |
|
1987 | # only yield rev for which we have the changelog, it can | |
1987 | # happen while doing "hg log" during a pull or commit |
|
1988 | # happen while doing "hg log" during a pull or commit | |
1988 | if linkrev >= cl_count: |
|
1989 | if linkrev >= cl_count: | |
1989 | break |
|
1990 | break | |
1990 |
|
1991 | |||
1991 | parentlinkrevs = [] |
|
1992 | parentlinkrevs = [] | |
1992 | for p in filelog.parentrevs(j): |
|
1993 | for p in filelog.parentrevs(j): | |
1993 | if p != nullrev: |
|
1994 | if p != nullrev: | |
1994 | parentlinkrevs.append(filelog.linkrev(p)) |
|
1995 | parentlinkrevs.append(filelog.linkrev(p)) | |
1995 | n = filelog.node(j) |
|
1996 | n = filelog.node(j) | |
1996 | revs.append((linkrev, parentlinkrevs, |
|
1997 | revs.append((linkrev, parentlinkrevs, | |
1997 | follow and filelog.renamed(n))) |
|
1998 | follow and filelog.renamed(n))) | |
1998 |
|
1999 | |||
1999 | return reversed(revs) |
|
2000 | return reversed(revs) | |
2000 | def iterfiles(): |
|
2001 | def iterfiles(): | |
2001 | pctx = repo['.'] |
|
2002 | pctx = repo['.'] | |
2002 | for filename in match.files(): |
|
2003 | for filename in match.files(): | |
2003 | if follow: |
|
2004 | if follow: | |
2004 | if filename not in pctx: |
|
2005 | if filename not in pctx: | |
2005 | raise error.Abort(_('cannot follow file not in parent ' |
|
2006 | raise error.Abort(_('cannot follow file not in parent ' | |
2006 | 'revision: "%s"') % filename) |
|
2007 | 'revision: "%s"') % filename) | |
2007 | yield filename, pctx[filename].filenode() |
|
2008 | yield filename, pctx[filename].filenode() | |
2008 | else: |
|
2009 | else: | |
2009 | yield filename, None |
|
2010 | yield filename, None | |
2010 | for filename_node in copies: |
|
2011 | for filename_node in copies: | |
2011 | yield filename_node |
|
2012 | yield filename_node | |
2012 |
|
2013 | |||
2013 | for file_, node in iterfiles(): |
|
2014 | for file_, node in iterfiles(): | |
2014 | filelog = repo.file(file_) |
|
2015 | filelog = repo.file(file_) | |
2015 | if not len(filelog): |
|
2016 | if not len(filelog): | |
2016 | if node is None: |
|
2017 | if node is None: | |
2017 | # A zero count may be a directory or deleted file, so |
|
2018 | # A zero count may be a directory or deleted file, so | |
2018 | # try to find matching entries on the slow path. |
|
2019 | # try to find matching entries on the slow path. | |
2019 | if follow: |
|
2020 | if follow: | |
2020 | raise error.Abort( |
|
2021 | raise error.Abort( | |
2021 | _('cannot follow nonexistent file: "%s"') % file_) |
|
2022 | _('cannot follow nonexistent file: "%s"') % file_) | |
2022 | raise FileWalkError("Cannot walk via filelog") |
|
2023 | raise FileWalkError("Cannot walk via filelog") | |
2023 | else: |
|
2024 | else: | |
2024 | continue |
|
2025 | continue | |
2025 |
|
2026 | |||
2026 | if node is None: |
|
2027 | if node is None: | |
2027 | last = len(filelog) - 1 |
|
2028 | last = len(filelog) - 1 | |
2028 | else: |
|
2029 | else: | |
2029 | last = filelog.rev(node) |
|
2030 | last = filelog.rev(node) | |
2030 |
|
2031 | |||
2031 | # keep track of all ancestors of the file |
|
2032 | # keep track of all ancestors of the file | |
2032 | ancestors = {filelog.linkrev(last)} |
|
2033 | ancestors = {filelog.linkrev(last)} | |
2033 |
|
2034 | |||
2034 | # iterate from latest to oldest revision |
|
2035 | # iterate from latest to oldest revision | |
2035 | for rev, flparentlinkrevs, copied in filerevgen(filelog, last): |
|
2036 | for rev, flparentlinkrevs, copied in filerevgen(filelog, last): | |
2036 | if not follow: |
|
2037 | if not follow: | |
2037 | if rev > maxrev: |
|
2038 | if rev > maxrev: | |
2038 | continue |
|
2039 | continue | |
2039 | else: |
|
2040 | else: | |
2040 | # Note that last might not be the first interesting |
|
2041 | # Note that last might not be the first interesting | |
2041 | # rev to us: |
|
2042 | # rev to us: | |
2042 | # if the file has been changed after maxrev, we'll |
|
2043 | # if the file has been changed after maxrev, we'll | |
2043 | # have linkrev(last) > maxrev, and we still need |
|
2044 | # have linkrev(last) > maxrev, and we still need | |
2044 | # to explore the file graph |
|
2045 | # to explore the file graph | |
2045 | if rev not in ancestors: |
|
2046 | if rev not in ancestors: | |
2046 | continue |
|
2047 | continue | |
2047 | # XXX insert 1327 fix here |
|
2048 | # XXX insert 1327 fix here | |
2048 | if flparentlinkrevs: |
|
2049 | if flparentlinkrevs: | |
2049 | ancestors.update(flparentlinkrevs) |
|
2050 | ancestors.update(flparentlinkrevs) | |
2050 |
|
2051 | |||
2051 | fncache.setdefault(rev, []).append(file_) |
|
2052 | fncache.setdefault(rev, []).append(file_) | |
2052 | wanted.add(rev) |
|
2053 | wanted.add(rev) | |
2053 | if copied: |
|
2054 | if copied: | |
2054 | copies.append(copied) |
|
2055 | copies.append(copied) | |
2055 |
|
2056 | |||
2056 | return wanted |
|
2057 | return wanted | |
2057 |
|
2058 | |||
2058 | class _followfilter(object): |
|
2059 | class _followfilter(object): | |
2059 | def __init__(self, repo, onlyfirst=False): |
|
2060 | def __init__(self, repo, onlyfirst=False): | |
2060 | self.repo = repo |
|
2061 | self.repo = repo | |
2061 | self.startrev = nullrev |
|
2062 | self.startrev = nullrev | |
2062 | self.roots = set() |
|
2063 | self.roots = set() | |
2063 | self.onlyfirst = onlyfirst |
|
2064 | self.onlyfirst = onlyfirst | |
2064 |
|
2065 | |||
2065 | def match(self, rev): |
|
2066 | def match(self, rev): | |
2066 | def realparents(rev): |
|
2067 | def realparents(rev): | |
2067 | if self.onlyfirst: |
|
2068 | if self.onlyfirst: | |
2068 | return self.repo.changelog.parentrevs(rev)[0:1] |
|
2069 | return self.repo.changelog.parentrevs(rev)[0:1] | |
2069 | else: |
|
2070 | else: | |
2070 | return filter(lambda x: x != nullrev, |
|
2071 | return filter(lambda x: x != nullrev, | |
2071 | self.repo.changelog.parentrevs(rev)) |
|
2072 | self.repo.changelog.parentrevs(rev)) | |
2072 |
|
2073 | |||
2073 | if self.startrev == nullrev: |
|
2074 | if self.startrev == nullrev: | |
2074 | self.startrev = rev |
|
2075 | self.startrev = rev | |
2075 | return True |
|
2076 | return True | |
2076 |
|
2077 | |||
2077 | if rev > self.startrev: |
|
2078 | if rev > self.startrev: | |
2078 | # forward: all descendants |
|
2079 | # forward: all descendants | |
2079 | if not self.roots: |
|
2080 | if not self.roots: | |
2080 | self.roots.add(self.startrev) |
|
2081 | self.roots.add(self.startrev) | |
2081 | for parent in realparents(rev): |
|
2082 | for parent in realparents(rev): | |
2082 | if parent in self.roots: |
|
2083 | if parent in self.roots: | |
2083 | self.roots.add(rev) |
|
2084 | self.roots.add(rev) | |
2084 | return True |
|
2085 | return True | |
2085 | else: |
|
2086 | else: | |
2086 | # backwards: all parents |
|
2087 | # backwards: all parents | |
2087 | if not self.roots: |
|
2088 | if not self.roots: | |
2088 | self.roots.update(realparents(self.startrev)) |
|
2089 | self.roots.update(realparents(self.startrev)) | |
2089 | if rev in self.roots: |
|
2090 | if rev in self.roots: | |
2090 | self.roots.remove(rev) |
|
2091 | self.roots.remove(rev) | |
2091 | self.roots.update(realparents(rev)) |
|
2092 | self.roots.update(realparents(rev)) | |
2092 | return True |
|
2093 | return True | |
2093 |
|
2094 | |||
2094 | return False |
|
2095 | return False | |
2095 |
|
2096 | |||
2096 | def walkchangerevs(repo, match, opts, prepare): |
|
2097 | def walkchangerevs(repo, match, opts, prepare): | |
2097 | '''Iterate over files and the revs in which they changed. |
|
2098 | '''Iterate over files and the revs in which they changed. | |
2098 |
|
2099 | |||
2099 | Callers most commonly need to iterate backwards over the history |
|
2100 | Callers most commonly need to iterate backwards over the history | |
2100 | in which they are interested. Doing so has awful (quadratic-looking) |
|
2101 | in which they are interested. Doing so has awful (quadratic-looking) | |
2101 | performance, so we use iterators in a "windowed" way. |
|
2102 | performance, so we use iterators in a "windowed" way. | |
2102 |
|
2103 | |||
2103 | We walk a window of revisions in the desired order. Within the |
|
2104 | We walk a window of revisions in the desired order. Within the | |
2104 | window, we first walk forwards to gather data, then in the desired |
|
2105 | window, we first walk forwards to gather data, then in the desired | |
2105 | order (usually backwards) to display it. |
|
2106 | order (usually backwards) to display it. | |
2106 |
|
2107 | |||
2107 | This function returns an iterator yielding contexts. Before |
|
2108 | This function returns an iterator yielding contexts. Before | |
2108 | yielding each context, the iterator will first call the prepare |
|
2109 | yielding each context, the iterator will first call the prepare | |
2109 | function on each context in the window in forward order.''' |
|
2110 | function on each context in the window in forward order.''' | |
2110 |
|
2111 | |||
2111 | follow = opts.get('follow') or opts.get('follow_first') |
|
2112 | follow = opts.get('follow') or opts.get('follow_first') | |
2112 | revs = _logrevs(repo, opts) |
|
2113 | revs = _logrevs(repo, opts) | |
2113 | if not revs: |
|
2114 | if not revs: | |
2114 | return [] |
|
2115 | return [] | |
2115 | wanted = set() |
|
2116 | wanted = set() | |
2116 | slowpath = match.anypats() or ((match.isexact() or match.prefix()) and |
|
2117 | slowpath = match.anypats() or ((match.isexact() or match.prefix()) and | |
2117 | opts.get('removed')) |
|
2118 | opts.get('removed')) | |
2118 | fncache = {} |
|
2119 | fncache = {} | |
2119 | change = repo.changectx |
|
2120 | change = repo.changectx | |
2120 |
|
2121 | |||
2121 | # First step is to fill wanted, the set of revisions that we want to yield. |
|
2122 | # First step is to fill wanted, the set of revisions that we want to yield. | |
2122 | # When it does not induce extra cost, we also fill fncache for revisions in |
|
2123 | # When it does not induce extra cost, we also fill fncache for revisions in | |
2123 | # wanted: a cache of filenames that were changed (ctx.files()) and that |
|
2124 | # wanted: a cache of filenames that were changed (ctx.files()) and that | |
2124 | # match the file filtering conditions. |
|
2125 | # match the file filtering conditions. | |
2125 |
|
2126 | |||
2126 | if match.always(): |
|
2127 | if match.always(): | |
2127 | # No files, no patterns. Display all revs. |
|
2128 | # No files, no patterns. Display all revs. | |
2128 | wanted = revs |
|
2129 | wanted = revs | |
2129 | elif not slowpath: |
|
2130 | elif not slowpath: | |
2130 | # We only have to read through the filelog to find wanted revisions |
|
2131 | # We only have to read through the filelog to find wanted revisions | |
2131 |
|
2132 | |||
2132 | try: |
|
2133 | try: | |
2133 | wanted = walkfilerevs(repo, match, follow, revs, fncache) |
|
2134 | wanted = walkfilerevs(repo, match, follow, revs, fncache) | |
2134 | except FileWalkError: |
|
2135 | except FileWalkError: | |
2135 | slowpath = True |
|
2136 | slowpath = True | |
2136 |
|
2137 | |||
2137 | # We decided to fall back to the slowpath because at least one |
|
2138 | # We decided to fall back to the slowpath because at least one | |
2138 | # of the paths was not a file. Check to see if at least one of them |
|
2139 | # of the paths was not a file. Check to see if at least one of them | |
2139 | # existed in history, otherwise simply return |
|
2140 | # existed in history, otherwise simply return | |
2140 | for path in match.files(): |
|
2141 | for path in match.files(): | |
2141 | if path == '.' or path in repo.store: |
|
2142 | if path == '.' or path in repo.store: | |
2142 | break |
|
2143 | break | |
2143 | else: |
|
2144 | else: | |
2144 | return [] |
|
2145 | return [] | |
2145 |
|
2146 | |||
2146 | if slowpath: |
|
2147 | if slowpath: | |
2147 | # We have to read the changelog to match filenames against |
|
2148 | # We have to read the changelog to match filenames against | |
2148 | # changed files |
|
2149 | # changed files | |
2149 |
|
2150 | |||
2150 | if follow: |
|
2151 | if follow: | |
2151 | raise error.Abort(_('can only follow copies/renames for explicit ' |
|
2152 | raise error.Abort(_('can only follow copies/renames for explicit ' | |
2152 | 'filenames')) |
|
2153 | 'filenames')) | |
2153 |
|
2154 | |||
2154 | # The slow path checks files modified in every changeset. |
|
2155 | # The slow path checks files modified in every changeset. | |
2155 | # This is really slow on large repos, so compute the set lazily. |
|
2156 | # This is really slow on large repos, so compute the set lazily. | |
2156 | class lazywantedset(object): |
|
2157 | class lazywantedset(object): | |
2157 | def __init__(self): |
|
2158 | def __init__(self): | |
2158 | self.set = set() |
|
2159 | self.set = set() | |
2159 | self.revs = set(revs) |
|
2160 | self.revs = set(revs) | |
2160 |
|
2161 | |||
2161 | # No need to worry about locality here because it will be accessed |
|
2162 | # No need to worry about locality here because it will be accessed | |
2162 | # in the same order as the increasing window below. |
|
2163 | # in the same order as the increasing window below. | |
2163 | def __contains__(self, value): |
|
2164 | def __contains__(self, value): | |
2164 | if value in self.set: |
|
2165 | if value in self.set: | |
2165 | return True |
|
2166 | return True | |
2166 | elif not value in self.revs: |
|
2167 | elif not value in self.revs: | |
2167 | return False |
|
2168 | return False | |
2168 | else: |
|
2169 | else: | |
2169 | self.revs.discard(value) |
|
2170 | self.revs.discard(value) | |
2170 | ctx = change(value) |
|
2171 | ctx = change(value) | |
2171 | matches = filter(match, ctx.files()) |
|
2172 | matches = filter(match, ctx.files()) | |
2172 | if matches: |
|
2173 | if matches: | |
2173 | fncache[value] = matches |
|
2174 | fncache[value] = matches | |
2174 | self.set.add(value) |
|
2175 | self.set.add(value) | |
2175 | return True |
|
2176 | return True | |
2176 | return False |
|
2177 | return False | |
2177 |
|
2178 | |||
2178 | def discard(self, value): |
|
2179 | def discard(self, value): | |
2179 | self.revs.discard(value) |
|
2180 | self.revs.discard(value) | |
2180 | self.set.discard(value) |
|
2181 | self.set.discard(value) | |
2181 |
|
2182 | |||
2182 | wanted = lazywantedset() |
|
2183 | wanted = lazywantedset() | |
2183 |
|
2184 | |||
2184 | # it might be worthwhile to do this in the iterator if the rev range |
|
2185 | # it might be worthwhile to do this in the iterator if the rev range | |
2185 | # is descending and the prune args are all within that range |
|
2186 | # is descending and the prune args are all within that range | |
2186 | for rev in opts.get('prune', ()): |
|
2187 | for rev in opts.get('prune', ()): | |
2187 | rev = repo[rev].rev() |
|
2188 | rev = repo[rev].rev() | |
2188 | ff = _followfilter(repo) |
|
2189 | ff = _followfilter(repo) | |
2189 | stop = min(revs[0], revs[-1]) |
|
2190 | stop = min(revs[0], revs[-1]) | |
2190 | for x in xrange(rev, stop - 1, -1): |
|
2191 | for x in xrange(rev, stop - 1, -1): | |
2191 | if ff.match(x): |
|
2192 | if ff.match(x): | |
2192 | wanted = wanted - [x] |
|
2193 | wanted = wanted - [x] | |
2193 |
|
2194 | |||
2194 | # Now that wanted is correctly initialized, we can iterate over the |
|
2195 | # Now that wanted is correctly initialized, we can iterate over the | |
2195 | # revision range, yielding only revisions in wanted. |
|
2196 | # revision range, yielding only revisions in wanted. | |
2196 | def iterate(): |
|
2197 | def iterate(): | |
2197 | if follow and match.always(): |
|
2198 | if follow and match.always(): | |
2198 | ff = _followfilter(repo, onlyfirst=opts.get('follow_first')) |
|
2199 | ff = _followfilter(repo, onlyfirst=opts.get('follow_first')) | |
2199 | def want(rev): |
|
2200 | def want(rev): | |
2200 | return ff.match(rev) and rev in wanted |
|
2201 | return ff.match(rev) and rev in wanted | |
2201 | else: |
|
2202 | else: | |
2202 | def want(rev): |
|
2203 | def want(rev): | |
2203 | return rev in wanted |
|
2204 | return rev in wanted | |
2204 |
|
2205 | |||
2205 | it = iter(revs) |
|
2206 | it = iter(revs) | |
2206 | stopiteration = False |
|
2207 | stopiteration = False | |
2207 | for windowsize in increasingwindows(): |
|
2208 | for windowsize in increasingwindows(): | |
2208 | nrevs = [] |
|
2209 | nrevs = [] | |
2209 | for i in xrange(windowsize): |
|
2210 | for i in xrange(windowsize): | |
2210 | rev = next(it, None) |
|
2211 | rev = next(it, None) | |
2211 | if rev is None: |
|
2212 | if rev is None: | |
2212 | stopiteration = True |
|
2213 | stopiteration = True | |
2213 | break |
|
2214 | break | |
2214 | elif want(rev): |
|
2215 | elif want(rev): | |
2215 | nrevs.append(rev) |
|
2216 | nrevs.append(rev) | |
2216 | for rev in sorted(nrevs): |
|
2217 | for rev in sorted(nrevs): | |
2217 | fns = fncache.get(rev) |
|
2218 | fns = fncache.get(rev) | |
2218 | ctx = change(rev) |
|
2219 | ctx = change(rev) | |
2219 | if not fns: |
|
2220 | if not fns: | |
2220 | def fns_generator(): |
|
2221 | def fns_generator(): | |
2221 | for f in ctx.files(): |
|
2222 | for f in ctx.files(): | |
2222 | if match(f): |
|
2223 | if match(f): | |
2223 | yield f |
|
2224 | yield f | |
2224 | fns = fns_generator() |
|
2225 | fns = fns_generator() | |
2225 | prepare(ctx, fns) |
|
2226 | prepare(ctx, fns) | |
2226 | for rev in nrevs: |
|
2227 | for rev in nrevs: | |
2227 | yield change(rev) |
|
2228 | yield change(rev) | |
2228 |
|
2229 | |||
2229 | if stopiteration: |
|
2230 | if stopiteration: | |
2230 | break |
|
2231 | break | |
2231 |
|
2232 | |||
2232 | return iterate() |
|
2233 | return iterate() | |
2233 |
|
2234 | |||
2234 | def _makefollowlogfilematcher(repo, files, followfirst): |
|
2235 | def _makefollowlogfilematcher(repo, files, followfirst): | |
2235 | # When displaying a revision with --patch --follow FILE, we have |
|
2236 | # When displaying a revision with --patch --follow FILE, we have | |
2236 | # to know which file of the revision must be diffed. With |
|
2237 | # to know which file of the revision must be diffed. With | |
2237 | # --follow, we want the names of the ancestors of FILE in the |
|
2238 | # --follow, we want the names of the ancestors of FILE in the | |
2238 | # revision, stored in "fcache". "fcache" is populated by |
|
2239 | # revision, stored in "fcache". "fcache" is populated by | |
2239 | # reproducing the graph traversal already done by --follow revset |
|
2240 | # reproducing the graph traversal already done by --follow revset | |
2240 | # and relating revs to file names (which is not "correct" but |
|
2241 | # and relating revs to file names (which is not "correct" but | |
2241 | # good enough). |
|
2242 | # good enough). | |
2242 | fcache = {} |
|
2243 | fcache = {} | |
2243 | fcacheready = [False] |
|
2244 | fcacheready = [False] | |
2244 | pctx = repo['.'] |
|
2245 | pctx = repo['.'] | |
2245 |
|
2246 | |||
2246 | def populate(): |
|
2247 | def populate(): | |
2247 | for fn in files: |
|
2248 | for fn in files: | |
2248 | fctx = pctx[fn] |
|
2249 | fctx = pctx[fn] | |
2249 | fcache.setdefault(fctx.introrev(), set()).add(fctx.path()) |
|
2250 | fcache.setdefault(fctx.introrev(), set()).add(fctx.path()) | |
2250 | for c in fctx.ancestors(followfirst=followfirst): |
|
2251 | for c in fctx.ancestors(followfirst=followfirst): | |
2251 | fcache.setdefault(c.rev(), set()).add(c.path()) |
|
2252 | fcache.setdefault(c.rev(), set()).add(c.path()) | |
2252 |
|
2253 | |||
2253 | def filematcher(rev): |
|
2254 | def filematcher(rev): | |
2254 | if not fcacheready[0]: |
|
2255 | if not fcacheready[0]: | |
2255 | # Lazy initialization |
|
2256 | # Lazy initialization | |
2256 | fcacheready[0] = True |
|
2257 | fcacheready[0] = True | |
2257 | populate() |
|
2258 | populate() | |
2258 | return scmutil.matchfiles(repo, fcache.get(rev, [])) |
|
2259 | return scmutil.matchfiles(repo, fcache.get(rev, [])) | |
2259 |
|
2260 | |||
2260 | return filematcher |
|
2261 | return filematcher | |
2261 |
|
2262 | |||
2262 | def _makenofollowlogfilematcher(repo, pats, opts): |
|
2263 | def _makenofollowlogfilematcher(repo, pats, opts): | |
2263 | '''hook for extensions to override the filematcher for non-follow cases''' |
|
2264 | '''hook for extensions to override the filematcher for non-follow cases''' | |
2264 | return None |
|
2265 | return None | |
2265 |
|
2266 | |||
2266 | def _makelogrevset(repo, pats, opts, revs): |
|
2267 | def _makelogrevset(repo, pats, opts, revs): | |
2267 | """Return (expr, filematcher) where expr is a revset string built |
|
2268 | """Return (expr, filematcher) where expr is a revset string built | |
2268 | from log options and file patterns or None. If --stat or --patch |
|
2269 | from log options and file patterns or None. If --stat or --patch | |
2269 | are not passed filematcher is None. Otherwise it is a callable |
|
2270 | are not passed filematcher is None. Otherwise it is a callable | |
2270 | taking a revision number and returning a match objects filtering |
|
2271 | taking a revision number and returning a match objects filtering | |
2271 | the files to be detailed when displaying the revision. |
|
2272 | the files to be detailed when displaying the revision. | |
2272 | """ |
|
2273 | """ | |
2273 | opt2revset = { |
|
2274 | opt2revset = { | |
2274 | 'no_merges': ('not merge()', None), |
|
2275 | 'no_merges': ('not merge()', None), | |
2275 | 'only_merges': ('merge()', None), |
|
2276 | 'only_merges': ('merge()', None), | |
2276 | '_ancestors': ('ancestors(%(val)s)', None), |
|
2277 | '_ancestors': ('ancestors(%(val)s)', None), | |
2277 | '_fancestors': ('_firstancestors(%(val)s)', None), |
|
2278 | '_fancestors': ('_firstancestors(%(val)s)', None), | |
2278 | '_descendants': ('descendants(%(val)s)', None), |
|
2279 | '_descendants': ('descendants(%(val)s)', None), | |
2279 | '_fdescendants': ('_firstdescendants(%(val)s)', None), |
|
2280 | '_fdescendants': ('_firstdescendants(%(val)s)', None), | |
2280 | '_matchfiles': ('_matchfiles(%(val)s)', None), |
|
2281 | '_matchfiles': ('_matchfiles(%(val)s)', None), | |
2281 | 'date': ('date(%(val)r)', None), |
|
2282 | 'date': ('date(%(val)r)', None), | |
2282 | 'branch': ('branch(%(val)r)', ' or '), |
|
2283 | 'branch': ('branch(%(val)r)', ' or '), | |
2283 | '_patslog': ('filelog(%(val)r)', ' or '), |
|
2284 | '_patslog': ('filelog(%(val)r)', ' or '), | |
2284 | '_patsfollow': ('follow(%(val)r)', ' or '), |
|
2285 | '_patsfollow': ('follow(%(val)r)', ' or '), | |
2285 | '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '), |
|
2286 | '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '), | |
2286 | 'keyword': ('keyword(%(val)r)', ' or '), |
|
2287 | 'keyword': ('keyword(%(val)r)', ' or '), | |
2287 | 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '), |
|
2288 | 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '), | |
2288 | 'user': ('user(%(val)r)', ' or '), |
|
2289 | 'user': ('user(%(val)r)', ' or '), | |
2289 | } |
|
2290 | } | |
2290 |
|
2291 | |||
2291 | opts = dict(opts) |
|
2292 | opts = dict(opts) | |
2292 | # follow or not follow? |
|
2293 | # follow or not follow? | |
2293 | follow = opts.get('follow') or opts.get('follow_first') |
|
2294 | follow = opts.get('follow') or opts.get('follow_first') | |
2294 | if opts.get('follow_first'): |
|
2295 | if opts.get('follow_first'): | |
2295 | followfirst = 1 |
|
2296 | followfirst = 1 | |
2296 | else: |
|
2297 | else: | |
2297 | followfirst = 0 |
|
2298 | followfirst = 0 | |
2298 | # --follow with FILE behavior depends on revs... |
|
2299 | # --follow with FILE behavior depends on revs... | |
2299 | it = iter(revs) |
|
2300 | it = iter(revs) | |
2300 | startrev = next(it) |
|
2301 | startrev = next(it) | |
2301 | followdescendants = startrev < next(it, startrev) |
|
2302 | followdescendants = startrev < next(it, startrev) | |
2302 |
|
2303 | |||
2303 | # branch and only_branch are really aliases and must be handled at |
|
2304 | # branch and only_branch are really aliases and must be handled at | |
2304 | # the same time |
|
2305 | # the same time | |
2305 | opts['branch'] = opts.get('branch', []) + opts.get('only_branch', []) |
|
2306 | opts['branch'] = opts.get('branch', []) + opts.get('only_branch', []) | |
2306 | opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']] |
|
2307 | opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']] | |
2307 | # pats/include/exclude are passed to match.match() directly in |
|
2308 | # pats/include/exclude are passed to match.match() directly in | |
2308 | # _matchfiles() revset but walkchangerevs() builds its matcher with |
|
2309 | # _matchfiles() revset but walkchangerevs() builds its matcher with | |
2309 | # scmutil.match(). The difference is input pats are globbed on |
|
2310 | # scmutil.match(). The difference is input pats are globbed on | |
2310 | # platforms without shell expansion (windows). |
|
2311 | # platforms without shell expansion (windows). | |
2311 | wctx = repo[None] |
|
2312 | wctx = repo[None] | |
2312 | match, pats = scmutil.matchandpats(wctx, pats, opts) |
|
2313 | match, pats = scmutil.matchandpats(wctx, pats, opts) | |
2313 | slowpath = match.anypats() or ((match.isexact() or match.prefix()) and |
|
2314 | slowpath = match.anypats() or ((match.isexact() or match.prefix()) and | |
2314 | opts.get('removed')) |
|
2315 | opts.get('removed')) | |
2315 | if not slowpath: |
|
2316 | if not slowpath: | |
2316 | for f in match.files(): |
|
2317 | for f in match.files(): | |
2317 | if follow and f not in wctx: |
|
2318 | if follow and f not in wctx: | |
2318 | # If the file exists, it may be a directory, so let it |
|
2319 | # If the file exists, it may be a directory, so let it | |
2319 | # take the slow path. |
|
2320 | # take the slow path. | |
2320 | if os.path.exists(repo.wjoin(f)): |
|
2321 | if os.path.exists(repo.wjoin(f)): | |
2321 | slowpath = True |
|
2322 | slowpath = True | |
2322 | continue |
|
2323 | continue | |
2323 | else: |
|
2324 | else: | |
2324 | raise error.Abort(_('cannot follow file not in parent ' |
|
2325 | raise error.Abort(_('cannot follow file not in parent ' | |
2325 | 'revision: "%s"') % f) |
|
2326 | 'revision: "%s"') % f) | |
2326 | filelog = repo.file(f) |
|
2327 | filelog = repo.file(f) | |
2327 | if not filelog: |
|
2328 | if not filelog: | |
2328 | # A zero count may be a directory or deleted file, so |
|
2329 | # A zero count may be a directory or deleted file, so | |
2329 | # try to find matching entries on the slow path. |
|
2330 | # try to find matching entries on the slow path. | |
2330 | if follow: |
|
2331 | if follow: | |
2331 | raise error.Abort( |
|
2332 | raise error.Abort( | |
2332 | _('cannot follow nonexistent file: "%s"') % f) |
|
2333 | _('cannot follow nonexistent file: "%s"') % f) | |
2333 | slowpath = True |
|
2334 | slowpath = True | |
2334 |
|
2335 | |||
2335 | # We decided to fall back to the slowpath because at least one |
|
2336 | # We decided to fall back to the slowpath because at least one | |
2336 | # of the paths was not a file. Check to see if at least one of them |
|
2337 | # of the paths was not a file. Check to see if at least one of them | |
2337 | # existed in history - in that case, we'll continue down the |
|
2338 | # existed in history - in that case, we'll continue down the | |
2338 | # slowpath; otherwise, we can turn off the slowpath |
|
2339 | # slowpath; otherwise, we can turn off the slowpath | |
2339 | if slowpath: |
|
2340 | if slowpath: | |
2340 | for path in match.files(): |
|
2341 | for path in match.files(): | |
2341 | if path == '.' or path in repo.store: |
|
2342 | if path == '.' or path in repo.store: | |
2342 | break |
|
2343 | break | |
2343 | else: |
|
2344 | else: | |
2344 | slowpath = False |
|
2345 | slowpath = False | |
2345 |
|
2346 | |||
2346 | fpats = ('_patsfollow', '_patsfollowfirst') |
|
2347 | fpats = ('_patsfollow', '_patsfollowfirst') | |
2347 | fnopats = (('_ancestors', '_fancestors'), |
|
2348 | fnopats = (('_ancestors', '_fancestors'), | |
2348 | ('_descendants', '_fdescendants')) |
|
2349 | ('_descendants', '_fdescendants')) | |
2349 | if slowpath: |
|
2350 | if slowpath: | |
2350 | # See walkchangerevs() slow path. |
|
2351 | # See walkchangerevs() slow path. | |
2351 | # |
|
2352 | # | |
2352 | # pats/include/exclude cannot be represented as separate |
|
2353 | # pats/include/exclude cannot be represented as separate | |
2353 | # revset expressions as their filtering logic applies at file |
|
2354 | # revset expressions as their filtering logic applies at file | |
2354 | # level. For instance "-I a -X a" matches a revision touching |
|
2355 | # level. For instance "-I a -X a" matches a revision touching | |
2355 | # "a" and "b" while "file(a) and not file(b)" does |
|
2356 | # "a" and "b" while "file(a) and not file(b)" does | |
2356 | # not. Besides, filesets are evaluated against the working |
|
2357 | # not. Besides, filesets are evaluated against the working | |
2357 | # directory. |
|
2358 | # directory. | |
2358 | matchargs = ['r:', 'd:relpath'] |
|
2359 | matchargs = ['r:', 'd:relpath'] | |
2359 | for p in pats: |
|
2360 | for p in pats: | |
2360 | matchargs.append('p:' + p) |
|
2361 | matchargs.append('p:' + p) | |
2361 | for p in opts.get('include', []): |
|
2362 | for p in opts.get('include', []): | |
2362 | matchargs.append('i:' + p) |
|
2363 | matchargs.append('i:' + p) | |
2363 | for p in opts.get('exclude', []): |
|
2364 | for p in opts.get('exclude', []): | |
2364 | matchargs.append('x:' + p) |
|
2365 | matchargs.append('x:' + p) | |
2365 | matchargs = ','.join(('%r' % p) for p in matchargs) |
|
2366 | matchargs = ','.join(('%r' % p) for p in matchargs) | |
2366 | opts['_matchfiles'] = matchargs |
|
2367 | opts['_matchfiles'] = matchargs | |
2367 | if follow: |
|
2368 | if follow: | |
2368 | opts[fnopats[0][followfirst]] = '.' |
|
2369 | opts[fnopats[0][followfirst]] = '.' | |
2369 | else: |
|
2370 | else: | |
2370 | if follow: |
|
2371 | if follow: | |
2371 | if pats: |
|
2372 | if pats: | |
2372 | # follow() revset interprets its file argument as a |
|
2373 | # follow() revset interprets its file argument as a | |
2373 | # manifest entry, so use match.files(), not pats. |
|
2374 | # manifest entry, so use match.files(), not pats. | |
2374 | opts[fpats[followfirst]] = list(match.files()) |
|
2375 | opts[fpats[followfirst]] = list(match.files()) | |
2375 | else: |
|
2376 | else: | |
2376 | op = fnopats[followdescendants][followfirst] |
|
2377 | op = fnopats[followdescendants][followfirst] | |
2377 | opts[op] = 'rev(%d)' % startrev |
|
2378 | opts[op] = 'rev(%d)' % startrev | |
2378 | else: |
|
2379 | else: | |
2379 | opts['_patslog'] = list(pats) |
|
2380 | opts['_patslog'] = list(pats) | |
2380 |
|
2381 | |||
2381 | filematcher = None |
|
2382 | filematcher = None | |
2382 | if opts.get('patch') or opts.get('stat'): |
|
2383 | if opts.get('patch') or opts.get('stat'): | |
2383 | # When following files, track renames via a special matcher. |
|
2384 | # When following files, track renames via a special matcher. | |
2384 | # If we're forced to take the slowpath it means we're following |
|
2385 | # If we're forced to take the slowpath it means we're following | |
2385 | # at least one pattern/directory, so don't bother with rename tracking. |
|
2386 | # at least one pattern/directory, so don't bother with rename tracking. | |
2386 | if follow and not match.always() and not slowpath: |
|
2387 | if follow and not match.always() and not slowpath: | |
2387 | # _makefollowlogfilematcher expects its files argument to be |
|
2388 | # _makefollowlogfilematcher expects its files argument to be | |
2388 | # relative to the repo root, so use match.files(), not pats. |
|
2389 | # relative to the repo root, so use match.files(), not pats. | |
2389 | filematcher = _makefollowlogfilematcher(repo, match.files(), |
|
2390 | filematcher = _makefollowlogfilematcher(repo, match.files(), | |
2390 | followfirst) |
|
2391 | followfirst) | |
2391 | else: |
|
2392 | else: | |
2392 | filematcher = _makenofollowlogfilematcher(repo, pats, opts) |
|
2393 | filematcher = _makenofollowlogfilematcher(repo, pats, opts) | |
2393 | if filematcher is None: |
|
2394 | if filematcher is None: | |
2394 | filematcher = lambda rev: match |
|
2395 | filematcher = lambda rev: match | |
2395 |
|
2396 | |||
2396 | expr = [] |
|
2397 | expr = [] | |
2397 | for op, val in sorted(opts.iteritems()): |
|
2398 | for op, val in sorted(opts.iteritems()): | |
2398 | if not val: |
|
2399 | if not val: | |
2399 | continue |
|
2400 | continue | |
2400 | if op not in opt2revset: |
|
2401 | if op not in opt2revset: | |
2401 | continue |
|
2402 | continue | |
2402 | revop, andor = opt2revset[op] |
|
2403 | revop, andor = opt2revset[op] | |
2403 | if '%(val)' not in revop: |
|
2404 | if '%(val)' not in revop: | |
2404 | expr.append(revop) |
|
2405 | expr.append(revop) | |
2405 | else: |
|
2406 | else: | |
2406 | if not isinstance(val, list): |
|
2407 | if not isinstance(val, list): | |
2407 | e = revop % {'val': val} |
|
2408 | e = revop % {'val': val} | |
2408 | else: |
|
2409 | else: | |
2409 | e = '(' + andor.join((revop % {'val': v}) for v in val) + ')' |
|
2410 | e = '(' + andor.join((revop % {'val': v}) for v in val) + ')' | |
2410 | expr.append(e) |
|
2411 | expr.append(e) | |
2411 |
|
2412 | |||
2412 | if expr: |
|
2413 | if expr: | |
2413 | expr = '(' + ' and '.join(expr) + ')' |
|
2414 | expr = '(' + ' and '.join(expr) + ')' | |
2414 | else: |
|
2415 | else: | |
2415 | expr = None |
|
2416 | expr = None | |
2416 | return expr, filematcher |
|
2417 | return expr, filematcher | |
2417 |
|
2418 | |||
2418 | def _logrevs(repo, opts): |
|
2419 | def _logrevs(repo, opts): | |
2419 | # Default --rev value depends on --follow but --follow behavior |
|
2420 | # Default --rev value depends on --follow but --follow behavior | |
2420 | # depends on revisions resolved from --rev... |
|
2421 | # depends on revisions resolved from --rev... | |
2421 | follow = opts.get('follow') or opts.get('follow_first') |
|
2422 | follow = opts.get('follow') or opts.get('follow_first') | |
2422 | if opts.get('rev'): |
|
2423 | if opts.get('rev'): | |
2423 | revs = scmutil.revrange(repo, opts['rev']) |
|
2424 | revs = scmutil.revrange(repo, opts['rev']) | |
2424 | elif follow and repo.dirstate.p1() == nullid: |
|
2425 | elif follow and repo.dirstate.p1() == nullid: | |
2425 | revs = smartset.baseset() |
|
2426 | revs = smartset.baseset() | |
2426 | elif follow: |
|
2427 | elif follow: | |
2427 | revs = repo.revs('reverse(:.)') |
|
2428 | revs = repo.revs('reverse(:.)') | |
2428 | else: |
|
2429 | else: | |
2429 | revs = smartset.spanset(repo) |
|
2430 | revs = smartset.spanset(repo) | |
2430 | revs.reverse() |
|
2431 | revs.reverse() | |
2431 | return revs |
|
2432 | return revs | |
2432 |
|
2433 | |||
2433 | def getgraphlogrevs(repo, pats, opts): |
|
2434 | def getgraphlogrevs(repo, pats, opts): | |
2434 | """Return (revs, expr, filematcher) where revs is an iterable of |
|
2435 | """Return (revs, expr, filematcher) where revs is an iterable of | |
2435 | revision numbers, expr is a revset string built from log options |
|
2436 | revision numbers, expr is a revset string built from log options | |
2436 | and file patterns or None, and used to filter 'revs'. If --stat or |
|
2437 | and file patterns or None, and used to filter 'revs'. If --stat or | |
2437 | --patch are not passed filematcher is None. Otherwise it is a |
|
2438 | --patch are not passed filematcher is None. Otherwise it is a | |
2438 | callable taking a revision number and returning a match objects |
|
2439 | callable taking a revision number and returning a match objects | |
2439 | filtering the files to be detailed when displaying the revision. |
|
2440 | filtering the files to be detailed when displaying the revision. | |
2440 | """ |
|
2441 | """ | |
2441 | limit = loglimit(opts) |
|
2442 | limit = loglimit(opts) | |
2442 | revs = _logrevs(repo, opts) |
|
2443 | revs = _logrevs(repo, opts) | |
2443 | if not revs: |
|
2444 | if not revs: | |
2444 | return smartset.baseset(), None, None |
|
2445 | return smartset.baseset(), None, None | |
2445 | expr, filematcher = _makelogrevset(repo, pats, opts, revs) |
|
2446 | expr, filematcher = _makelogrevset(repo, pats, opts, revs) | |
2446 | if opts.get('rev'): |
|
2447 | if opts.get('rev'): | |
2447 | # User-specified revs might be unsorted, but don't sort before |
|
2448 | # User-specified revs might be unsorted, but don't sort before | |
2448 | # _makelogrevset because it might depend on the order of revs |
|
2449 | # _makelogrevset because it might depend on the order of revs | |
2449 | if not (revs.isdescending() or revs.istopo()): |
|
2450 | if not (revs.isdescending() or revs.istopo()): | |
2450 | revs.sort(reverse=True) |
|
2451 | revs.sort(reverse=True) | |
2451 | if expr: |
|
2452 | if expr: | |
2452 | matcher = revset.match(repo.ui, expr, order=revset.followorder) |
|
2453 | matcher = revset.match(repo.ui, expr, order=revset.followorder) | |
2453 | revs = matcher(repo, revs) |
|
2454 | revs = matcher(repo, revs) | |
2454 | if limit is not None: |
|
2455 | if limit is not None: | |
2455 | limitedrevs = [] |
|
2456 | limitedrevs = [] | |
2456 | for idx, rev in enumerate(revs): |
|
2457 | for idx, rev in enumerate(revs): | |
2457 | if idx >= limit: |
|
2458 | if idx >= limit: | |
2458 | break |
|
2459 | break | |
2459 | limitedrevs.append(rev) |
|
2460 | limitedrevs.append(rev) | |
2460 | revs = smartset.baseset(limitedrevs) |
|
2461 | revs = smartset.baseset(limitedrevs) | |
2461 |
|
2462 | |||
2462 | return revs, expr, filematcher |
|
2463 | return revs, expr, filematcher | |
2463 |
|
2464 | |||
2464 | def getlogrevs(repo, pats, opts): |
|
2465 | def getlogrevs(repo, pats, opts): | |
2465 | """Return (revs, expr, filematcher) where revs is an iterable of |
|
2466 | """Return (revs, expr, filematcher) where revs is an iterable of | |
2466 | revision numbers, expr is a revset string built from log options |
|
2467 | revision numbers, expr is a revset string built from log options | |
2467 | and file patterns or None, and used to filter 'revs'. If --stat or |
|
2468 | and file patterns or None, and used to filter 'revs'. If --stat or | |
2468 | --patch are not passed filematcher is None. Otherwise it is a |
|
2469 | --patch are not passed filematcher is None. Otherwise it is a | |
2469 | callable taking a revision number and returning a match objects |
|
2470 | callable taking a revision number and returning a match objects | |
2470 | filtering the files to be detailed when displaying the revision. |
|
2471 | filtering the files to be detailed when displaying the revision. | |
2471 | """ |
|
2472 | """ | |
2472 | limit = loglimit(opts) |
|
2473 | limit = loglimit(opts) | |
2473 | revs = _logrevs(repo, opts) |
|
2474 | revs = _logrevs(repo, opts) | |
2474 | if not revs: |
|
2475 | if not revs: | |
2475 | return smartset.baseset([]), None, None |
|
2476 | return smartset.baseset([]), None, None | |
2476 | expr, filematcher = _makelogrevset(repo, pats, opts, revs) |
|
2477 | expr, filematcher = _makelogrevset(repo, pats, opts, revs) | |
2477 | if expr: |
|
2478 | if expr: | |
2478 | matcher = revset.match(repo.ui, expr, order=revset.followorder) |
|
2479 | matcher = revset.match(repo.ui, expr, order=revset.followorder) | |
2479 | revs = matcher(repo, revs) |
|
2480 | revs = matcher(repo, revs) | |
2480 | if limit is not None: |
|
2481 | if limit is not None: | |
2481 | limitedrevs = [] |
|
2482 | limitedrevs = [] | |
2482 | for idx, r in enumerate(revs): |
|
2483 | for idx, r in enumerate(revs): | |
2483 | if limit <= idx: |
|
2484 | if limit <= idx: | |
2484 | break |
|
2485 | break | |
2485 | limitedrevs.append(r) |
|
2486 | limitedrevs.append(r) | |
2486 | revs = smartset.baseset(limitedrevs) |
|
2487 | revs = smartset.baseset(limitedrevs) | |
2487 |
|
2488 | |||
2488 | return revs, expr, filematcher |
|
2489 | return revs, expr, filematcher | |
2489 |
|
2490 | |||
2490 | def _graphnodeformatter(ui, displayer): |
|
2491 | def _graphnodeformatter(ui, displayer): | |
2491 | spec = ui.config('ui', 'graphnodetemplate') |
|
2492 | spec = ui.config('ui', 'graphnodetemplate') | |
2492 | if not spec: |
|
2493 | if not spec: | |
2493 | return templatekw.showgraphnode # fast path for "{graphnode}" |
|
2494 | return templatekw.showgraphnode # fast path for "{graphnode}" | |
2494 |
|
2495 | |||
2495 | spec = templater.unquotestring(spec) |
|
2496 | spec = templater.unquotestring(spec) | |
2496 | templ = formatter.maketemplater(ui, spec) |
|
2497 | templ = formatter.maketemplater(ui, spec) | |
2497 | cache = {} |
|
2498 | cache = {} | |
2498 | if isinstance(displayer, changeset_templater): |
|
2499 | if isinstance(displayer, changeset_templater): | |
2499 | cache = displayer.cache # reuse cache of slow templates |
|
2500 | cache = displayer.cache # reuse cache of slow templates | |
2500 | props = templatekw.keywords.copy() |
|
2501 | props = templatekw.keywords.copy() | |
2501 | props['templ'] = templ |
|
2502 | props['templ'] = templ | |
2502 | props['cache'] = cache |
|
2503 | props['cache'] = cache | |
2503 | def formatnode(repo, ctx): |
|
2504 | def formatnode(repo, ctx): | |
2504 | props['ctx'] = ctx |
|
2505 | props['ctx'] = ctx | |
2505 | props['repo'] = repo |
|
2506 | props['repo'] = repo | |
2506 | props['ui'] = repo.ui |
|
2507 | props['ui'] = repo.ui | |
2507 | props['revcache'] = {} |
|
2508 | props['revcache'] = {} | |
2508 | return templ.render(props) |
|
2509 | return templ.render(props) | |
2509 | return formatnode |
|
2510 | return formatnode | |
2510 |
|
2511 | |||
2511 | def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, |
|
2512 | def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, | |
2512 | filematcher=None): |
|
2513 | filematcher=None): | |
2513 | formatnode = _graphnodeformatter(ui, displayer) |
|
2514 | formatnode = _graphnodeformatter(ui, displayer) | |
2514 | state = graphmod.asciistate() |
|
2515 | state = graphmod.asciistate() | |
2515 | styles = state['styles'] |
|
2516 | styles = state['styles'] | |
2516 |
|
2517 | |||
2517 | # only set graph styling if HGPLAIN is not set. |
|
2518 | # only set graph styling if HGPLAIN is not set. | |
2518 | if ui.plain('graph'): |
|
2519 | if ui.plain('graph'): | |
2519 | # set all edge styles to |, the default pre-3.8 behaviour |
|
2520 | # set all edge styles to |, the default pre-3.8 behaviour | |
2520 | styles.update(dict.fromkeys(styles, '|')) |
|
2521 | styles.update(dict.fromkeys(styles, '|')) | |
2521 | else: |
|
2522 | else: | |
2522 | edgetypes = { |
|
2523 | edgetypes = { | |
2523 | 'parent': graphmod.PARENT, |
|
2524 | 'parent': graphmod.PARENT, | |
2524 | 'grandparent': graphmod.GRANDPARENT, |
|
2525 | 'grandparent': graphmod.GRANDPARENT, | |
2525 | 'missing': graphmod.MISSINGPARENT |
|
2526 | 'missing': graphmod.MISSINGPARENT | |
2526 | } |
|
2527 | } | |
2527 | for name, key in edgetypes.items(): |
|
2528 | for name, key in edgetypes.items(): | |
2528 | # experimental config: experimental.graphstyle.* |
|
2529 | # experimental config: experimental.graphstyle.* | |
2529 | styles[key] = ui.config('experimental', 'graphstyle.%s' % name, |
|
2530 | styles[key] = ui.config('experimental', 'graphstyle.%s' % name, | |
2530 | styles[key]) |
|
2531 | styles[key]) | |
2531 | if not styles[key]: |
|
2532 | if not styles[key]: | |
2532 | styles[key] = None |
|
2533 | styles[key] = None | |
2533 |
|
2534 | |||
2534 | # experimental config: experimental.graphshorten |
|
2535 | # experimental config: experimental.graphshorten | |
2535 | state['graphshorten'] = ui.configbool('experimental', 'graphshorten') |
|
2536 | state['graphshorten'] = ui.configbool('experimental', 'graphshorten') | |
2536 |
|
2537 | |||
2537 | for rev, type, ctx, parents in dag: |
|
2538 | for rev, type, ctx, parents in dag: | |
2538 | char = formatnode(repo, ctx) |
|
2539 | char = formatnode(repo, ctx) | |
2539 | copies = None |
|
2540 | copies = None | |
2540 | if getrenamed and ctx.rev(): |
|
2541 | if getrenamed and ctx.rev(): | |
2541 | copies = [] |
|
2542 | copies = [] | |
2542 | for fn in ctx.files(): |
|
2543 | for fn in ctx.files(): | |
2543 | rename = getrenamed(fn, ctx.rev()) |
|
2544 | rename = getrenamed(fn, ctx.rev()) | |
2544 | if rename: |
|
2545 | if rename: | |
2545 | copies.append((fn, rename[0])) |
|
2546 | copies.append((fn, rename[0])) | |
2546 | revmatchfn = None |
|
2547 | revmatchfn = None | |
2547 | if filematcher is not None: |
|
2548 | if filematcher is not None: | |
2548 | revmatchfn = filematcher(ctx.rev()) |
|
2549 | revmatchfn = filematcher(ctx.rev()) | |
2549 | displayer.show(ctx, copies=copies, matchfn=revmatchfn) |
|
2550 | displayer.show(ctx, copies=copies, matchfn=revmatchfn) | |
2550 | lines = displayer.hunk.pop(rev).split('\n') |
|
2551 | lines = displayer.hunk.pop(rev).split('\n') | |
2551 | if not lines[-1]: |
|
2552 | if not lines[-1]: | |
2552 | del lines[-1] |
|
2553 | del lines[-1] | |
2553 | displayer.flush(ctx) |
|
2554 | displayer.flush(ctx) | |
2554 | edges = edgefn(type, char, lines, state, rev, parents) |
|
2555 | edges = edgefn(type, char, lines, state, rev, parents) | |
2555 | for type, char, lines, coldata in edges: |
|
2556 | for type, char, lines, coldata in edges: | |
2556 | graphmod.ascii(ui, state, type, char, lines, coldata) |
|
2557 | graphmod.ascii(ui, state, type, char, lines, coldata) | |
2557 | displayer.close() |
|
2558 | displayer.close() | |
2558 |
|
2559 | |||
2559 | def graphlog(ui, repo, pats, opts): |
|
2560 | def graphlog(ui, repo, pats, opts): | |
2560 | # Parameters are identical to log command ones |
|
2561 | # Parameters are identical to log command ones | |
2561 | revs, expr, filematcher = getgraphlogrevs(repo, pats, opts) |
|
2562 | revs, expr, filematcher = getgraphlogrevs(repo, pats, opts) | |
2562 | revdag = graphmod.dagwalker(repo, revs) |
|
2563 | revdag = graphmod.dagwalker(repo, revs) | |
2563 |
|
2564 | |||
2564 | getrenamed = None |
|
2565 | getrenamed = None | |
2565 | if opts.get('copies'): |
|
2566 | if opts.get('copies'): | |
2566 | endrev = None |
|
2567 | endrev = None | |
2567 | if opts.get('rev'): |
|
2568 | if opts.get('rev'): | |
2568 | endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1 |
|
2569 | endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1 | |
2569 | getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) |
|
2570 | getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) | |
2570 |
|
2571 | |||
2571 | ui.pager('log') |
|
2572 | ui.pager('log') | |
2572 | displayer = show_changeset(ui, repo, opts, buffered=True) |
|
2573 | displayer = show_changeset(ui, repo, opts, buffered=True) | |
2573 | displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed, |
|
2574 | displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed, | |
2574 | filematcher) |
|
2575 | filematcher) | |
2575 |
|
2576 | |||
2576 | def checkunsupportedgraphflags(pats, opts): |
|
2577 | def checkunsupportedgraphflags(pats, opts): | |
2577 | for op in ["newest_first"]: |
|
2578 | for op in ["newest_first"]: | |
2578 | if op in opts and opts[op]: |
|
2579 | if op in opts and opts[op]: | |
2579 | raise error.Abort(_("-G/--graph option is incompatible with --%s") |
|
2580 | raise error.Abort(_("-G/--graph option is incompatible with --%s") | |
2580 | % op.replace("_", "-")) |
|
2581 | % op.replace("_", "-")) | |
2581 |
|
2582 | |||
2582 | def graphrevs(repo, nodes, opts): |
|
2583 | def graphrevs(repo, nodes, opts): | |
2583 | limit = loglimit(opts) |
|
2584 | limit = loglimit(opts) | |
2584 | nodes.reverse() |
|
2585 | nodes.reverse() | |
2585 | if limit is not None: |
|
2586 | if limit is not None: | |
2586 | nodes = nodes[:limit] |
|
2587 | nodes = nodes[:limit] | |
2587 | return graphmod.nodes(repo, nodes) |
|
2588 | return graphmod.nodes(repo, nodes) | |
2588 |
|
2589 | |||
2589 | def add(ui, repo, match, prefix, explicitonly, **opts): |
|
2590 | def add(ui, repo, match, prefix, explicitonly, **opts): | |
2590 | join = lambda f: os.path.join(prefix, f) |
|
2591 | join = lambda f: os.path.join(prefix, f) | |
2591 | bad = [] |
|
2592 | bad = [] | |
2592 |
|
2593 | |||
2593 | badfn = lambda x, y: bad.append(x) or match.bad(x, y) |
|
2594 | badfn = lambda x, y: bad.append(x) or match.bad(x, y) | |
2594 | names = [] |
|
2595 | names = [] | |
2595 | wctx = repo[None] |
|
2596 | wctx = repo[None] | |
2596 | cca = None |
|
2597 | cca = None | |
2597 | abort, warn = scmutil.checkportabilityalert(ui) |
|
2598 | abort, warn = scmutil.checkportabilityalert(ui) | |
2598 | if abort or warn: |
|
2599 | if abort or warn: | |
2599 | cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate) |
|
2600 | cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate) | |
2600 |
|
2601 | |||
2601 | badmatch = matchmod.badmatch(match, badfn) |
|
2602 | badmatch = matchmod.badmatch(match, badfn) | |
2602 | dirstate = repo.dirstate |
|
2603 | dirstate = repo.dirstate | |
2603 | # We don't want to just call wctx.walk here, since it would return a lot of |
|
2604 | # We don't want to just call wctx.walk here, since it would return a lot of | |
2604 | # clean files, which we aren't interested in and takes time. |
|
2605 | # clean files, which we aren't interested in and takes time. | |
2605 | for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate), |
|
2606 | for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate), | |
2606 | True, False, full=False)): |
|
2607 | True, False, full=False)): | |
2607 | exact = match.exact(f) |
|
2608 | exact = match.exact(f) | |
2608 | if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f): |
|
2609 | if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f): | |
2609 | if cca: |
|
2610 | if cca: | |
2610 | cca(f) |
|
2611 | cca(f) | |
2611 | names.append(f) |
|
2612 | names.append(f) | |
2612 | if ui.verbose or not exact: |
|
2613 | if ui.verbose or not exact: | |
2613 | ui.status(_('adding %s\n') % match.rel(f)) |
|
2614 | ui.status(_('adding %s\n') % match.rel(f)) | |
2614 |
|
2615 | |||
2615 | for subpath in sorted(wctx.substate): |
|
2616 | for subpath in sorted(wctx.substate): | |
2616 | sub = wctx.sub(subpath) |
|
2617 | sub = wctx.sub(subpath) | |
2617 | try: |
|
2618 | try: | |
2618 | submatch = matchmod.subdirmatcher(subpath, match) |
|
2619 | submatch = matchmod.subdirmatcher(subpath, match) | |
2619 | if opts.get(r'subrepos'): |
|
2620 | if opts.get(r'subrepos'): | |
2620 | bad.extend(sub.add(ui, submatch, prefix, False, **opts)) |
|
2621 | bad.extend(sub.add(ui, submatch, prefix, False, **opts)) | |
2621 | else: |
|
2622 | else: | |
2622 | bad.extend(sub.add(ui, submatch, prefix, True, **opts)) |
|
2623 | bad.extend(sub.add(ui, submatch, prefix, True, **opts)) | |
2623 | except error.LookupError: |
|
2624 | except error.LookupError: | |
2624 | ui.status(_("skipping missing subrepository: %s\n") |
|
2625 | ui.status(_("skipping missing subrepository: %s\n") | |
2625 | % join(subpath)) |
|
2626 | % join(subpath)) | |
2626 |
|
2627 | |||
2627 | if not opts.get(r'dry_run'): |
|
2628 | if not opts.get(r'dry_run'): | |
2628 | rejected = wctx.add(names, prefix) |
|
2629 | rejected = wctx.add(names, prefix) | |
2629 | bad.extend(f for f in rejected if f in match.files()) |
|
2630 | bad.extend(f for f in rejected if f in match.files()) | |
2630 | return bad |
|
2631 | return bad | |
2631 |
|
2632 | |||
2632 | def addwebdirpath(repo, serverpath, webconf): |
|
2633 | def addwebdirpath(repo, serverpath, webconf): | |
2633 | webconf[serverpath] = repo.root |
|
2634 | webconf[serverpath] = repo.root | |
2634 | repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root)) |
|
2635 | repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root)) | |
2635 |
|
2636 | |||
2636 | for r in repo.revs('filelog("path:.hgsub")'): |
|
2637 | for r in repo.revs('filelog("path:.hgsub")'): | |
2637 | ctx = repo[r] |
|
2638 | ctx = repo[r] | |
2638 | for subpath in ctx.substate: |
|
2639 | for subpath in ctx.substate: | |
2639 | ctx.sub(subpath).addwebdirpath(serverpath, webconf) |
|
2640 | ctx.sub(subpath).addwebdirpath(serverpath, webconf) | |
2640 |
|
2641 | |||
2641 | def forget(ui, repo, match, prefix, explicitonly): |
|
2642 | def forget(ui, repo, match, prefix, explicitonly): | |
2642 | join = lambda f: os.path.join(prefix, f) |
|
2643 | join = lambda f: os.path.join(prefix, f) | |
2643 | bad = [] |
|
2644 | bad = [] | |
2644 | badfn = lambda x, y: bad.append(x) or match.bad(x, y) |
|
2645 | badfn = lambda x, y: bad.append(x) or match.bad(x, y) | |
2645 | wctx = repo[None] |
|
2646 | wctx = repo[None] | |
2646 | forgot = [] |
|
2647 | forgot = [] | |
2647 |
|
2648 | |||
2648 | s = repo.status(match=matchmod.badmatch(match, badfn), clean=True) |
|
2649 | s = repo.status(match=matchmod.badmatch(match, badfn), clean=True) | |
2649 | forget = sorted(s.modified + s.added + s.deleted + s.clean) |
|
2650 | forget = sorted(s.modified + s.added + s.deleted + s.clean) | |
2650 | if explicitonly: |
|
2651 | if explicitonly: | |
2651 | forget = [f for f in forget if match.exact(f)] |
|
2652 | forget = [f for f in forget if match.exact(f)] | |
2652 |
|
2653 | |||
2653 | for subpath in sorted(wctx.substate): |
|
2654 | for subpath in sorted(wctx.substate): | |
2654 | sub = wctx.sub(subpath) |
|
2655 | sub = wctx.sub(subpath) | |
2655 | try: |
|
2656 | try: | |
2656 | submatch = matchmod.subdirmatcher(subpath, match) |
|
2657 | submatch = matchmod.subdirmatcher(subpath, match) | |
2657 | subbad, subforgot = sub.forget(submatch, prefix) |
|
2658 | subbad, subforgot = sub.forget(submatch, prefix) | |
2658 | bad.extend([subpath + '/' + f for f in subbad]) |
|
2659 | bad.extend([subpath + '/' + f for f in subbad]) | |
2659 | forgot.extend([subpath + '/' + f for f in subforgot]) |
|
2660 | forgot.extend([subpath + '/' + f for f in subforgot]) | |
2660 | except error.LookupError: |
|
2661 | except error.LookupError: | |
2661 | ui.status(_("skipping missing subrepository: %s\n") |
|
2662 | ui.status(_("skipping missing subrepository: %s\n") | |
2662 | % join(subpath)) |
|
2663 | % join(subpath)) | |
2663 |
|
2664 | |||
2664 | if not explicitonly: |
|
2665 | if not explicitonly: | |
2665 | for f in match.files(): |
|
2666 | for f in match.files(): | |
2666 | if f not in repo.dirstate and not repo.wvfs.isdir(f): |
|
2667 | if f not in repo.dirstate and not repo.wvfs.isdir(f): | |
2667 | if f not in forgot: |
|
2668 | if f not in forgot: | |
2668 | if repo.wvfs.exists(f): |
|
2669 | if repo.wvfs.exists(f): | |
2669 | # Don't complain if the exact case match wasn't given. |
|
2670 | # Don't complain if the exact case match wasn't given. | |
2670 | # But don't do this until after checking 'forgot', so |
|
2671 | # But don't do this until after checking 'forgot', so | |
2671 | # that subrepo files aren't normalized, and this op is |
|
2672 | # that subrepo files aren't normalized, and this op is | |
2672 | # purely from data cached by the status walk above. |
|
2673 | # purely from data cached by the status walk above. | |
2673 | if repo.dirstate.normalize(f) in repo.dirstate: |
|
2674 | if repo.dirstate.normalize(f) in repo.dirstate: | |
2674 | continue |
|
2675 | continue | |
2675 | ui.warn(_('not removing %s: ' |
|
2676 | ui.warn(_('not removing %s: ' | |
2676 | 'file is already untracked\n') |
|
2677 | 'file is already untracked\n') | |
2677 | % match.rel(f)) |
|
2678 | % match.rel(f)) | |
2678 | bad.append(f) |
|
2679 | bad.append(f) | |
2679 |
|
2680 | |||
2680 | for f in forget: |
|
2681 | for f in forget: | |
2681 | if ui.verbose or not match.exact(f): |
|
2682 | if ui.verbose or not match.exact(f): | |
2682 | ui.status(_('removing %s\n') % match.rel(f)) |
|
2683 | ui.status(_('removing %s\n') % match.rel(f)) | |
2683 |
|
2684 | |||
2684 | rejected = wctx.forget(forget, prefix) |
|
2685 | rejected = wctx.forget(forget, prefix) | |
2685 | bad.extend(f for f in rejected if f in match.files()) |
|
2686 | bad.extend(f for f in rejected if f in match.files()) | |
2686 | forgot.extend(f for f in forget if f not in rejected) |
|
2687 | forgot.extend(f for f in forget if f not in rejected) | |
2687 | return bad, forgot |
|
2688 | return bad, forgot | |
2688 |
|
2689 | |||
2689 | def files(ui, ctx, m, fm, fmt, subrepos): |
|
2690 | def files(ui, ctx, m, fm, fmt, subrepos): | |
2690 | rev = ctx.rev() |
|
2691 | rev = ctx.rev() | |
2691 | ret = 1 |
|
2692 | ret = 1 | |
2692 | ds = ctx.repo().dirstate |
|
2693 | ds = ctx.repo().dirstate | |
2693 |
|
2694 | |||
2694 | for f in ctx.matches(m): |
|
2695 | for f in ctx.matches(m): | |
2695 | if rev is None and ds[f] == 'r': |
|
2696 | if rev is None and ds[f] == 'r': | |
2696 | continue |
|
2697 | continue | |
2697 | fm.startitem() |
|
2698 | fm.startitem() | |
2698 | if ui.verbose: |
|
2699 | if ui.verbose: | |
2699 | fc = ctx[f] |
|
2700 | fc = ctx[f] | |
2700 | fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags()) |
|
2701 | fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags()) | |
2701 | fm.data(abspath=f) |
|
2702 | fm.data(abspath=f) | |
2702 | fm.write('path', fmt, m.rel(f)) |
|
2703 | fm.write('path', fmt, m.rel(f)) | |
2703 | ret = 0 |
|
2704 | ret = 0 | |
2704 |
|
2705 | |||
2705 | for subpath in sorted(ctx.substate): |
|
2706 | for subpath in sorted(ctx.substate): | |
2706 | submatch = matchmod.subdirmatcher(subpath, m) |
|
2707 | submatch = matchmod.subdirmatcher(subpath, m) | |
2707 | if (subrepos or m.exact(subpath) or any(submatch.files())): |
|
2708 | if (subrepos or m.exact(subpath) or any(submatch.files())): | |
2708 | sub = ctx.sub(subpath) |
|
2709 | sub = ctx.sub(subpath) | |
2709 | try: |
|
2710 | try: | |
2710 | recurse = m.exact(subpath) or subrepos |
|
2711 | recurse = m.exact(subpath) or subrepos | |
2711 | if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0: |
|
2712 | if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0: | |
2712 | ret = 0 |
|
2713 | ret = 0 | |
2713 | except error.LookupError: |
|
2714 | except error.LookupError: | |
2714 | ui.status(_("skipping missing subrepository: %s\n") |
|
2715 | ui.status(_("skipping missing subrepository: %s\n") | |
2715 | % m.abs(subpath)) |
|
2716 | % m.abs(subpath)) | |
2716 |
|
2717 | |||
2717 | return ret |
|
2718 | return ret | |
2718 |
|
2719 | |||
2719 | def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None): |
|
2720 | def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None): | |
2720 | join = lambda f: os.path.join(prefix, f) |
|
2721 | join = lambda f: os.path.join(prefix, f) | |
2721 | ret = 0 |
|
2722 | ret = 0 | |
2722 | s = repo.status(match=m, clean=True) |
|
2723 | s = repo.status(match=m, clean=True) | |
2723 | modified, added, deleted, clean = s[0], s[1], s[3], s[6] |
|
2724 | modified, added, deleted, clean = s[0], s[1], s[3], s[6] | |
2724 |
|
2725 | |||
2725 | wctx = repo[None] |
|
2726 | wctx = repo[None] | |
2726 |
|
2727 | |||
2727 | if warnings is None: |
|
2728 | if warnings is None: | |
2728 | warnings = [] |
|
2729 | warnings = [] | |
2729 | warn = True |
|
2730 | warn = True | |
2730 | else: |
|
2731 | else: | |
2731 | warn = False |
|
2732 | warn = False | |
2732 |
|
2733 | |||
2733 | subs = sorted(wctx.substate) |
|
2734 | subs = sorted(wctx.substate) | |
2734 | total = len(subs) |
|
2735 | total = len(subs) | |
2735 | count = 0 |
|
2736 | count = 0 | |
2736 | for subpath in subs: |
|
2737 | for subpath in subs: | |
2737 | count += 1 |
|
2738 | count += 1 | |
2738 | submatch = matchmod.subdirmatcher(subpath, m) |
|
2739 | submatch = matchmod.subdirmatcher(subpath, m) | |
2739 | if subrepos or m.exact(subpath) or any(submatch.files()): |
|
2740 | if subrepos or m.exact(subpath) or any(submatch.files()): | |
2740 | ui.progress(_('searching'), count, total=total, unit=_('subrepos')) |
|
2741 | ui.progress(_('searching'), count, total=total, unit=_('subrepos')) | |
2741 | sub = wctx.sub(subpath) |
|
2742 | sub = wctx.sub(subpath) | |
2742 | try: |
|
2743 | try: | |
2743 | if sub.removefiles(submatch, prefix, after, force, subrepos, |
|
2744 | if sub.removefiles(submatch, prefix, after, force, subrepos, | |
2744 | warnings): |
|
2745 | warnings): | |
2745 | ret = 1 |
|
2746 | ret = 1 | |
2746 | except error.LookupError: |
|
2747 | except error.LookupError: | |
2747 | warnings.append(_("skipping missing subrepository: %s\n") |
|
2748 | warnings.append(_("skipping missing subrepository: %s\n") | |
2748 | % join(subpath)) |
|
2749 | % join(subpath)) | |
2749 | ui.progress(_('searching'), None) |
|
2750 | ui.progress(_('searching'), None) | |
2750 |
|
2751 | |||
2751 | # warn about failure to delete explicit files/dirs |
|
2752 | # warn about failure to delete explicit files/dirs | |
2752 | deleteddirs = util.dirs(deleted) |
|
2753 | deleteddirs = util.dirs(deleted) | |
2753 | files = m.files() |
|
2754 | files = m.files() | |
2754 | total = len(files) |
|
2755 | total = len(files) | |
2755 | count = 0 |
|
2756 | count = 0 | |
2756 | for f in files: |
|
2757 | for f in files: | |
2757 | def insubrepo(): |
|
2758 | def insubrepo(): | |
2758 | for subpath in wctx.substate: |
|
2759 | for subpath in wctx.substate: | |
2759 | if f.startswith(subpath + '/'): |
|
2760 | if f.startswith(subpath + '/'): | |
2760 | return True |
|
2761 | return True | |
2761 | return False |
|
2762 | return False | |
2762 |
|
2763 | |||
2763 | count += 1 |
|
2764 | count += 1 | |
2764 | ui.progress(_('deleting'), count, total=total, unit=_('files')) |
|
2765 | ui.progress(_('deleting'), count, total=total, unit=_('files')) | |
2765 | isdir = f in deleteddirs or wctx.hasdir(f) |
|
2766 | isdir = f in deleteddirs or wctx.hasdir(f) | |
2766 | if (f in repo.dirstate or isdir or f == '.' |
|
2767 | if (f in repo.dirstate or isdir or f == '.' | |
2767 | or insubrepo() or f in subs): |
|
2768 | or insubrepo() or f in subs): | |
2768 | continue |
|
2769 | continue | |
2769 |
|
2770 | |||
2770 | if repo.wvfs.exists(f): |
|
2771 | if repo.wvfs.exists(f): | |
2771 | if repo.wvfs.isdir(f): |
|
2772 | if repo.wvfs.isdir(f): | |
2772 | warnings.append(_('not removing %s: no tracked files\n') |
|
2773 | warnings.append(_('not removing %s: no tracked files\n') | |
2773 | % m.rel(f)) |
|
2774 | % m.rel(f)) | |
2774 | else: |
|
2775 | else: | |
2775 | warnings.append(_('not removing %s: file is untracked\n') |
|
2776 | warnings.append(_('not removing %s: file is untracked\n') | |
2776 | % m.rel(f)) |
|
2777 | % m.rel(f)) | |
2777 | # missing files will generate a warning elsewhere |
|
2778 | # missing files will generate a warning elsewhere | |
2778 | ret = 1 |
|
2779 | ret = 1 | |
2779 | ui.progress(_('deleting'), None) |
|
2780 | ui.progress(_('deleting'), None) | |
2780 |
|
2781 | |||
2781 | if force: |
|
2782 | if force: | |
2782 | list = modified + deleted + clean + added |
|
2783 | list = modified + deleted + clean + added | |
2783 | elif after: |
|
2784 | elif after: | |
2784 | list = deleted |
|
2785 | list = deleted | |
2785 | remaining = modified + added + clean |
|
2786 | remaining = modified + added + clean | |
2786 | total = len(remaining) |
|
2787 | total = len(remaining) | |
2787 | count = 0 |
|
2788 | count = 0 | |
2788 | for f in remaining: |
|
2789 | for f in remaining: | |
2789 | count += 1 |
|
2790 | count += 1 | |
2790 | ui.progress(_('skipping'), count, total=total, unit=_('files')) |
|
2791 | ui.progress(_('skipping'), count, total=total, unit=_('files')) | |
2791 | warnings.append(_('not removing %s: file still exists\n') |
|
2792 | warnings.append(_('not removing %s: file still exists\n') | |
2792 | % m.rel(f)) |
|
2793 | % m.rel(f)) | |
2793 | ret = 1 |
|
2794 | ret = 1 | |
2794 | ui.progress(_('skipping'), None) |
|
2795 | ui.progress(_('skipping'), None) | |
2795 | else: |
|
2796 | else: | |
2796 | list = deleted + clean |
|
2797 | list = deleted + clean | |
2797 | total = len(modified) + len(added) |
|
2798 | total = len(modified) + len(added) | |
2798 | count = 0 |
|
2799 | count = 0 | |
2799 | for f in modified: |
|
2800 | for f in modified: | |
2800 | count += 1 |
|
2801 | count += 1 | |
2801 | ui.progress(_('skipping'), count, total=total, unit=_('files')) |
|
2802 | ui.progress(_('skipping'), count, total=total, unit=_('files')) | |
2802 | warnings.append(_('not removing %s: file is modified (use -f' |
|
2803 | warnings.append(_('not removing %s: file is modified (use -f' | |
2803 | ' to force removal)\n') % m.rel(f)) |
|
2804 | ' to force removal)\n') % m.rel(f)) | |
2804 | ret = 1 |
|
2805 | ret = 1 | |
2805 | for f in added: |
|
2806 | for f in added: | |
2806 | count += 1 |
|
2807 | count += 1 | |
2807 | ui.progress(_('skipping'), count, total=total, unit=_('files')) |
|
2808 | ui.progress(_('skipping'), count, total=total, unit=_('files')) | |
2808 | warnings.append(_("not removing %s: file has been marked for add" |
|
2809 | warnings.append(_("not removing %s: file has been marked for add" | |
2809 | " (use 'hg forget' to undo add)\n") % m.rel(f)) |
|
2810 | " (use 'hg forget' to undo add)\n") % m.rel(f)) | |
2810 | ret = 1 |
|
2811 | ret = 1 | |
2811 | ui.progress(_('skipping'), None) |
|
2812 | ui.progress(_('skipping'), None) | |
2812 |
|
2813 | |||
2813 | list = sorted(list) |
|
2814 | list = sorted(list) | |
2814 | total = len(list) |
|
2815 | total = len(list) | |
2815 | count = 0 |
|
2816 | count = 0 | |
2816 | for f in list: |
|
2817 | for f in list: | |
2817 | count += 1 |
|
2818 | count += 1 | |
2818 | if ui.verbose or not m.exact(f): |
|
2819 | if ui.verbose or not m.exact(f): | |
2819 | ui.progress(_('deleting'), count, total=total, unit=_('files')) |
|
2820 | ui.progress(_('deleting'), count, total=total, unit=_('files')) | |
2820 | ui.status(_('removing %s\n') % m.rel(f)) |
|
2821 | ui.status(_('removing %s\n') % m.rel(f)) | |
2821 | ui.progress(_('deleting'), None) |
|
2822 | ui.progress(_('deleting'), None) | |
2822 |
|
2823 | |||
2823 | with repo.wlock(): |
|
2824 | with repo.wlock(): | |
2824 | if not after: |
|
2825 | if not after: | |
2825 | for f in list: |
|
2826 | for f in list: | |
2826 | if f in added: |
|
2827 | if f in added: | |
2827 | continue # we never unlink added files on remove |
|
2828 | continue # we never unlink added files on remove | |
2828 | repo.wvfs.unlinkpath(f, ignoremissing=True) |
|
2829 | repo.wvfs.unlinkpath(f, ignoremissing=True) | |
2829 | repo[None].forget(list) |
|
2830 | repo[None].forget(list) | |
2830 |
|
2831 | |||
2831 | if warn: |
|
2832 | if warn: | |
2832 | for warning in warnings: |
|
2833 | for warning in warnings: | |
2833 | ui.warn(warning) |
|
2834 | ui.warn(warning) | |
2834 |
|
2835 | |||
2835 | return ret |
|
2836 | return ret | |
2836 |
|
2837 | |||
2837 | def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts): |
|
2838 | def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts): | |
2838 | err = 1 |
|
2839 | err = 1 | |
2839 |
|
2840 | |||
2840 | def write(path): |
|
2841 | def write(path): | |
2841 | filename = None |
|
2842 | filename = None | |
2842 | if fntemplate: |
|
2843 | if fntemplate: | |
2843 | filename = makefilename(repo, fntemplate, ctx.node(), |
|
2844 | filename = makefilename(repo, fntemplate, ctx.node(), | |
2844 | pathname=os.path.join(prefix, path)) |
|
2845 | pathname=os.path.join(prefix, path)) | |
2845 | with formatter.maybereopen(basefm, filename, opts) as fm: |
|
2846 | with formatter.maybereopen(basefm, filename, opts) as fm: | |
2846 | data = ctx[path].data() |
|
2847 | data = ctx[path].data() | |
2847 | if opts.get('decode'): |
|
2848 | if opts.get('decode'): | |
2848 | data = repo.wwritedata(path, data) |
|
2849 | data = repo.wwritedata(path, data) | |
2849 | fm.startitem() |
|
2850 | fm.startitem() | |
2850 | fm.write('data', '%s', data) |
|
2851 | fm.write('data', '%s', data) | |
2851 | fm.data(abspath=path, path=matcher.rel(path)) |
|
2852 | fm.data(abspath=path, path=matcher.rel(path)) | |
2852 |
|
2853 | |||
2853 | # Automation often uses hg cat on single files, so special case it |
|
2854 | # Automation often uses hg cat on single files, so special case it | |
2854 | # for performance to avoid the cost of parsing the manifest. |
|
2855 | # for performance to avoid the cost of parsing the manifest. | |
2855 | if len(matcher.files()) == 1 and not matcher.anypats(): |
|
2856 | if len(matcher.files()) == 1 and not matcher.anypats(): | |
2856 | file = matcher.files()[0] |
|
2857 | file = matcher.files()[0] | |
2857 | mfl = repo.manifestlog |
|
2858 | mfl = repo.manifestlog | |
2858 | mfnode = ctx.manifestnode() |
|
2859 | mfnode = ctx.manifestnode() | |
2859 | try: |
|
2860 | try: | |
2860 | if mfnode and mfl[mfnode].find(file)[0]: |
|
2861 | if mfnode and mfl[mfnode].find(file)[0]: | |
2861 | write(file) |
|
2862 | write(file) | |
2862 | return 0 |
|
2863 | return 0 | |
2863 | except KeyError: |
|
2864 | except KeyError: | |
2864 | pass |
|
2865 | pass | |
2865 |
|
2866 | |||
2866 | for abs in ctx.walk(matcher): |
|
2867 | for abs in ctx.walk(matcher): | |
2867 | write(abs) |
|
2868 | write(abs) | |
2868 | err = 0 |
|
2869 | err = 0 | |
2869 |
|
2870 | |||
2870 | for subpath in sorted(ctx.substate): |
|
2871 | for subpath in sorted(ctx.substate): | |
2871 | sub = ctx.sub(subpath) |
|
2872 | sub = ctx.sub(subpath) | |
2872 | try: |
|
2873 | try: | |
2873 | submatch = matchmod.subdirmatcher(subpath, matcher) |
|
2874 | submatch = matchmod.subdirmatcher(subpath, matcher) | |
2874 |
|
2875 | |||
2875 | if not sub.cat(submatch, basefm, fntemplate, |
|
2876 | if not sub.cat(submatch, basefm, fntemplate, | |
2876 | os.path.join(prefix, sub._path), **opts): |
|
2877 | os.path.join(prefix, sub._path), **opts): | |
2877 | err = 0 |
|
2878 | err = 0 | |
2878 | except error.RepoLookupError: |
|
2879 | except error.RepoLookupError: | |
2879 | ui.status(_("skipping missing subrepository: %s\n") |
|
2880 | ui.status(_("skipping missing subrepository: %s\n") | |
2880 | % os.path.join(prefix, subpath)) |
|
2881 | % os.path.join(prefix, subpath)) | |
2881 |
|
2882 | |||
2882 | return err |
|
2883 | return err | |
2883 |
|
2884 | |||
2884 | def commit(ui, repo, commitfunc, pats, opts): |
|
2885 | def commit(ui, repo, commitfunc, pats, opts): | |
2885 | '''commit the specified files or all outstanding changes''' |
|
2886 | '''commit the specified files or all outstanding changes''' | |
2886 | date = opts.get('date') |
|
2887 | date = opts.get('date') | |
2887 | if date: |
|
2888 | if date: | |
2888 | opts['date'] = util.parsedate(date) |
|
2889 | opts['date'] = util.parsedate(date) | |
2889 | message = logmessage(ui, opts) |
|
2890 | message = logmessage(ui, opts) | |
2890 | matcher = scmutil.match(repo[None], pats, opts) |
|
2891 | matcher = scmutil.match(repo[None], pats, opts) | |
2891 |
|
2892 | |||
|
2893 | dsguard = None | |||
2892 | # extract addremove carefully -- this function can be called from a command |
|
2894 | # extract addremove carefully -- this function can be called from a command | |
2893 | # that doesn't support addremove |
|
2895 | # that doesn't support addremove | |
2894 | if opts.get('addremove'): |
|
2896 | try: | |
2895 | if scmutil.addremove(repo, matcher, "", opts) != 0: |
|
2897 | if opts.get('addremove'): | |
2896 | raise error.Abort( |
|
2898 | dsguard = dirstateguard.dirstateguard(repo, 'commit') | |
2897 | _("failed to mark all new/missing files as added/removed")) |
|
2899 | if scmutil.addremove(repo, matcher, "", opts) != 0: | |
2898 |
|
2900 | raise error.Abort( | ||
2899 | return commitfunc(ui, repo, message, matcher, opts) |
|
2901 | _("failed to mark all new/missing files as added/removed")) | |
|
2902 | ||||
|
2903 | r = commitfunc(ui, repo, message, matcher, opts) | |||
|
2904 | if dsguard: | |||
|
2905 | dsguard.close() | |||
|
2906 | return r | |||
|
2907 | finally: | |||
|
2908 | if dsguard: | |||
|
2909 | dsguard.release() | |||
2900 |
|
2910 | |||
2901 | def samefile(f, ctx1, ctx2): |
|
2911 | def samefile(f, ctx1, ctx2): | |
2902 | if f in ctx1.manifest(): |
|
2912 | if f in ctx1.manifest(): | |
2903 | a = ctx1.filectx(f) |
|
2913 | a = ctx1.filectx(f) | |
2904 | if f in ctx2.manifest(): |
|
2914 | if f in ctx2.manifest(): | |
2905 | b = ctx2.filectx(f) |
|
2915 | b = ctx2.filectx(f) | |
2906 | return (not a.cmp(b) |
|
2916 | return (not a.cmp(b) | |
2907 | and a.flags() == b.flags()) |
|
2917 | and a.flags() == b.flags()) | |
2908 | else: |
|
2918 | else: | |
2909 | return False |
|
2919 | return False | |
2910 | else: |
|
2920 | else: | |
2911 | return f not in ctx2.manifest() |
|
2921 | return f not in ctx2.manifest() | |
2912 |
|
2922 | |||
2913 | def amend(ui, repo, commitfunc, old, extra, pats, opts): |
|
2923 | def amend(ui, repo, commitfunc, old, extra, pats, opts): | |
2914 | # avoid cycle context -> subrepo -> cmdutil |
|
2924 | # avoid cycle context -> subrepo -> cmdutil | |
2915 | from . import context |
|
2925 | from . import context | |
2916 |
|
2926 | |||
2917 | # amend will reuse the existing user if not specified, but the obsolete |
|
2927 | # amend will reuse the existing user if not specified, but the obsolete | |
2918 | # marker creation requires that the current user's name is specified. |
|
2928 | # marker creation requires that the current user's name is specified. | |
2919 | if obsolete.isenabled(repo, obsolete.createmarkersopt): |
|
2929 | if obsolete.isenabled(repo, obsolete.createmarkersopt): | |
2920 | ui.username() # raise exception if username not set |
|
2930 | ui.username() # raise exception if username not set | |
2921 |
|
2931 | |||
2922 | ui.note(_('amending changeset %s\n') % old) |
|
2932 | ui.note(_('amending changeset %s\n') % old) | |
2923 | base = old.p1() |
|
2933 | base = old.p1() | |
2924 |
|
2934 | |||
2925 | newid = None |
|
2935 | newid = None | |
2926 | with repo.wlock(), repo.lock(), repo.transaction('amend'): |
|
2936 | with repo.wlock(), repo.lock(), repo.transaction('amend'): | |
2927 | # See if we got a message from -m or -l, if not, open the editor |
|
2937 | # See if we got a message from -m or -l, if not, open the editor | |
2928 | # with the message of the changeset to amend |
|
2938 | # with the message of the changeset to amend | |
2929 | message = logmessage(ui, opts) |
|
2939 | message = logmessage(ui, opts) | |
2930 | # ensure logfile does not conflict with later enforcement of the |
|
2940 | # ensure logfile does not conflict with later enforcement of the | |
2931 | # message. potential logfile content has been processed by |
|
2941 | # message. potential logfile content has been processed by | |
2932 | # `logmessage` anyway. |
|
2942 | # `logmessage` anyway. | |
2933 | opts.pop('logfile') |
|
2943 | opts.pop('logfile') | |
2934 | # First, do a regular commit to record all changes in the working |
|
2944 | # First, do a regular commit to record all changes in the working | |
2935 | # directory (if there are any) |
|
2945 | # directory (if there are any) | |
2936 | ui.callhooks = False |
|
2946 | ui.callhooks = False | |
2937 | activebookmark = repo._bookmarks.active |
|
2947 | activebookmark = repo._bookmarks.active | |
2938 | try: |
|
2948 | try: | |
2939 | repo._bookmarks.active = None |
|
2949 | repo._bookmarks.active = None | |
2940 | opts['message'] = 'temporary amend commit for %s' % old |
|
2950 | opts['message'] = 'temporary amend commit for %s' % old | |
2941 | node = commit(ui, repo, commitfunc, pats, opts) |
|
2951 | node = commit(ui, repo, commitfunc, pats, opts) | |
2942 | finally: |
|
2952 | finally: | |
2943 | repo._bookmarks.active = activebookmark |
|
2953 | repo._bookmarks.active = activebookmark | |
2944 | ui.callhooks = True |
|
2954 | ui.callhooks = True | |
2945 | ctx = repo[node] |
|
2955 | ctx = repo[node] | |
2946 |
|
2956 | |||
2947 | # Participating changesets: |
|
2957 | # Participating changesets: | |
2948 | # |
|
2958 | # | |
2949 | # node/ctx o - new (intermediate) commit that contains changes |
|
2959 | # node/ctx o - new (intermediate) commit that contains changes | |
2950 | # | from working dir to go into amending commit |
|
2960 | # | from working dir to go into amending commit | |
2951 | # | (or a workingctx if there were no changes) |
|
2961 | # | (or a workingctx if there were no changes) | |
2952 | # | |
|
2962 | # | | |
2953 | # old o - changeset to amend |
|
2963 | # old o - changeset to amend | |
2954 | # | |
|
2964 | # | | |
2955 | # base o - parent of amending changeset |
|
2965 | # base o - parent of amending changeset | |
2956 |
|
2966 | |||
2957 | # Update extra dict from amended commit (e.g. to preserve graft |
|
2967 | # Update extra dict from amended commit (e.g. to preserve graft | |
2958 | # source) |
|
2968 | # source) | |
2959 | extra.update(old.extra()) |
|
2969 | extra.update(old.extra()) | |
2960 |
|
2970 | |||
2961 | # Also update it from the intermediate commit or from the wctx |
|
2971 | # Also update it from the intermediate commit or from the wctx | |
2962 | extra.update(ctx.extra()) |
|
2972 | extra.update(ctx.extra()) | |
2963 |
|
2973 | |||
2964 | if len(old.parents()) > 1: |
|
2974 | if len(old.parents()) > 1: | |
2965 | # ctx.files() isn't reliable for merges, so fall back to the |
|
2975 | # ctx.files() isn't reliable for merges, so fall back to the | |
2966 | # slower repo.status() method |
|
2976 | # slower repo.status() method | |
2967 | files = set([fn for st in repo.status(base, old)[:3] |
|
2977 | files = set([fn for st in repo.status(base, old)[:3] | |
2968 | for fn in st]) |
|
2978 | for fn in st]) | |
2969 | else: |
|
2979 | else: | |
2970 | files = set(old.files()) |
|
2980 | files = set(old.files()) | |
2971 |
|
2981 | |||
2972 | # Second, we use either the commit we just did, or if there were no |
|
2982 | # Second, we use either the commit we just did, or if there were no | |
2973 | # changes the parent of the working directory as the version of the |
|
2983 | # changes the parent of the working directory as the version of the | |
2974 | # files in the final amend commit |
|
2984 | # files in the final amend commit | |
2975 | if node: |
|
2985 | if node: | |
2976 | ui.note(_('copying changeset %s to %s\n') % (ctx, base)) |
|
2986 | ui.note(_('copying changeset %s to %s\n') % (ctx, base)) | |
2977 |
|
2987 | |||
2978 | user = ctx.user() |
|
2988 | user = ctx.user() | |
2979 | date = ctx.date() |
|
2989 | date = ctx.date() | |
2980 | # Recompute copies (avoid recording a -> b -> a) |
|
2990 | # Recompute copies (avoid recording a -> b -> a) | |
2981 | copied = copies.pathcopies(base, ctx) |
|
2991 | copied = copies.pathcopies(base, ctx) | |
2982 | if old.p2: |
|
2992 | if old.p2: | |
2983 | copied.update(copies.pathcopies(old.p2(), ctx)) |
|
2993 | copied.update(copies.pathcopies(old.p2(), ctx)) | |
2984 |
|
2994 | |||
2985 | # Prune files which were reverted by the updates: if old |
|
2995 | # Prune files which were reverted by the updates: if old | |
2986 | # introduced file X and our intermediate commit, node, |
|
2996 | # introduced file X and our intermediate commit, node, | |
2987 | # renamed that file, then those two files are the same and |
|
2997 | # renamed that file, then those two files are the same and | |
2988 | # we can discard X from our list of files. Likewise if X |
|
2998 | # we can discard X from our list of files. Likewise if X | |
2989 | # was deleted, it's no longer relevant |
|
2999 | # was deleted, it's no longer relevant | |
2990 | files.update(ctx.files()) |
|
3000 | files.update(ctx.files()) | |
2991 | files = [f for f in files if not samefile(f, ctx, base)] |
|
3001 | files = [f for f in files if not samefile(f, ctx, base)] | |
2992 |
|
3002 | |||
2993 | def filectxfn(repo, ctx_, path): |
|
3003 | def filectxfn(repo, ctx_, path): | |
2994 | try: |
|
3004 | try: | |
2995 | fctx = ctx[path] |
|
3005 | fctx = ctx[path] | |
2996 | flags = fctx.flags() |
|
3006 | flags = fctx.flags() | |
2997 | mctx = context.memfilectx(repo, |
|
3007 | mctx = context.memfilectx(repo, | |
2998 | fctx.path(), fctx.data(), |
|
3008 | fctx.path(), fctx.data(), | |
2999 | islink='l' in flags, |
|
3009 | islink='l' in flags, | |
3000 | isexec='x' in flags, |
|
3010 | isexec='x' in flags, | |
3001 | copied=copied.get(path)) |
|
3011 | copied=copied.get(path)) | |
3002 | return mctx |
|
3012 | return mctx | |
3003 | except KeyError: |
|
3013 | except KeyError: | |
3004 | return None |
|
3014 | return None | |
3005 | else: |
|
3015 | else: | |
3006 | ui.note(_('copying changeset %s to %s\n') % (old, base)) |
|
3016 | ui.note(_('copying changeset %s to %s\n') % (old, base)) | |
3007 |
|
3017 | |||
3008 | # Use version of files as in the old cset |
|
3018 | # Use version of files as in the old cset | |
3009 | def filectxfn(repo, ctx_, path): |
|
3019 | def filectxfn(repo, ctx_, path): | |
3010 | try: |
|
3020 | try: | |
3011 | return old.filectx(path) |
|
3021 | return old.filectx(path) | |
3012 | except KeyError: |
|
3022 | except KeyError: | |
3013 | return None |
|
3023 | return None | |
3014 |
|
3024 | |||
3015 | user = opts.get('user') or old.user() |
|
3025 | user = opts.get('user') or old.user() | |
3016 | date = opts.get('date') or old.date() |
|
3026 | date = opts.get('date') or old.date() | |
3017 | editform = mergeeditform(old, 'commit.amend') |
|
3027 | editform = mergeeditform(old, 'commit.amend') | |
3018 | editor = getcommiteditor(editform=editform, |
|
3028 | editor = getcommiteditor(editform=editform, | |
3019 | **pycompat.strkwargs(opts)) |
|
3029 | **pycompat.strkwargs(opts)) | |
3020 | if not message: |
|
3030 | if not message: | |
3021 | editor = getcommiteditor(edit=True, editform=editform) |
|
3031 | editor = getcommiteditor(edit=True, editform=editform) | |
3022 | message = old.description() |
|
3032 | message = old.description() | |
3023 |
|
3033 | |||
3024 | pureextra = extra.copy() |
|
3034 | pureextra = extra.copy() | |
3025 | extra['amend_source'] = old.hex() |
|
3035 | extra['amend_source'] = old.hex() | |
3026 |
|
3036 | |||
3027 | new = context.memctx(repo, |
|
3037 | new = context.memctx(repo, | |
3028 | parents=[base.node(), old.p2().node()], |
|
3038 | parents=[base.node(), old.p2().node()], | |
3029 | text=message, |
|
3039 | text=message, | |
3030 | files=files, |
|
3040 | files=files, | |
3031 | filectxfn=filectxfn, |
|
3041 | filectxfn=filectxfn, | |
3032 | user=user, |
|
3042 | user=user, | |
3033 | date=date, |
|
3043 | date=date, | |
3034 | extra=extra, |
|
3044 | extra=extra, | |
3035 | editor=editor) |
|
3045 | editor=editor) | |
3036 |
|
3046 | |||
3037 | newdesc = changelog.stripdesc(new.description()) |
|
3047 | newdesc = changelog.stripdesc(new.description()) | |
3038 | if ((not node) |
|
3048 | if ((not node) | |
3039 | and newdesc == old.description() |
|
3049 | and newdesc == old.description() | |
3040 | and user == old.user() |
|
3050 | and user == old.user() | |
3041 | and date == old.date() |
|
3051 | and date == old.date() | |
3042 | and pureextra == old.extra()): |
|
3052 | and pureextra == old.extra()): | |
3043 | # nothing changed. continuing here would create a new node |
|
3053 | # nothing changed. continuing here would create a new node | |
3044 | # anyway because of the amend_source noise. |
|
3054 | # anyway because of the amend_source noise. | |
3045 | # |
|
3055 | # | |
3046 | # This not what we expect from amend. |
|
3056 | # This not what we expect from amend. | |
3047 | return old.node() |
|
3057 | return old.node() | |
3048 |
|
3058 | |||
3049 | ph = repo.ui.config('phases', 'new-commit', phases.draft) |
|
3059 | ph = repo.ui.config('phases', 'new-commit', phases.draft) | |
3050 | try: |
|
3060 | try: | |
3051 | if opts.get('secret'): |
|
3061 | if opts.get('secret'): | |
3052 | commitphase = 'secret' |
|
3062 | commitphase = 'secret' | |
3053 | else: |
|
3063 | else: | |
3054 | commitphase = old.phase() |
|
3064 | commitphase = old.phase() | |
3055 | repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend') |
|
3065 | repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend') | |
3056 | newid = repo.commitctx(new) |
|
3066 | newid = repo.commitctx(new) | |
3057 | finally: |
|
3067 | finally: | |
3058 | repo.ui.setconfig('phases', 'new-commit', ph, 'amend') |
|
3068 | repo.ui.setconfig('phases', 'new-commit', ph, 'amend') | |
3059 | if newid != old.node(): |
|
3069 | if newid != old.node(): | |
3060 | # Reroute the working copy parent to the new changeset |
|
3070 | # Reroute the working copy parent to the new changeset | |
3061 | repo.setparents(newid, nullid) |
|
3071 | repo.setparents(newid, nullid) | |
3062 | mapping = {old.node(): (newid,)} |
|
3072 | mapping = {old.node(): (newid,)} | |
3063 | if node: |
|
3073 | if node: | |
3064 | mapping[node] = () |
|
3074 | mapping[node] = () | |
3065 | scmutil.cleanupnodes(repo, mapping, 'amend') |
|
3075 | scmutil.cleanupnodes(repo, mapping, 'amend') | |
3066 | return newid |
|
3076 | return newid | |
3067 |
|
3077 | |||
3068 | def commiteditor(repo, ctx, subs, editform=''): |
|
3078 | def commiteditor(repo, ctx, subs, editform=''): | |
3069 | if ctx.description(): |
|
3079 | if ctx.description(): | |
3070 | return ctx.description() |
|
3080 | return ctx.description() | |
3071 | return commitforceeditor(repo, ctx, subs, editform=editform, |
|
3081 | return commitforceeditor(repo, ctx, subs, editform=editform, | |
3072 | unchangedmessagedetection=True) |
|
3082 | unchangedmessagedetection=True) | |
3073 |
|
3083 | |||
3074 | def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None, |
|
3084 | def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None, | |
3075 | editform='', unchangedmessagedetection=False): |
|
3085 | editform='', unchangedmessagedetection=False): | |
3076 | if not extramsg: |
|
3086 | if not extramsg: | |
3077 | extramsg = _("Leave message empty to abort commit.") |
|
3087 | extramsg = _("Leave message empty to abort commit.") | |
3078 |
|
3088 | |||
3079 | forms = [e for e in editform.split('.') if e] |
|
3089 | forms = [e for e in editform.split('.') if e] | |
3080 | forms.insert(0, 'changeset') |
|
3090 | forms.insert(0, 'changeset') | |
3081 | templatetext = None |
|
3091 | templatetext = None | |
3082 | while forms: |
|
3092 | while forms: | |
3083 | ref = '.'.join(forms) |
|
3093 | ref = '.'.join(forms) | |
3084 | if repo.ui.config('committemplate', ref): |
|
3094 | if repo.ui.config('committemplate', ref): | |
3085 | templatetext = committext = buildcommittemplate( |
|
3095 | templatetext = committext = buildcommittemplate( | |
3086 | repo, ctx, subs, extramsg, ref) |
|
3096 | repo, ctx, subs, extramsg, ref) | |
3087 | break |
|
3097 | break | |
3088 | forms.pop() |
|
3098 | forms.pop() | |
3089 | else: |
|
3099 | else: | |
3090 | committext = buildcommittext(repo, ctx, subs, extramsg) |
|
3100 | committext = buildcommittext(repo, ctx, subs, extramsg) | |
3091 |
|
3101 | |||
3092 | # run editor in the repository root |
|
3102 | # run editor in the repository root | |
3093 | olddir = pycompat.getcwd() |
|
3103 | olddir = pycompat.getcwd() | |
3094 | os.chdir(repo.root) |
|
3104 | os.chdir(repo.root) | |
3095 |
|
3105 | |||
3096 | # make in-memory changes visible to external process |
|
3106 | # make in-memory changes visible to external process | |
3097 | tr = repo.currenttransaction() |
|
3107 | tr = repo.currenttransaction() | |
3098 | repo.dirstate.write(tr) |
|
3108 | repo.dirstate.write(tr) | |
3099 | pending = tr and tr.writepending() and repo.root |
|
3109 | pending = tr and tr.writepending() and repo.root | |
3100 |
|
3110 | |||
3101 | editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(), |
|
3111 | editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(), | |
3102 | editform=editform, pending=pending, |
|
3112 | editform=editform, pending=pending, | |
3103 | repopath=repo.path) |
|
3113 | repopath=repo.path) | |
3104 | text = editortext |
|
3114 | text = editortext | |
3105 |
|
3115 | |||
3106 | # strip away anything below this special string (used for editors that want |
|
3116 | # strip away anything below this special string (used for editors that want | |
3107 | # to display the diff) |
|
3117 | # to display the diff) | |
3108 | stripbelow = re.search(_linebelow, text, flags=re.MULTILINE) |
|
3118 | stripbelow = re.search(_linebelow, text, flags=re.MULTILINE) | |
3109 | if stripbelow: |
|
3119 | if stripbelow: | |
3110 | text = text[:stripbelow.start()] |
|
3120 | text = text[:stripbelow.start()] | |
3111 |
|
3121 | |||
3112 | text = re.sub("(?m)^HG:.*(\n|$)", "", text) |
|
3122 | text = re.sub("(?m)^HG:.*(\n|$)", "", text) | |
3113 | os.chdir(olddir) |
|
3123 | os.chdir(olddir) | |
3114 |
|
3124 | |||
3115 | if finishdesc: |
|
3125 | if finishdesc: | |
3116 | text = finishdesc(text) |
|
3126 | text = finishdesc(text) | |
3117 | if not text.strip(): |
|
3127 | if not text.strip(): | |
3118 | raise error.Abort(_("empty commit message")) |
|
3128 | raise error.Abort(_("empty commit message")) | |
3119 | if unchangedmessagedetection and editortext == templatetext: |
|
3129 | if unchangedmessagedetection and editortext == templatetext: | |
3120 | raise error.Abort(_("commit message unchanged")) |
|
3130 | raise error.Abort(_("commit message unchanged")) | |
3121 |
|
3131 | |||
3122 | return text |
|
3132 | return text | |
3123 |
|
3133 | |||
3124 | def buildcommittemplate(repo, ctx, subs, extramsg, ref): |
|
3134 | def buildcommittemplate(repo, ctx, subs, extramsg, ref): | |
3125 | ui = repo.ui |
|
3135 | ui = repo.ui | |
3126 | spec = formatter.templatespec(ref, None, None) |
|
3136 | spec = formatter.templatespec(ref, None, None) | |
3127 | t = changeset_templater(ui, repo, spec, None, {}, False) |
|
3137 | t = changeset_templater(ui, repo, spec, None, {}, False) | |
3128 | t.t.cache.update((k, templater.unquotestring(v)) |
|
3138 | t.t.cache.update((k, templater.unquotestring(v)) | |
3129 | for k, v in repo.ui.configitems('committemplate')) |
|
3139 | for k, v in repo.ui.configitems('committemplate')) | |
3130 |
|
3140 | |||
3131 | if not extramsg: |
|
3141 | if not extramsg: | |
3132 | extramsg = '' # ensure that extramsg is string |
|
3142 | extramsg = '' # ensure that extramsg is string | |
3133 |
|
3143 | |||
3134 | ui.pushbuffer() |
|
3144 | ui.pushbuffer() | |
3135 | t.show(ctx, extramsg=extramsg) |
|
3145 | t.show(ctx, extramsg=extramsg) | |
3136 | return ui.popbuffer() |
|
3146 | return ui.popbuffer() | |
3137 |
|
3147 | |||
3138 | def hgprefix(msg): |
|
3148 | def hgprefix(msg): | |
3139 | return "\n".join(["HG: %s" % a for a in msg.split("\n") if a]) |
|
3149 | return "\n".join(["HG: %s" % a for a in msg.split("\n") if a]) | |
3140 |
|
3150 | |||
3141 | def buildcommittext(repo, ctx, subs, extramsg): |
|
3151 | def buildcommittext(repo, ctx, subs, extramsg): | |
3142 | edittext = [] |
|
3152 | edittext = [] | |
3143 | modified, added, removed = ctx.modified(), ctx.added(), ctx.removed() |
|
3153 | modified, added, removed = ctx.modified(), ctx.added(), ctx.removed() | |
3144 | if ctx.description(): |
|
3154 | if ctx.description(): | |
3145 | edittext.append(ctx.description()) |
|
3155 | edittext.append(ctx.description()) | |
3146 | edittext.append("") |
|
3156 | edittext.append("") | |
3147 | edittext.append("") # Empty line between message and comments. |
|
3157 | edittext.append("") # Empty line between message and comments. | |
3148 | edittext.append(hgprefix(_("Enter commit message." |
|
3158 | edittext.append(hgprefix(_("Enter commit message." | |
3149 | " Lines beginning with 'HG:' are removed."))) |
|
3159 | " Lines beginning with 'HG:' are removed."))) | |
3150 | edittext.append(hgprefix(extramsg)) |
|
3160 | edittext.append(hgprefix(extramsg)) | |
3151 | edittext.append("HG: --") |
|
3161 | edittext.append("HG: --") | |
3152 | edittext.append(hgprefix(_("user: %s") % ctx.user())) |
|
3162 | edittext.append(hgprefix(_("user: %s") % ctx.user())) | |
3153 | if ctx.p2(): |
|
3163 | if ctx.p2(): | |
3154 | edittext.append(hgprefix(_("branch merge"))) |
|
3164 | edittext.append(hgprefix(_("branch merge"))) | |
3155 | if ctx.branch(): |
|
3165 | if ctx.branch(): | |
3156 | edittext.append(hgprefix(_("branch '%s'") % ctx.branch())) |
|
3166 | edittext.append(hgprefix(_("branch '%s'") % ctx.branch())) | |
3157 | if bookmarks.isactivewdirparent(repo): |
|
3167 | if bookmarks.isactivewdirparent(repo): | |
3158 | edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark)) |
|
3168 | edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark)) | |
3159 | edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs]) |
|
3169 | edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs]) | |
3160 | edittext.extend([hgprefix(_("added %s") % f) for f in added]) |
|
3170 | edittext.extend([hgprefix(_("added %s") % f) for f in added]) | |
3161 | edittext.extend([hgprefix(_("changed %s") % f) for f in modified]) |
|
3171 | edittext.extend([hgprefix(_("changed %s") % f) for f in modified]) | |
3162 | edittext.extend([hgprefix(_("removed %s") % f) for f in removed]) |
|
3172 | edittext.extend([hgprefix(_("removed %s") % f) for f in removed]) | |
3163 | if not added and not modified and not removed: |
|
3173 | if not added and not modified and not removed: | |
3164 | edittext.append(hgprefix(_("no files changed"))) |
|
3174 | edittext.append(hgprefix(_("no files changed"))) | |
3165 | edittext.append("") |
|
3175 | edittext.append("") | |
3166 |
|
3176 | |||
3167 | return "\n".join(edittext) |
|
3177 | return "\n".join(edittext) | |
3168 |
|
3178 | |||
3169 | def commitstatus(repo, node, branch, bheads=None, opts=None): |
|
3179 | def commitstatus(repo, node, branch, bheads=None, opts=None): | |
3170 | if opts is None: |
|
3180 | if opts is None: | |
3171 | opts = {} |
|
3181 | opts = {} | |
3172 | ctx = repo[node] |
|
3182 | ctx = repo[node] | |
3173 | parents = ctx.parents() |
|
3183 | parents = ctx.parents() | |
3174 |
|
3184 | |||
3175 | if (not opts.get('amend') and bheads and node not in bheads and not |
|
3185 | if (not opts.get('amend') and bheads and node not in bheads and not | |
3176 | [x for x in parents if x.node() in bheads and x.branch() == branch]): |
|
3186 | [x for x in parents if x.node() in bheads and x.branch() == branch]): | |
3177 | repo.ui.status(_('created new head\n')) |
|
3187 | repo.ui.status(_('created new head\n')) | |
3178 | # The message is not printed for initial roots. For the other |
|
3188 | # The message is not printed for initial roots. For the other | |
3179 | # changesets, it is printed in the following situations: |
|
3189 | # changesets, it is printed in the following situations: | |
3180 | # |
|
3190 | # | |
3181 | # Par column: for the 2 parents with ... |
|
3191 | # Par column: for the 2 parents with ... | |
3182 | # N: null or no parent |
|
3192 | # N: null or no parent | |
3183 | # B: parent is on another named branch |
|
3193 | # B: parent is on another named branch | |
3184 | # C: parent is a regular non head changeset |
|
3194 | # C: parent is a regular non head changeset | |
3185 | # H: parent was a branch head of the current branch |
|
3195 | # H: parent was a branch head of the current branch | |
3186 | # Msg column: whether we print "created new head" message |
|
3196 | # Msg column: whether we print "created new head" message | |
3187 | # In the following, it is assumed that there already exists some |
|
3197 | # In the following, it is assumed that there already exists some | |
3188 | # initial branch heads of the current branch, otherwise nothing is |
|
3198 | # initial branch heads of the current branch, otherwise nothing is | |
3189 | # printed anyway. |
|
3199 | # printed anyway. | |
3190 | # |
|
3200 | # | |
3191 | # Par Msg Comment |
|
3201 | # Par Msg Comment | |
3192 | # N N y additional topo root |
|
3202 | # N N y additional topo root | |
3193 | # |
|
3203 | # | |
3194 | # B N y additional branch root |
|
3204 | # B N y additional branch root | |
3195 | # C N y additional topo head |
|
3205 | # C N y additional topo head | |
3196 | # H N n usual case |
|
3206 | # H N n usual case | |
3197 | # |
|
3207 | # | |
3198 | # B B y weird additional branch root |
|
3208 | # B B y weird additional branch root | |
3199 | # C B y branch merge |
|
3209 | # C B y branch merge | |
3200 | # H B n merge with named branch |
|
3210 | # H B n merge with named branch | |
3201 | # |
|
3211 | # | |
3202 | # C C y additional head from merge |
|
3212 | # C C y additional head from merge | |
3203 | # C H n merge with a head |
|
3213 | # C H n merge with a head | |
3204 | # |
|
3214 | # | |
3205 | # H H n head merge: head count decreases |
|
3215 | # H H n head merge: head count decreases | |
3206 |
|
3216 | |||
3207 | if not opts.get('close_branch'): |
|
3217 | if not opts.get('close_branch'): | |
3208 | for r in parents: |
|
3218 | for r in parents: | |
3209 | if r.closesbranch() and r.branch() == branch: |
|
3219 | if r.closesbranch() and r.branch() == branch: | |
3210 | repo.ui.status(_('reopening closed branch head %d\n') % r) |
|
3220 | repo.ui.status(_('reopening closed branch head %d\n') % r) | |
3211 |
|
3221 | |||
3212 | if repo.ui.debugflag: |
|
3222 | if repo.ui.debugflag: | |
3213 | repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex())) |
|
3223 | repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex())) | |
3214 | elif repo.ui.verbose: |
|
3224 | elif repo.ui.verbose: | |
3215 | repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx)) |
|
3225 | repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx)) | |
3216 |
|
3226 | |||
3217 | def postcommitstatus(repo, pats, opts): |
|
3227 | def postcommitstatus(repo, pats, opts): | |
3218 | return repo.status(match=scmutil.match(repo[None], pats, opts)) |
|
3228 | return repo.status(match=scmutil.match(repo[None], pats, opts)) | |
3219 |
|
3229 | |||
3220 | def revert(ui, repo, ctx, parents, *pats, **opts): |
|
3230 | def revert(ui, repo, ctx, parents, *pats, **opts): | |
3221 | parent, p2 = parents |
|
3231 | parent, p2 = parents | |
3222 | node = ctx.node() |
|
3232 | node = ctx.node() | |
3223 |
|
3233 | |||
3224 | mf = ctx.manifest() |
|
3234 | mf = ctx.manifest() | |
3225 | if node == p2: |
|
3235 | if node == p2: | |
3226 | parent = p2 |
|
3236 | parent = p2 | |
3227 |
|
3237 | |||
3228 | # need all matching names in dirstate and manifest of target rev, |
|
3238 | # need all matching names in dirstate and manifest of target rev, | |
3229 | # so have to walk both. do not print errors if files exist in one |
|
3239 | # so have to walk both. do not print errors if files exist in one | |
3230 | # but not other. in both cases, filesets should be evaluated against |
|
3240 | # but not other. in both cases, filesets should be evaluated against | |
3231 | # workingctx to get consistent result (issue4497). this means 'set:**' |
|
3241 | # workingctx to get consistent result (issue4497). this means 'set:**' | |
3232 | # cannot be used to select missing files from target rev. |
|
3242 | # cannot be used to select missing files from target rev. | |
3233 |
|
3243 | |||
3234 | # `names` is a mapping for all elements in working copy and target revision |
|
3244 | # `names` is a mapping for all elements in working copy and target revision | |
3235 | # The mapping is in the form: |
|
3245 | # The mapping is in the form: | |
3236 | # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>) |
|
3246 | # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>) | |
3237 | names = {} |
|
3247 | names = {} | |
3238 |
|
3248 | |||
3239 | with repo.wlock(): |
|
3249 | with repo.wlock(): | |
3240 | ## filling of the `names` mapping |
|
3250 | ## filling of the `names` mapping | |
3241 | # walk dirstate to fill `names` |
|
3251 | # walk dirstate to fill `names` | |
3242 |
|
3252 | |||
3243 | interactive = opts.get('interactive', False) |
|
3253 | interactive = opts.get('interactive', False) | |
3244 | wctx = repo[None] |
|
3254 | wctx = repo[None] | |
3245 | m = scmutil.match(wctx, pats, opts) |
|
3255 | m = scmutil.match(wctx, pats, opts) | |
3246 |
|
3256 | |||
3247 | # we'll need this later |
|
3257 | # we'll need this later | |
3248 | targetsubs = sorted(s for s in wctx.substate if m(s)) |
|
3258 | targetsubs = sorted(s for s in wctx.substate if m(s)) | |
3249 |
|
3259 | |||
3250 | if not m.always(): |
|
3260 | if not m.always(): | |
3251 | matcher = matchmod.badmatch(m, lambda x, y: False) |
|
3261 | matcher = matchmod.badmatch(m, lambda x, y: False) | |
3252 | for abs in wctx.walk(matcher): |
|
3262 | for abs in wctx.walk(matcher): | |
3253 | names[abs] = m.rel(abs), m.exact(abs) |
|
3263 | names[abs] = m.rel(abs), m.exact(abs) | |
3254 |
|
3264 | |||
3255 | # walk target manifest to fill `names` |
|
3265 | # walk target manifest to fill `names` | |
3256 |
|
3266 | |||
3257 | def badfn(path, msg): |
|
3267 | def badfn(path, msg): | |
3258 | if path in names: |
|
3268 | if path in names: | |
3259 | return |
|
3269 | return | |
3260 | if path in ctx.substate: |
|
3270 | if path in ctx.substate: | |
3261 | return |
|
3271 | return | |
3262 | path_ = path + '/' |
|
3272 | path_ = path + '/' | |
3263 | for f in names: |
|
3273 | for f in names: | |
3264 | if f.startswith(path_): |
|
3274 | if f.startswith(path_): | |
3265 | return |
|
3275 | return | |
3266 | ui.warn("%s: %s\n" % (m.rel(path), msg)) |
|
3276 | ui.warn("%s: %s\n" % (m.rel(path), msg)) | |
3267 |
|
3277 | |||
3268 | for abs in ctx.walk(matchmod.badmatch(m, badfn)): |
|
3278 | for abs in ctx.walk(matchmod.badmatch(m, badfn)): | |
3269 | if abs not in names: |
|
3279 | if abs not in names: | |
3270 | names[abs] = m.rel(abs), m.exact(abs) |
|
3280 | names[abs] = m.rel(abs), m.exact(abs) | |
3271 |
|
3281 | |||
3272 | # Find status of all file in `names`. |
|
3282 | # Find status of all file in `names`. | |
3273 | m = scmutil.matchfiles(repo, names) |
|
3283 | m = scmutil.matchfiles(repo, names) | |
3274 |
|
3284 | |||
3275 | changes = repo.status(node1=node, match=m, |
|
3285 | changes = repo.status(node1=node, match=m, | |
3276 | unknown=True, ignored=True, clean=True) |
|
3286 | unknown=True, ignored=True, clean=True) | |
3277 | else: |
|
3287 | else: | |
3278 | changes = repo.status(node1=node, match=m) |
|
3288 | changes = repo.status(node1=node, match=m) | |
3279 | for kind in changes: |
|
3289 | for kind in changes: | |
3280 | for abs in kind: |
|
3290 | for abs in kind: | |
3281 | names[abs] = m.rel(abs), m.exact(abs) |
|
3291 | names[abs] = m.rel(abs), m.exact(abs) | |
3282 |
|
3292 | |||
3283 | m = scmutil.matchfiles(repo, names) |
|
3293 | m = scmutil.matchfiles(repo, names) | |
3284 |
|
3294 | |||
3285 | modified = set(changes.modified) |
|
3295 | modified = set(changes.modified) | |
3286 | added = set(changes.added) |
|
3296 | added = set(changes.added) | |
3287 | removed = set(changes.removed) |
|
3297 | removed = set(changes.removed) | |
3288 | _deleted = set(changes.deleted) |
|
3298 | _deleted = set(changes.deleted) | |
3289 | unknown = set(changes.unknown) |
|
3299 | unknown = set(changes.unknown) | |
3290 | unknown.update(changes.ignored) |
|
3300 | unknown.update(changes.ignored) | |
3291 | clean = set(changes.clean) |
|
3301 | clean = set(changes.clean) | |
3292 | modadded = set() |
|
3302 | modadded = set() | |
3293 |
|
3303 | |||
3294 | # We need to account for the state of the file in the dirstate, |
|
3304 | # We need to account for the state of the file in the dirstate, | |
3295 | # even when we revert against something else than parent. This will |
|
3305 | # even when we revert against something else than parent. This will | |
3296 | # slightly alter the behavior of revert (doing back up or not, delete |
|
3306 | # slightly alter the behavior of revert (doing back up or not, delete | |
3297 | # or just forget etc). |
|
3307 | # or just forget etc). | |
3298 | if parent == node: |
|
3308 | if parent == node: | |
3299 | dsmodified = modified |
|
3309 | dsmodified = modified | |
3300 | dsadded = added |
|
3310 | dsadded = added | |
3301 | dsremoved = removed |
|
3311 | dsremoved = removed | |
3302 | # store all local modifications, useful later for rename detection |
|
3312 | # store all local modifications, useful later for rename detection | |
3303 | localchanges = dsmodified | dsadded |
|
3313 | localchanges = dsmodified | dsadded | |
3304 | modified, added, removed = set(), set(), set() |
|
3314 | modified, added, removed = set(), set(), set() | |
3305 | else: |
|
3315 | else: | |
3306 | changes = repo.status(node1=parent, match=m) |
|
3316 | changes = repo.status(node1=parent, match=m) | |
3307 | dsmodified = set(changes.modified) |
|
3317 | dsmodified = set(changes.modified) | |
3308 | dsadded = set(changes.added) |
|
3318 | dsadded = set(changes.added) | |
3309 | dsremoved = set(changes.removed) |
|
3319 | dsremoved = set(changes.removed) | |
3310 | # store all local modifications, useful later for rename detection |
|
3320 | # store all local modifications, useful later for rename detection | |
3311 | localchanges = dsmodified | dsadded |
|
3321 | localchanges = dsmodified | dsadded | |
3312 |
|
3322 | |||
3313 | # only take into account for removes between wc and target |
|
3323 | # only take into account for removes between wc and target | |
3314 | clean |= dsremoved - removed |
|
3324 | clean |= dsremoved - removed | |
3315 | dsremoved &= removed |
|
3325 | dsremoved &= removed | |
3316 | # distinct between dirstate remove and other |
|
3326 | # distinct between dirstate remove and other | |
3317 | removed -= dsremoved |
|
3327 | removed -= dsremoved | |
3318 |
|
3328 | |||
3319 | modadded = added & dsmodified |
|
3329 | modadded = added & dsmodified | |
3320 | added -= modadded |
|
3330 | added -= modadded | |
3321 |
|
3331 | |||
3322 | # tell newly modified apart. |
|
3332 | # tell newly modified apart. | |
3323 | dsmodified &= modified |
|
3333 | dsmodified &= modified | |
3324 | dsmodified |= modified & dsadded # dirstate added may need backup |
|
3334 | dsmodified |= modified & dsadded # dirstate added may need backup | |
3325 | modified -= dsmodified |
|
3335 | modified -= dsmodified | |
3326 |
|
3336 | |||
3327 | # We need to wait for some post-processing to update this set |
|
3337 | # We need to wait for some post-processing to update this set | |
3328 | # before making the distinction. The dirstate will be used for |
|
3338 | # before making the distinction. The dirstate will be used for | |
3329 | # that purpose. |
|
3339 | # that purpose. | |
3330 | dsadded = added |
|
3340 | dsadded = added | |
3331 |
|
3341 | |||
3332 | # in case of merge, files that are actually added can be reported as |
|
3342 | # in case of merge, files that are actually added can be reported as | |
3333 | # modified, we need to post process the result |
|
3343 | # modified, we need to post process the result | |
3334 | if p2 != nullid: |
|
3344 | if p2 != nullid: | |
3335 | mergeadd = set(dsmodified) |
|
3345 | mergeadd = set(dsmodified) | |
3336 | for path in dsmodified: |
|
3346 | for path in dsmodified: | |
3337 | if path in mf: |
|
3347 | if path in mf: | |
3338 | mergeadd.remove(path) |
|
3348 | mergeadd.remove(path) | |
3339 | dsadded |= mergeadd |
|
3349 | dsadded |= mergeadd | |
3340 | dsmodified -= mergeadd |
|
3350 | dsmodified -= mergeadd | |
3341 |
|
3351 | |||
3342 | # if f is a rename, update `names` to also revert the source |
|
3352 | # if f is a rename, update `names` to also revert the source | |
3343 | cwd = repo.getcwd() |
|
3353 | cwd = repo.getcwd() | |
3344 | for f in localchanges: |
|
3354 | for f in localchanges: | |
3345 | src = repo.dirstate.copied(f) |
|
3355 | src = repo.dirstate.copied(f) | |
3346 | # XXX should we check for rename down to target node? |
|
3356 | # XXX should we check for rename down to target node? | |
3347 | if src and src not in names and repo.dirstate[src] == 'r': |
|
3357 | if src and src not in names and repo.dirstate[src] == 'r': | |
3348 | dsremoved.add(src) |
|
3358 | dsremoved.add(src) | |
3349 | names[src] = (repo.pathto(src, cwd), True) |
|
3359 | names[src] = (repo.pathto(src, cwd), True) | |
3350 |
|
3360 | |||
3351 | # determine the exact nature of the deleted changesets |
|
3361 | # determine the exact nature of the deleted changesets | |
3352 | deladded = set(_deleted) |
|
3362 | deladded = set(_deleted) | |
3353 | for path in _deleted: |
|
3363 | for path in _deleted: | |
3354 | if path in mf: |
|
3364 | if path in mf: | |
3355 | deladded.remove(path) |
|
3365 | deladded.remove(path) | |
3356 | deleted = _deleted - deladded |
|
3366 | deleted = _deleted - deladded | |
3357 |
|
3367 | |||
3358 | # distinguish between file to forget and the other |
|
3368 | # distinguish between file to forget and the other | |
3359 | added = set() |
|
3369 | added = set() | |
3360 | for abs in dsadded: |
|
3370 | for abs in dsadded: | |
3361 | if repo.dirstate[abs] != 'a': |
|
3371 | if repo.dirstate[abs] != 'a': | |
3362 | added.add(abs) |
|
3372 | added.add(abs) | |
3363 | dsadded -= added |
|
3373 | dsadded -= added | |
3364 |
|
3374 | |||
3365 | for abs in deladded: |
|
3375 | for abs in deladded: | |
3366 | if repo.dirstate[abs] == 'a': |
|
3376 | if repo.dirstate[abs] == 'a': | |
3367 | dsadded.add(abs) |
|
3377 | dsadded.add(abs) | |
3368 | deladded -= dsadded |
|
3378 | deladded -= dsadded | |
3369 |
|
3379 | |||
3370 | # For files marked as removed, we check if an unknown file is present at |
|
3380 | # For files marked as removed, we check if an unknown file is present at | |
3371 | # the same path. If a such file exists it may need to be backed up. |
|
3381 | # the same path. If a such file exists it may need to be backed up. | |
3372 | # Making the distinction at this stage helps have simpler backup |
|
3382 | # Making the distinction at this stage helps have simpler backup | |
3373 | # logic. |
|
3383 | # logic. | |
3374 | removunk = set() |
|
3384 | removunk = set() | |
3375 | for abs in removed: |
|
3385 | for abs in removed: | |
3376 | target = repo.wjoin(abs) |
|
3386 | target = repo.wjoin(abs) | |
3377 | if os.path.lexists(target): |
|
3387 | if os.path.lexists(target): | |
3378 | removunk.add(abs) |
|
3388 | removunk.add(abs) | |
3379 | removed -= removunk |
|
3389 | removed -= removunk | |
3380 |
|
3390 | |||
3381 | dsremovunk = set() |
|
3391 | dsremovunk = set() | |
3382 | for abs in dsremoved: |
|
3392 | for abs in dsremoved: | |
3383 | target = repo.wjoin(abs) |
|
3393 | target = repo.wjoin(abs) | |
3384 | if os.path.lexists(target): |
|
3394 | if os.path.lexists(target): | |
3385 | dsremovunk.add(abs) |
|
3395 | dsremovunk.add(abs) | |
3386 | dsremoved -= dsremovunk |
|
3396 | dsremoved -= dsremovunk | |
3387 |
|
3397 | |||
3388 | # action to be actually performed by revert |
|
3398 | # action to be actually performed by revert | |
3389 | # (<list of file>, message>) tuple |
|
3399 | # (<list of file>, message>) tuple | |
3390 | actions = {'revert': ([], _('reverting %s\n')), |
|
3400 | actions = {'revert': ([], _('reverting %s\n')), | |
3391 | 'add': ([], _('adding %s\n')), |
|
3401 | 'add': ([], _('adding %s\n')), | |
3392 | 'remove': ([], _('removing %s\n')), |
|
3402 | 'remove': ([], _('removing %s\n')), | |
3393 | 'drop': ([], _('removing %s\n')), |
|
3403 | 'drop': ([], _('removing %s\n')), | |
3394 | 'forget': ([], _('forgetting %s\n')), |
|
3404 | 'forget': ([], _('forgetting %s\n')), | |
3395 | 'undelete': ([], _('undeleting %s\n')), |
|
3405 | 'undelete': ([], _('undeleting %s\n')), | |
3396 | 'noop': (None, _('no changes needed to %s\n')), |
|
3406 | 'noop': (None, _('no changes needed to %s\n')), | |
3397 | 'unknown': (None, _('file not managed: %s\n')), |
|
3407 | 'unknown': (None, _('file not managed: %s\n')), | |
3398 | } |
|
3408 | } | |
3399 |
|
3409 | |||
3400 | # "constant" that convey the backup strategy. |
|
3410 | # "constant" that convey the backup strategy. | |
3401 | # All set to `discard` if `no-backup` is set do avoid checking |
|
3411 | # All set to `discard` if `no-backup` is set do avoid checking | |
3402 | # no_backup lower in the code. |
|
3412 | # no_backup lower in the code. | |
3403 | # These values are ordered for comparison purposes |
|
3413 | # These values are ordered for comparison purposes | |
3404 | backupinteractive = 3 # do backup if interactively modified |
|
3414 | backupinteractive = 3 # do backup if interactively modified | |
3405 | backup = 2 # unconditionally do backup |
|
3415 | backup = 2 # unconditionally do backup | |
3406 | check = 1 # check if the existing file differs from target |
|
3416 | check = 1 # check if the existing file differs from target | |
3407 | discard = 0 # never do backup |
|
3417 | discard = 0 # never do backup | |
3408 | if opts.get('no_backup'): |
|
3418 | if opts.get('no_backup'): | |
3409 | backupinteractive = backup = check = discard |
|
3419 | backupinteractive = backup = check = discard | |
3410 | if interactive: |
|
3420 | if interactive: | |
3411 | dsmodifiedbackup = backupinteractive |
|
3421 | dsmodifiedbackup = backupinteractive | |
3412 | else: |
|
3422 | else: | |
3413 | dsmodifiedbackup = backup |
|
3423 | dsmodifiedbackup = backup | |
3414 | tobackup = set() |
|
3424 | tobackup = set() | |
3415 |
|
3425 | |||
3416 | backupanddel = actions['remove'] |
|
3426 | backupanddel = actions['remove'] | |
3417 | if not opts.get('no_backup'): |
|
3427 | if not opts.get('no_backup'): | |
3418 | backupanddel = actions['drop'] |
|
3428 | backupanddel = actions['drop'] | |
3419 |
|
3429 | |||
3420 | disptable = ( |
|
3430 | disptable = ( | |
3421 | # dispatch table: |
|
3431 | # dispatch table: | |
3422 | # file state |
|
3432 | # file state | |
3423 | # action |
|
3433 | # action | |
3424 | # make backup |
|
3434 | # make backup | |
3425 |
|
3435 | |||
3426 | ## Sets that results that will change file on disk |
|
3436 | ## Sets that results that will change file on disk | |
3427 | # Modified compared to target, no local change |
|
3437 | # Modified compared to target, no local change | |
3428 | (modified, actions['revert'], discard), |
|
3438 | (modified, actions['revert'], discard), | |
3429 | # Modified compared to target, but local file is deleted |
|
3439 | # Modified compared to target, but local file is deleted | |
3430 | (deleted, actions['revert'], discard), |
|
3440 | (deleted, actions['revert'], discard), | |
3431 | # Modified compared to target, local change |
|
3441 | # Modified compared to target, local change | |
3432 | (dsmodified, actions['revert'], dsmodifiedbackup), |
|
3442 | (dsmodified, actions['revert'], dsmodifiedbackup), | |
3433 | # Added since target |
|
3443 | # Added since target | |
3434 | (added, actions['remove'], discard), |
|
3444 | (added, actions['remove'], discard), | |
3435 | # Added in working directory |
|
3445 | # Added in working directory | |
3436 | (dsadded, actions['forget'], discard), |
|
3446 | (dsadded, actions['forget'], discard), | |
3437 | # Added since target, have local modification |
|
3447 | # Added since target, have local modification | |
3438 | (modadded, backupanddel, backup), |
|
3448 | (modadded, backupanddel, backup), | |
3439 | # Added since target but file is missing in working directory |
|
3449 | # Added since target but file is missing in working directory | |
3440 | (deladded, actions['drop'], discard), |
|
3450 | (deladded, actions['drop'], discard), | |
3441 | # Removed since target, before working copy parent |
|
3451 | # Removed since target, before working copy parent | |
3442 | (removed, actions['add'], discard), |
|
3452 | (removed, actions['add'], discard), | |
3443 | # Same as `removed` but an unknown file exists at the same path |
|
3453 | # Same as `removed` but an unknown file exists at the same path | |
3444 | (removunk, actions['add'], check), |
|
3454 | (removunk, actions['add'], check), | |
3445 | # Removed since targe, marked as such in working copy parent |
|
3455 | # Removed since targe, marked as such in working copy parent | |
3446 | (dsremoved, actions['undelete'], discard), |
|
3456 | (dsremoved, actions['undelete'], discard), | |
3447 | # Same as `dsremoved` but an unknown file exists at the same path |
|
3457 | # Same as `dsremoved` but an unknown file exists at the same path | |
3448 | (dsremovunk, actions['undelete'], check), |
|
3458 | (dsremovunk, actions['undelete'], check), | |
3449 | ## the following sets does not result in any file changes |
|
3459 | ## the following sets does not result in any file changes | |
3450 | # File with no modification |
|
3460 | # File with no modification | |
3451 | (clean, actions['noop'], discard), |
|
3461 | (clean, actions['noop'], discard), | |
3452 | # Existing file, not tracked anywhere |
|
3462 | # Existing file, not tracked anywhere | |
3453 | (unknown, actions['unknown'], discard), |
|
3463 | (unknown, actions['unknown'], discard), | |
3454 | ) |
|
3464 | ) | |
3455 |
|
3465 | |||
3456 | for abs, (rel, exact) in sorted(names.items()): |
|
3466 | for abs, (rel, exact) in sorted(names.items()): | |
3457 | # target file to be touch on disk (relative to cwd) |
|
3467 | # target file to be touch on disk (relative to cwd) | |
3458 | target = repo.wjoin(abs) |
|
3468 | target = repo.wjoin(abs) | |
3459 | # search the entry in the dispatch table. |
|
3469 | # search the entry in the dispatch table. | |
3460 | # if the file is in any of these sets, it was touched in the working |
|
3470 | # if the file is in any of these sets, it was touched in the working | |
3461 | # directory parent and we are sure it needs to be reverted. |
|
3471 | # directory parent and we are sure it needs to be reverted. | |
3462 | for table, (xlist, msg), dobackup in disptable: |
|
3472 | for table, (xlist, msg), dobackup in disptable: | |
3463 | if abs not in table: |
|
3473 | if abs not in table: | |
3464 | continue |
|
3474 | continue | |
3465 | if xlist is not None: |
|
3475 | if xlist is not None: | |
3466 | xlist.append(abs) |
|
3476 | xlist.append(abs) | |
3467 | if dobackup: |
|
3477 | if dobackup: | |
3468 | # If in interactive mode, don't automatically create |
|
3478 | # If in interactive mode, don't automatically create | |
3469 | # .orig files (issue4793) |
|
3479 | # .orig files (issue4793) | |
3470 | if dobackup == backupinteractive: |
|
3480 | if dobackup == backupinteractive: | |
3471 | tobackup.add(abs) |
|
3481 | tobackup.add(abs) | |
3472 | elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])): |
|
3482 | elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])): | |
3473 | bakname = scmutil.origpath(ui, repo, rel) |
|
3483 | bakname = scmutil.origpath(ui, repo, rel) | |
3474 | ui.note(_('saving current version of %s as %s\n') % |
|
3484 | ui.note(_('saving current version of %s as %s\n') % | |
3475 | (rel, bakname)) |
|
3485 | (rel, bakname)) | |
3476 | if not opts.get('dry_run'): |
|
3486 | if not opts.get('dry_run'): | |
3477 | if interactive: |
|
3487 | if interactive: | |
3478 | util.copyfile(target, bakname) |
|
3488 | util.copyfile(target, bakname) | |
3479 | else: |
|
3489 | else: | |
3480 | util.rename(target, bakname) |
|
3490 | util.rename(target, bakname) | |
3481 | if ui.verbose or not exact: |
|
3491 | if ui.verbose or not exact: | |
3482 | if not isinstance(msg, basestring): |
|
3492 | if not isinstance(msg, basestring): | |
3483 | msg = msg(abs) |
|
3493 | msg = msg(abs) | |
3484 | ui.status(msg % rel) |
|
3494 | ui.status(msg % rel) | |
3485 | elif exact: |
|
3495 | elif exact: | |
3486 | ui.warn(msg % rel) |
|
3496 | ui.warn(msg % rel) | |
3487 | break |
|
3497 | break | |
3488 |
|
3498 | |||
3489 | if not opts.get('dry_run'): |
|
3499 | if not opts.get('dry_run'): | |
3490 | needdata = ('revert', 'add', 'undelete') |
|
3500 | needdata = ('revert', 'add', 'undelete') | |
3491 | _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata]) |
|
3501 | _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata]) | |
3492 | _performrevert(repo, parents, ctx, actions, interactive, tobackup) |
|
3502 | _performrevert(repo, parents, ctx, actions, interactive, tobackup) | |
3493 |
|
3503 | |||
3494 | if targetsubs: |
|
3504 | if targetsubs: | |
3495 | # Revert the subrepos on the revert list |
|
3505 | # Revert the subrepos on the revert list | |
3496 | for sub in targetsubs: |
|
3506 | for sub in targetsubs: | |
3497 | try: |
|
3507 | try: | |
3498 | wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts) |
|
3508 | wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts) | |
3499 | except KeyError: |
|
3509 | except KeyError: | |
3500 | raise error.Abort("subrepository '%s' does not exist in %s!" |
|
3510 | raise error.Abort("subrepository '%s' does not exist in %s!" | |
3501 | % (sub, short(ctx.node()))) |
|
3511 | % (sub, short(ctx.node()))) | |
3502 |
|
3512 | |||
3503 | def _revertprefetch(repo, ctx, *files): |
|
3513 | def _revertprefetch(repo, ctx, *files): | |
3504 | """Let extension changing the storage layer prefetch content""" |
|
3514 | """Let extension changing the storage layer prefetch content""" | |
3505 | pass |
|
3515 | pass | |
3506 |
|
3516 | |||
3507 | def _performrevert(repo, parents, ctx, actions, interactive=False, |
|
3517 | def _performrevert(repo, parents, ctx, actions, interactive=False, | |
3508 | tobackup=None): |
|
3518 | tobackup=None): | |
3509 | """function that actually perform all the actions computed for revert |
|
3519 | """function that actually perform all the actions computed for revert | |
3510 |
|
3520 | |||
3511 | This is an independent function to let extension to plug in and react to |
|
3521 | This is an independent function to let extension to plug in and react to | |
3512 | the imminent revert. |
|
3522 | the imminent revert. | |
3513 |
|
3523 | |||
3514 | Make sure you have the working directory locked when calling this function. |
|
3524 | Make sure you have the working directory locked when calling this function. | |
3515 | """ |
|
3525 | """ | |
3516 | parent, p2 = parents |
|
3526 | parent, p2 = parents | |
3517 | node = ctx.node() |
|
3527 | node = ctx.node() | |
3518 | excluded_files = [] |
|
3528 | excluded_files = [] | |
3519 | matcher_opts = {"exclude": excluded_files} |
|
3529 | matcher_opts = {"exclude": excluded_files} | |
3520 |
|
3530 | |||
3521 | def checkout(f): |
|
3531 | def checkout(f): | |
3522 | fc = ctx[f] |
|
3532 | fc = ctx[f] | |
3523 | repo.wwrite(f, fc.data(), fc.flags()) |
|
3533 | repo.wwrite(f, fc.data(), fc.flags()) | |
3524 |
|
3534 | |||
3525 | def doremove(f): |
|
3535 | def doremove(f): | |
3526 | try: |
|
3536 | try: | |
3527 | repo.wvfs.unlinkpath(f) |
|
3537 | repo.wvfs.unlinkpath(f) | |
3528 | except OSError: |
|
3538 | except OSError: | |
3529 | pass |
|
3539 | pass | |
3530 | repo.dirstate.remove(f) |
|
3540 | repo.dirstate.remove(f) | |
3531 |
|
3541 | |||
3532 | audit_path = pathutil.pathauditor(repo.root) |
|
3542 | audit_path = pathutil.pathauditor(repo.root) | |
3533 | for f in actions['forget'][0]: |
|
3543 | for f in actions['forget'][0]: | |
3534 | if interactive: |
|
3544 | if interactive: | |
3535 | choice = repo.ui.promptchoice( |
|
3545 | choice = repo.ui.promptchoice( | |
3536 | _("forget added file %s (Yn)?$$ &Yes $$ &No") % f) |
|
3546 | _("forget added file %s (Yn)?$$ &Yes $$ &No") % f) | |
3537 | if choice == 0: |
|
3547 | if choice == 0: | |
3538 | repo.dirstate.drop(f) |
|
3548 | repo.dirstate.drop(f) | |
3539 | else: |
|
3549 | else: | |
3540 | excluded_files.append(repo.wjoin(f)) |
|
3550 | excluded_files.append(repo.wjoin(f)) | |
3541 | else: |
|
3551 | else: | |
3542 | repo.dirstate.drop(f) |
|
3552 | repo.dirstate.drop(f) | |
3543 | for f in actions['remove'][0]: |
|
3553 | for f in actions['remove'][0]: | |
3544 | audit_path(f) |
|
3554 | audit_path(f) | |
3545 | if interactive: |
|
3555 | if interactive: | |
3546 | choice = repo.ui.promptchoice( |
|
3556 | choice = repo.ui.promptchoice( | |
3547 | _("remove added file %s (Yn)?$$ &Yes $$ &No") % f) |
|
3557 | _("remove added file %s (Yn)?$$ &Yes $$ &No") % f) | |
3548 | if choice == 0: |
|
3558 | if choice == 0: | |
3549 | doremove(f) |
|
3559 | doremove(f) | |
3550 | else: |
|
3560 | else: | |
3551 | excluded_files.append(repo.wjoin(f)) |
|
3561 | excluded_files.append(repo.wjoin(f)) | |
3552 | else: |
|
3562 | else: | |
3553 | doremove(f) |
|
3563 | doremove(f) | |
3554 | for f in actions['drop'][0]: |
|
3564 | for f in actions['drop'][0]: | |
3555 | audit_path(f) |
|
3565 | audit_path(f) | |
3556 | repo.dirstate.remove(f) |
|
3566 | repo.dirstate.remove(f) | |
3557 |
|
3567 | |||
3558 | normal = None |
|
3568 | normal = None | |
3559 | if node == parent: |
|
3569 | if node == parent: | |
3560 | # We're reverting to our parent. If possible, we'd like status |
|
3570 | # We're reverting to our parent. If possible, we'd like status | |
3561 | # to report the file as clean. We have to use normallookup for |
|
3571 | # to report the file as clean. We have to use normallookup for | |
3562 | # merges to avoid losing information about merged/dirty files. |
|
3572 | # merges to avoid losing information about merged/dirty files. | |
3563 | if p2 != nullid: |
|
3573 | if p2 != nullid: | |
3564 | normal = repo.dirstate.normallookup |
|
3574 | normal = repo.dirstate.normallookup | |
3565 | else: |
|
3575 | else: | |
3566 | normal = repo.dirstate.normal |
|
3576 | normal = repo.dirstate.normal | |
3567 |
|
3577 | |||
3568 | newlyaddedandmodifiedfiles = set() |
|
3578 | newlyaddedandmodifiedfiles = set() | |
3569 | if interactive: |
|
3579 | if interactive: | |
3570 | # Prompt the user for changes to revert |
|
3580 | # Prompt the user for changes to revert | |
3571 | torevert = [repo.wjoin(f) for f in actions['revert'][0]] |
|
3581 | torevert = [repo.wjoin(f) for f in actions['revert'][0]] | |
3572 | m = scmutil.match(ctx, torevert, matcher_opts) |
|
3582 | m = scmutil.match(ctx, torevert, matcher_opts) | |
3573 | diffopts = patch.difffeatureopts(repo.ui, whitespace=True) |
|
3583 | diffopts = patch.difffeatureopts(repo.ui, whitespace=True) | |
3574 | diffopts.nodates = True |
|
3584 | diffopts.nodates = True | |
3575 | diffopts.git = True |
|
3585 | diffopts.git = True | |
3576 | operation = 'discard' |
|
3586 | operation = 'discard' | |
3577 | reversehunks = True |
|
3587 | reversehunks = True | |
3578 | if node != parent: |
|
3588 | if node != parent: | |
3579 | operation = 'revert' |
|
3589 | operation = 'revert' | |
3580 | reversehunks = repo.ui.configbool('experimental', |
|
3590 | reversehunks = repo.ui.configbool('experimental', | |
3581 | 'revertalternateinteractivemode') |
|
3591 | 'revertalternateinteractivemode') | |
3582 | if reversehunks: |
|
3592 | if reversehunks: | |
3583 | diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts) |
|
3593 | diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts) | |
3584 | else: |
|
3594 | else: | |
3585 | diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts) |
|
3595 | diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts) | |
3586 | originalchunks = patch.parsepatch(diff) |
|
3596 | originalchunks = patch.parsepatch(diff) | |
3587 |
|
3597 | |||
3588 | try: |
|
3598 | try: | |
3589 |
|
3599 | |||
3590 | chunks, opts = recordfilter(repo.ui, originalchunks, |
|
3600 | chunks, opts = recordfilter(repo.ui, originalchunks, | |
3591 | operation=operation) |
|
3601 | operation=operation) | |
3592 | if reversehunks: |
|
3602 | if reversehunks: | |
3593 | chunks = patch.reversehunks(chunks) |
|
3603 | chunks = patch.reversehunks(chunks) | |
3594 |
|
3604 | |||
3595 | except patch.PatchError as err: |
|
3605 | except patch.PatchError as err: | |
3596 | raise error.Abort(_('error parsing patch: %s') % err) |
|
3606 | raise error.Abort(_('error parsing patch: %s') % err) | |
3597 |
|
3607 | |||
3598 | newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks) |
|
3608 | newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks) | |
3599 | if tobackup is None: |
|
3609 | if tobackup is None: | |
3600 | tobackup = set() |
|
3610 | tobackup = set() | |
3601 | # Apply changes |
|
3611 | # Apply changes | |
3602 | fp = stringio() |
|
3612 | fp = stringio() | |
3603 | for c in chunks: |
|
3613 | for c in chunks: | |
3604 | # Create a backup file only if this hunk should be backed up |
|
3614 | # Create a backup file only if this hunk should be backed up | |
3605 | if ishunk(c) and c.header.filename() in tobackup: |
|
3615 | if ishunk(c) and c.header.filename() in tobackup: | |
3606 | abs = c.header.filename() |
|
3616 | abs = c.header.filename() | |
3607 | target = repo.wjoin(abs) |
|
3617 | target = repo.wjoin(abs) | |
3608 | bakname = scmutil.origpath(repo.ui, repo, m.rel(abs)) |
|
3618 | bakname = scmutil.origpath(repo.ui, repo, m.rel(abs)) | |
3609 | util.copyfile(target, bakname) |
|
3619 | util.copyfile(target, bakname) | |
3610 | tobackup.remove(abs) |
|
3620 | tobackup.remove(abs) | |
3611 | c.write(fp) |
|
3621 | c.write(fp) | |
3612 | dopatch = fp.tell() |
|
3622 | dopatch = fp.tell() | |
3613 | fp.seek(0) |
|
3623 | fp.seek(0) | |
3614 | if dopatch: |
|
3624 | if dopatch: | |
3615 | try: |
|
3625 | try: | |
3616 | patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None) |
|
3626 | patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None) | |
3617 | except patch.PatchError as err: |
|
3627 | except patch.PatchError as err: | |
3618 | raise error.Abort(str(err)) |
|
3628 | raise error.Abort(str(err)) | |
3619 | del fp |
|
3629 | del fp | |
3620 | else: |
|
3630 | else: | |
3621 | for f in actions['revert'][0]: |
|
3631 | for f in actions['revert'][0]: | |
3622 | checkout(f) |
|
3632 | checkout(f) | |
3623 | if normal: |
|
3633 | if normal: | |
3624 | normal(f) |
|
3634 | normal(f) | |
3625 |
|
3635 | |||
3626 | for f in actions['add'][0]: |
|
3636 | for f in actions['add'][0]: | |
3627 | # Don't checkout modified files, they are already created by the diff |
|
3637 | # Don't checkout modified files, they are already created by the diff | |
3628 | if f not in newlyaddedandmodifiedfiles: |
|
3638 | if f not in newlyaddedandmodifiedfiles: | |
3629 | checkout(f) |
|
3639 | checkout(f) | |
3630 | repo.dirstate.add(f) |
|
3640 | repo.dirstate.add(f) | |
3631 |
|
3641 | |||
3632 | normal = repo.dirstate.normallookup |
|
3642 | normal = repo.dirstate.normallookup | |
3633 | if node == parent and p2 == nullid: |
|
3643 | if node == parent and p2 == nullid: | |
3634 | normal = repo.dirstate.normal |
|
3644 | normal = repo.dirstate.normal | |
3635 | for f in actions['undelete'][0]: |
|
3645 | for f in actions['undelete'][0]: | |
3636 | checkout(f) |
|
3646 | checkout(f) | |
3637 | normal(f) |
|
3647 | normal(f) | |
3638 |
|
3648 | |||
3639 | copied = copies.pathcopies(repo[parent], ctx) |
|
3649 | copied = copies.pathcopies(repo[parent], ctx) | |
3640 |
|
3650 | |||
3641 | for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]: |
|
3651 | for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]: | |
3642 | if f in copied: |
|
3652 | if f in copied: | |
3643 | repo.dirstate.copy(copied[f], f) |
|
3653 | repo.dirstate.copy(copied[f], f) | |
3644 |
|
3654 | |||
3645 | class command(registrar.command): |
|
3655 | class command(registrar.command): | |
3646 | def _doregister(self, func, name, *args, **kwargs): |
|
3656 | def _doregister(self, func, name, *args, **kwargs): | |
3647 | func._deprecatedregistrar = True # flag for deprecwarn in extensions.py |
|
3657 | func._deprecatedregistrar = True # flag for deprecwarn in extensions.py | |
3648 | return super(command, self)._doregister(func, name, *args, **kwargs) |
|
3658 | return super(command, self)._doregister(func, name, *args, **kwargs) | |
3649 |
|
3659 | |||
3650 | # a list of (ui, repo, otherpeer, opts, missing) functions called by |
|
3660 | # a list of (ui, repo, otherpeer, opts, missing) functions called by | |
3651 | # commands.outgoing. "missing" is "missing" of the result of |
|
3661 | # commands.outgoing. "missing" is "missing" of the result of | |
3652 | # "findcommonoutgoing()" |
|
3662 | # "findcommonoutgoing()" | |
3653 | outgoinghooks = util.hooks() |
|
3663 | outgoinghooks = util.hooks() | |
3654 |
|
3664 | |||
3655 | # a list of (ui, repo) functions called by commands.summary |
|
3665 | # a list of (ui, repo) functions called by commands.summary | |
3656 | summaryhooks = util.hooks() |
|
3666 | summaryhooks = util.hooks() | |
3657 |
|
3667 | |||
3658 | # a list of (ui, repo, opts, changes) functions called by commands.summary. |
|
3668 | # a list of (ui, repo, opts, changes) functions called by commands.summary. | |
3659 | # |
|
3669 | # | |
3660 | # functions should return tuple of booleans below, if 'changes' is None: |
|
3670 | # functions should return tuple of booleans below, if 'changes' is None: | |
3661 | # (whether-incomings-are-needed, whether-outgoings-are-needed) |
|
3671 | # (whether-incomings-are-needed, whether-outgoings-are-needed) | |
3662 | # |
|
3672 | # | |
3663 | # otherwise, 'changes' is a tuple of tuples below: |
|
3673 | # otherwise, 'changes' is a tuple of tuples below: | |
3664 | # - (sourceurl, sourcebranch, sourcepeer, incoming) |
|
3674 | # - (sourceurl, sourcebranch, sourcepeer, incoming) | |
3665 | # - (desturl, destbranch, destpeer, outgoing) |
|
3675 | # - (desturl, destbranch, destpeer, outgoing) | |
3666 | summaryremotehooks = util.hooks() |
|
3676 | summaryremotehooks = util.hooks() | |
3667 |
|
3677 | |||
3668 | # A list of state files kept by multistep operations like graft. |
|
3678 | # A list of state files kept by multistep operations like graft. | |
3669 | # Since graft cannot be aborted, it is considered 'clearable' by update. |
|
3679 | # Since graft cannot be aborted, it is considered 'clearable' by update. | |
3670 | # note: bisect is intentionally excluded |
|
3680 | # note: bisect is intentionally excluded | |
3671 | # (state file, clearable, allowcommit, error, hint) |
|
3681 | # (state file, clearable, allowcommit, error, hint) | |
3672 | unfinishedstates = [ |
|
3682 | unfinishedstates = [ | |
3673 | ('graftstate', True, False, _('graft in progress'), |
|
3683 | ('graftstate', True, False, _('graft in progress'), | |
3674 | _("use 'hg graft --continue' or 'hg update' to abort")), |
|
3684 | _("use 'hg graft --continue' or 'hg update' to abort")), | |
3675 | ('updatestate', True, False, _('last update was interrupted'), |
|
3685 | ('updatestate', True, False, _('last update was interrupted'), | |
3676 | _("use 'hg update' to get a consistent checkout")) |
|
3686 | _("use 'hg update' to get a consistent checkout")) | |
3677 | ] |
|
3687 | ] | |
3678 |
|
3688 | |||
3679 | def checkunfinished(repo, commit=False): |
|
3689 | def checkunfinished(repo, commit=False): | |
3680 | '''Look for an unfinished multistep operation, like graft, and abort |
|
3690 | '''Look for an unfinished multistep operation, like graft, and abort | |
3681 | if found. It's probably good to check this right before |
|
3691 | if found. It's probably good to check this right before | |
3682 | bailifchanged(). |
|
3692 | bailifchanged(). | |
3683 | ''' |
|
3693 | ''' | |
3684 | for f, clearable, allowcommit, msg, hint in unfinishedstates: |
|
3694 | for f, clearable, allowcommit, msg, hint in unfinishedstates: | |
3685 | if commit and allowcommit: |
|
3695 | if commit and allowcommit: | |
3686 | continue |
|
3696 | continue | |
3687 | if repo.vfs.exists(f): |
|
3697 | if repo.vfs.exists(f): | |
3688 | raise error.Abort(msg, hint=hint) |
|
3698 | raise error.Abort(msg, hint=hint) | |
3689 |
|
3699 | |||
3690 | def clearunfinished(repo): |
|
3700 | def clearunfinished(repo): | |
3691 | '''Check for unfinished operations (as above), and clear the ones |
|
3701 | '''Check for unfinished operations (as above), and clear the ones | |
3692 | that are clearable. |
|
3702 | that are clearable. | |
3693 | ''' |
|
3703 | ''' | |
3694 | for f, clearable, allowcommit, msg, hint in unfinishedstates: |
|
3704 | for f, clearable, allowcommit, msg, hint in unfinishedstates: | |
3695 | if not clearable and repo.vfs.exists(f): |
|
3705 | if not clearable and repo.vfs.exists(f): | |
3696 | raise error.Abort(msg, hint=hint) |
|
3706 | raise error.Abort(msg, hint=hint) | |
3697 | for f, clearable, allowcommit, msg, hint in unfinishedstates: |
|
3707 | for f, clearable, allowcommit, msg, hint in unfinishedstates: | |
3698 | if clearable and repo.vfs.exists(f): |
|
3708 | if clearable and repo.vfs.exists(f): | |
3699 | util.unlink(repo.vfs.join(f)) |
|
3709 | util.unlink(repo.vfs.join(f)) | |
3700 |
|
3710 | |||
3701 | afterresolvedstates = [ |
|
3711 | afterresolvedstates = [ | |
3702 | ('graftstate', |
|
3712 | ('graftstate', | |
3703 | _('hg graft --continue')), |
|
3713 | _('hg graft --continue')), | |
3704 | ] |
|
3714 | ] | |
3705 |
|
3715 | |||
3706 | def howtocontinue(repo): |
|
3716 | def howtocontinue(repo): | |
3707 | '''Check for an unfinished operation and return the command to finish |
|
3717 | '''Check for an unfinished operation and return the command to finish | |
3708 | it. |
|
3718 | it. | |
3709 |
|
3719 | |||
3710 | afterresolvedstates tuples define a .hg/{file} and the corresponding |
|
3720 | afterresolvedstates tuples define a .hg/{file} and the corresponding | |
3711 | command needed to finish it. |
|
3721 | command needed to finish it. | |
3712 |
|
3722 | |||
3713 | Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is |
|
3723 | Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is | |
3714 | a boolean. |
|
3724 | a boolean. | |
3715 | ''' |
|
3725 | ''' | |
3716 | contmsg = _("continue: %s") |
|
3726 | contmsg = _("continue: %s") | |
3717 | for f, msg in afterresolvedstates: |
|
3727 | for f, msg in afterresolvedstates: | |
3718 | if repo.vfs.exists(f): |
|
3728 | if repo.vfs.exists(f): | |
3719 | return contmsg % msg, True |
|
3729 | return contmsg % msg, True | |
3720 | if repo[None].dirty(missing=True, merge=False, branch=False): |
|
3730 | if repo[None].dirty(missing=True, merge=False, branch=False): | |
3721 | return contmsg % _("hg commit"), False |
|
3731 | return contmsg % _("hg commit"), False | |
3722 | return None, None |
|
3732 | return None, None | |
3723 |
|
3733 | |||
3724 | def checkafterresolved(repo): |
|
3734 | def checkafterresolved(repo): | |
3725 | '''Inform the user about the next action after completing hg resolve |
|
3735 | '''Inform the user about the next action after completing hg resolve | |
3726 |
|
3736 | |||
3727 | If there's a matching afterresolvedstates, howtocontinue will yield |
|
3737 | If there's a matching afterresolvedstates, howtocontinue will yield | |
3728 | repo.ui.warn as the reporter. |
|
3738 | repo.ui.warn as the reporter. | |
3729 |
|
3739 | |||
3730 | Otherwise, it will yield repo.ui.note. |
|
3740 | Otherwise, it will yield repo.ui.note. | |
3731 | ''' |
|
3741 | ''' | |
3732 | msg, warning = howtocontinue(repo) |
|
3742 | msg, warning = howtocontinue(repo) | |
3733 | if msg is not None: |
|
3743 | if msg is not None: | |
3734 | if warning: |
|
3744 | if warning: | |
3735 | repo.ui.warn("%s\n" % msg) |
|
3745 | repo.ui.warn("%s\n" % msg) | |
3736 | else: |
|
3746 | else: | |
3737 | repo.ui.note("%s\n" % msg) |
|
3747 | repo.ui.note("%s\n" % msg) | |
3738 |
|
3748 | |||
3739 | def wrongtooltocontinue(repo, task): |
|
3749 | def wrongtooltocontinue(repo, task): | |
3740 | '''Raise an abort suggesting how to properly continue if there is an |
|
3750 | '''Raise an abort suggesting how to properly continue if there is an | |
3741 | active task. |
|
3751 | active task. | |
3742 |
|
3752 | |||
3743 | Uses howtocontinue() to find the active task. |
|
3753 | Uses howtocontinue() to find the active task. | |
3744 |
|
3754 | |||
3745 | If there's no task (repo.ui.note for 'hg commit'), it does not offer |
|
3755 | If there's no task (repo.ui.note for 'hg commit'), it does not offer | |
3746 | a hint. |
|
3756 | a hint. | |
3747 | ''' |
|
3757 | ''' | |
3748 | after = howtocontinue(repo) |
|
3758 | after = howtocontinue(repo) | |
3749 | hint = None |
|
3759 | hint = None | |
3750 | if after[1]: |
|
3760 | if after[1]: | |
3751 | hint = after[0] |
|
3761 | hint = after[0] | |
3752 | raise error.Abort(_('no %s in progress') % task, hint=hint) |
|
3762 | raise error.Abort(_('no %s in progress') % task, hint=hint) |
@@ -1,979 +1,980 | |||||
1 | # dispatch.py - command dispatching for mercurial |
|
1 | # dispatch.py - command dispatching for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import, print_function |
|
8 | from __future__ import absolute_import, print_function | |
9 |
|
9 | |||
10 | import difflib |
|
10 | import difflib | |
11 | import errno |
|
11 | import errno | |
12 | import getopt |
|
12 | import getopt | |
13 | import os |
|
13 | import os | |
14 | import pdb |
|
14 | import pdb | |
15 | import re |
|
15 | import re | |
16 | import signal |
|
16 | import signal | |
17 | import sys |
|
17 | import sys | |
18 | import time |
|
18 | import time | |
19 | import traceback |
|
19 | import traceback | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | from .i18n import _ |
|
22 | from .i18n import _ | |
23 |
|
23 | |||
24 | from . import ( |
|
24 | from . import ( | |
25 | cmdutil, |
|
25 | cmdutil, | |
26 | color, |
|
26 | color, | |
27 | commands, |
|
27 | commands, | |
28 | demandimport, |
|
28 | demandimport, | |
29 | encoding, |
|
29 | encoding, | |
30 | error, |
|
30 | error, | |
31 | extensions, |
|
31 | extensions, | |
32 | fancyopts, |
|
32 | fancyopts, | |
33 | help, |
|
33 | help, | |
34 | hg, |
|
34 | hg, | |
35 | hook, |
|
35 | hook, | |
36 | profiling, |
|
36 | profiling, | |
37 | pycompat, |
|
37 | pycompat, | |
38 | scmutil, |
|
38 | scmutil, | |
39 | ui as uimod, |
|
39 | ui as uimod, | |
40 | util, |
|
40 | util, | |
41 | ) |
|
41 | ) | |
42 |
|
42 | |||
43 | class request(object): |
|
43 | class request(object): | |
44 | def __init__(self, args, ui=None, repo=None, fin=None, fout=None, |
|
44 | def __init__(self, args, ui=None, repo=None, fin=None, fout=None, | |
45 | ferr=None, prereposetups=None): |
|
45 | ferr=None, prereposetups=None): | |
46 | self.args = args |
|
46 | self.args = args | |
47 | self.ui = ui |
|
47 | self.ui = ui | |
48 | self.repo = repo |
|
48 | self.repo = repo | |
49 |
|
49 | |||
50 | # input/output/error streams |
|
50 | # input/output/error streams | |
51 | self.fin = fin |
|
51 | self.fin = fin | |
52 | self.fout = fout |
|
52 | self.fout = fout | |
53 | self.ferr = ferr |
|
53 | self.ferr = ferr | |
54 |
|
54 | |||
55 | # reposetups which run before extensions, useful for chg to pre-fill |
|
55 | # reposetups which run before extensions, useful for chg to pre-fill | |
56 | # low-level repo state (for example, changelog) before extensions. |
|
56 | # low-level repo state (for example, changelog) before extensions. | |
57 | self.prereposetups = prereposetups or [] |
|
57 | self.prereposetups = prereposetups or [] | |
58 |
|
58 | |||
59 | def _runexithandlers(self): |
|
59 | def _runexithandlers(self): | |
60 | exc = None |
|
60 | exc = None | |
61 | handlers = self.ui._exithandlers |
|
61 | handlers = self.ui._exithandlers | |
62 | try: |
|
62 | try: | |
63 | while handlers: |
|
63 | while handlers: | |
64 | func, args, kwargs = handlers.pop() |
|
64 | func, args, kwargs = handlers.pop() | |
65 | try: |
|
65 | try: | |
66 | func(*args, **kwargs) |
|
66 | func(*args, **kwargs) | |
67 | except: # re-raises below |
|
67 | except: # re-raises below | |
68 | if exc is None: |
|
68 | if exc is None: | |
69 | exc = sys.exc_info()[1] |
|
69 | exc = sys.exc_info()[1] | |
70 | self.ui.warn(('error in exit handlers:\n')) |
|
70 | self.ui.warn(('error in exit handlers:\n')) | |
71 | self.ui.traceback(force=True) |
|
71 | self.ui.traceback(force=True) | |
72 | finally: |
|
72 | finally: | |
73 | if exc is not None: |
|
73 | if exc is not None: | |
74 | raise exc |
|
74 | raise exc | |
75 |
|
75 | |||
76 | def run(): |
|
76 | def run(): | |
77 | "run the command in sys.argv" |
|
77 | "run the command in sys.argv" | |
78 | req = request(pycompat.sysargv[1:]) |
|
78 | req = request(pycompat.sysargv[1:]) | |
79 | err = None |
|
79 | err = None | |
80 | try: |
|
80 | try: | |
81 | status = (dispatch(req) or 0) & 255 |
|
81 | status = (dispatch(req) or 0) & 255 | |
82 | except error.StdioError as err: |
|
82 | except error.StdioError as err: | |
83 | status = -1 |
|
83 | status = -1 | |
84 | if util.safehasattr(req.ui, 'fout'): |
|
84 | if util.safehasattr(req.ui, 'fout'): | |
85 | try: |
|
85 | try: | |
86 | req.ui.fout.flush() |
|
86 | req.ui.fout.flush() | |
87 | except IOError as err: |
|
87 | except IOError as err: | |
88 | status = -1 |
|
88 | status = -1 | |
89 | if util.safehasattr(req.ui, 'ferr'): |
|
89 | if util.safehasattr(req.ui, 'ferr'): | |
90 | if err is not None and err.errno != errno.EPIPE: |
|
90 | if err is not None and err.errno != errno.EPIPE: | |
91 | req.ui.ferr.write('abort: %s\n' % err.strerror) |
|
91 | req.ui.ferr.write('abort: %s\n' % err.strerror) | |
92 | req.ui.ferr.flush() |
|
92 | req.ui.ferr.flush() | |
93 | sys.exit(status & 255) |
|
93 | sys.exit(status & 255) | |
94 |
|
94 | |||
95 | def _getsimilar(symbols, value): |
|
95 | def _getsimilar(symbols, value): | |
96 | sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio() |
|
96 | sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio() | |
97 | # The cutoff for similarity here is pretty arbitrary. It should |
|
97 | # The cutoff for similarity here is pretty arbitrary. It should | |
98 | # probably be investigated and tweaked. |
|
98 | # probably be investigated and tweaked. | |
99 | return [s for s in symbols if sim(s) > 0.6] |
|
99 | return [s for s in symbols if sim(s) > 0.6] | |
100 |
|
100 | |||
101 | def _reportsimilar(write, similar): |
|
101 | def _reportsimilar(write, similar): | |
102 | if len(similar) == 1: |
|
102 | if len(similar) == 1: | |
103 | write(_("(did you mean %s?)\n") % similar[0]) |
|
103 | write(_("(did you mean %s?)\n") % similar[0]) | |
104 | elif similar: |
|
104 | elif similar: | |
105 | ss = ", ".join(sorted(similar)) |
|
105 | ss = ", ".join(sorted(similar)) | |
106 | write(_("(did you mean one of %s?)\n") % ss) |
|
106 | write(_("(did you mean one of %s?)\n") % ss) | |
107 |
|
107 | |||
108 | def _formatparse(write, inst): |
|
108 | def _formatparse(write, inst): | |
109 | similar = [] |
|
109 | similar = [] | |
110 | if isinstance(inst, error.UnknownIdentifier): |
|
110 | if isinstance(inst, error.UnknownIdentifier): | |
111 | # make sure to check fileset first, as revset can invoke fileset |
|
111 | # make sure to check fileset first, as revset can invoke fileset | |
112 | similar = _getsimilar(inst.symbols, inst.function) |
|
112 | similar = _getsimilar(inst.symbols, inst.function) | |
113 | if len(inst.args) > 1: |
|
113 | if len(inst.args) > 1: | |
114 | write(_("hg: parse error at %s: %s\n") % |
|
114 | write(_("hg: parse error at %s: %s\n") % | |
115 | (inst.args[1], inst.args[0])) |
|
115 | (inst.args[1], inst.args[0])) | |
116 | if (inst.args[0][0] == ' '): |
|
116 | if (inst.args[0][0] == ' '): | |
117 | write(_("unexpected leading whitespace\n")) |
|
117 | write(_("unexpected leading whitespace\n")) | |
118 | else: |
|
118 | else: | |
119 | write(_("hg: parse error: %s\n") % inst.args[0]) |
|
119 | write(_("hg: parse error: %s\n") % inst.args[0]) | |
120 | _reportsimilar(write, similar) |
|
120 | _reportsimilar(write, similar) | |
121 | if inst.hint: |
|
121 | if inst.hint: | |
122 | write(_("(%s)\n") % inst.hint) |
|
122 | write(_("(%s)\n") % inst.hint) | |
123 |
|
123 | |||
124 | def _formatargs(args): |
|
124 | def _formatargs(args): | |
125 | return ' '.join(util.shellquote(a) for a in args) |
|
125 | return ' '.join(util.shellquote(a) for a in args) | |
126 |
|
126 | |||
127 | def dispatch(req): |
|
127 | def dispatch(req): | |
128 | "run the command specified in req.args" |
|
128 | "run the command specified in req.args" | |
129 | if req.ferr: |
|
129 | if req.ferr: | |
130 | ferr = req.ferr |
|
130 | ferr = req.ferr | |
131 | elif req.ui: |
|
131 | elif req.ui: | |
132 | ferr = req.ui.ferr |
|
132 | ferr = req.ui.ferr | |
133 | else: |
|
133 | else: | |
134 | ferr = util.stderr |
|
134 | ferr = util.stderr | |
135 |
|
135 | |||
136 | try: |
|
136 | try: | |
137 | if not req.ui: |
|
137 | if not req.ui: | |
138 | req.ui = uimod.ui.load() |
|
138 | req.ui = uimod.ui.load() | |
139 | if '--traceback' in req.args: |
|
139 | if '--traceback' in req.args: | |
140 | req.ui.setconfig('ui', 'traceback', 'on', '--traceback') |
|
140 | req.ui.setconfig('ui', 'traceback', 'on', '--traceback') | |
141 |
|
141 | |||
142 | # set ui streams from the request |
|
142 | # set ui streams from the request | |
143 | if req.fin: |
|
143 | if req.fin: | |
144 | req.ui.fin = req.fin |
|
144 | req.ui.fin = req.fin | |
145 | if req.fout: |
|
145 | if req.fout: | |
146 | req.ui.fout = req.fout |
|
146 | req.ui.fout = req.fout | |
147 | if req.ferr: |
|
147 | if req.ferr: | |
148 | req.ui.ferr = req.ferr |
|
148 | req.ui.ferr = req.ferr | |
149 | except error.Abort as inst: |
|
149 | except error.Abort as inst: | |
150 | ferr.write(_("abort: %s\n") % inst) |
|
150 | ferr.write(_("abort: %s\n") % inst) | |
151 | if inst.hint: |
|
151 | if inst.hint: | |
152 | ferr.write(_("(%s)\n") % inst.hint) |
|
152 | ferr.write(_("(%s)\n") % inst.hint) | |
153 | return -1 |
|
153 | return -1 | |
154 | except error.ParseError as inst: |
|
154 | except error.ParseError as inst: | |
155 | _formatparse(ferr.write, inst) |
|
155 | _formatparse(ferr.write, inst) | |
156 | return -1 |
|
156 | return -1 | |
157 |
|
157 | |||
158 | msg = _formatargs(req.args) |
|
158 | msg = _formatargs(req.args) | |
159 | starttime = util.timer() |
|
159 | starttime = util.timer() | |
160 | ret = None |
|
160 | ret = None | |
161 | try: |
|
161 | try: | |
162 | ret = _runcatch(req) |
|
162 | ret = _runcatch(req) | |
163 | except error.ProgrammingError as inst: |
|
163 | except error.ProgrammingError as inst: | |
164 | req.ui.warn(_('** ProgrammingError: %s\n') % inst) |
|
164 | req.ui.warn(_('** ProgrammingError: %s\n') % inst) | |
165 | if inst.hint: |
|
165 | if inst.hint: | |
166 | req.ui.warn(_('** (%s)\n') % inst.hint) |
|
166 | req.ui.warn(_('** (%s)\n') % inst.hint) | |
167 | raise |
|
167 | raise | |
168 | except KeyboardInterrupt as inst: |
|
168 | except KeyboardInterrupt as inst: | |
169 | try: |
|
169 | try: | |
170 | if isinstance(inst, error.SignalInterrupt): |
|
170 | if isinstance(inst, error.SignalInterrupt): | |
171 | msg = _("killed!\n") |
|
171 | msg = _("killed!\n") | |
172 | else: |
|
172 | else: | |
173 | msg = _("interrupted!\n") |
|
173 | msg = _("interrupted!\n") | |
174 | req.ui.warn(msg) |
|
174 | req.ui.warn(msg) | |
175 | except error.SignalInterrupt: |
|
175 | except error.SignalInterrupt: | |
176 | # maybe pager would quit without consuming all the output, and |
|
176 | # maybe pager would quit without consuming all the output, and | |
177 | # SIGPIPE was raised. we cannot print anything in this case. |
|
177 | # SIGPIPE was raised. we cannot print anything in this case. | |
178 | pass |
|
178 | pass | |
179 | except IOError as inst: |
|
179 | except IOError as inst: | |
180 | if inst.errno != errno.EPIPE: |
|
180 | if inst.errno != errno.EPIPE: | |
181 | raise |
|
181 | raise | |
182 | ret = -1 |
|
182 | ret = -1 | |
183 | finally: |
|
183 | finally: | |
184 | duration = util.timer() - starttime |
|
184 | duration = util.timer() - starttime | |
185 | req.ui.flush() |
|
185 | req.ui.flush() | |
186 | if req.ui.logblockedtimes: |
|
186 | if req.ui.logblockedtimes: | |
187 | req.ui._blockedtimes['command_duration'] = duration * 1000 |
|
187 | req.ui._blockedtimes['command_duration'] = duration * 1000 | |
188 | req.ui.log('uiblocked', 'ui blocked ms', **req.ui._blockedtimes) |
|
188 | req.ui.log('uiblocked', 'ui blocked ms', **req.ui._blockedtimes) | |
189 | req.ui.log("commandfinish", "%s exited %d after %0.2f seconds\n", |
|
189 | req.ui.log("commandfinish", "%s exited %d after %0.2f seconds\n", | |
190 | msg, ret or 0, duration) |
|
190 | msg, ret or 0, duration) | |
191 | try: |
|
191 | try: | |
192 | req._runexithandlers() |
|
192 | req._runexithandlers() | |
193 | except: # exiting, so no re-raises |
|
193 | except: # exiting, so no re-raises | |
194 | ret = ret or -1 |
|
194 | ret = ret or -1 | |
195 | return ret |
|
195 | return ret | |
196 |
|
196 | |||
197 | def _runcatch(req): |
|
197 | def _runcatch(req): | |
198 | def catchterm(*args): |
|
198 | def catchterm(*args): | |
199 | raise error.SignalInterrupt |
|
199 | raise error.SignalInterrupt | |
200 |
|
200 | |||
201 | ui = req.ui |
|
201 | ui = req.ui | |
202 | try: |
|
202 | try: | |
203 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': |
|
203 | for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': | |
204 | num = getattr(signal, name, None) |
|
204 | num = getattr(signal, name, None) | |
205 | if num: |
|
205 | if num: | |
206 | signal.signal(num, catchterm) |
|
206 | signal.signal(num, catchterm) | |
207 | except ValueError: |
|
207 | except ValueError: | |
208 | pass # happens if called in a thread |
|
208 | pass # happens if called in a thread | |
209 |
|
209 | |||
210 | def _runcatchfunc(): |
|
210 | def _runcatchfunc(): | |
211 | realcmd = None |
|
211 | realcmd = None | |
212 | try: |
|
212 | try: | |
213 | cmdargs = fancyopts.fancyopts(req.args[:], commands.globalopts, {}) |
|
213 | cmdargs = fancyopts.fancyopts(req.args[:], commands.globalopts, {}) | |
214 | cmd = cmdargs[0] |
|
214 | cmd = cmdargs[0] | |
215 | aliases, entry = cmdutil.findcmd(cmd, commands.table, False) |
|
215 | aliases, entry = cmdutil.findcmd(cmd, commands.table, False) | |
216 | realcmd = aliases[0] |
|
216 | realcmd = aliases[0] | |
217 | except (error.UnknownCommand, error.AmbiguousCommand, |
|
217 | except (error.UnknownCommand, error.AmbiguousCommand, | |
218 | IndexError, getopt.GetoptError): |
|
218 | IndexError, getopt.GetoptError): | |
219 | # Don't handle this here. We know the command is |
|
219 | # Don't handle this here. We know the command is | |
220 | # invalid, but all we're worried about for now is that |
|
220 | # invalid, but all we're worried about for now is that | |
221 | # it's not a command that server operators expect to |
|
221 | # it's not a command that server operators expect to | |
222 | # be safe to offer to users in a sandbox. |
|
222 | # be safe to offer to users in a sandbox. | |
223 | pass |
|
223 | pass | |
224 | if realcmd == 'serve' and '--stdio' in cmdargs: |
|
224 | if realcmd == 'serve' and '--stdio' in cmdargs: | |
225 | # We want to constrain 'hg serve --stdio' instances pretty |
|
225 | # We want to constrain 'hg serve --stdio' instances pretty | |
226 | # closely, as many shared-ssh access tools want to grant |
|
226 | # closely, as many shared-ssh access tools want to grant | |
227 | # access to run *only* 'hg -R $repo serve --stdio'. We |
|
227 | # access to run *only* 'hg -R $repo serve --stdio'. We | |
228 | # restrict to exactly that set of arguments, and prohibit |
|
228 | # restrict to exactly that set of arguments, and prohibit | |
229 | # any repo name that starts with '--' to prevent |
|
229 | # any repo name that starts with '--' to prevent | |
230 | # shenanigans wherein a user does something like pass |
|
230 | # shenanigans wherein a user does something like pass | |
231 | # --debugger or --config=ui.debugger=1 as a repo |
|
231 | # --debugger or --config=ui.debugger=1 as a repo | |
232 | # name. This used to actually run the debugger. |
|
232 | # name. This used to actually run the debugger. | |
233 | if (len(req.args) != 4 or |
|
233 | if (len(req.args) != 4 or | |
234 | req.args[0] != '-R' or |
|
234 | req.args[0] != '-R' or | |
235 | req.args[1].startswith('--') or |
|
235 | req.args[1].startswith('--') or | |
236 | req.args[2] != 'serve' or |
|
236 | req.args[2] != 'serve' or | |
237 | req.args[3] != '--stdio'): |
|
237 | req.args[3] != '--stdio'): | |
238 | raise error.Abort( |
|
238 | raise error.Abort( | |
239 | _('potentially unsafe serve --stdio invocation: %r') % |
|
239 | _('potentially unsafe serve --stdio invocation: %r') % | |
240 | (req.args,)) |
|
240 | (req.args,)) | |
241 |
|
241 | |||
242 | try: |
|
242 | try: | |
243 | debugger = 'pdb' |
|
243 | debugger = 'pdb' | |
244 | debugtrace = { |
|
244 | debugtrace = { | |
245 | 'pdb' : pdb.set_trace |
|
245 | 'pdb' : pdb.set_trace | |
246 | } |
|
246 | } | |
247 | debugmortem = { |
|
247 | debugmortem = { | |
248 | 'pdb' : pdb.post_mortem |
|
248 | 'pdb' : pdb.post_mortem | |
249 | } |
|
249 | } | |
250 |
|
250 | |||
251 | # read --config before doing anything else |
|
251 | # read --config before doing anything else | |
252 | # (e.g. to change trust settings for reading .hg/hgrc) |
|
252 | # (e.g. to change trust settings for reading .hg/hgrc) | |
253 | cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args)) |
|
253 | cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args)) | |
254 |
|
254 | |||
255 | if req.repo: |
|
255 | if req.repo: | |
256 | # copy configs that were passed on the cmdline (--config) to |
|
256 | # copy configs that were passed on the cmdline (--config) to | |
257 | # the repo ui |
|
257 | # the repo ui | |
258 | for sec, name, val in cfgs: |
|
258 | for sec, name, val in cfgs: | |
259 | req.repo.ui.setconfig(sec, name, val, source='--config') |
|
259 | req.repo.ui.setconfig(sec, name, val, source='--config') | |
260 |
|
260 | |||
261 | # developer config: ui.debugger |
|
261 | # developer config: ui.debugger | |
262 | debugger = ui.config("ui", "debugger") |
|
262 | debugger = ui.config("ui", "debugger") | |
263 | debugmod = pdb |
|
263 | debugmod = pdb | |
264 | if not debugger or ui.plain(): |
|
264 | if not debugger or ui.plain(): | |
265 | # if we are in HGPLAIN mode, then disable custom debugging |
|
265 | # if we are in HGPLAIN mode, then disable custom debugging | |
266 | debugger = 'pdb' |
|
266 | debugger = 'pdb' | |
267 | elif '--debugger' in req.args: |
|
267 | elif '--debugger' in req.args: | |
268 | # This import can be slow for fancy debuggers, so only |
|
268 | # This import can be slow for fancy debuggers, so only | |
269 | # do it when absolutely necessary, i.e. when actual |
|
269 | # do it when absolutely necessary, i.e. when actual | |
270 | # debugging has been requested |
|
270 | # debugging has been requested | |
271 | with demandimport.deactivated(): |
|
271 | with demandimport.deactivated(): | |
272 | try: |
|
272 | try: | |
273 | debugmod = __import__(debugger) |
|
273 | debugmod = __import__(debugger) | |
274 | except ImportError: |
|
274 | except ImportError: | |
275 | pass # Leave debugmod = pdb |
|
275 | pass # Leave debugmod = pdb | |
276 |
|
276 | |||
277 | debugtrace[debugger] = debugmod.set_trace |
|
277 | debugtrace[debugger] = debugmod.set_trace | |
278 | debugmortem[debugger] = debugmod.post_mortem |
|
278 | debugmortem[debugger] = debugmod.post_mortem | |
279 |
|
279 | |||
280 | # enter the debugger before command execution |
|
280 | # enter the debugger before command execution | |
281 | if '--debugger' in req.args: |
|
281 | if '--debugger' in req.args: | |
282 | ui.warn(_("entering debugger - " |
|
282 | ui.warn(_("entering debugger - " | |
283 | "type c to continue starting hg or h for help\n")) |
|
283 | "type c to continue starting hg or h for help\n")) | |
284 |
|
284 | |||
285 | if (debugger != 'pdb' and |
|
285 | if (debugger != 'pdb' and | |
286 | debugtrace[debugger] == debugtrace['pdb']): |
|
286 | debugtrace[debugger] == debugtrace['pdb']): | |
287 | ui.warn(_("%s debugger specified " |
|
287 | ui.warn(_("%s debugger specified " | |
288 | "but its module was not found\n") % debugger) |
|
288 | "but its module was not found\n") % debugger) | |
289 | with demandimport.deactivated(): |
|
289 | with demandimport.deactivated(): | |
290 | debugtrace[debugger]() |
|
290 | debugtrace[debugger]() | |
291 | try: |
|
291 | try: | |
292 | return _dispatch(req) |
|
292 | return _dispatch(req) | |
293 | finally: |
|
293 | finally: | |
294 | ui.flush() |
|
294 | ui.flush() | |
295 | except: # re-raises |
|
295 | except: # re-raises | |
296 | # enter the debugger when we hit an exception |
|
296 | # enter the debugger when we hit an exception | |
297 | if '--debugger' in req.args: |
|
297 | if '--debugger' in req.args: | |
298 | traceback.print_exc() |
|
298 | traceback.print_exc() | |
299 | debugmortem[debugger](sys.exc_info()[2]) |
|
299 | debugmortem[debugger](sys.exc_info()[2]) | |
300 | raise |
|
300 | raise | |
301 |
|
301 | |||
302 | return _callcatch(ui, _runcatchfunc) |
|
302 | return _callcatch(ui, _runcatchfunc) | |
303 |
|
303 | |||
304 | def _callcatch(ui, func): |
|
304 | def _callcatch(ui, func): | |
305 | """like scmutil.callcatch but handles more high-level exceptions about |
|
305 | """like scmutil.callcatch but handles more high-level exceptions about | |
306 | config parsing and commands. besides, use handlecommandexception to handle |
|
306 | config parsing and commands. besides, use handlecommandexception to handle | |
307 | uncaught exceptions. |
|
307 | uncaught exceptions. | |
308 | """ |
|
308 | """ | |
309 | try: |
|
309 | try: | |
310 | return scmutil.callcatch(ui, func) |
|
310 | return scmutil.callcatch(ui, func) | |
311 | except error.AmbiguousCommand as inst: |
|
311 | except error.AmbiguousCommand as inst: | |
312 | ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % |
|
312 | ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % | |
313 | (inst.args[0], " ".join(inst.args[1]))) |
|
313 | (inst.args[0], " ".join(inst.args[1]))) | |
314 | except error.CommandError as inst: |
|
314 | except error.CommandError as inst: | |
315 | if inst.args[0]: |
|
315 | if inst.args[0]: | |
316 | ui.pager('help') |
|
316 | ui.pager('help') | |
317 | msgbytes = pycompat.bytestr(inst.args[1]) |
|
317 | msgbytes = pycompat.bytestr(inst.args[1]) | |
318 | ui.warn(_("hg %s: %s\n") % (inst.args[0], msgbytes)) |
|
318 | ui.warn(_("hg %s: %s\n") % (inst.args[0], msgbytes)) | |
319 | commands.help_(ui, inst.args[0], full=False, command=True) |
|
319 | commands.help_(ui, inst.args[0], full=False, command=True) | |
320 | else: |
|
320 | else: | |
321 | ui.pager('help') |
|
321 | ui.pager('help') | |
322 | ui.warn(_("hg: %s\n") % inst.args[1]) |
|
322 | ui.warn(_("hg: %s\n") % inst.args[1]) | |
323 | commands.help_(ui, 'shortlist') |
|
323 | commands.help_(ui, 'shortlist') | |
324 | except error.ParseError as inst: |
|
324 | except error.ParseError as inst: | |
325 | _formatparse(ui.warn, inst) |
|
325 | _formatparse(ui.warn, inst) | |
326 | return -1 |
|
326 | return -1 | |
327 | except error.UnknownCommand as inst: |
|
327 | except error.UnknownCommand as inst: | |
328 | nocmdmsg = _("hg: unknown command '%s'\n") % inst.args[0] |
|
328 | nocmdmsg = _("hg: unknown command '%s'\n") % inst.args[0] | |
329 | try: |
|
329 | try: | |
330 | # check if the command is in a disabled extension |
|
330 | # check if the command is in a disabled extension | |
331 | # (but don't check for extensions themselves) |
|
331 | # (but don't check for extensions themselves) | |
332 | formatted = help.formattedhelp(ui, commands, inst.args[0], |
|
332 | formatted = help.formattedhelp(ui, commands, inst.args[0], | |
333 | unknowncmd=True) |
|
333 | unknowncmd=True) | |
334 | ui.warn(nocmdmsg) |
|
334 | ui.warn(nocmdmsg) | |
335 | ui.write(formatted) |
|
335 | ui.write(formatted) | |
336 | except (error.UnknownCommand, error.Abort): |
|
336 | except (error.UnknownCommand, error.Abort): | |
337 | suggested = False |
|
337 | suggested = False | |
338 | if len(inst.args) == 2: |
|
338 | if len(inst.args) == 2: | |
339 | sim = _getsimilar(inst.args[1], inst.args[0]) |
|
339 | sim = _getsimilar(inst.args[1], inst.args[0]) | |
340 | if sim: |
|
340 | if sim: | |
341 | ui.warn(nocmdmsg) |
|
341 | ui.warn(nocmdmsg) | |
342 | _reportsimilar(ui.warn, sim) |
|
342 | _reportsimilar(ui.warn, sim) | |
343 | suggested = True |
|
343 | suggested = True | |
344 | if not suggested: |
|
344 | if not suggested: | |
345 | ui.pager('help') |
|
345 | ui.pager('help') | |
346 | ui.warn(nocmdmsg) |
|
346 | ui.warn(nocmdmsg) | |
347 | commands.help_(ui, 'shortlist') |
|
347 | commands.help_(ui, 'shortlist') | |
348 | except IOError: |
|
348 | except IOError: | |
349 | raise |
|
349 | raise | |
350 | except KeyboardInterrupt: |
|
350 | except KeyboardInterrupt: | |
351 | raise |
|
351 | raise | |
352 | except: # probably re-raises |
|
352 | except: # probably re-raises | |
353 | if not handlecommandexception(ui): |
|
353 | if not handlecommandexception(ui): | |
354 | raise |
|
354 | raise | |
355 |
|
355 | |||
356 | return -1 |
|
356 | return -1 | |
357 |
|
357 | |||
358 | def aliasargs(fn, givenargs): |
|
358 | def aliasargs(fn, givenargs): | |
359 | args = getattr(fn, 'args', []) |
|
359 | args = getattr(fn, 'args', []) | |
360 | if args: |
|
360 | if args: | |
361 | cmd = ' '.join(map(util.shellquote, args)) |
|
361 | cmd = ' '.join(map(util.shellquote, args)) | |
362 |
|
362 | |||
363 | nums = [] |
|
363 | nums = [] | |
364 | def replacer(m): |
|
364 | def replacer(m): | |
365 | num = int(m.group(1)) - 1 |
|
365 | num = int(m.group(1)) - 1 | |
366 | nums.append(num) |
|
366 | nums.append(num) | |
367 | if num < len(givenargs): |
|
367 | if num < len(givenargs): | |
368 | return givenargs[num] |
|
368 | return givenargs[num] | |
369 | raise error.Abort(_('too few arguments for command alias')) |
|
369 | raise error.Abort(_('too few arguments for command alias')) | |
370 | cmd = re.sub(br'\$(\d+|\$)', replacer, cmd) |
|
370 | cmd = re.sub(br'\$(\d+|\$)', replacer, cmd) | |
371 | givenargs = [x for i, x in enumerate(givenargs) |
|
371 | givenargs = [x for i, x in enumerate(givenargs) | |
372 | if i not in nums] |
|
372 | if i not in nums] | |
373 | args = pycompat.shlexsplit(cmd) |
|
373 | args = pycompat.shlexsplit(cmd) | |
374 | return args + givenargs |
|
374 | return args + givenargs | |
375 |
|
375 | |||
376 | def aliasinterpolate(name, args, cmd): |
|
376 | def aliasinterpolate(name, args, cmd): | |
377 | '''interpolate args into cmd for shell aliases |
|
377 | '''interpolate args into cmd for shell aliases | |
378 |
|
378 | |||
379 | This also handles $0, $@ and "$@". |
|
379 | This also handles $0, $@ and "$@". | |
380 | ''' |
|
380 | ''' | |
381 | # util.interpolate can't deal with "$@" (with quotes) because it's only |
|
381 | # util.interpolate can't deal with "$@" (with quotes) because it's only | |
382 | # built to match prefix + patterns. |
|
382 | # built to match prefix + patterns. | |
383 | replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args)) |
|
383 | replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args)) | |
384 | replacemap['$0'] = name |
|
384 | replacemap['$0'] = name | |
385 | replacemap['$$'] = '$' |
|
385 | replacemap['$$'] = '$' | |
386 | replacemap['$@'] = ' '.join(args) |
|
386 | replacemap['$@'] = ' '.join(args) | |
387 | # Typical Unix shells interpolate "$@" (with quotes) as all the positional |
|
387 | # Typical Unix shells interpolate "$@" (with quotes) as all the positional | |
388 | # parameters, separated out into words. Emulate the same behavior here by |
|
388 | # parameters, separated out into words. Emulate the same behavior here by | |
389 | # quoting the arguments individually. POSIX shells will then typically |
|
389 | # quoting the arguments individually. POSIX shells will then typically | |
390 | # tokenize each argument into exactly one word. |
|
390 | # tokenize each argument into exactly one word. | |
391 | replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args) |
|
391 | replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args) | |
392 | # escape '\$' for regex |
|
392 | # escape '\$' for regex | |
393 | regex = '|'.join(replacemap.keys()).replace('$', r'\$') |
|
393 | regex = '|'.join(replacemap.keys()).replace('$', r'\$') | |
394 | r = re.compile(regex) |
|
394 | r = re.compile(regex) | |
395 | return r.sub(lambda x: replacemap[x.group()], cmd) |
|
395 | return r.sub(lambda x: replacemap[x.group()], cmd) | |
396 |
|
396 | |||
397 | class cmdalias(object): |
|
397 | class cmdalias(object): | |
398 | def __init__(self, name, definition, cmdtable, source): |
|
398 | def __init__(self, name, definition, cmdtable, source): | |
399 | self.name = self.cmd = name |
|
399 | self.name = self.cmd = name | |
400 | self.cmdname = '' |
|
400 | self.cmdname = '' | |
401 | self.definition = definition |
|
401 | self.definition = definition | |
402 | self.fn = None |
|
402 | self.fn = None | |
403 | self.givenargs = [] |
|
403 | self.givenargs = [] | |
404 | self.opts = [] |
|
404 | self.opts = [] | |
405 | self.help = '' |
|
405 | self.help = '' | |
406 | self.badalias = None |
|
406 | self.badalias = None | |
407 | self.unknowncmd = False |
|
407 | self.unknowncmd = False | |
408 | self.source = source |
|
408 | self.source = source | |
409 |
|
409 | |||
410 | try: |
|
410 | try: | |
411 | aliases, entry = cmdutil.findcmd(self.name, cmdtable) |
|
411 | aliases, entry = cmdutil.findcmd(self.name, cmdtable) | |
412 | for alias, e in cmdtable.iteritems(): |
|
412 | for alias, e in cmdtable.iteritems(): | |
413 | if e is entry: |
|
413 | if e is entry: | |
414 | self.cmd = alias |
|
414 | self.cmd = alias | |
415 | break |
|
415 | break | |
416 | self.shadows = True |
|
416 | self.shadows = True | |
417 | except error.UnknownCommand: |
|
417 | except error.UnknownCommand: | |
418 | self.shadows = False |
|
418 | self.shadows = False | |
419 |
|
419 | |||
420 | if not self.definition: |
|
420 | if not self.definition: | |
421 | self.badalias = _("no definition for alias '%s'") % self.name |
|
421 | self.badalias = _("no definition for alias '%s'") % self.name | |
422 | return |
|
422 | return | |
423 |
|
423 | |||
424 | if self.definition.startswith('!'): |
|
424 | if self.definition.startswith('!'): | |
425 | self.shell = True |
|
425 | self.shell = True | |
426 | def fn(ui, *args): |
|
426 | def fn(ui, *args): | |
427 | env = {'HG_ARGS': ' '.join((self.name,) + args)} |
|
427 | env = {'HG_ARGS': ' '.join((self.name,) + args)} | |
428 | def _checkvar(m): |
|
428 | def _checkvar(m): | |
429 | if m.groups()[0] == '$': |
|
429 | if m.groups()[0] == '$': | |
430 | return m.group() |
|
430 | return m.group() | |
431 | elif int(m.groups()[0]) <= len(args): |
|
431 | elif int(m.groups()[0]) <= len(args): | |
432 | return m.group() |
|
432 | return m.group() | |
433 | else: |
|
433 | else: | |
434 | ui.debug("No argument found for substitution " |
|
434 | ui.debug("No argument found for substitution " | |
435 | "of %i variable in alias '%s' definition." |
|
435 | "of %i variable in alias '%s' definition." | |
436 | % (int(m.groups()[0]), self.name)) |
|
436 | % (int(m.groups()[0]), self.name)) | |
437 | return '' |
|
437 | return '' | |
438 | cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:]) |
|
438 | cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:]) | |
439 | cmd = aliasinterpolate(self.name, args, cmd) |
|
439 | cmd = aliasinterpolate(self.name, args, cmd) | |
440 | return ui.system(cmd, environ=env, |
|
440 | return ui.system(cmd, environ=env, | |
441 | blockedtag='alias_%s' % self.name) |
|
441 | blockedtag='alias_%s' % self.name) | |
442 | self.fn = fn |
|
442 | self.fn = fn | |
443 | return |
|
443 | return | |
444 |
|
444 | |||
445 | try: |
|
445 | try: | |
446 | args = pycompat.shlexsplit(self.definition) |
|
446 | args = pycompat.shlexsplit(self.definition) | |
447 | except ValueError as inst: |
|
447 | except ValueError as inst: | |
448 | self.badalias = (_("error in definition for alias '%s': %s") |
|
448 | self.badalias = (_("error in definition for alias '%s': %s") | |
449 | % (self.name, inst)) |
|
449 | % (self.name, inst)) | |
450 | return |
|
450 | return | |
451 | self.cmdname = cmd = args.pop(0) |
|
451 | self.cmdname = cmd = args.pop(0) | |
452 | self.givenargs = args |
|
452 | self.givenargs = args | |
453 |
|
453 | |||
454 | for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"): |
|
454 | for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"): | |
455 | if _earlygetopt([invalidarg], args): |
|
455 | if _earlygetopt([invalidarg], args): | |
456 | self.badalias = (_("error in definition for alias '%s': %s may " |
|
456 | self.badalias = (_("error in definition for alias '%s': %s may " | |
457 | "only be given on the command line") |
|
457 | "only be given on the command line") | |
458 | % (self.name, invalidarg)) |
|
458 | % (self.name, invalidarg)) | |
459 | return |
|
459 | return | |
460 |
|
460 | |||
461 | try: |
|
461 | try: | |
462 | tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] |
|
462 | tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] | |
463 | if len(tableentry) > 2: |
|
463 | if len(tableentry) > 2: | |
464 | self.fn, self.opts, self.help = tableentry |
|
464 | self.fn, self.opts, self.help = tableentry | |
465 | else: |
|
465 | else: | |
466 | self.fn, self.opts = tableentry |
|
466 | self.fn, self.opts = tableentry | |
467 |
|
467 | |||
468 | if self.help.startswith("hg " + cmd): |
|
468 | if self.help.startswith("hg " + cmd): | |
469 | # drop prefix in old-style help lines so hg shows the alias |
|
469 | # drop prefix in old-style help lines so hg shows the alias | |
470 | self.help = self.help[4 + len(cmd):] |
|
470 | self.help = self.help[4 + len(cmd):] | |
471 | self.__doc__ = self.fn.__doc__ |
|
471 | self.__doc__ = self.fn.__doc__ | |
472 |
|
472 | |||
473 | except error.UnknownCommand: |
|
473 | except error.UnknownCommand: | |
474 | self.badalias = (_("alias '%s' resolves to unknown command '%s'") |
|
474 | self.badalias = (_("alias '%s' resolves to unknown command '%s'") | |
475 | % (self.name, cmd)) |
|
475 | % (self.name, cmd)) | |
476 | self.unknowncmd = True |
|
476 | self.unknowncmd = True | |
477 | except error.AmbiguousCommand: |
|
477 | except error.AmbiguousCommand: | |
478 | self.badalias = (_("alias '%s' resolves to ambiguous command '%s'") |
|
478 | self.badalias = (_("alias '%s' resolves to ambiguous command '%s'") | |
479 | % (self.name, cmd)) |
|
479 | % (self.name, cmd)) | |
480 |
|
480 | |||
481 | @property |
|
481 | @property | |
482 | def args(self): |
|
482 | def args(self): | |
483 | args = pycompat.maplist(util.expandpath, self.givenargs) |
|
483 | args = pycompat.maplist(util.expandpath, self.givenargs) | |
484 | return aliasargs(self.fn, args) |
|
484 | return aliasargs(self.fn, args) | |
485 |
|
485 | |||
486 | def __getattr__(self, name): |
|
486 | def __getattr__(self, name): | |
487 | adefaults = {r'norepo': True, |
|
487 | adefaults = {r'norepo': True, | |
488 | r'optionalrepo': False, r'inferrepo': False} |
|
488 | r'optionalrepo': False, r'inferrepo': False} | |
489 | if name not in adefaults: |
|
489 | if name not in adefaults: | |
490 | raise AttributeError(name) |
|
490 | raise AttributeError(name) | |
491 | if self.badalias or util.safehasattr(self, 'shell'): |
|
491 | if self.badalias or util.safehasattr(self, 'shell'): | |
492 | return adefaults[name] |
|
492 | return adefaults[name] | |
493 | return getattr(self.fn, name) |
|
493 | return getattr(self.fn, name) | |
494 |
|
494 | |||
495 | def __call__(self, ui, *args, **opts): |
|
495 | def __call__(self, ui, *args, **opts): | |
496 | if self.badalias: |
|
496 | if self.badalias: | |
497 | hint = None |
|
497 | hint = None | |
498 | if self.unknowncmd: |
|
498 | if self.unknowncmd: | |
499 | try: |
|
499 | try: | |
500 | # check if the command is in a disabled extension |
|
500 | # check if the command is in a disabled extension | |
501 | cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2] |
|
501 | cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2] | |
502 | hint = _("'%s' is provided by '%s' extension") % (cmd, ext) |
|
502 | hint = _("'%s' is provided by '%s' extension") % (cmd, ext) | |
503 | except error.UnknownCommand: |
|
503 | except error.UnknownCommand: | |
504 | pass |
|
504 | pass | |
505 | raise error.Abort(self.badalias, hint=hint) |
|
505 | raise error.Abort(self.badalias, hint=hint) | |
506 | if self.shadows: |
|
506 | if self.shadows: | |
507 | ui.debug("alias '%s' shadows command '%s'\n" % |
|
507 | ui.debug("alias '%s' shadows command '%s'\n" % | |
508 | (self.name, self.cmdname)) |
|
508 | (self.name, self.cmdname)) | |
509 |
|
509 | |||
510 | ui.log('commandalias', "alias '%s' expands to '%s'\n", |
|
510 | ui.log('commandalias', "alias '%s' expands to '%s'\n", | |
511 | self.name, self.definition) |
|
511 | self.name, self.definition) | |
512 | if util.safehasattr(self, 'shell'): |
|
512 | if util.safehasattr(self, 'shell'): | |
513 | return self.fn(ui, *args, **opts) |
|
513 | return self.fn(ui, *args, **opts) | |
514 | else: |
|
514 | else: | |
515 | try: |
|
515 | try: | |
516 | return util.checksignature(self.fn)(ui, *args, **opts) |
|
516 | return util.checksignature(self.fn)(ui, *args, **opts) | |
517 | except error.SignatureError: |
|
517 | except error.SignatureError: | |
518 | args = ' '.join([self.cmdname] + self.args) |
|
518 | args = ' '.join([self.cmdname] + self.args) | |
519 | ui.debug("alias '%s' expands to '%s'\n" % (self.name, args)) |
|
519 | ui.debug("alias '%s' expands to '%s'\n" % (self.name, args)) | |
520 | raise |
|
520 | raise | |
521 |
|
521 | |||
522 | def addaliases(ui, cmdtable): |
|
522 | def addaliases(ui, cmdtable): | |
523 | # aliases are processed after extensions have been loaded, so they |
|
523 | # aliases are processed after extensions have been loaded, so they | |
524 | # may use extension commands. Aliases can also use other alias definitions, |
|
524 | # may use extension commands. Aliases can also use other alias definitions, | |
525 | # but only if they have been defined prior to the current definition. |
|
525 | # but only if they have been defined prior to the current definition. | |
526 | for alias, definition in ui.configitems('alias'): |
|
526 | for alias, definition in ui.configitems('alias'): | |
527 | source = ui.configsource('alias', alias) |
|
527 | source = ui.configsource('alias', alias) | |
528 | aliasdef = cmdalias(alias, definition, cmdtable, source) |
|
528 | aliasdef = cmdalias(alias, definition, cmdtable, source) | |
529 |
|
529 | |||
530 | try: |
|
530 | try: | |
531 | olddef = cmdtable[aliasdef.cmd][0] |
|
531 | olddef = cmdtable[aliasdef.cmd][0] | |
532 | if olddef.definition == aliasdef.definition: |
|
532 | if olddef.definition == aliasdef.definition: | |
533 | continue |
|
533 | continue | |
534 | except (KeyError, AttributeError): |
|
534 | except (KeyError, AttributeError): | |
535 | # definition might not exist or it might not be a cmdalias |
|
535 | # definition might not exist or it might not be a cmdalias | |
536 | pass |
|
536 | pass | |
537 |
|
537 | |||
538 | cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help) |
|
538 | cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help) | |
539 |
|
539 | |||
540 | def _parse(ui, args): |
|
540 | def _parse(ui, args): | |
541 | options = {} |
|
541 | options = {} | |
542 | cmdoptions = {} |
|
542 | cmdoptions = {} | |
543 |
|
543 | |||
544 | try: |
|
544 | try: | |
545 | args = fancyopts.fancyopts(args, commands.globalopts, options) |
|
545 | args = fancyopts.fancyopts(args, commands.globalopts, options) | |
546 | except getopt.GetoptError as inst: |
|
546 | except getopt.GetoptError as inst: | |
547 | raise error.CommandError(None, inst) |
|
547 | raise error.CommandError(None, inst) | |
548 |
|
548 | |||
549 | if args: |
|
549 | if args: | |
550 | cmd, args = args[0], args[1:] |
|
550 | cmd, args = args[0], args[1:] | |
551 | aliases, entry = cmdutil.findcmd(cmd, commands.table, |
|
551 | aliases, entry = cmdutil.findcmd(cmd, commands.table, | |
552 | ui.configbool("ui", "strict")) |
|
552 | ui.configbool("ui", "strict")) | |
553 | cmd = aliases[0] |
|
553 | cmd = aliases[0] | |
554 | args = aliasargs(entry[0], args) |
|
554 | args = aliasargs(entry[0], args) | |
555 | defaults = ui.config("defaults", cmd) |
|
555 | defaults = ui.config("defaults", cmd) | |
556 | if defaults: |
|
556 | if defaults: | |
557 | args = pycompat.maplist( |
|
557 | args = pycompat.maplist( | |
558 | util.expandpath, pycompat.shlexsplit(defaults)) + args |
|
558 | util.expandpath, pycompat.shlexsplit(defaults)) + args | |
559 | c = list(entry[1]) |
|
559 | c = list(entry[1]) | |
560 | else: |
|
560 | else: | |
561 | cmd = None |
|
561 | cmd = None | |
562 | c = [] |
|
562 | c = [] | |
563 |
|
563 | |||
564 | # combine global options into local |
|
564 | # combine global options into local | |
565 | for o in commands.globalopts: |
|
565 | for o in commands.globalopts: | |
566 | c.append((o[0], o[1], options[o[1]], o[3])) |
|
566 | c.append((o[0], o[1], options[o[1]], o[3])) | |
567 |
|
567 | |||
568 | try: |
|
568 | try: | |
569 | args = fancyopts.fancyopts(args, c, cmdoptions, gnu=True) |
|
569 | args = fancyopts.fancyopts(args, c, cmdoptions, gnu=True) | |
570 | except getopt.GetoptError as inst: |
|
570 | except getopt.GetoptError as inst: | |
571 | raise error.CommandError(cmd, inst) |
|
571 | raise error.CommandError(cmd, inst) | |
572 |
|
572 | |||
573 | # separate global options back out |
|
573 | # separate global options back out | |
574 | for o in commands.globalopts: |
|
574 | for o in commands.globalopts: | |
575 | n = o[1] |
|
575 | n = o[1] | |
576 | options[n] = cmdoptions[n] |
|
576 | options[n] = cmdoptions[n] | |
577 | del cmdoptions[n] |
|
577 | del cmdoptions[n] | |
578 |
|
578 | |||
579 | return (cmd, cmd and entry[0] or None, args, options, cmdoptions) |
|
579 | return (cmd, cmd and entry[0] or None, args, options, cmdoptions) | |
580 |
|
580 | |||
581 | def _parseconfig(ui, config): |
|
581 | def _parseconfig(ui, config): | |
582 | """parse the --config options from the command line""" |
|
582 | """parse the --config options from the command line""" | |
583 | configs = [] |
|
583 | configs = [] | |
584 |
|
584 | |||
585 | for cfg in config: |
|
585 | for cfg in config: | |
586 | try: |
|
586 | try: | |
587 | name, value = [cfgelem.strip() |
|
587 | name, value = [cfgelem.strip() | |
588 | for cfgelem in cfg.split('=', 1)] |
|
588 | for cfgelem in cfg.split('=', 1)] | |
589 | section, name = name.split('.', 1) |
|
589 | section, name = name.split('.', 1) | |
590 | if not section or not name: |
|
590 | if not section or not name: | |
591 | raise IndexError |
|
591 | raise IndexError | |
592 | ui.setconfig(section, name, value, '--config') |
|
592 | ui.setconfig(section, name, value, '--config') | |
593 | configs.append((section, name, value)) |
|
593 | configs.append((section, name, value)) | |
594 | except (IndexError, ValueError): |
|
594 | except (IndexError, ValueError): | |
595 | raise error.Abort(_('malformed --config option: %r ' |
|
595 | raise error.Abort(_('malformed --config option: %r ' | |
596 | '(use --config section.name=value)') % cfg) |
|
596 | '(use --config section.name=value)') % cfg) | |
597 |
|
597 | |||
598 | return configs |
|
598 | return configs | |
599 |
|
599 | |||
600 | def _earlygetopt(aliases, args): |
|
600 | def _earlygetopt(aliases, args): | |
601 | """Return list of values for an option (or aliases). |
|
601 | """Return list of values for an option (or aliases). | |
602 |
|
602 | |||
603 | The values are listed in the order they appear in args. |
|
603 | The values are listed in the order they appear in args. | |
604 | The options and values are removed from args. |
|
604 | The options and values are removed from args. | |
605 |
|
605 | |||
606 | >>> args = ['x', '--cwd', 'foo', 'y'] |
|
606 | >>> args = ['x', '--cwd', 'foo', 'y'] | |
607 | >>> _earlygetopt(['--cwd'], args), args |
|
607 | >>> _earlygetopt(['--cwd'], args), args | |
608 | (['foo'], ['x', 'y']) |
|
608 | (['foo'], ['x', 'y']) | |
609 |
|
609 | |||
610 | >>> args = ['x', '--cwd=bar', 'y'] |
|
610 | >>> args = ['x', '--cwd=bar', 'y'] | |
611 | >>> _earlygetopt(['--cwd'], args), args |
|
611 | >>> _earlygetopt(['--cwd'], args), args | |
612 | (['bar'], ['x', 'y']) |
|
612 | (['bar'], ['x', 'y']) | |
613 |
|
613 | |||
614 | >>> args = ['x', '-R', 'foo', 'y'] |
|
614 | >>> args = ['x', '-R', 'foo', 'y'] | |
615 | >>> _earlygetopt(['-R'], args), args |
|
615 | >>> _earlygetopt(['-R'], args), args | |
616 | (['foo'], ['x', 'y']) |
|
616 | (['foo'], ['x', 'y']) | |
617 |
|
617 | |||
618 | >>> args = ['x', '-Rbar', 'y'] |
|
618 | >>> args = ['x', '-Rbar', 'y'] | |
619 | >>> _earlygetopt(['-R'], args), args |
|
619 | >>> _earlygetopt(['-R'], args), args | |
620 | (['bar'], ['x', 'y']) |
|
620 | (['bar'], ['x', 'y']) | |
621 | """ |
|
621 | """ | |
622 | try: |
|
622 | try: | |
623 | argcount = args.index("--") |
|
623 | argcount = args.index("--") | |
624 | except ValueError: |
|
624 | except ValueError: | |
625 | argcount = len(args) |
|
625 | argcount = len(args) | |
626 | shortopts = [opt for opt in aliases if len(opt) == 2] |
|
626 | shortopts = [opt for opt in aliases if len(opt) == 2] | |
627 | values = [] |
|
627 | values = [] | |
628 | pos = 0 |
|
628 | pos = 0 | |
629 | while pos < argcount: |
|
629 | while pos < argcount: | |
630 | fullarg = arg = args[pos] |
|
630 | fullarg = arg = args[pos] | |
631 | equals = arg.find('=') |
|
631 | equals = arg.find('=') | |
632 | if equals > -1: |
|
632 | if equals > -1: | |
633 | arg = arg[:equals] |
|
633 | arg = arg[:equals] | |
634 | if arg in aliases: |
|
634 | if arg in aliases: | |
635 | del args[pos] |
|
635 | del args[pos] | |
636 | if equals > -1: |
|
636 | if equals > -1: | |
637 | values.append(fullarg[equals + 1:]) |
|
637 | values.append(fullarg[equals + 1:]) | |
638 | argcount -= 1 |
|
638 | argcount -= 1 | |
639 | else: |
|
639 | else: | |
640 | if pos + 1 >= argcount: |
|
640 | if pos + 1 >= argcount: | |
641 | # ignore and let getopt report an error if there is no value |
|
641 | # ignore and let getopt report an error if there is no value | |
642 | break |
|
642 | break | |
643 | values.append(args.pop(pos)) |
|
643 | values.append(args.pop(pos)) | |
644 | argcount -= 2 |
|
644 | argcount -= 2 | |
645 | elif arg[:2] in shortopts: |
|
645 | elif arg[:2] in shortopts: | |
646 | # short option can have no following space, e.g. hg log -Rfoo |
|
646 | # short option can have no following space, e.g. hg log -Rfoo | |
647 | values.append(args.pop(pos)[2:]) |
|
647 | values.append(args.pop(pos)[2:]) | |
648 | argcount -= 1 |
|
648 | argcount -= 1 | |
649 | else: |
|
649 | else: | |
650 | pos += 1 |
|
650 | pos += 1 | |
651 | return values |
|
651 | return values | |
652 |
|
652 | |||
653 | def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): |
|
653 | def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): | |
654 | # run pre-hook, and abort if it fails |
|
654 | # run pre-hook, and abort if it fails | |
655 | hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs), |
|
655 | hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs), | |
656 | pats=cmdpats, opts=cmdoptions) |
|
656 | pats=cmdpats, opts=cmdoptions) | |
657 | try: |
|
657 | try: | |
658 | ret = _runcommand(ui, options, cmd, d) |
|
658 | ret = _runcommand(ui, options, cmd, d) | |
659 | # run post-hook, passing command result |
|
659 | # run post-hook, passing command result | |
660 | hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), |
|
660 | hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), | |
661 | result=ret, pats=cmdpats, opts=cmdoptions) |
|
661 | result=ret, pats=cmdpats, opts=cmdoptions) | |
662 | except Exception: |
|
662 | except Exception: | |
663 | # run failure hook and re-raise |
|
663 | # run failure hook and re-raise | |
664 | hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs), |
|
664 | hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs), | |
665 | pats=cmdpats, opts=cmdoptions) |
|
665 | pats=cmdpats, opts=cmdoptions) | |
666 | raise |
|
666 | raise | |
667 | return ret |
|
667 | return ret | |
668 |
|
668 | |||
669 | def _getlocal(ui, rpath, wd=None): |
|
669 | def _getlocal(ui, rpath, wd=None): | |
670 | """Return (path, local ui object) for the given target path. |
|
670 | """Return (path, local ui object) for the given target path. | |
671 |
|
671 | |||
672 | Takes paths in [cwd]/.hg/hgrc into account." |
|
672 | Takes paths in [cwd]/.hg/hgrc into account." | |
673 | """ |
|
673 | """ | |
674 | if wd is None: |
|
674 | if wd is None: | |
675 | try: |
|
675 | try: | |
676 | wd = pycompat.getcwd() |
|
676 | wd = pycompat.getcwd() | |
677 | except OSError as e: |
|
677 | except OSError as e: | |
678 | raise error.Abort(_("error getting current working directory: %s") % |
|
678 | raise error.Abort(_("error getting current working directory: %s") % | |
679 | e.strerror) |
|
679 | e.strerror) | |
680 | path = cmdutil.findrepo(wd) or "" |
|
680 | path = cmdutil.findrepo(wd) or "" | |
681 | if not path: |
|
681 | if not path: | |
682 | lui = ui |
|
682 | lui = ui | |
683 | else: |
|
683 | else: | |
684 | lui = ui.copy() |
|
684 | lui = ui.copy() | |
685 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) |
|
685 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) | |
686 |
|
686 | |||
687 | if rpath and rpath[-1]: |
|
687 | if rpath and rpath[-1]: | |
688 | path = lui.expandpath(rpath[-1]) |
|
688 | path = lui.expandpath(rpath[-1]) | |
689 | lui = ui.copy() |
|
689 | lui = ui.copy() | |
690 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) |
|
690 | lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) | |
691 |
|
691 | |||
692 | return path, lui |
|
692 | return path, lui | |
693 |
|
693 | |||
694 | def _checkshellalias(lui, ui, args): |
|
694 | def _checkshellalias(lui, ui, args): | |
695 | """Return the function to run the shell alias, if it is required""" |
|
695 | """Return the function to run the shell alias, if it is required""" | |
696 | options = {} |
|
696 | options = {} | |
697 |
|
697 | |||
698 | try: |
|
698 | try: | |
699 | args = fancyopts.fancyopts(args, commands.globalopts, options) |
|
699 | args = fancyopts.fancyopts(args, commands.globalopts, options) | |
700 | except getopt.GetoptError: |
|
700 | except getopt.GetoptError: | |
701 | return |
|
701 | return | |
702 |
|
702 | |||
703 | if not args: |
|
703 | if not args: | |
704 | return |
|
704 | return | |
705 |
|
705 | |||
706 | cmdtable = commands.table |
|
706 | cmdtable = commands.table | |
707 |
|
707 | |||
708 | cmd = args[0] |
|
708 | cmd = args[0] | |
709 | try: |
|
709 | try: | |
710 | strict = ui.configbool("ui", "strict") |
|
710 | strict = ui.configbool("ui", "strict") | |
711 | aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict) |
|
711 | aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict) | |
712 | except (error.AmbiguousCommand, error.UnknownCommand): |
|
712 | except (error.AmbiguousCommand, error.UnknownCommand): | |
713 | return |
|
713 | return | |
714 |
|
714 | |||
715 | cmd = aliases[0] |
|
715 | cmd = aliases[0] | |
716 | fn = entry[0] |
|
716 | fn = entry[0] | |
717 |
|
717 | |||
718 | if cmd and util.safehasattr(fn, 'shell'): |
|
718 | if cmd and util.safehasattr(fn, 'shell'): | |
719 | d = lambda: fn(ui, *args[1:]) |
|
719 | d = lambda: fn(ui, *args[1:]) | |
720 | return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, |
|
720 | return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, | |
721 | [], {}) |
|
721 | [], {}) | |
722 |
|
722 | |||
723 | def _dispatch(req): |
|
723 | def _dispatch(req): | |
724 | args = req.args |
|
724 | args = req.args | |
725 | ui = req.ui |
|
725 | ui = req.ui | |
726 |
|
726 | |||
727 | # check for cwd |
|
727 | # check for cwd | |
728 | cwd = _earlygetopt(['--cwd'], args) |
|
728 | cwd = _earlygetopt(['--cwd'], args) | |
729 | if cwd: |
|
729 | if cwd: | |
730 | os.chdir(cwd[-1]) |
|
730 | os.chdir(cwd[-1]) | |
731 |
|
731 | |||
732 | rpath = _earlygetopt(["-R", "--repository", "--repo"], args) |
|
732 | rpath = _earlygetopt(["-R", "--repository", "--repo"], args) | |
733 | path, lui = _getlocal(ui, rpath) |
|
733 | path, lui = _getlocal(ui, rpath) | |
734 |
|
734 | |||
735 | uis = {ui, lui} |
|
735 | uis = {ui, lui} | |
736 |
|
736 | |||
737 | if req.repo: |
|
737 | if req.repo: | |
738 | uis.add(req.repo.ui) |
|
738 | uis.add(req.repo.ui) | |
739 |
|
739 | |||
740 | if '--profile' in args: |
|
740 | if '--profile' in args: | |
741 | for ui_ in uis: |
|
741 | for ui_ in uis: | |
742 | ui_.setconfig('profiling', 'enabled', 'true', '--profile') |
|
742 | ui_.setconfig('profiling', 'enabled', 'true', '--profile') | |
743 |
|
743 | |||
744 | profile = lui.configbool('profiling', 'enabled') |
|
744 | profile = lui.configbool('profiling', 'enabled') | |
745 | with profiling.profile(lui, enabled=profile) as profiler: |
|
745 | with profiling.profile(lui, enabled=profile) as profiler: | |
746 | # Configure extensions in phases: uisetup, extsetup, cmdtable, and |
|
746 | # Configure extensions in phases: uisetup, extsetup, cmdtable, and | |
747 | # reposetup |
|
747 | # reposetup | |
748 | extensions.loadall(lui) |
|
748 | extensions.loadall(lui) | |
749 | # Propagate any changes to lui.__class__ by extensions |
|
749 | # Propagate any changes to lui.__class__ by extensions | |
750 | ui.__class__ = lui.__class__ |
|
750 | ui.__class__ = lui.__class__ | |
751 |
|
751 | |||
752 | # (uisetup and extsetup are handled in extensions.loadall) |
|
752 | # (uisetup and extsetup are handled in extensions.loadall) | |
753 |
|
753 | |||
754 | # (reposetup is handled in hg.repository) |
|
754 | # (reposetup is handled in hg.repository) | |
755 |
|
755 | |||
756 | addaliases(lui, commands.table) |
|
756 | addaliases(lui, commands.table) | |
757 |
|
757 | |||
758 | # All aliases and commands are completely defined, now. |
|
758 | # All aliases and commands are completely defined, now. | |
759 | # Check abbreviation/ambiguity of shell alias. |
|
759 | # Check abbreviation/ambiguity of shell alias. | |
760 | shellaliasfn = _checkshellalias(lui, ui, args) |
|
760 | shellaliasfn = _checkshellalias(lui, ui, args) | |
761 | if shellaliasfn: |
|
761 | if shellaliasfn: | |
762 | return shellaliasfn() |
|
762 | return shellaliasfn() | |
763 |
|
763 | |||
764 | # check for fallback encoding |
|
764 | # check for fallback encoding | |
765 | fallback = lui.config('ui', 'fallbackencoding') |
|
765 | fallback = lui.config('ui', 'fallbackencoding') | |
766 | if fallback: |
|
766 | if fallback: | |
767 | encoding.fallbackencoding = fallback |
|
767 | encoding.fallbackencoding = fallback | |
768 |
|
768 | |||
769 | fullargs = args |
|
769 | fullargs = args | |
770 | cmd, func, args, options, cmdoptions = _parse(lui, args) |
|
770 | cmd, func, args, options, cmdoptions = _parse(lui, args) | |
771 |
|
771 | |||
772 | if options["config"]: |
|
772 | if options["config"]: | |
773 | raise error.Abort(_("option --config may not be abbreviated!")) |
|
773 | raise error.Abort(_("option --config may not be abbreviated!")) | |
774 | if options["cwd"]: |
|
774 | if options["cwd"]: | |
775 | raise error.Abort(_("option --cwd may not be abbreviated!")) |
|
775 | raise error.Abort(_("option --cwd may not be abbreviated!")) | |
776 | if options["repository"]: |
|
776 | if options["repository"]: | |
777 | raise error.Abort(_( |
|
777 | raise error.Abort(_( | |
778 | "option -R has to be separated from other options (e.g. not " |
|
778 | "option -R has to be separated from other options (e.g. not " | |
779 | "-qR) and --repository may only be abbreviated as --repo!")) |
|
779 | "-qR) and --repository may only be abbreviated as --repo!")) | |
780 |
|
780 | |||
781 | if options["encoding"]: |
|
781 | if options["encoding"]: | |
782 | encoding.encoding = options["encoding"] |
|
782 | encoding.encoding = options["encoding"] | |
783 | if options["encodingmode"]: |
|
783 | if options["encodingmode"]: | |
784 | encoding.encodingmode = options["encodingmode"] |
|
784 | encoding.encodingmode = options["encodingmode"] | |
785 | if options["time"]: |
|
785 | if options["time"]: | |
786 | def get_times(): |
|
786 | def get_times(): | |
787 | t = os.times() |
|
787 | t = os.times() | |
788 | if t[4] == 0.0: |
|
788 | if t[4] == 0.0: | |
789 | # Windows leaves this as zero, so use time.clock() |
|
789 | # Windows leaves this as zero, so use time.clock() | |
790 | t = (t[0], t[1], t[2], t[3], time.clock()) |
|
790 | t = (t[0], t[1], t[2], t[3], time.clock()) | |
791 | return t |
|
791 | return t | |
792 | s = get_times() |
|
792 | s = get_times() | |
793 | def print_time(): |
|
793 | def print_time(): | |
794 | t = get_times() |
|
794 | t = get_times() | |
795 | ui.warn( |
|
795 | ui.warn( | |
796 | _("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % |
|
796 | _("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % | |
797 | (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) |
|
797 | (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) | |
798 | ui.atexit(print_time) |
|
798 | ui.atexit(print_time) | |
799 | if options["profile"]: |
|
799 | if options["profile"]: | |
800 | profiler.start() |
|
800 | profiler.start() | |
801 |
|
801 | |||
802 | if options['verbose'] or options['debug'] or options['quiet']: |
|
802 | if options['verbose'] or options['debug'] or options['quiet']: | |
803 | for opt in ('verbose', 'debug', 'quiet'): |
|
803 | for opt in ('verbose', 'debug', 'quiet'): | |
804 | val = str(bool(options[opt])) |
|
804 | val = str(bool(options[opt])) | |
805 | if pycompat.ispy3: |
|
805 | if pycompat.ispy3: | |
806 | val = val.encode('ascii') |
|
806 | val = val.encode('ascii') | |
807 | for ui_ in uis: |
|
807 | for ui_ in uis: | |
808 | ui_.setconfig('ui', opt, val, '--' + opt) |
|
808 | ui_.setconfig('ui', opt, val, '--' + opt) | |
809 |
|
809 | |||
810 | if options['traceback']: |
|
810 | if options['traceback']: | |
811 | for ui_ in uis: |
|
811 | for ui_ in uis: | |
812 | ui_.setconfig('ui', 'traceback', 'on', '--traceback') |
|
812 | ui_.setconfig('ui', 'traceback', 'on', '--traceback') | |
813 |
|
813 | |||
814 | if options['noninteractive']: |
|
814 | if options['noninteractive']: | |
815 | for ui_ in uis: |
|
815 | for ui_ in uis: | |
816 | ui_.setconfig('ui', 'interactive', 'off', '-y') |
|
816 | ui_.setconfig('ui', 'interactive', 'off', '-y') | |
817 |
|
817 | |||
818 | if cmdoptions.get('insecure', False): |
|
818 | if cmdoptions.get('insecure', False): | |
819 | for ui_ in uis: |
|
819 | for ui_ in uis: | |
820 | ui_.insecureconnections = True |
|
820 | ui_.insecureconnections = True | |
821 |
|
821 | |||
822 | # setup color handling before pager, because setting up pager |
|
822 | # setup color handling before pager, because setting up pager | |
823 | # might cause incorrect console information |
|
823 | # might cause incorrect console information | |
824 | coloropt = options['color'] |
|
824 | coloropt = options['color'] | |
825 | for ui_ in uis: |
|
825 | for ui_ in uis: | |
826 | if coloropt: |
|
826 | if coloropt: | |
827 | ui_.setconfig('ui', 'color', coloropt, '--color') |
|
827 | ui_.setconfig('ui', 'color', coloropt, '--color') | |
828 | color.setup(ui_) |
|
828 | color.setup(ui_) | |
829 |
|
829 | |||
830 | if util.parsebool(options['pager']): |
|
830 | if util.parsebool(options['pager']): | |
|
831 | # ui.pager() expects 'internal-always-' prefix in this case | |||
831 | ui.pager('internal-always-' + cmd) |
|
832 | ui.pager('internal-always-' + cmd) | |
832 | elif options['pager'] != 'auto': |
|
833 | elif options['pager'] != 'auto': | |
833 | ui.disablepager() |
|
834 | ui.disablepager() | |
834 |
|
835 | |||
835 | if options['version']: |
|
836 | if options['version']: | |
836 | return commands.version_(ui) |
|
837 | return commands.version_(ui) | |
837 | if options['help']: |
|
838 | if options['help']: | |
838 | return commands.help_(ui, cmd, command=cmd is not None) |
|
839 | return commands.help_(ui, cmd, command=cmd is not None) | |
839 | elif not cmd: |
|
840 | elif not cmd: | |
840 | return commands.help_(ui, 'shortlist') |
|
841 | return commands.help_(ui, 'shortlist') | |
841 |
|
842 | |||
842 | repo = None |
|
843 | repo = None | |
843 | cmdpats = args[:] |
|
844 | cmdpats = args[:] | |
844 | if not func.norepo: |
|
845 | if not func.norepo: | |
845 | # use the repo from the request only if we don't have -R |
|
846 | # use the repo from the request only if we don't have -R | |
846 | if not rpath and not cwd: |
|
847 | if not rpath and not cwd: | |
847 | repo = req.repo |
|
848 | repo = req.repo | |
848 |
|
849 | |||
849 | if repo: |
|
850 | if repo: | |
850 | # set the descriptors of the repo ui to those of ui |
|
851 | # set the descriptors of the repo ui to those of ui | |
851 | repo.ui.fin = ui.fin |
|
852 | repo.ui.fin = ui.fin | |
852 | repo.ui.fout = ui.fout |
|
853 | repo.ui.fout = ui.fout | |
853 | repo.ui.ferr = ui.ferr |
|
854 | repo.ui.ferr = ui.ferr | |
854 | else: |
|
855 | else: | |
855 | try: |
|
856 | try: | |
856 | repo = hg.repository(ui, path=path, |
|
857 | repo = hg.repository(ui, path=path, | |
857 | presetupfuncs=req.prereposetups) |
|
858 | presetupfuncs=req.prereposetups) | |
858 | if not repo.local(): |
|
859 | if not repo.local(): | |
859 | raise error.Abort(_("repository '%s' is not local") |
|
860 | raise error.Abort(_("repository '%s' is not local") | |
860 | % path) |
|
861 | % path) | |
861 | repo.ui.setconfig("bundle", "mainreporoot", repo.root, |
|
862 | repo.ui.setconfig("bundle", "mainreporoot", repo.root, | |
862 | 'repo') |
|
863 | 'repo') | |
863 | except error.RequirementError: |
|
864 | except error.RequirementError: | |
864 | raise |
|
865 | raise | |
865 | except error.RepoError: |
|
866 | except error.RepoError: | |
866 | if rpath and rpath[-1]: # invalid -R path |
|
867 | if rpath and rpath[-1]: # invalid -R path | |
867 | raise |
|
868 | raise | |
868 | if not func.optionalrepo: |
|
869 | if not func.optionalrepo: | |
869 | if func.inferrepo and args and not path: |
|
870 | if func.inferrepo and args and not path: | |
870 | # try to infer -R from command args |
|
871 | # try to infer -R from command args | |
871 | repos = map(cmdutil.findrepo, args) |
|
872 | repos = map(cmdutil.findrepo, args) | |
872 | guess = repos[0] |
|
873 | guess = repos[0] | |
873 | if guess and repos.count(guess) == len(repos): |
|
874 | if guess and repos.count(guess) == len(repos): | |
874 | req.args = ['--repository', guess] + fullargs |
|
875 | req.args = ['--repository', guess] + fullargs | |
875 | return _dispatch(req) |
|
876 | return _dispatch(req) | |
876 | if not path: |
|
877 | if not path: | |
877 | raise error.RepoError(_("no repository found in" |
|
878 | raise error.RepoError(_("no repository found in" | |
878 | " '%s' (.hg not found)") |
|
879 | " '%s' (.hg not found)") | |
879 | % pycompat.getcwd()) |
|
880 | % pycompat.getcwd()) | |
880 | raise |
|
881 | raise | |
881 | if repo: |
|
882 | if repo: | |
882 | ui = repo.ui |
|
883 | ui = repo.ui | |
883 | if options['hidden']: |
|
884 | if options['hidden']: | |
884 | repo = repo.unfiltered() |
|
885 | repo = repo.unfiltered() | |
885 | args.insert(0, repo) |
|
886 | args.insert(0, repo) | |
886 | elif rpath: |
|
887 | elif rpath: | |
887 | ui.warn(_("warning: --repository ignored\n")) |
|
888 | ui.warn(_("warning: --repository ignored\n")) | |
888 |
|
889 | |||
889 | msg = _formatargs(fullargs) |
|
890 | msg = _formatargs(fullargs) | |
890 | ui.log("command", '%s\n', msg) |
|
891 | ui.log("command", '%s\n', msg) | |
891 | strcmdopt = pycompat.strkwargs(cmdoptions) |
|
892 | strcmdopt = pycompat.strkwargs(cmdoptions) | |
892 | d = lambda: util.checksignature(func)(ui, *args, **strcmdopt) |
|
893 | d = lambda: util.checksignature(func)(ui, *args, **strcmdopt) | |
893 | try: |
|
894 | try: | |
894 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, |
|
895 | return runcommand(lui, repo, cmd, fullargs, ui, options, d, | |
895 | cmdpats, cmdoptions) |
|
896 | cmdpats, cmdoptions) | |
896 | finally: |
|
897 | finally: | |
897 | if repo and repo != req.repo: |
|
898 | if repo and repo != req.repo: | |
898 | repo.close() |
|
899 | repo.close() | |
899 |
|
900 | |||
900 | def _runcommand(ui, options, cmd, cmdfunc): |
|
901 | def _runcommand(ui, options, cmd, cmdfunc): | |
901 | """Run a command function, possibly with profiling enabled.""" |
|
902 | """Run a command function, possibly with profiling enabled.""" | |
902 | try: |
|
903 | try: | |
903 | return cmdfunc() |
|
904 | return cmdfunc() | |
904 | except error.SignatureError: |
|
905 | except error.SignatureError: | |
905 | raise error.CommandError(cmd, _('invalid arguments')) |
|
906 | raise error.CommandError(cmd, _('invalid arguments')) | |
906 |
|
907 | |||
907 | def _exceptionwarning(ui): |
|
908 | def _exceptionwarning(ui): | |
908 | """Produce a warning message for the current active exception""" |
|
909 | """Produce a warning message for the current active exception""" | |
909 |
|
910 | |||
910 | # For compatibility checking, we discard the portion of the hg |
|
911 | # For compatibility checking, we discard the portion of the hg | |
911 | # version after the + on the assumption that if a "normal |
|
912 | # version after the + on the assumption that if a "normal | |
912 | # user" is running a build with a + in it the packager |
|
913 | # user" is running a build with a + in it the packager | |
913 | # probably built from fairly close to a tag and anyone with a |
|
914 | # probably built from fairly close to a tag and anyone with a | |
914 | # 'make local' copy of hg (where the version number can be out |
|
915 | # 'make local' copy of hg (where the version number can be out | |
915 | # of date) will be clueful enough to notice the implausible |
|
916 | # of date) will be clueful enough to notice the implausible | |
916 | # version number and try updating. |
|
917 | # version number and try updating. | |
917 | ct = util.versiontuple(n=2) |
|
918 | ct = util.versiontuple(n=2) | |
918 | worst = None, ct, '' |
|
919 | worst = None, ct, '' | |
919 | if ui.config('ui', 'supportcontact') is None: |
|
920 | if ui.config('ui', 'supportcontact') is None: | |
920 | for name, mod in extensions.extensions(): |
|
921 | for name, mod in extensions.extensions(): | |
921 | testedwith = getattr(mod, 'testedwith', '') |
|
922 | testedwith = getattr(mod, 'testedwith', '') | |
922 | if pycompat.ispy3 and isinstance(testedwith, str): |
|
923 | if pycompat.ispy3 and isinstance(testedwith, str): | |
923 | testedwith = testedwith.encode(u'utf-8') |
|
924 | testedwith = testedwith.encode(u'utf-8') | |
924 | report = getattr(mod, 'buglink', _('the extension author.')) |
|
925 | report = getattr(mod, 'buglink', _('the extension author.')) | |
925 | if not testedwith.strip(): |
|
926 | if not testedwith.strip(): | |
926 | # We found an untested extension. It's likely the culprit. |
|
927 | # We found an untested extension. It's likely the culprit. | |
927 | worst = name, 'unknown', report |
|
928 | worst = name, 'unknown', report | |
928 | break |
|
929 | break | |
929 |
|
930 | |||
930 | # Never blame on extensions bundled with Mercurial. |
|
931 | # Never blame on extensions bundled with Mercurial. | |
931 | if extensions.ismoduleinternal(mod): |
|
932 | if extensions.ismoduleinternal(mod): | |
932 | continue |
|
933 | continue | |
933 |
|
934 | |||
934 | tested = [util.versiontuple(t, 2) for t in testedwith.split()] |
|
935 | tested = [util.versiontuple(t, 2) for t in testedwith.split()] | |
935 | if ct in tested: |
|
936 | if ct in tested: | |
936 | continue |
|
937 | continue | |
937 |
|
938 | |||
938 | lower = [t for t in tested if t < ct] |
|
939 | lower = [t for t in tested if t < ct] | |
939 | nearest = max(lower or tested) |
|
940 | nearest = max(lower or tested) | |
940 | if worst[0] is None or nearest < worst[1]: |
|
941 | if worst[0] is None or nearest < worst[1]: | |
941 | worst = name, nearest, report |
|
942 | worst = name, nearest, report | |
942 | if worst[0] is not None: |
|
943 | if worst[0] is not None: | |
943 | name, testedwith, report = worst |
|
944 | name, testedwith, report = worst | |
944 | if not isinstance(testedwith, (bytes, str)): |
|
945 | if not isinstance(testedwith, (bytes, str)): | |
945 | testedwith = '.'.join([str(c) for c in testedwith]) |
|
946 | testedwith = '.'.join([str(c) for c in testedwith]) | |
946 | warning = (_('** Unknown exception encountered with ' |
|
947 | warning = (_('** Unknown exception encountered with ' | |
947 | 'possibly-broken third-party extension %s\n' |
|
948 | 'possibly-broken third-party extension %s\n' | |
948 | '** which supports versions %s of Mercurial.\n' |
|
949 | '** which supports versions %s of Mercurial.\n' | |
949 | '** Please disable %s and try your action again.\n' |
|
950 | '** Please disable %s and try your action again.\n' | |
950 | '** If that fixes the bug please report it to %s\n') |
|
951 | '** If that fixes the bug please report it to %s\n') | |
951 | % (name, testedwith, name, report)) |
|
952 | % (name, testedwith, name, report)) | |
952 | else: |
|
953 | else: | |
953 | bugtracker = ui.config('ui', 'supportcontact') |
|
954 | bugtracker = ui.config('ui', 'supportcontact') | |
954 | if bugtracker is None: |
|
955 | if bugtracker is None: | |
955 | bugtracker = _("https://mercurial-scm.org/wiki/BugTracker") |
|
956 | bugtracker = _("https://mercurial-scm.org/wiki/BugTracker") | |
956 | warning = (_("** unknown exception encountered, " |
|
957 | warning = (_("** unknown exception encountered, " | |
957 | "please report by visiting\n** ") + bugtracker + '\n') |
|
958 | "please report by visiting\n** ") + bugtracker + '\n') | |
958 | if pycompat.ispy3: |
|
959 | if pycompat.ispy3: | |
959 | sysversion = sys.version.encode(u'utf-8') |
|
960 | sysversion = sys.version.encode(u'utf-8') | |
960 | else: |
|
961 | else: | |
961 | sysversion = sys.version |
|
962 | sysversion = sys.version | |
962 | sysversion = sysversion.replace('\n', '') |
|
963 | sysversion = sysversion.replace('\n', '') | |
963 | warning += ((_("** Python %s\n") % sysversion) + |
|
964 | warning += ((_("** Python %s\n") % sysversion) + | |
964 | (_("** Mercurial Distributed SCM (version %s)\n") % |
|
965 | (_("** Mercurial Distributed SCM (version %s)\n") % | |
965 | util.version()) + |
|
966 | util.version()) + | |
966 | (_("** Extensions loaded: %s\n") % |
|
967 | (_("** Extensions loaded: %s\n") % | |
967 | ", ".join([x[0] for x in extensions.extensions()]))) |
|
968 | ", ".join([x[0] for x in extensions.extensions()]))) | |
968 | return warning |
|
969 | return warning | |
969 |
|
970 | |||
970 | def handlecommandexception(ui): |
|
971 | def handlecommandexception(ui): | |
971 | """Produce a warning message for broken commands |
|
972 | """Produce a warning message for broken commands | |
972 |
|
973 | |||
973 | Called when handling an exception; the exception is reraised if |
|
974 | Called when handling an exception; the exception is reraised if | |
974 | this function returns False, ignored otherwise. |
|
975 | this function returns False, ignored otherwise. | |
975 | """ |
|
976 | """ | |
976 | warning = _exceptionwarning(ui) |
|
977 | warning = _exceptionwarning(ui) | |
977 | ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc()) |
|
978 | ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc()) | |
978 | ui.warn(warning) |
|
979 | ui.warn(warning) | |
979 | return False # re-raise the exception |
|
980 | return False # re-raise the exception |
@@ -1,460 +1,461 | |||||
1 | # pycompat.py - portability shim for python 3 |
|
1 | # pycompat.py - portability shim for python 3 | |
2 | # |
|
2 | # | |
3 | # This software may be used and distributed according to the terms of the |
|
3 | # This software may be used and distributed according to the terms of the | |
4 | # GNU General Public License version 2 or any later version. |
|
4 | # GNU General Public License version 2 or any later version. | |
5 |
|
5 | |||
6 | """Mercurial portability shim for python 3. |
|
6 | """Mercurial portability shim for python 3. | |
7 |
|
7 | |||
8 | This contains aliases to hide python version-specific details from the core. |
|
8 | This contains aliases to hide python version-specific details from the core. | |
9 | """ |
|
9 | """ | |
10 |
|
10 | |||
11 | from __future__ import absolute_import |
|
11 | from __future__ import absolute_import | |
12 |
|
12 | |||
13 | import getopt |
|
13 | import getopt | |
14 | import os |
|
14 | import os | |
15 | import shlex |
|
15 | import shlex | |
16 | import sys |
|
16 | import sys | |
17 |
|
17 | |||
18 | ispy3 = (sys.version_info[0] >= 3) |
|
18 | ispy3 = (sys.version_info[0] >= 3) | |
|
19 | ispypy = (r'__pypy__' in sys.builtin_module_names) | |||
19 |
|
20 | |||
20 | if not ispy3: |
|
21 | if not ispy3: | |
21 | import cookielib |
|
22 | import cookielib | |
22 | import cPickle as pickle |
|
23 | import cPickle as pickle | |
23 | import httplib |
|
24 | import httplib | |
24 | import Queue as _queue |
|
25 | import Queue as _queue | |
25 | import SocketServer as socketserver |
|
26 | import SocketServer as socketserver | |
26 | import xmlrpclib |
|
27 | import xmlrpclib | |
27 | else: |
|
28 | else: | |
28 | import http.cookiejar as cookielib |
|
29 | import http.cookiejar as cookielib | |
29 | import http.client as httplib |
|
30 | import http.client as httplib | |
30 | import pickle |
|
31 | import pickle | |
31 | import queue as _queue |
|
32 | import queue as _queue | |
32 | import socketserver |
|
33 | import socketserver | |
33 | import xmlrpc.client as xmlrpclib |
|
34 | import xmlrpc.client as xmlrpclib | |
34 |
|
35 | |||
35 | empty = _queue.Empty |
|
36 | empty = _queue.Empty | |
36 | queue = _queue.Queue |
|
37 | queue = _queue.Queue | |
37 |
|
38 | |||
38 | def identity(a): |
|
39 | def identity(a): | |
39 | return a |
|
40 | return a | |
40 |
|
41 | |||
41 | if ispy3: |
|
42 | if ispy3: | |
42 | import builtins |
|
43 | import builtins | |
43 | import functools |
|
44 | import functools | |
44 | import io |
|
45 | import io | |
45 | import struct |
|
46 | import struct | |
46 |
|
47 | |||
47 | fsencode = os.fsencode |
|
48 | fsencode = os.fsencode | |
48 | fsdecode = os.fsdecode |
|
49 | fsdecode = os.fsdecode | |
49 | oslinesep = os.linesep.encode('ascii') |
|
50 | oslinesep = os.linesep.encode('ascii') | |
50 | osname = os.name.encode('ascii') |
|
51 | osname = os.name.encode('ascii') | |
51 | ospathsep = os.pathsep.encode('ascii') |
|
52 | ospathsep = os.pathsep.encode('ascii') | |
52 | ossep = os.sep.encode('ascii') |
|
53 | ossep = os.sep.encode('ascii') | |
53 | osaltsep = os.altsep |
|
54 | osaltsep = os.altsep | |
54 | if osaltsep: |
|
55 | if osaltsep: | |
55 | osaltsep = osaltsep.encode('ascii') |
|
56 | osaltsep = osaltsep.encode('ascii') | |
56 | # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which |
|
57 | # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which | |
57 | # returns bytes. |
|
58 | # returns bytes. | |
58 | getcwd = os.getcwdb |
|
59 | getcwd = os.getcwdb | |
59 | sysplatform = sys.platform.encode('ascii') |
|
60 | sysplatform = sys.platform.encode('ascii') | |
60 | sysexecutable = sys.executable |
|
61 | sysexecutable = sys.executable | |
61 | if sysexecutable: |
|
62 | if sysexecutable: | |
62 | sysexecutable = os.fsencode(sysexecutable) |
|
63 | sysexecutable = os.fsencode(sysexecutable) | |
63 | stringio = io.BytesIO |
|
64 | stringio = io.BytesIO | |
64 | maplist = lambda *args: list(map(*args)) |
|
65 | maplist = lambda *args: list(map(*args)) | |
65 |
|
66 | |||
66 | # TODO: .buffer might not exist if std streams were replaced; we'll need |
|
67 | # TODO: .buffer might not exist if std streams were replaced; we'll need | |
67 | # a silly wrapper to make a bytes stream backed by a unicode one. |
|
68 | # a silly wrapper to make a bytes stream backed by a unicode one. | |
68 | stdin = sys.stdin.buffer |
|
69 | stdin = sys.stdin.buffer | |
69 | stdout = sys.stdout.buffer |
|
70 | stdout = sys.stdout.buffer | |
70 | stderr = sys.stderr.buffer |
|
71 | stderr = sys.stderr.buffer | |
71 |
|
72 | |||
72 | # Since Python 3 converts argv to wchar_t type by Py_DecodeLocale() on Unix, |
|
73 | # Since Python 3 converts argv to wchar_t type by Py_DecodeLocale() on Unix, | |
73 | # we can use os.fsencode() to get back bytes argv. |
|
74 | # we can use os.fsencode() to get back bytes argv. | |
74 | # |
|
75 | # | |
75 | # https://hg.python.org/cpython/file/v3.5.1/Programs/python.c#l55 |
|
76 | # https://hg.python.org/cpython/file/v3.5.1/Programs/python.c#l55 | |
76 | # |
|
77 | # | |
77 | # TODO: On Windows, the native argv is wchar_t, so we'll need a different |
|
78 | # TODO: On Windows, the native argv is wchar_t, so we'll need a different | |
78 | # workaround to simulate the Python 2 (i.e. ANSI Win32 API) behavior. |
|
79 | # workaround to simulate the Python 2 (i.e. ANSI Win32 API) behavior. | |
79 | if getattr(sys, 'argv', None) is not None: |
|
80 | if getattr(sys, 'argv', None) is not None: | |
80 | sysargv = list(map(os.fsencode, sys.argv)) |
|
81 | sysargv = list(map(os.fsencode, sys.argv)) | |
81 |
|
82 | |||
82 | bytechr = struct.Struct('>B').pack |
|
83 | bytechr = struct.Struct('>B').pack | |
83 |
|
84 | |||
84 | class bytestr(bytes): |
|
85 | class bytestr(bytes): | |
85 | """A bytes which mostly acts as a Python 2 str |
|
86 | """A bytes which mostly acts as a Python 2 str | |
86 |
|
87 | |||
87 | >>> bytestr(), bytestr(bytearray(b'foo')), bytestr(u'ascii'), bytestr(1) |
|
88 | >>> bytestr(), bytestr(bytearray(b'foo')), bytestr(u'ascii'), bytestr(1) | |
88 | (b'', b'foo', b'ascii', b'1') |
|
89 | (b'', b'foo', b'ascii', b'1') | |
89 | >>> s = bytestr(b'foo') |
|
90 | >>> s = bytestr(b'foo') | |
90 | >>> assert s is bytestr(s) |
|
91 | >>> assert s is bytestr(s) | |
91 |
|
92 | |||
92 | __bytes__() should be called if provided: |
|
93 | __bytes__() should be called if provided: | |
93 |
|
94 | |||
94 | >>> class bytesable(object): |
|
95 | >>> class bytesable(object): | |
95 | ... def __bytes__(self): |
|
96 | ... def __bytes__(self): | |
96 | ... return b'bytes' |
|
97 | ... return b'bytes' | |
97 | >>> bytestr(bytesable()) |
|
98 | >>> bytestr(bytesable()) | |
98 | b'bytes' |
|
99 | b'bytes' | |
99 |
|
100 | |||
100 | There's no implicit conversion from non-ascii str as its encoding is |
|
101 | There's no implicit conversion from non-ascii str as its encoding is | |
101 | unknown: |
|
102 | unknown: | |
102 |
|
103 | |||
103 | >>> bytestr(chr(0x80)) # doctest: +ELLIPSIS |
|
104 | >>> bytestr(chr(0x80)) # doctest: +ELLIPSIS | |
104 | Traceback (most recent call last): |
|
105 | Traceback (most recent call last): | |
105 | ... |
|
106 | ... | |
106 | UnicodeEncodeError: ... |
|
107 | UnicodeEncodeError: ... | |
107 |
|
108 | |||
108 | Comparison between bytestr and bytes should work: |
|
109 | Comparison between bytestr and bytes should work: | |
109 |
|
110 | |||
110 | >>> assert bytestr(b'foo') == b'foo' |
|
111 | >>> assert bytestr(b'foo') == b'foo' | |
111 | >>> assert b'foo' == bytestr(b'foo') |
|
112 | >>> assert b'foo' == bytestr(b'foo') | |
112 | >>> assert b'f' in bytestr(b'foo') |
|
113 | >>> assert b'f' in bytestr(b'foo') | |
113 | >>> assert bytestr(b'f') in b'foo' |
|
114 | >>> assert bytestr(b'f') in b'foo' | |
114 |
|
115 | |||
115 | Sliced elements should be bytes, not integer: |
|
116 | Sliced elements should be bytes, not integer: | |
116 |
|
117 | |||
117 | >>> s[1], s[:2] |
|
118 | >>> s[1], s[:2] | |
118 | (b'o', b'fo') |
|
119 | (b'o', b'fo') | |
119 | >>> list(s), list(reversed(s)) |
|
120 | >>> list(s), list(reversed(s)) | |
120 | ([b'f', b'o', b'o'], [b'o', b'o', b'f']) |
|
121 | ([b'f', b'o', b'o'], [b'o', b'o', b'f']) | |
121 |
|
122 | |||
122 | As bytestr type isn't propagated across operations, you need to cast |
|
123 | As bytestr type isn't propagated across operations, you need to cast | |
123 | bytes to bytestr explicitly: |
|
124 | bytes to bytestr explicitly: | |
124 |
|
125 | |||
125 | >>> s = bytestr(b'foo').upper() |
|
126 | >>> s = bytestr(b'foo').upper() | |
126 | >>> t = bytestr(s) |
|
127 | >>> t = bytestr(s) | |
127 | >>> s[0], t[0] |
|
128 | >>> s[0], t[0] | |
128 | (70, b'F') |
|
129 | (70, b'F') | |
129 |
|
130 | |||
130 | Be careful to not pass a bytestr object to a function which expects |
|
131 | Be careful to not pass a bytestr object to a function which expects | |
131 | bytearray-like behavior. |
|
132 | bytearray-like behavior. | |
132 |
|
133 | |||
133 | >>> t = bytes(t) # cast to bytes |
|
134 | >>> t = bytes(t) # cast to bytes | |
134 | >>> assert type(t) is bytes |
|
135 | >>> assert type(t) is bytes | |
135 | """ |
|
136 | """ | |
136 |
|
137 | |||
137 | def __new__(cls, s=b''): |
|
138 | def __new__(cls, s=b''): | |
138 | if isinstance(s, bytestr): |
|
139 | if isinstance(s, bytestr): | |
139 | return s |
|
140 | return s | |
140 | if (not isinstance(s, (bytes, bytearray)) |
|
141 | if (not isinstance(s, (bytes, bytearray)) | |
141 | and not hasattr(s, u'__bytes__')): # hasattr-py3-only |
|
142 | and not hasattr(s, u'__bytes__')): # hasattr-py3-only | |
142 | s = str(s).encode(u'ascii') |
|
143 | s = str(s).encode(u'ascii') | |
143 | return bytes.__new__(cls, s) |
|
144 | return bytes.__new__(cls, s) | |
144 |
|
145 | |||
145 | def __getitem__(self, key): |
|
146 | def __getitem__(self, key): | |
146 | s = bytes.__getitem__(self, key) |
|
147 | s = bytes.__getitem__(self, key) | |
147 | if not isinstance(s, bytes): |
|
148 | if not isinstance(s, bytes): | |
148 | s = bytechr(s) |
|
149 | s = bytechr(s) | |
149 | return s |
|
150 | return s | |
150 |
|
151 | |||
151 | def __iter__(self): |
|
152 | def __iter__(self): | |
152 | return iterbytestr(bytes.__iter__(self)) |
|
153 | return iterbytestr(bytes.__iter__(self)) | |
153 |
|
154 | |||
154 | def iterbytestr(s): |
|
155 | def iterbytestr(s): | |
155 | """Iterate bytes as if it were a str object of Python 2""" |
|
156 | """Iterate bytes as if it were a str object of Python 2""" | |
156 | return map(bytechr, s) |
|
157 | return map(bytechr, s) | |
157 |
|
158 | |||
158 | def sysbytes(s): |
|
159 | def sysbytes(s): | |
159 | """Convert an internal str (e.g. keyword, __doc__) back to bytes |
|
160 | """Convert an internal str (e.g. keyword, __doc__) back to bytes | |
160 |
|
161 | |||
161 | This never raises UnicodeEncodeError, but only ASCII characters |
|
162 | This never raises UnicodeEncodeError, but only ASCII characters | |
162 | can be round-trip by sysstr(sysbytes(s)). |
|
163 | can be round-trip by sysstr(sysbytes(s)). | |
163 | """ |
|
164 | """ | |
164 | return s.encode(u'utf-8') |
|
165 | return s.encode(u'utf-8') | |
165 |
|
166 | |||
166 | def sysstr(s): |
|
167 | def sysstr(s): | |
167 | """Return a keyword str to be passed to Python functions such as |
|
168 | """Return a keyword str to be passed to Python functions such as | |
168 | getattr() and str.encode() |
|
169 | getattr() and str.encode() | |
169 |
|
170 | |||
170 | This never raises UnicodeDecodeError. Non-ascii characters are |
|
171 | This never raises UnicodeDecodeError. Non-ascii characters are | |
171 | considered invalid and mapped to arbitrary but unique code points |
|
172 | considered invalid and mapped to arbitrary but unique code points | |
172 | such that 'sysstr(a) != sysstr(b)' for all 'a != b'. |
|
173 | such that 'sysstr(a) != sysstr(b)' for all 'a != b'. | |
173 | """ |
|
174 | """ | |
174 | if isinstance(s, builtins.str): |
|
175 | if isinstance(s, builtins.str): | |
175 | return s |
|
176 | return s | |
176 | return s.decode(u'latin-1') |
|
177 | return s.decode(u'latin-1') | |
177 |
|
178 | |||
178 | def strurl(url): |
|
179 | def strurl(url): | |
179 | """Converts a bytes url back to str""" |
|
180 | """Converts a bytes url back to str""" | |
180 | return url.decode(u'ascii') |
|
181 | return url.decode(u'ascii') | |
181 |
|
182 | |||
182 | def bytesurl(url): |
|
183 | def bytesurl(url): | |
183 | """Converts a str url to bytes by encoding in ascii""" |
|
184 | """Converts a str url to bytes by encoding in ascii""" | |
184 | return url.encode(u'ascii') |
|
185 | return url.encode(u'ascii') | |
185 |
|
186 | |||
186 | def raisewithtb(exc, tb): |
|
187 | def raisewithtb(exc, tb): | |
187 | """Raise exception with the given traceback""" |
|
188 | """Raise exception with the given traceback""" | |
188 | raise exc.with_traceback(tb) |
|
189 | raise exc.with_traceback(tb) | |
189 |
|
190 | |||
190 | def getdoc(obj): |
|
191 | def getdoc(obj): | |
191 | """Get docstring as bytes; may be None so gettext() won't confuse it |
|
192 | """Get docstring as bytes; may be None so gettext() won't confuse it | |
192 | with _('')""" |
|
193 | with _('')""" | |
193 | doc = getattr(obj, u'__doc__', None) |
|
194 | doc = getattr(obj, u'__doc__', None) | |
194 | if doc is None: |
|
195 | if doc is None: | |
195 | return doc |
|
196 | return doc | |
196 | return sysbytes(doc) |
|
197 | return sysbytes(doc) | |
197 |
|
198 | |||
198 | def _wrapattrfunc(f): |
|
199 | def _wrapattrfunc(f): | |
199 | @functools.wraps(f) |
|
200 | @functools.wraps(f) | |
200 | def w(object, name, *args): |
|
201 | def w(object, name, *args): | |
201 | return f(object, sysstr(name), *args) |
|
202 | return f(object, sysstr(name), *args) | |
202 | return w |
|
203 | return w | |
203 |
|
204 | |||
204 | # these wrappers are automagically imported by hgloader |
|
205 | # these wrappers are automagically imported by hgloader | |
205 | delattr = _wrapattrfunc(builtins.delattr) |
|
206 | delattr = _wrapattrfunc(builtins.delattr) | |
206 | getattr = _wrapattrfunc(builtins.getattr) |
|
207 | getattr = _wrapattrfunc(builtins.getattr) | |
207 | hasattr = _wrapattrfunc(builtins.hasattr) |
|
208 | hasattr = _wrapattrfunc(builtins.hasattr) | |
208 | setattr = _wrapattrfunc(builtins.setattr) |
|
209 | setattr = _wrapattrfunc(builtins.setattr) | |
209 | xrange = builtins.range |
|
210 | xrange = builtins.range | |
210 | unicode = str |
|
211 | unicode = str | |
211 |
|
212 | |||
212 | def open(name, mode='r', buffering=-1): |
|
213 | def open(name, mode='r', buffering=-1): | |
213 | return builtins.open(name, sysstr(mode), buffering) |
|
214 | return builtins.open(name, sysstr(mode), buffering) | |
214 |
|
215 | |||
215 | def getoptb(args, shortlist, namelist): |
|
216 | def getoptb(args, shortlist, namelist): | |
216 | """ |
|
217 | """ | |
217 | Takes bytes arguments, converts them to unicode, pass them to |
|
218 | Takes bytes arguments, converts them to unicode, pass them to | |
218 | getopt.getopt(), convert the returned values back to bytes and then |
|
219 | getopt.getopt(), convert the returned values back to bytes and then | |
219 | return them for Python 3 compatibility as getopt.getopt() don't accepts |
|
220 | return them for Python 3 compatibility as getopt.getopt() don't accepts | |
220 | bytes on Python 3. |
|
221 | bytes on Python 3. | |
221 | """ |
|
222 | """ | |
222 | args = [a.decode('latin-1') for a in args] |
|
223 | args = [a.decode('latin-1') for a in args] | |
223 | shortlist = shortlist.decode('latin-1') |
|
224 | shortlist = shortlist.decode('latin-1') | |
224 | namelist = [a.decode('latin-1') for a in namelist] |
|
225 | namelist = [a.decode('latin-1') for a in namelist] | |
225 | opts, args = getopt.getopt(args, shortlist, namelist) |
|
226 | opts, args = getopt.getopt(args, shortlist, namelist) | |
226 | opts = [(a[0].encode('latin-1'), a[1].encode('latin-1')) |
|
227 | opts = [(a[0].encode('latin-1'), a[1].encode('latin-1')) | |
227 | for a in opts] |
|
228 | for a in opts] | |
228 | args = [a.encode('latin-1') for a in args] |
|
229 | args = [a.encode('latin-1') for a in args] | |
229 | return opts, args |
|
230 | return opts, args | |
230 |
|
231 | |||
231 | def strkwargs(dic): |
|
232 | def strkwargs(dic): | |
232 | """ |
|
233 | """ | |
233 | Converts the keys of a python dictonary to str i.e. unicodes so that |
|
234 | Converts the keys of a python dictonary to str i.e. unicodes so that | |
234 | they can be passed as keyword arguments as dictonaries with bytes keys |
|
235 | they can be passed as keyword arguments as dictonaries with bytes keys | |
235 | can't be passed as keyword arguments to functions on Python 3. |
|
236 | can't be passed as keyword arguments to functions on Python 3. | |
236 | """ |
|
237 | """ | |
237 | dic = dict((k.decode('latin-1'), v) for k, v in dic.iteritems()) |
|
238 | dic = dict((k.decode('latin-1'), v) for k, v in dic.iteritems()) | |
238 | return dic |
|
239 | return dic | |
239 |
|
240 | |||
240 | def byteskwargs(dic): |
|
241 | def byteskwargs(dic): | |
241 | """ |
|
242 | """ | |
242 | Converts keys of python dictonaries to bytes as they were converted to |
|
243 | Converts keys of python dictonaries to bytes as they were converted to | |
243 | str to pass that dictonary as a keyword argument on Python 3. |
|
244 | str to pass that dictonary as a keyword argument on Python 3. | |
244 | """ |
|
245 | """ | |
245 | dic = dict((k.encode('latin-1'), v) for k, v in dic.iteritems()) |
|
246 | dic = dict((k.encode('latin-1'), v) for k, v in dic.iteritems()) | |
246 | return dic |
|
247 | return dic | |
247 |
|
248 | |||
248 | # TODO: handle shlex.shlex(). |
|
249 | # TODO: handle shlex.shlex(). | |
249 | def shlexsplit(s): |
|
250 | def shlexsplit(s): | |
250 | """ |
|
251 | """ | |
251 | Takes bytes argument, convert it to str i.e. unicodes, pass that into |
|
252 | Takes bytes argument, convert it to str i.e. unicodes, pass that into | |
252 | shlex.split(), convert the returned value to bytes and return that for |
|
253 | shlex.split(), convert the returned value to bytes and return that for | |
253 | Python 3 compatibility as shelx.split() don't accept bytes on Python 3. |
|
254 | Python 3 compatibility as shelx.split() don't accept bytes on Python 3. | |
254 | """ |
|
255 | """ | |
255 | ret = shlex.split(s.decode('latin-1')) |
|
256 | ret = shlex.split(s.decode('latin-1')) | |
256 | return [a.encode('latin-1') for a in ret] |
|
257 | return [a.encode('latin-1') for a in ret] | |
257 |
|
258 | |||
258 | else: |
|
259 | else: | |
259 | import cStringIO |
|
260 | import cStringIO | |
260 |
|
261 | |||
261 | bytechr = chr |
|
262 | bytechr = chr | |
262 | bytestr = str |
|
263 | bytestr = str | |
263 | iterbytestr = iter |
|
264 | iterbytestr = iter | |
264 | sysbytes = identity |
|
265 | sysbytes = identity | |
265 | sysstr = identity |
|
266 | sysstr = identity | |
266 | strurl = identity |
|
267 | strurl = identity | |
267 | bytesurl = identity |
|
268 | bytesurl = identity | |
268 |
|
269 | |||
269 | # this can't be parsed on Python 3 |
|
270 | # this can't be parsed on Python 3 | |
270 | exec('def raisewithtb(exc, tb):\n' |
|
271 | exec('def raisewithtb(exc, tb):\n' | |
271 | ' raise exc, None, tb\n') |
|
272 | ' raise exc, None, tb\n') | |
272 |
|
273 | |||
273 | def fsencode(filename): |
|
274 | def fsencode(filename): | |
274 | """ |
|
275 | """ | |
275 | Partial backport from os.py in Python 3, which only accepts bytes. |
|
276 | Partial backport from os.py in Python 3, which only accepts bytes. | |
276 | In Python 2, our paths should only ever be bytes, a unicode path |
|
277 | In Python 2, our paths should only ever be bytes, a unicode path | |
277 | indicates a bug. |
|
278 | indicates a bug. | |
278 | """ |
|
279 | """ | |
279 | if isinstance(filename, str): |
|
280 | if isinstance(filename, str): | |
280 | return filename |
|
281 | return filename | |
281 | else: |
|
282 | else: | |
282 | raise TypeError( |
|
283 | raise TypeError( | |
283 | "expect str, not %s" % type(filename).__name__) |
|
284 | "expect str, not %s" % type(filename).__name__) | |
284 |
|
285 | |||
285 | # In Python 2, fsdecode() has a very chance to receive bytes. So it's |
|
286 | # In Python 2, fsdecode() has a very chance to receive bytes. So it's | |
286 | # better not to touch Python 2 part as it's already working fine. |
|
287 | # better not to touch Python 2 part as it's already working fine. | |
287 | fsdecode = identity |
|
288 | fsdecode = identity | |
288 |
|
289 | |||
289 | def getdoc(obj): |
|
290 | def getdoc(obj): | |
290 | return getattr(obj, '__doc__', None) |
|
291 | return getattr(obj, '__doc__', None) | |
291 |
|
292 | |||
292 | def getoptb(args, shortlist, namelist): |
|
293 | def getoptb(args, shortlist, namelist): | |
293 | return getopt.getopt(args, shortlist, namelist) |
|
294 | return getopt.getopt(args, shortlist, namelist) | |
294 |
|
295 | |||
295 | strkwargs = identity |
|
296 | strkwargs = identity | |
296 | byteskwargs = identity |
|
297 | byteskwargs = identity | |
297 |
|
298 | |||
298 | oslinesep = os.linesep |
|
299 | oslinesep = os.linesep | |
299 | osname = os.name |
|
300 | osname = os.name | |
300 | ospathsep = os.pathsep |
|
301 | ospathsep = os.pathsep | |
301 | ossep = os.sep |
|
302 | ossep = os.sep | |
302 | osaltsep = os.altsep |
|
303 | osaltsep = os.altsep | |
303 | stdin = sys.stdin |
|
304 | stdin = sys.stdin | |
304 | stdout = sys.stdout |
|
305 | stdout = sys.stdout | |
305 | stderr = sys.stderr |
|
306 | stderr = sys.stderr | |
306 | if getattr(sys, 'argv', None) is not None: |
|
307 | if getattr(sys, 'argv', None) is not None: | |
307 | sysargv = sys.argv |
|
308 | sysargv = sys.argv | |
308 | sysplatform = sys.platform |
|
309 | sysplatform = sys.platform | |
309 | getcwd = os.getcwd |
|
310 | getcwd = os.getcwd | |
310 | sysexecutable = sys.executable |
|
311 | sysexecutable = sys.executable | |
311 | shlexsplit = shlex.split |
|
312 | shlexsplit = shlex.split | |
312 | stringio = cStringIO.StringIO |
|
313 | stringio = cStringIO.StringIO | |
313 | maplist = map |
|
314 | maplist = map | |
314 |
|
315 | |||
315 | class _pycompatstub(object): |
|
316 | class _pycompatstub(object): | |
316 | def __init__(self): |
|
317 | def __init__(self): | |
317 | self._aliases = {} |
|
318 | self._aliases = {} | |
318 |
|
319 | |||
319 | def _registeraliases(self, origin, items): |
|
320 | def _registeraliases(self, origin, items): | |
320 | """Add items that will be populated at the first access""" |
|
321 | """Add items that will be populated at the first access""" | |
321 | items = map(sysstr, items) |
|
322 | items = map(sysstr, items) | |
322 | self._aliases.update( |
|
323 | self._aliases.update( | |
323 | (item.replace(sysstr('_'), sysstr('')).lower(), (origin, item)) |
|
324 | (item.replace(sysstr('_'), sysstr('')).lower(), (origin, item)) | |
324 | for item in items) |
|
325 | for item in items) | |
325 |
|
326 | |||
326 | def _registeralias(self, origin, attr, name): |
|
327 | def _registeralias(self, origin, attr, name): | |
327 | """Alias ``origin``.``attr`` as ``name``""" |
|
328 | """Alias ``origin``.``attr`` as ``name``""" | |
328 | self._aliases[sysstr(name)] = (origin, sysstr(attr)) |
|
329 | self._aliases[sysstr(name)] = (origin, sysstr(attr)) | |
329 |
|
330 | |||
330 | def __getattr__(self, name): |
|
331 | def __getattr__(self, name): | |
331 | try: |
|
332 | try: | |
332 | origin, item = self._aliases[name] |
|
333 | origin, item = self._aliases[name] | |
333 | except KeyError: |
|
334 | except KeyError: | |
334 | raise AttributeError(name) |
|
335 | raise AttributeError(name) | |
335 | self.__dict__[name] = obj = getattr(origin, item) |
|
336 | self.__dict__[name] = obj = getattr(origin, item) | |
336 | return obj |
|
337 | return obj | |
337 |
|
338 | |||
338 | httpserver = _pycompatstub() |
|
339 | httpserver = _pycompatstub() | |
339 | urlreq = _pycompatstub() |
|
340 | urlreq = _pycompatstub() | |
340 | urlerr = _pycompatstub() |
|
341 | urlerr = _pycompatstub() | |
341 | if not ispy3: |
|
342 | if not ispy3: | |
342 | import BaseHTTPServer |
|
343 | import BaseHTTPServer | |
343 | import CGIHTTPServer |
|
344 | import CGIHTTPServer | |
344 | import SimpleHTTPServer |
|
345 | import SimpleHTTPServer | |
345 | import urllib2 |
|
346 | import urllib2 | |
346 | import urllib |
|
347 | import urllib | |
347 | import urlparse |
|
348 | import urlparse | |
348 | urlreq._registeraliases(urllib, ( |
|
349 | urlreq._registeraliases(urllib, ( | |
349 | "addclosehook", |
|
350 | "addclosehook", | |
350 | "addinfourl", |
|
351 | "addinfourl", | |
351 | "ftpwrapper", |
|
352 | "ftpwrapper", | |
352 | "pathname2url", |
|
353 | "pathname2url", | |
353 | "quote", |
|
354 | "quote", | |
354 | "splitattr", |
|
355 | "splitattr", | |
355 | "splitpasswd", |
|
356 | "splitpasswd", | |
356 | "splitport", |
|
357 | "splitport", | |
357 | "splituser", |
|
358 | "splituser", | |
358 | "unquote", |
|
359 | "unquote", | |
359 | "url2pathname", |
|
360 | "url2pathname", | |
360 | "urlencode", |
|
361 | "urlencode", | |
361 | )) |
|
362 | )) | |
362 | urlreq._registeraliases(urllib2, ( |
|
363 | urlreq._registeraliases(urllib2, ( | |
363 | "AbstractHTTPHandler", |
|
364 | "AbstractHTTPHandler", | |
364 | "BaseHandler", |
|
365 | "BaseHandler", | |
365 | "build_opener", |
|
366 | "build_opener", | |
366 | "FileHandler", |
|
367 | "FileHandler", | |
367 | "FTPHandler", |
|
368 | "FTPHandler", | |
368 | "HTTPBasicAuthHandler", |
|
369 | "HTTPBasicAuthHandler", | |
369 | "HTTPDigestAuthHandler", |
|
370 | "HTTPDigestAuthHandler", | |
370 | "HTTPHandler", |
|
371 | "HTTPHandler", | |
371 | "HTTPPasswordMgrWithDefaultRealm", |
|
372 | "HTTPPasswordMgrWithDefaultRealm", | |
372 | "HTTPSHandler", |
|
373 | "HTTPSHandler", | |
373 | "install_opener", |
|
374 | "install_opener", | |
374 | "ProxyHandler", |
|
375 | "ProxyHandler", | |
375 | "Request", |
|
376 | "Request", | |
376 | "urlopen", |
|
377 | "urlopen", | |
377 | )) |
|
378 | )) | |
378 | urlreq._registeraliases(urlparse, ( |
|
379 | urlreq._registeraliases(urlparse, ( | |
379 | "urlparse", |
|
380 | "urlparse", | |
380 | "urlunparse", |
|
381 | "urlunparse", | |
381 | )) |
|
382 | )) | |
382 | urlerr._registeraliases(urllib2, ( |
|
383 | urlerr._registeraliases(urllib2, ( | |
383 | "HTTPError", |
|
384 | "HTTPError", | |
384 | "URLError", |
|
385 | "URLError", | |
385 | )) |
|
386 | )) | |
386 | httpserver._registeraliases(BaseHTTPServer, ( |
|
387 | httpserver._registeraliases(BaseHTTPServer, ( | |
387 | "HTTPServer", |
|
388 | "HTTPServer", | |
388 | "BaseHTTPRequestHandler", |
|
389 | "BaseHTTPRequestHandler", | |
389 | )) |
|
390 | )) | |
390 | httpserver._registeraliases(SimpleHTTPServer, ( |
|
391 | httpserver._registeraliases(SimpleHTTPServer, ( | |
391 | "SimpleHTTPRequestHandler", |
|
392 | "SimpleHTTPRequestHandler", | |
392 | )) |
|
393 | )) | |
393 | httpserver._registeraliases(CGIHTTPServer, ( |
|
394 | httpserver._registeraliases(CGIHTTPServer, ( | |
394 | "CGIHTTPRequestHandler", |
|
395 | "CGIHTTPRequestHandler", | |
395 | )) |
|
396 | )) | |
396 |
|
397 | |||
397 | else: |
|
398 | else: | |
398 | import urllib.parse |
|
399 | import urllib.parse | |
399 | urlreq._registeraliases(urllib.parse, ( |
|
400 | urlreq._registeraliases(urllib.parse, ( | |
400 | "splitattr", |
|
401 | "splitattr", | |
401 | "splitpasswd", |
|
402 | "splitpasswd", | |
402 | "splitport", |
|
403 | "splitport", | |
403 | "splituser", |
|
404 | "splituser", | |
404 | "urlparse", |
|
405 | "urlparse", | |
405 | "urlunparse", |
|
406 | "urlunparse", | |
406 | )) |
|
407 | )) | |
407 | urlreq._registeralias(urllib.parse, "unquote_to_bytes", "unquote") |
|
408 | urlreq._registeralias(urllib.parse, "unquote_to_bytes", "unquote") | |
408 | import urllib.request |
|
409 | import urllib.request | |
409 | urlreq._registeraliases(urllib.request, ( |
|
410 | urlreq._registeraliases(urllib.request, ( | |
410 | "AbstractHTTPHandler", |
|
411 | "AbstractHTTPHandler", | |
411 | "BaseHandler", |
|
412 | "BaseHandler", | |
412 | "build_opener", |
|
413 | "build_opener", | |
413 | "FileHandler", |
|
414 | "FileHandler", | |
414 | "FTPHandler", |
|
415 | "FTPHandler", | |
415 | "ftpwrapper", |
|
416 | "ftpwrapper", | |
416 | "HTTPHandler", |
|
417 | "HTTPHandler", | |
417 | "HTTPSHandler", |
|
418 | "HTTPSHandler", | |
418 | "install_opener", |
|
419 | "install_opener", | |
419 | "pathname2url", |
|
420 | "pathname2url", | |
420 | "HTTPBasicAuthHandler", |
|
421 | "HTTPBasicAuthHandler", | |
421 | "HTTPDigestAuthHandler", |
|
422 | "HTTPDigestAuthHandler", | |
422 | "HTTPPasswordMgrWithDefaultRealm", |
|
423 | "HTTPPasswordMgrWithDefaultRealm", | |
423 | "ProxyHandler", |
|
424 | "ProxyHandler", | |
424 | "Request", |
|
425 | "Request", | |
425 | "url2pathname", |
|
426 | "url2pathname", | |
426 | "urlopen", |
|
427 | "urlopen", | |
427 | )) |
|
428 | )) | |
428 | import urllib.response |
|
429 | import urllib.response | |
429 | urlreq._registeraliases(urllib.response, ( |
|
430 | urlreq._registeraliases(urllib.response, ( | |
430 | "addclosehook", |
|
431 | "addclosehook", | |
431 | "addinfourl", |
|
432 | "addinfourl", | |
432 | )) |
|
433 | )) | |
433 | import urllib.error |
|
434 | import urllib.error | |
434 | urlerr._registeraliases(urllib.error, ( |
|
435 | urlerr._registeraliases(urllib.error, ( | |
435 | "HTTPError", |
|
436 | "HTTPError", | |
436 | "URLError", |
|
437 | "URLError", | |
437 | )) |
|
438 | )) | |
438 | import http.server |
|
439 | import http.server | |
439 | httpserver._registeraliases(http.server, ( |
|
440 | httpserver._registeraliases(http.server, ( | |
440 | "HTTPServer", |
|
441 | "HTTPServer", | |
441 | "BaseHTTPRequestHandler", |
|
442 | "BaseHTTPRequestHandler", | |
442 | "SimpleHTTPRequestHandler", |
|
443 | "SimpleHTTPRequestHandler", | |
443 | "CGIHTTPRequestHandler", |
|
444 | "CGIHTTPRequestHandler", | |
444 | )) |
|
445 | )) | |
445 |
|
446 | |||
446 | # urllib.parse.quote() accepts both str and bytes, decodes bytes |
|
447 | # urllib.parse.quote() accepts both str and bytes, decodes bytes | |
447 | # (if necessary), and returns str. This is wonky. We provide a custom |
|
448 | # (if necessary), and returns str. This is wonky. We provide a custom | |
448 | # implementation that only accepts bytes and emits bytes. |
|
449 | # implementation that only accepts bytes and emits bytes. | |
449 | def quote(s, safe=r'/'): |
|
450 | def quote(s, safe=r'/'): | |
450 | s = urllib.parse.quote_from_bytes(s, safe=safe) |
|
451 | s = urllib.parse.quote_from_bytes(s, safe=safe) | |
451 | return s.encode('ascii', 'strict') |
|
452 | return s.encode('ascii', 'strict') | |
452 |
|
453 | |||
453 | # urllib.parse.urlencode() returns str. We use this function to make |
|
454 | # urllib.parse.urlencode() returns str. We use this function to make | |
454 | # sure we return bytes. |
|
455 | # sure we return bytes. | |
455 | def urlencode(query, doseq=False): |
|
456 | def urlencode(query, doseq=False): | |
456 | s = urllib.parse.urlencode(query, doseq=doseq) |
|
457 | s = urllib.parse.urlencode(query, doseq=doseq) | |
457 | return s.encode('ascii') |
|
458 | return s.encode('ascii') | |
458 |
|
459 | |||
459 | urlreq.quote = quote |
|
460 | urlreq.quote = quote | |
460 | urlreq.urlencode = urlencode |
|
461 | urlreq.urlencode = urlencode |
@@ -1,1778 +1,1785 | |||||
1 | # ui.py - user interface bits for mercurial |
|
1 | # ui.py - user interface bits for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2005-2007 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import collections |
|
10 | import collections | |
11 | import contextlib |
|
11 | import contextlib | |
12 | import errno |
|
12 | import errno | |
13 | import getpass |
|
13 | import getpass | |
14 | import inspect |
|
14 | import inspect | |
15 | import os |
|
15 | import os | |
16 | import re |
|
16 | import re | |
17 | import signal |
|
17 | import signal | |
18 | import socket |
|
18 | import socket | |
19 | import subprocess |
|
19 | import subprocess | |
20 | import sys |
|
20 | import sys | |
21 | import tempfile |
|
21 | import tempfile | |
22 | import traceback |
|
22 | import traceback | |
23 |
|
23 | |||
24 | from .i18n import _ |
|
24 | from .i18n import _ | |
25 | from .node import hex |
|
25 | from .node import hex | |
26 |
|
26 | |||
27 | from . import ( |
|
27 | from . import ( | |
28 | color, |
|
28 | color, | |
29 | config, |
|
29 | config, | |
30 | configitems, |
|
30 | configitems, | |
31 | encoding, |
|
31 | encoding, | |
32 | error, |
|
32 | error, | |
33 | formatter, |
|
33 | formatter, | |
34 | progress, |
|
34 | progress, | |
35 | pycompat, |
|
35 | pycompat, | |
36 | rcutil, |
|
36 | rcutil, | |
37 | scmutil, |
|
37 | scmutil, | |
38 | util, |
|
38 | util, | |
39 | ) |
|
39 | ) | |
40 |
|
40 | |||
41 | urlreq = util.urlreq |
|
41 | urlreq = util.urlreq | |
42 |
|
42 | |||
43 | # for use with str.translate(None, _keepalnum), to keep just alphanumerics |
|
43 | # for use with str.translate(None, _keepalnum), to keep just alphanumerics | |
44 | _keepalnum = ''.join(c for c in map(pycompat.bytechr, range(256)) |
|
44 | _keepalnum = ''.join(c for c in map(pycompat.bytechr, range(256)) | |
45 | if not c.isalnum()) |
|
45 | if not c.isalnum()) | |
46 |
|
46 | |||
47 | # The config knobs that will be altered (if unset) by ui.tweakdefaults. |
|
47 | # The config knobs that will be altered (if unset) by ui.tweakdefaults. | |
48 | tweakrc = """ |
|
48 | tweakrc = """ | |
49 | [ui] |
|
49 | [ui] | |
50 | # The rollback command is dangerous. As a rule, don't use it. |
|
50 | # The rollback command is dangerous. As a rule, don't use it. | |
51 | rollback = False |
|
51 | rollback = False | |
52 |
|
52 | |||
53 | [commands] |
|
53 | [commands] | |
54 | # Make `hg status` emit cwd-relative paths by default. |
|
54 | # Make `hg status` emit cwd-relative paths by default. | |
55 | status.relative = yes |
|
55 | status.relative = yes | |
56 |
|
56 | |||
57 | [diff] |
|
57 | [diff] | |
58 | git = 1 |
|
58 | git = 1 | |
59 | """ |
|
59 | """ | |
60 |
|
60 | |||
61 | samplehgrcs = { |
|
61 | samplehgrcs = { | |
62 | 'user': |
|
62 | 'user': | |
63 | b"""# example user config (see 'hg help config' for more info) |
|
63 | b"""# example user config (see 'hg help config' for more info) | |
64 | [ui] |
|
64 | [ui] | |
65 | # name and email, e.g. |
|
65 | # name and email, e.g. | |
66 | # username = Jane Doe <jdoe@example.com> |
|
66 | # username = Jane Doe <jdoe@example.com> | |
67 | username = |
|
67 | username = | |
68 |
|
68 | |||
69 | # uncomment to disable color in command output |
|
69 | # uncomment to disable color in command output | |
70 | # (see 'hg help color' for details) |
|
70 | # (see 'hg help color' for details) | |
71 | # color = never |
|
71 | # color = never | |
72 |
|
72 | |||
73 | # uncomment to disable command output pagination |
|
73 | # uncomment to disable command output pagination | |
74 | # (see 'hg help pager' for details) |
|
74 | # (see 'hg help pager' for details) | |
75 | # paginate = never |
|
75 | # paginate = never | |
76 |
|
76 | |||
77 | [extensions] |
|
77 | [extensions] | |
78 | # uncomment these lines to enable some popular extensions |
|
78 | # uncomment these lines to enable some popular extensions | |
79 | # (see 'hg help extensions' for more info) |
|
79 | # (see 'hg help extensions' for more info) | |
80 | # |
|
80 | # | |
81 | # churn = |
|
81 | # churn = | |
82 | """, |
|
82 | """, | |
83 |
|
83 | |||
84 | 'cloned': |
|
84 | 'cloned': | |
85 | b"""# example repository config (see 'hg help config' for more info) |
|
85 | b"""# example repository config (see 'hg help config' for more info) | |
86 | [paths] |
|
86 | [paths] | |
87 | default = %s |
|
87 | default = %s | |
88 |
|
88 | |||
89 | # path aliases to other clones of this repo in URLs or filesystem paths |
|
89 | # path aliases to other clones of this repo in URLs or filesystem paths | |
90 | # (see 'hg help config.paths' for more info) |
|
90 | # (see 'hg help config.paths' for more info) | |
91 | # |
|
91 | # | |
92 | # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork |
|
92 | # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork | |
93 | # my-fork = ssh://jdoe@example.net/hg/jdoes-fork |
|
93 | # my-fork = ssh://jdoe@example.net/hg/jdoes-fork | |
94 | # my-clone = /home/jdoe/jdoes-clone |
|
94 | # my-clone = /home/jdoe/jdoes-clone | |
95 |
|
95 | |||
96 | [ui] |
|
96 | [ui] | |
97 | # name and email (local to this repository, optional), e.g. |
|
97 | # name and email (local to this repository, optional), e.g. | |
98 | # username = Jane Doe <jdoe@example.com> |
|
98 | # username = Jane Doe <jdoe@example.com> | |
99 | """, |
|
99 | """, | |
100 |
|
100 | |||
101 | 'local': |
|
101 | 'local': | |
102 | b"""# example repository config (see 'hg help config' for more info) |
|
102 | b"""# example repository config (see 'hg help config' for more info) | |
103 | [paths] |
|
103 | [paths] | |
104 | # path aliases to other clones of this repo in URLs or filesystem paths |
|
104 | # path aliases to other clones of this repo in URLs or filesystem paths | |
105 | # (see 'hg help config.paths' for more info) |
|
105 | # (see 'hg help config.paths' for more info) | |
106 | # |
|
106 | # | |
107 | # default = http://example.com/hg/example-repo |
|
107 | # default = http://example.com/hg/example-repo | |
108 | # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork |
|
108 | # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork | |
109 | # my-fork = ssh://jdoe@example.net/hg/jdoes-fork |
|
109 | # my-fork = ssh://jdoe@example.net/hg/jdoes-fork | |
110 | # my-clone = /home/jdoe/jdoes-clone |
|
110 | # my-clone = /home/jdoe/jdoes-clone | |
111 |
|
111 | |||
112 | [ui] |
|
112 | [ui] | |
113 | # name and email (local to this repository, optional), e.g. |
|
113 | # name and email (local to this repository, optional), e.g. | |
114 | # username = Jane Doe <jdoe@example.com> |
|
114 | # username = Jane Doe <jdoe@example.com> | |
115 | """, |
|
115 | """, | |
116 |
|
116 | |||
117 | 'global': |
|
117 | 'global': | |
118 | b"""# example system-wide hg config (see 'hg help config' for more info) |
|
118 | b"""# example system-wide hg config (see 'hg help config' for more info) | |
119 |
|
119 | |||
120 | [ui] |
|
120 | [ui] | |
121 | # uncomment to disable color in command output |
|
121 | # uncomment to disable color in command output | |
122 | # (see 'hg help color' for details) |
|
122 | # (see 'hg help color' for details) | |
123 | # color = never |
|
123 | # color = never | |
124 |
|
124 | |||
125 | # uncomment to disable command output pagination |
|
125 | # uncomment to disable command output pagination | |
126 | # (see 'hg help pager' for details) |
|
126 | # (see 'hg help pager' for details) | |
127 | # paginate = never |
|
127 | # paginate = never | |
128 |
|
128 | |||
129 | [extensions] |
|
129 | [extensions] | |
130 | # uncomment these lines to enable some popular extensions |
|
130 | # uncomment these lines to enable some popular extensions | |
131 | # (see 'hg help extensions' for more info) |
|
131 | # (see 'hg help extensions' for more info) | |
132 | # |
|
132 | # | |
133 | # blackbox = |
|
133 | # blackbox = | |
134 | # churn = |
|
134 | # churn = | |
135 | """, |
|
135 | """, | |
136 | } |
|
136 | } | |
137 |
|
137 | |||
138 |
|
138 | |||
139 | class httppasswordmgrdbproxy(object): |
|
139 | class httppasswordmgrdbproxy(object): | |
140 | """Delays loading urllib2 until it's needed.""" |
|
140 | """Delays loading urllib2 until it's needed.""" | |
141 | def __init__(self): |
|
141 | def __init__(self): | |
142 | self._mgr = None |
|
142 | self._mgr = None | |
143 |
|
143 | |||
144 | def _get_mgr(self): |
|
144 | def _get_mgr(self): | |
145 | if self._mgr is None: |
|
145 | if self._mgr is None: | |
146 | self._mgr = urlreq.httppasswordmgrwithdefaultrealm() |
|
146 | self._mgr = urlreq.httppasswordmgrwithdefaultrealm() | |
147 | return self._mgr |
|
147 | return self._mgr | |
148 |
|
148 | |||
149 | def add_password(self, *args, **kwargs): |
|
149 | def add_password(self, *args, **kwargs): | |
150 | return self._get_mgr().add_password(*args, **kwargs) |
|
150 | return self._get_mgr().add_password(*args, **kwargs) | |
151 |
|
151 | |||
152 | def find_user_password(self, *args, **kwargs): |
|
152 | def find_user_password(self, *args, **kwargs): | |
153 | return self._get_mgr().find_user_password(*args, **kwargs) |
|
153 | return self._get_mgr().find_user_password(*args, **kwargs) | |
154 |
|
154 | |||
155 | def _catchterm(*args): |
|
155 | def _catchterm(*args): | |
156 | raise error.SignalInterrupt |
|
156 | raise error.SignalInterrupt | |
157 |
|
157 | |||
158 | # unique object used to detect no default value has been provided when |
|
158 | # unique object used to detect no default value has been provided when | |
159 | # retrieving configuration value. |
|
159 | # retrieving configuration value. | |
160 | _unset = object() |
|
160 | _unset = object() | |
161 |
|
161 | |||
162 | class ui(object): |
|
162 | class ui(object): | |
163 | def __init__(self, src=None): |
|
163 | def __init__(self, src=None): | |
164 | """Create a fresh new ui object if no src given |
|
164 | """Create a fresh new ui object if no src given | |
165 |
|
165 | |||
166 | Use uimod.ui.load() to create a ui which knows global and user configs. |
|
166 | Use uimod.ui.load() to create a ui which knows global and user configs. | |
167 | In most cases, you should use ui.copy() to create a copy of an existing |
|
167 | In most cases, you should use ui.copy() to create a copy of an existing | |
168 | ui object. |
|
168 | ui object. | |
169 | """ |
|
169 | """ | |
170 | # _buffers: used for temporary capture of output |
|
170 | # _buffers: used for temporary capture of output | |
171 | self._buffers = [] |
|
171 | self._buffers = [] | |
172 | # _exithandlers: callbacks run at the end of a request |
|
172 | # _exithandlers: callbacks run at the end of a request | |
173 | self._exithandlers = [] |
|
173 | self._exithandlers = [] | |
174 | # 3-tuple describing how each buffer in the stack behaves. |
|
174 | # 3-tuple describing how each buffer in the stack behaves. | |
175 | # Values are (capture stderr, capture subprocesses, apply labels). |
|
175 | # Values are (capture stderr, capture subprocesses, apply labels). | |
176 | self._bufferstates = [] |
|
176 | self._bufferstates = [] | |
177 | # When a buffer is active, defines whether we are expanding labels. |
|
177 | # When a buffer is active, defines whether we are expanding labels. | |
178 | # This exists to prevent an extra list lookup. |
|
178 | # This exists to prevent an extra list lookup. | |
179 | self._bufferapplylabels = None |
|
179 | self._bufferapplylabels = None | |
180 | self.quiet = self.verbose = self.debugflag = self.tracebackflag = False |
|
180 | self.quiet = self.verbose = self.debugflag = self.tracebackflag = False | |
181 | self._reportuntrusted = True |
|
181 | self._reportuntrusted = True | |
182 | self._knownconfig = configitems.coreitems |
|
182 | self._knownconfig = configitems.coreitems | |
183 | self._ocfg = config.config() # overlay |
|
183 | self._ocfg = config.config() # overlay | |
184 | self._tcfg = config.config() # trusted |
|
184 | self._tcfg = config.config() # trusted | |
185 | self._ucfg = config.config() # untrusted |
|
185 | self._ucfg = config.config() # untrusted | |
186 | self._trustusers = set() |
|
186 | self._trustusers = set() | |
187 | self._trustgroups = set() |
|
187 | self._trustgroups = set() | |
188 | self.callhooks = True |
|
188 | self.callhooks = True | |
189 | # Insecure server connections requested. |
|
189 | # Insecure server connections requested. | |
190 | self.insecureconnections = False |
|
190 | self.insecureconnections = False | |
191 | # Blocked time |
|
191 | # Blocked time | |
192 | self.logblockedtimes = False |
|
192 | self.logblockedtimes = False | |
193 | # color mode: see mercurial/color.py for possible value |
|
193 | # color mode: see mercurial/color.py for possible value | |
194 | self._colormode = None |
|
194 | self._colormode = None | |
195 | self._terminfoparams = {} |
|
195 | self._terminfoparams = {} | |
196 | self._styles = {} |
|
196 | self._styles = {} | |
197 |
|
197 | |||
198 | if src: |
|
198 | if src: | |
199 | self._exithandlers = src._exithandlers |
|
199 | self._exithandlers = src._exithandlers | |
200 | self.fout = src.fout |
|
200 | self.fout = src.fout | |
201 | self.ferr = src.ferr |
|
201 | self.ferr = src.ferr | |
202 | self.fin = src.fin |
|
202 | self.fin = src.fin | |
203 | self.pageractive = src.pageractive |
|
203 | self.pageractive = src.pageractive | |
204 | self._disablepager = src._disablepager |
|
204 | self._disablepager = src._disablepager | |
205 | self._tweaked = src._tweaked |
|
205 | self._tweaked = src._tweaked | |
206 |
|
206 | |||
207 | self._tcfg = src._tcfg.copy() |
|
207 | self._tcfg = src._tcfg.copy() | |
208 | self._ucfg = src._ucfg.copy() |
|
208 | self._ucfg = src._ucfg.copy() | |
209 | self._ocfg = src._ocfg.copy() |
|
209 | self._ocfg = src._ocfg.copy() | |
210 | self._trustusers = src._trustusers.copy() |
|
210 | self._trustusers = src._trustusers.copy() | |
211 | self._trustgroups = src._trustgroups.copy() |
|
211 | self._trustgroups = src._trustgroups.copy() | |
212 | self.environ = src.environ |
|
212 | self.environ = src.environ | |
213 | self.callhooks = src.callhooks |
|
213 | self.callhooks = src.callhooks | |
214 | self.insecureconnections = src.insecureconnections |
|
214 | self.insecureconnections = src.insecureconnections | |
215 | self._colormode = src._colormode |
|
215 | self._colormode = src._colormode | |
216 | self._terminfoparams = src._terminfoparams.copy() |
|
216 | self._terminfoparams = src._terminfoparams.copy() | |
217 | self._styles = src._styles.copy() |
|
217 | self._styles = src._styles.copy() | |
218 |
|
218 | |||
219 | self.fixconfig() |
|
219 | self.fixconfig() | |
220 |
|
220 | |||
221 | self.httppasswordmgrdb = src.httppasswordmgrdb |
|
221 | self.httppasswordmgrdb = src.httppasswordmgrdb | |
222 | self._blockedtimes = src._blockedtimes |
|
222 | self._blockedtimes = src._blockedtimes | |
223 | else: |
|
223 | else: | |
224 | self.fout = util.stdout |
|
224 | self.fout = util.stdout | |
225 | self.ferr = util.stderr |
|
225 | self.ferr = util.stderr | |
226 | self.fin = util.stdin |
|
226 | self.fin = util.stdin | |
227 | self.pageractive = False |
|
227 | self.pageractive = False | |
228 | self._disablepager = False |
|
228 | self._disablepager = False | |
229 | self._tweaked = False |
|
229 | self._tweaked = False | |
230 |
|
230 | |||
231 | # shared read-only environment |
|
231 | # shared read-only environment | |
232 | self.environ = encoding.environ |
|
232 | self.environ = encoding.environ | |
233 |
|
233 | |||
234 | self.httppasswordmgrdb = httppasswordmgrdbproxy() |
|
234 | self.httppasswordmgrdb = httppasswordmgrdbproxy() | |
235 | self._blockedtimes = collections.defaultdict(int) |
|
235 | self._blockedtimes = collections.defaultdict(int) | |
236 |
|
236 | |||
237 | allowed = self.configlist('experimental', 'exportableenviron') |
|
237 | allowed = self.configlist('experimental', 'exportableenviron') | |
238 | if '*' in allowed: |
|
238 | if '*' in allowed: | |
239 | self._exportableenviron = self.environ |
|
239 | self._exportableenviron = self.environ | |
240 | else: |
|
240 | else: | |
241 | self._exportableenviron = {} |
|
241 | self._exportableenviron = {} | |
242 | for k in allowed: |
|
242 | for k in allowed: | |
243 | if k in self.environ: |
|
243 | if k in self.environ: | |
244 | self._exportableenviron[k] = self.environ[k] |
|
244 | self._exportableenviron[k] = self.environ[k] | |
245 |
|
245 | |||
246 | @classmethod |
|
246 | @classmethod | |
247 | def load(cls): |
|
247 | def load(cls): | |
248 | """Create a ui and load global and user configs""" |
|
248 | """Create a ui and load global and user configs""" | |
249 | u = cls() |
|
249 | u = cls() | |
250 | # we always trust global config files and environment variables |
|
250 | # we always trust global config files and environment variables | |
251 | for t, f in rcutil.rccomponents(): |
|
251 | for t, f in rcutil.rccomponents(): | |
252 | if t == 'path': |
|
252 | if t == 'path': | |
253 | u.readconfig(f, trust=True) |
|
253 | u.readconfig(f, trust=True) | |
254 | elif t == 'items': |
|
254 | elif t == 'items': | |
255 | sections = set() |
|
255 | sections = set() | |
256 | for section, name, value, source in f: |
|
256 | for section, name, value, source in f: | |
257 | # do not set u._ocfg |
|
257 | # do not set u._ocfg | |
258 | # XXX clean this up once immutable config object is a thing |
|
258 | # XXX clean this up once immutable config object is a thing | |
259 | u._tcfg.set(section, name, value, source) |
|
259 | u._tcfg.set(section, name, value, source) | |
260 | u._ucfg.set(section, name, value, source) |
|
260 | u._ucfg.set(section, name, value, source) | |
261 | sections.add(section) |
|
261 | sections.add(section) | |
262 | for section in sections: |
|
262 | for section in sections: | |
263 | u.fixconfig(section=section) |
|
263 | u.fixconfig(section=section) | |
264 | else: |
|
264 | else: | |
265 | raise error.ProgrammingError('unknown rctype: %s' % t) |
|
265 | raise error.ProgrammingError('unknown rctype: %s' % t) | |
266 | u._maybetweakdefaults() |
|
266 | u._maybetweakdefaults() | |
267 | return u |
|
267 | return u | |
268 |
|
268 | |||
269 | def _maybetweakdefaults(self): |
|
269 | def _maybetweakdefaults(self): | |
270 | if not self.configbool('ui', 'tweakdefaults'): |
|
270 | if not self.configbool('ui', 'tweakdefaults'): | |
271 | return |
|
271 | return | |
272 | if self._tweaked or self.plain('tweakdefaults'): |
|
272 | if self._tweaked or self.plain('tweakdefaults'): | |
273 | return |
|
273 | return | |
274 |
|
274 | |||
275 | # Note: it is SUPER IMPORTANT that you set self._tweaked to |
|
275 | # Note: it is SUPER IMPORTANT that you set self._tweaked to | |
276 | # True *before* any calls to setconfig(), otherwise you'll get |
|
276 | # True *before* any calls to setconfig(), otherwise you'll get | |
277 | # infinite recursion between setconfig and this method. |
|
277 | # infinite recursion between setconfig and this method. | |
278 | # |
|
278 | # | |
279 | # TODO: We should extract an inner method in setconfig() to |
|
279 | # TODO: We should extract an inner method in setconfig() to | |
280 | # avoid this weirdness. |
|
280 | # avoid this weirdness. | |
281 | self._tweaked = True |
|
281 | self._tweaked = True | |
282 | tmpcfg = config.config() |
|
282 | tmpcfg = config.config() | |
283 | tmpcfg.parse('<tweakdefaults>', tweakrc) |
|
283 | tmpcfg.parse('<tweakdefaults>', tweakrc) | |
284 | for section in tmpcfg: |
|
284 | for section in tmpcfg: | |
285 | for name, value in tmpcfg.items(section): |
|
285 | for name, value in tmpcfg.items(section): | |
286 | if not self.hasconfig(section, name): |
|
286 | if not self.hasconfig(section, name): | |
287 | self.setconfig(section, name, value, "<tweakdefaults>") |
|
287 | self.setconfig(section, name, value, "<tweakdefaults>") | |
288 |
|
288 | |||
289 | def copy(self): |
|
289 | def copy(self): | |
290 | return self.__class__(self) |
|
290 | return self.__class__(self) | |
291 |
|
291 | |||
292 | def resetstate(self): |
|
292 | def resetstate(self): | |
293 | """Clear internal state that shouldn't persist across commands""" |
|
293 | """Clear internal state that shouldn't persist across commands""" | |
294 | if self._progbar: |
|
294 | if self._progbar: | |
295 | self._progbar.resetstate() # reset last-print time of progress bar |
|
295 | self._progbar.resetstate() # reset last-print time of progress bar | |
296 | self.httppasswordmgrdb = httppasswordmgrdbproxy() |
|
296 | self.httppasswordmgrdb = httppasswordmgrdbproxy() | |
297 |
|
297 | |||
298 | @contextlib.contextmanager |
|
298 | @contextlib.contextmanager | |
299 | def timeblockedsection(self, key): |
|
299 | def timeblockedsection(self, key): | |
300 | # this is open-coded below - search for timeblockedsection to find them |
|
300 | # this is open-coded below - search for timeblockedsection to find them | |
301 | starttime = util.timer() |
|
301 | starttime = util.timer() | |
302 | try: |
|
302 | try: | |
303 | yield |
|
303 | yield | |
304 | finally: |
|
304 | finally: | |
305 | self._blockedtimes[key + '_blocked'] += \ |
|
305 | self._blockedtimes[key + '_blocked'] += \ | |
306 | (util.timer() - starttime) * 1000 |
|
306 | (util.timer() - starttime) * 1000 | |
307 |
|
307 | |||
308 | def formatter(self, topic, opts): |
|
308 | def formatter(self, topic, opts): | |
309 | return formatter.formatter(self, self, topic, opts) |
|
309 | return formatter.formatter(self, self, topic, opts) | |
310 |
|
310 | |||
311 | def _trusted(self, fp, f): |
|
311 | def _trusted(self, fp, f): | |
312 | st = util.fstat(fp) |
|
312 | st = util.fstat(fp) | |
313 | if util.isowner(st): |
|
313 | if util.isowner(st): | |
314 | return True |
|
314 | return True | |
315 |
|
315 | |||
316 | tusers, tgroups = self._trustusers, self._trustgroups |
|
316 | tusers, tgroups = self._trustusers, self._trustgroups | |
317 | if '*' in tusers or '*' in tgroups: |
|
317 | if '*' in tusers or '*' in tgroups: | |
318 | return True |
|
318 | return True | |
319 |
|
319 | |||
320 | user = util.username(st.st_uid) |
|
320 | user = util.username(st.st_uid) | |
321 | group = util.groupname(st.st_gid) |
|
321 | group = util.groupname(st.st_gid) | |
322 | if user in tusers or group in tgroups or user == util.username(): |
|
322 | if user in tusers or group in tgroups or user == util.username(): | |
323 | return True |
|
323 | return True | |
324 |
|
324 | |||
325 | if self._reportuntrusted: |
|
325 | if self._reportuntrusted: | |
326 | self.warn(_('not trusting file %s from untrusted ' |
|
326 | self.warn(_('not trusting file %s from untrusted ' | |
327 | 'user %s, group %s\n') % (f, user, group)) |
|
327 | 'user %s, group %s\n') % (f, user, group)) | |
328 | return False |
|
328 | return False | |
329 |
|
329 | |||
330 | def readconfig(self, filename, root=None, trust=False, |
|
330 | def readconfig(self, filename, root=None, trust=False, | |
331 | sections=None, remap=None): |
|
331 | sections=None, remap=None): | |
332 | try: |
|
332 | try: | |
333 | fp = open(filename, u'rb') |
|
333 | fp = open(filename, u'rb') | |
334 | except IOError: |
|
334 | except IOError: | |
335 | if not sections: # ignore unless we were looking for something |
|
335 | if not sections: # ignore unless we were looking for something | |
336 | return |
|
336 | return | |
337 | raise |
|
337 | raise | |
338 |
|
338 | |||
339 | cfg = config.config() |
|
339 | cfg = config.config() | |
340 | trusted = sections or trust or self._trusted(fp, filename) |
|
340 | trusted = sections or trust or self._trusted(fp, filename) | |
341 |
|
341 | |||
342 | try: |
|
342 | try: | |
343 | cfg.read(filename, fp, sections=sections, remap=remap) |
|
343 | cfg.read(filename, fp, sections=sections, remap=remap) | |
344 | fp.close() |
|
344 | fp.close() | |
345 | except error.ConfigError as inst: |
|
345 | except error.ConfigError as inst: | |
346 | if trusted: |
|
346 | if trusted: | |
347 | raise |
|
347 | raise | |
348 | self.warn(_("ignored: %s\n") % str(inst)) |
|
348 | self.warn(_("ignored: %s\n") % str(inst)) | |
349 |
|
349 | |||
350 | if self.plain(): |
|
350 | if self.plain(): | |
351 | for k in ('debug', 'fallbackencoding', 'quiet', 'slash', |
|
351 | for k in ('debug', 'fallbackencoding', 'quiet', 'slash', | |
352 | 'logtemplate', 'statuscopies', 'style', |
|
352 | 'logtemplate', 'statuscopies', 'style', | |
353 | 'traceback', 'verbose'): |
|
353 | 'traceback', 'verbose'): | |
354 | if k in cfg['ui']: |
|
354 | if k in cfg['ui']: | |
355 | del cfg['ui'][k] |
|
355 | del cfg['ui'][k] | |
356 | for k, v in cfg.items('defaults'): |
|
356 | for k, v in cfg.items('defaults'): | |
357 | del cfg['defaults'][k] |
|
357 | del cfg['defaults'][k] | |
358 | for k, v in cfg.items('commands'): |
|
358 | for k, v in cfg.items('commands'): | |
359 | del cfg['commands'][k] |
|
359 | del cfg['commands'][k] | |
360 | # Don't remove aliases from the configuration if in the exceptionlist |
|
360 | # Don't remove aliases from the configuration if in the exceptionlist | |
361 | if self.plain('alias'): |
|
361 | if self.plain('alias'): | |
362 | for k, v in cfg.items('alias'): |
|
362 | for k, v in cfg.items('alias'): | |
363 | del cfg['alias'][k] |
|
363 | del cfg['alias'][k] | |
364 | if self.plain('revsetalias'): |
|
364 | if self.plain('revsetalias'): | |
365 | for k, v in cfg.items('revsetalias'): |
|
365 | for k, v in cfg.items('revsetalias'): | |
366 | del cfg['revsetalias'][k] |
|
366 | del cfg['revsetalias'][k] | |
367 | if self.plain('templatealias'): |
|
367 | if self.plain('templatealias'): | |
368 | for k, v in cfg.items('templatealias'): |
|
368 | for k, v in cfg.items('templatealias'): | |
369 | del cfg['templatealias'][k] |
|
369 | del cfg['templatealias'][k] | |
370 |
|
370 | |||
371 | if trusted: |
|
371 | if trusted: | |
372 | self._tcfg.update(cfg) |
|
372 | self._tcfg.update(cfg) | |
373 | self._tcfg.update(self._ocfg) |
|
373 | self._tcfg.update(self._ocfg) | |
374 | self._ucfg.update(cfg) |
|
374 | self._ucfg.update(cfg) | |
375 | self._ucfg.update(self._ocfg) |
|
375 | self._ucfg.update(self._ocfg) | |
376 |
|
376 | |||
377 | if root is None: |
|
377 | if root is None: | |
378 | root = os.path.expanduser('~') |
|
378 | root = os.path.expanduser('~') | |
379 | self.fixconfig(root=root) |
|
379 | self.fixconfig(root=root) | |
380 |
|
380 | |||
381 | def fixconfig(self, root=None, section=None): |
|
381 | def fixconfig(self, root=None, section=None): | |
382 | if section in (None, 'paths'): |
|
382 | if section in (None, 'paths'): | |
383 | # expand vars and ~ |
|
383 | # expand vars and ~ | |
384 | # translate paths relative to root (or home) into absolute paths |
|
384 | # translate paths relative to root (or home) into absolute paths | |
385 | root = root or pycompat.getcwd() |
|
385 | root = root or pycompat.getcwd() | |
386 | for c in self._tcfg, self._ucfg, self._ocfg: |
|
386 | for c in self._tcfg, self._ucfg, self._ocfg: | |
387 | for n, p in c.items('paths'): |
|
387 | for n, p in c.items('paths'): | |
388 | # Ignore sub-options. |
|
388 | # Ignore sub-options. | |
389 | if ':' in n: |
|
389 | if ':' in n: | |
390 | continue |
|
390 | continue | |
391 | if not p: |
|
391 | if not p: | |
392 | continue |
|
392 | continue | |
393 | if '%%' in p: |
|
393 | if '%%' in p: | |
394 | s = self.configsource('paths', n) or 'none' |
|
394 | s = self.configsource('paths', n) or 'none' | |
395 | self.warn(_("(deprecated '%%' in path %s=%s from %s)\n") |
|
395 | self.warn(_("(deprecated '%%' in path %s=%s from %s)\n") | |
396 | % (n, p, s)) |
|
396 | % (n, p, s)) | |
397 | p = p.replace('%%', '%') |
|
397 | p = p.replace('%%', '%') | |
398 | p = util.expandpath(p) |
|
398 | p = util.expandpath(p) | |
399 | if not util.hasscheme(p) and not os.path.isabs(p): |
|
399 | if not util.hasscheme(p) and not os.path.isabs(p): | |
400 | p = os.path.normpath(os.path.join(root, p)) |
|
400 | p = os.path.normpath(os.path.join(root, p)) | |
401 | c.set("paths", n, p) |
|
401 | c.set("paths", n, p) | |
402 |
|
402 | |||
403 | if section in (None, 'ui'): |
|
403 | if section in (None, 'ui'): | |
404 | # update ui options |
|
404 | # update ui options | |
405 | self.debugflag = self.configbool('ui', 'debug') |
|
405 | self.debugflag = self.configbool('ui', 'debug') | |
406 | self.verbose = self.debugflag or self.configbool('ui', 'verbose') |
|
406 | self.verbose = self.debugflag or self.configbool('ui', 'verbose') | |
407 | self.quiet = not self.debugflag and self.configbool('ui', 'quiet') |
|
407 | self.quiet = not self.debugflag and self.configbool('ui', 'quiet') | |
408 | if self.verbose and self.quiet: |
|
408 | if self.verbose and self.quiet: | |
409 | self.quiet = self.verbose = False |
|
409 | self.quiet = self.verbose = False | |
410 | self._reportuntrusted = self.debugflag or self.configbool("ui", |
|
410 | self._reportuntrusted = self.debugflag or self.configbool("ui", | |
411 | "report_untrusted") |
|
411 | "report_untrusted") | |
412 | self.tracebackflag = self.configbool('ui', 'traceback') |
|
412 | self.tracebackflag = self.configbool('ui', 'traceback') | |
413 | self.logblockedtimes = self.configbool('ui', 'logblockedtimes') |
|
413 | self.logblockedtimes = self.configbool('ui', 'logblockedtimes') | |
414 |
|
414 | |||
415 | if section in (None, 'trusted'): |
|
415 | if section in (None, 'trusted'): | |
416 | # update trust information |
|
416 | # update trust information | |
417 | self._trustusers.update(self.configlist('trusted', 'users')) |
|
417 | self._trustusers.update(self.configlist('trusted', 'users')) | |
418 | self._trustgroups.update(self.configlist('trusted', 'groups')) |
|
418 | self._trustgroups.update(self.configlist('trusted', 'groups')) | |
419 |
|
419 | |||
420 | def backupconfig(self, section, item): |
|
420 | def backupconfig(self, section, item): | |
421 | return (self._ocfg.backup(section, item), |
|
421 | return (self._ocfg.backup(section, item), | |
422 | self._tcfg.backup(section, item), |
|
422 | self._tcfg.backup(section, item), | |
423 | self._ucfg.backup(section, item),) |
|
423 | self._ucfg.backup(section, item),) | |
424 | def restoreconfig(self, data): |
|
424 | def restoreconfig(self, data): | |
425 | self._ocfg.restore(data[0]) |
|
425 | self._ocfg.restore(data[0]) | |
426 | self._tcfg.restore(data[1]) |
|
426 | self._tcfg.restore(data[1]) | |
427 | self._ucfg.restore(data[2]) |
|
427 | self._ucfg.restore(data[2]) | |
428 |
|
428 | |||
429 | def setconfig(self, section, name, value, source=''): |
|
429 | def setconfig(self, section, name, value, source=''): | |
430 | for cfg in (self._ocfg, self._tcfg, self._ucfg): |
|
430 | for cfg in (self._ocfg, self._tcfg, self._ucfg): | |
431 | cfg.set(section, name, value, source) |
|
431 | cfg.set(section, name, value, source) | |
432 | self.fixconfig(section=section) |
|
432 | self.fixconfig(section=section) | |
433 | self._maybetweakdefaults() |
|
433 | self._maybetweakdefaults() | |
434 |
|
434 | |||
435 | def _data(self, untrusted): |
|
435 | def _data(self, untrusted): | |
436 | return untrusted and self._ucfg or self._tcfg |
|
436 | return untrusted and self._ucfg or self._tcfg | |
437 |
|
437 | |||
438 | def configsource(self, section, name, untrusted=False): |
|
438 | def configsource(self, section, name, untrusted=False): | |
439 | return self._data(untrusted).source(section, name) |
|
439 | return self._data(untrusted).source(section, name) | |
440 |
|
440 | |||
441 | def config(self, section, name, default=_unset, untrusted=False): |
|
441 | def config(self, section, name, default=_unset, untrusted=False): | |
442 | """return the plain string version of a config""" |
|
442 | """return the plain string version of a config""" | |
443 | value = self._config(section, name, default=default, |
|
443 | value = self._config(section, name, default=default, | |
444 | untrusted=untrusted) |
|
444 | untrusted=untrusted) | |
445 | if value is _unset: |
|
445 | if value is _unset: | |
446 | return None |
|
446 | return None | |
447 | return value |
|
447 | return value | |
448 |
|
448 | |||
449 | def _config(self, section, name, default=_unset, untrusted=False): |
|
449 | def _config(self, section, name, default=_unset, untrusted=False): | |
450 | value = default |
|
450 | value = default | |
451 | item = self._knownconfig.get(section, {}).get(name) |
|
451 | item = self._knownconfig.get(section, {}).get(name) | |
452 | alternates = [(section, name)] |
|
452 | alternates = [(section, name)] | |
453 |
|
453 | |||
454 | if item is not None: |
|
454 | if item is not None: | |
455 | alternates.extend(item.alias) |
|
455 | alternates.extend(item.alias) | |
456 |
|
456 | |||
457 | if default is _unset: |
|
457 | if default is _unset: | |
458 | if item is None: |
|
458 | if item is None: | |
459 | value = default |
|
459 | value = default | |
460 | elif item.default is configitems.dynamicdefault: |
|
460 | elif item.default is configitems.dynamicdefault: | |
461 | value = None |
|
461 | value = None | |
462 | msg = "config item requires an explicit default value: '%s.%s'" |
|
462 | msg = "config item requires an explicit default value: '%s.%s'" | |
463 | msg %= (section, name) |
|
463 | msg %= (section, name) | |
464 | self.develwarn(msg, 2, 'warn-config-default') |
|
464 | self.develwarn(msg, 2, 'warn-config-default') | |
465 | elif callable(item.default): |
|
465 | elif callable(item.default): | |
466 | value = item.default() |
|
466 | value = item.default() | |
467 | else: |
|
467 | else: | |
468 | value = item.default |
|
468 | value = item.default | |
469 | elif (item is not None |
|
469 | elif (item is not None | |
470 | and item.default is not configitems.dynamicdefault): |
|
470 | and item.default is not configitems.dynamicdefault): | |
471 | msg = ("specifying a default value for a registered " |
|
471 | msg = ("specifying a default value for a registered " | |
472 | "config item: '%s.%s' '%s'") |
|
472 | "config item: '%s.%s' '%s'") | |
473 | msg %= (section, name, default) |
|
473 | msg %= (section, name, default) | |
474 | self.develwarn(msg, 2, 'warn-config-default') |
|
474 | self.develwarn(msg, 2, 'warn-config-default') | |
475 |
|
475 | |||
476 | for s, n in alternates: |
|
476 | for s, n in alternates: | |
477 | candidate = self._data(untrusted).get(s, n, None) |
|
477 | candidate = self._data(untrusted).get(s, n, None) | |
478 | if candidate is not None: |
|
478 | if candidate is not None: | |
479 | value = candidate |
|
479 | value = candidate | |
480 | section = s |
|
480 | section = s | |
481 | name = n |
|
481 | name = n | |
482 | break |
|
482 | break | |
483 |
|
483 | |||
484 | if self.debugflag and not untrusted and self._reportuntrusted: |
|
484 | if self.debugflag and not untrusted and self._reportuntrusted: | |
485 | for s, n in alternates: |
|
485 | for s, n in alternates: | |
486 | uvalue = self._ucfg.get(s, n) |
|
486 | uvalue = self._ucfg.get(s, n) | |
487 | if uvalue is not None and uvalue != value: |
|
487 | if uvalue is not None and uvalue != value: | |
488 | self.debug("ignoring untrusted configuration option " |
|
488 | self.debug("ignoring untrusted configuration option " | |
489 | "%s.%s = %s\n" % (s, n, uvalue)) |
|
489 | "%s.%s = %s\n" % (s, n, uvalue)) | |
490 | return value |
|
490 | return value | |
491 |
|
491 | |||
492 | def configsuboptions(self, section, name, default=_unset, untrusted=False): |
|
492 | def configsuboptions(self, section, name, default=_unset, untrusted=False): | |
493 | """Get a config option and all sub-options. |
|
493 | """Get a config option and all sub-options. | |
494 |
|
494 | |||
495 | Some config options have sub-options that are declared with the |
|
495 | Some config options have sub-options that are declared with the | |
496 | format "key:opt = value". This method is used to return the main |
|
496 | format "key:opt = value". This method is used to return the main | |
497 | option and all its declared sub-options. |
|
497 | option and all its declared sub-options. | |
498 |
|
498 | |||
499 | Returns a 2-tuple of ``(option, sub-options)``, where `sub-options`` |
|
499 | Returns a 2-tuple of ``(option, sub-options)``, where `sub-options`` | |
500 | is a dict of defined sub-options where keys and values are strings. |
|
500 | is a dict of defined sub-options where keys and values are strings. | |
501 | """ |
|
501 | """ | |
502 | main = self.config(section, name, default, untrusted=untrusted) |
|
502 | main = self.config(section, name, default, untrusted=untrusted) | |
503 | data = self._data(untrusted) |
|
503 | data = self._data(untrusted) | |
504 | sub = {} |
|
504 | sub = {} | |
505 | prefix = '%s:' % name |
|
505 | prefix = '%s:' % name | |
506 | for k, v in data.items(section): |
|
506 | for k, v in data.items(section): | |
507 | if k.startswith(prefix): |
|
507 | if k.startswith(prefix): | |
508 | sub[k[len(prefix):]] = v |
|
508 | sub[k[len(prefix):]] = v | |
509 |
|
509 | |||
510 | if self.debugflag and not untrusted and self._reportuntrusted: |
|
510 | if self.debugflag and not untrusted and self._reportuntrusted: | |
511 | for k, v in sub.items(): |
|
511 | for k, v in sub.items(): | |
512 | uvalue = self._ucfg.get(section, '%s:%s' % (name, k)) |
|
512 | uvalue = self._ucfg.get(section, '%s:%s' % (name, k)) | |
513 | if uvalue is not None and uvalue != v: |
|
513 | if uvalue is not None and uvalue != v: | |
514 | self.debug('ignoring untrusted configuration option ' |
|
514 | self.debug('ignoring untrusted configuration option ' | |
515 | '%s:%s.%s = %s\n' % (section, name, k, uvalue)) |
|
515 | '%s:%s.%s = %s\n' % (section, name, k, uvalue)) | |
516 |
|
516 | |||
517 | return main, sub |
|
517 | return main, sub | |
518 |
|
518 | |||
519 | def configpath(self, section, name, default=_unset, untrusted=False): |
|
519 | def configpath(self, section, name, default=_unset, untrusted=False): | |
520 | 'get a path config item, expanded relative to repo root or config file' |
|
520 | 'get a path config item, expanded relative to repo root or config file' | |
521 | v = self.config(section, name, default, untrusted) |
|
521 | v = self.config(section, name, default, untrusted) | |
522 | if v is None: |
|
522 | if v is None: | |
523 | return None |
|
523 | return None | |
524 | if not os.path.isabs(v) or "://" not in v: |
|
524 | if not os.path.isabs(v) or "://" not in v: | |
525 | src = self.configsource(section, name, untrusted) |
|
525 | src = self.configsource(section, name, untrusted) | |
526 | if ':' in src: |
|
526 | if ':' in src: | |
527 | base = os.path.dirname(src.rsplit(':')[0]) |
|
527 | base = os.path.dirname(src.rsplit(':')[0]) | |
528 | v = os.path.join(base, os.path.expanduser(v)) |
|
528 | v = os.path.join(base, os.path.expanduser(v)) | |
529 | return v |
|
529 | return v | |
530 |
|
530 | |||
531 | def configbool(self, section, name, default=_unset, untrusted=False): |
|
531 | def configbool(self, section, name, default=_unset, untrusted=False): | |
532 | """parse a configuration element as a boolean |
|
532 | """parse a configuration element as a boolean | |
533 |
|
533 | |||
534 | >>> u = ui(); s = 'foo' |
|
534 | >>> u = ui(); s = 'foo' | |
535 | >>> u.setconfig(s, 'true', 'yes') |
|
535 | >>> u.setconfig(s, 'true', 'yes') | |
536 | >>> u.configbool(s, 'true') |
|
536 | >>> u.configbool(s, 'true') | |
537 | True |
|
537 | True | |
538 | >>> u.setconfig(s, 'false', 'no') |
|
538 | >>> u.setconfig(s, 'false', 'no') | |
539 | >>> u.configbool(s, 'false') |
|
539 | >>> u.configbool(s, 'false') | |
540 | False |
|
540 | False | |
541 | >>> u.configbool(s, 'unknown') |
|
541 | >>> u.configbool(s, 'unknown') | |
542 | False |
|
542 | False | |
543 | >>> u.configbool(s, 'unknown', True) |
|
543 | >>> u.configbool(s, 'unknown', True) | |
544 | True |
|
544 | True | |
545 | >>> u.setconfig(s, 'invalid', 'somevalue') |
|
545 | >>> u.setconfig(s, 'invalid', 'somevalue') | |
546 | >>> u.configbool(s, 'invalid') |
|
546 | >>> u.configbool(s, 'invalid') | |
547 | Traceback (most recent call last): |
|
547 | Traceback (most recent call last): | |
548 | ... |
|
548 | ... | |
549 | ConfigError: foo.invalid is not a boolean ('somevalue') |
|
549 | ConfigError: foo.invalid is not a boolean ('somevalue') | |
550 | """ |
|
550 | """ | |
551 |
|
551 | |||
552 | v = self._config(section, name, default, untrusted=untrusted) |
|
552 | v = self._config(section, name, default, untrusted=untrusted) | |
553 | if v is None: |
|
553 | if v is None: | |
554 | return v |
|
554 | return v | |
555 | if v is _unset: |
|
555 | if v is _unset: | |
556 | if default is _unset: |
|
556 | if default is _unset: | |
557 | return False |
|
557 | return False | |
558 | return default |
|
558 | return default | |
559 | if isinstance(v, bool): |
|
559 | if isinstance(v, bool): | |
560 | return v |
|
560 | return v | |
561 | b = util.parsebool(v) |
|
561 | b = util.parsebool(v) | |
562 | if b is None: |
|
562 | if b is None: | |
563 | raise error.ConfigError(_("%s.%s is not a boolean ('%s')") |
|
563 | raise error.ConfigError(_("%s.%s is not a boolean ('%s')") | |
564 | % (section, name, v)) |
|
564 | % (section, name, v)) | |
565 | return b |
|
565 | return b | |
566 |
|
566 | |||
567 | def configwith(self, convert, section, name, default=_unset, |
|
567 | def configwith(self, convert, section, name, default=_unset, | |
568 | desc=None, untrusted=False): |
|
568 | desc=None, untrusted=False): | |
569 | """parse a configuration element with a conversion function |
|
569 | """parse a configuration element with a conversion function | |
570 |
|
570 | |||
571 | >>> u = ui(); s = 'foo' |
|
571 | >>> u = ui(); s = 'foo' | |
572 | >>> u.setconfig(s, 'float1', '42') |
|
572 | >>> u.setconfig(s, 'float1', '42') | |
573 | >>> u.configwith(float, s, 'float1') |
|
573 | >>> u.configwith(float, s, 'float1') | |
574 | 42.0 |
|
574 | 42.0 | |
575 | >>> u.setconfig(s, 'float2', '-4.25') |
|
575 | >>> u.setconfig(s, 'float2', '-4.25') | |
576 | >>> u.configwith(float, s, 'float2') |
|
576 | >>> u.configwith(float, s, 'float2') | |
577 | -4.25 |
|
577 | -4.25 | |
578 | >>> u.configwith(float, s, 'unknown', 7) |
|
578 | >>> u.configwith(float, s, 'unknown', 7) | |
579 | 7.0 |
|
579 | 7.0 | |
580 | >>> u.setconfig(s, 'invalid', 'somevalue') |
|
580 | >>> u.setconfig(s, 'invalid', 'somevalue') | |
581 | >>> u.configwith(float, s, 'invalid') |
|
581 | >>> u.configwith(float, s, 'invalid') | |
582 | Traceback (most recent call last): |
|
582 | Traceback (most recent call last): | |
583 | ... |
|
583 | ... | |
584 | ConfigError: foo.invalid is not a valid float ('somevalue') |
|
584 | ConfigError: foo.invalid is not a valid float ('somevalue') | |
585 | >>> u.configwith(float, s, 'invalid', desc='womble') |
|
585 | >>> u.configwith(float, s, 'invalid', desc='womble') | |
586 | Traceback (most recent call last): |
|
586 | Traceback (most recent call last): | |
587 | ... |
|
587 | ... | |
588 | ConfigError: foo.invalid is not a valid womble ('somevalue') |
|
588 | ConfigError: foo.invalid is not a valid womble ('somevalue') | |
589 | """ |
|
589 | """ | |
590 |
|
590 | |||
591 | v = self.config(section, name, default, untrusted) |
|
591 | v = self.config(section, name, default, untrusted) | |
592 | if v is None: |
|
592 | if v is None: | |
593 | return v # do not attempt to convert None |
|
593 | return v # do not attempt to convert None | |
594 | try: |
|
594 | try: | |
595 | return convert(v) |
|
595 | return convert(v) | |
596 | except (ValueError, error.ParseError): |
|
596 | except (ValueError, error.ParseError): | |
597 | if desc is None: |
|
597 | if desc is None: | |
598 | desc = convert.__name__ |
|
598 | desc = convert.__name__ | |
599 | raise error.ConfigError(_("%s.%s is not a valid %s ('%s')") |
|
599 | raise error.ConfigError(_("%s.%s is not a valid %s ('%s')") | |
600 | % (section, name, desc, v)) |
|
600 | % (section, name, desc, v)) | |
601 |
|
601 | |||
602 | def configint(self, section, name, default=_unset, untrusted=False): |
|
602 | def configint(self, section, name, default=_unset, untrusted=False): | |
603 | """parse a configuration element as an integer |
|
603 | """parse a configuration element as an integer | |
604 |
|
604 | |||
605 | >>> u = ui(); s = 'foo' |
|
605 | >>> u = ui(); s = 'foo' | |
606 | >>> u.setconfig(s, 'int1', '42') |
|
606 | >>> u.setconfig(s, 'int1', '42') | |
607 | >>> u.configint(s, 'int1') |
|
607 | >>> u.configint(s, 'int1') | |
608 | 42 |
|
608 | 42 | |
609 | >>> u.setconfig(s, 'int2', '-42') |
|
609 | >>> u.setconfig(s, 'int2', '-42') | |
610 | >>> u.configint(s, 'int2') |
|
610 | >>> u.configint(s, 'int2') | |
611 | -42 |
|
611 | -42 | |
612 | >>> u.configint(s, 'unknown', 7) |
|
612 | >>> u.configint(s, 'unknown', 7) | |
613 | 7 |
|
613 | 7 | |
614 | >>> u.setconfig(s, 'invalid', 'somevalue') |
|
614 | >>> u.setconfig(s, 'invalid', 'somevalue') | |
615 | >>> u.configint(s, 'invalid') |
|
615 | >>> u.configint(s, 'invalid') | |
616 | Traceback (most recent call last): |
|
616 | Traceback (most recent call last): | |
617 | ... |
|
617 | ... | |
618 | ConfigError: foo.invalid is not a valid integer ('somevalue') |
|
618 | ConfigError: foo.invalid is not a valid integer ('somevalue') | |
619 | """ |
|
619 | """ | |
620 |
|
620 | |||
621 | return self.configwith(int, section, name, default, 'integer', |
|
621 | return self.configwith(int, section, name, default, 'integer', | |
622 | untrusted) |
|
622 | untrusted) | |
623 |
|
623 | |||
624 | def configbytes(self, section, name, default=_unset, untrusted=False): |
|
624 | def configbytes(self, section, name, default=_unset, untrusted=False): | |
625 | """parse a configuration element as a quantity in bytes |
|
625 | """parse a configuration element as a quantity in bytes | |
626 |
|
626 | |||
627 | Units can be specified as b (bytes), k or kb (kilobytes), m or |
|
627 | Units can be specified as b (bytes), k or kb (kilobytes), m or | |
628 | mb (megabytes), g or gb (gigabytes). |
|
628 | mb (megabytes), g or gb (gigabytes). | |
629 |
|
629 | |||
630 | >>> u = ui(); s = 'foo' |
|
630 | >>> u = ui(); s = 'foo' | |
631 | >>> u.setconfig(s, 'val1', '42') |
|
631 | >>> u.setconfig(s, 'val1', '42') | |
632 | >>> u.configbytes(s, 'val1') |
|
632 | >>> u.configbytes(s, 'val1') | |
633 | 42 |
|
633 | 42 | |
634 | >>> u.setconfig(s, 'val2', '42.5 kb') |
|
634 | >>> u.setconfig(s, 'val2', '42.5 kb') | |
635 | >>> u.configbytes(s, 'val2') |
|
635 | >>> u.configbytes(s, 'val2') | |
636 | 43520 |
|
636 | 43520 | |
637 | >>> u.configbytes(s, 'unknown', '7 MB') |
|
637 | >>> u.configbytes(s, 'unknown', '7 MB') | |
638 | 7340032 |
|
638 | 7340032 | |
639 | >>> u.setconfig(s, 'invalid', 'somevalue') |
|
639 | >>> u.setconfig(s, 'invalid', 'somevalue') | |
640 | >>> u.configbytes(s, 'invalid') |
|
640 | >>> u.configbytes(s, 'invalid') | |
641 | Traceback (most recent call last): |
|
641 | Traceback (most recent call last): | |
642 | ... |
|
642 | ... | |
643 | ConfigError: foo.invalid is not a byte quantity ('somevalue') |
|
643 | ConfigError: foo.invalid is not a byte quantity ('somevalue') | |
644 | """ |
|
644 | """ | |
645 |
|
645 | |||
646 | value = self._config(section, name, default, untrusted) |
|
646 | value = self._config(section, name, default, untrusted) | |
647 | if value is _unset: |
|
647 | if value is _unset: | |
648 | if default is _unset: |
|
648 | if default is _unset: | |
649 | default = 0 |
|
649 | default = 0 | |
650 | value = default |
|
650 | value = default | |
651 | if not isinstance(value, bytes): |
|
651 | if not isinstance(value, bytes): | |
652 | return value |
|
652 | return value | |
653 | try: |
|
653 | try: | |
654 | return util.sizetoint(value) |
|
654 | return util.sizetoint(value) | |
655 | except error.ParseError: |
|
655 | except error.ParseError: | |
656 | raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')") |
|
656 | raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')") | |
657 | % (section, name, value)) |
|
657 | % (section, name, value)) | |
658 |
|
658 | |||
659 | def configlist(self, section, name, default=_unset, untrusted=False): |
|
659 | def configlist(self, section, name, default=_unset, untrusted=False): | |
660 | """parse a configuration element as a list of comma/space separated |
|
660 | """parse a configuration element as a list of comma/space separated | |
661 | strings |
|
661 | strings | |
662 |
|
662 | |||
663 | >>> u = ui(); s = 'foo' |
|
663 | >>> u = ui(); s = 'foo' | |
664 | >>> u.setconfig(s, 'list1', 'this,is "a small" ,test') |
|
664 | >>> u.setconfig(s, 'list1', 'this,is "a small" ,test') | |
665 | >>> u.configlist(s, 'list1') |
|
665 | >>> u.configlist(s, 'list1') | |
666 | ['this', 'is', 'a small', 'test'] |
|
666 | ['this', 'is', 'a small', 'test'] | |
667 | """ |
|
667 | """ | |
668 | # default is not always a list |
|
668 | # default is not always a list | |
669 | v = self.configwith(config.parselist, section, name, default, |
|
669 | v = self.configwith(config.parselist, section, name, default, | |
670 | 'list', untrusted) |
|
670 | 'list', untrusted) | |
671 | if isinstance(v, bytes): |
|
671 | if isinstance(v, bytes): | |
672 | return config.parselist(v) |
|
672 | return config.parselist(v) | |
673 | elif v is None: |
|
673 | elif v is None: | |
674 | return [] |
|
674 | return [] | |
675 | return v |
|
675 | return v | |
676 |
|
676 | |||
677 | def configdate(self, section, name, default=_unset, untrusted=False): |
|
677 | def configdate(self, section, name, default=_unset, untrusted=False): | |
678 | """parse a configuration element as a tuple of ints |
|
678 | """parse a configuration element as a tuple of ints | |
679 |
|
679 | |||
680 | >>> u = ui(); s = 'foo' |
|
680 | >>> u = ui(); s = 'foo' | |
681 | >>> u.setconfig(s, 'date', '0 0') |
|
681 | >>> u.setconfig(s, 'date', '0 0') | |
682 | >>> u.configdate(s, 'date') |
|
682 | >>> u.configdate(s, 'date') | |
683 | (0, 0) |
|
683 | (0, 0) | |
684 | """ |
|
684 | """ | |
685 | if self.config(section, name, default, untrusted): |
|
685 | if self.config(section, name, default, untrusted): | |
686 | return self.configwith(util.parsedate, section, name, default, |
|
686 | return self.configwith(util.parsedate, section, name, default, | |
687 | 'date', untrusted) |
|
687 | 'date', untrusted) | |
688 | if default is _unset: |
|
688 | if default is _unset: | |
689 | return None |
|
689 | return None | |
690 | return default |
|
690 | return default | |
691 |
|
691 | |||
692 | def hasconfig(self, section, name, untrusted=False): |
|
692 | def hasconfig(self, section, name, untrusted=False): | |
693 | return self._data(untrusted).hasitem(section, name) |
|
693 | return self._data(untrusted).hasitem(section, name) | |
694 |
|
694 | |||
695 | def has_section(self, section, untrusted=False): |
|
695 | def has_section(self, section, untrusted=False): | |
696 | '''tell whether section exists in config.''' |
|
696 | '''tell whether section exists in config.''' | |
697 | return section in self._data(untrusted) |
|
697 | return section in self._data(untrusted) | |
698 |
|
698 | |||
699 | def configitems(self, section, untrusted=False, ignoresub=False): |
|
699 | def configitems(self, section, untrusted=False, ignoresub=False): | |
700 | items = self._data(untrusted).items(section) |
|
700 | items = self._data(untrusted).items(section) | |
701 | if ignoresub: |
|
701 | if ignoresub: | |
702 | newitems = {} |
|
702 | newitems = {} | |
703 | for k, v in items: |
|
703 | for k, v in items: | |
704 | if ':' not in k: |
|
704 | if ':' not in k: | |
705 | newitems[k] = v |
|
705 | newitems[k] = v | |
706 | items = newitems.items() |
|
706 | items = newitems.items() | |
707 | if self.debugflag and not untrusted and self._reportuntrusted: |
|
707 | if self.debugflag and not untrusted and self._reportuntrusted: | |
708 | for k, v in self._ucfg.items(section): |
|
708 | for k, v in self._ucfg.items(section): | |
709 | if self._tcfg.get(section, k) != v: |
|
709 | if self._tcfg.get(section, k) != v: | |
710 | self.debug("ignoring untrusted configuration option " |
|
710 | self.debug("ignoring untrusted configuration option " | |
711 | "%s.%s = %s\n" % (section, k, v)) |
|
711 | "%s.%s = %s\n" % (section, k, v)) | |
712 | return items |
|
712 | return items | |
713 |
|
713 | |||
714 | def walkconfig(self, untrusted=False): |
|
714 | def walkconfig(self, untrusted=False): | |
715 | cfg = self._data(untrusted) |
|
715 | cfg = self._data(untrusted) | |
716 | for section in cfg.sections(): |
|
716 | for section in cfg.sections(): | |
717 | for name, value in self.configitems(section, untrusted): |
|
717 | for name, value in self.configitems(section, untrusted): | |
718 | yield section, name, value |
|
718 | yield section, name, value | |
719 |
|
719 | |||
720 | def plain(self, feature=None): |
|
720 | def plain(self, feature=None): | |
721 | '''is plain mode active? |
|
721 | '''is plain mode active? | |
722 |
|
722 | |||
723 | Plain mode means that all configuration variables which affect |
|
723 | Plain mode means that all configuration variables which affect | |
724 | the behavior and output of Mercurial should be |
|
724 | the behavior and output of Mercurial should be | |
725 | ignored. Additionally, the output should be stable, |
|
725 | ignored. Additionally, the output should be stable, | |
726 | reproducible and suitable for use in scripts or applications. |
|
726 | reproducible and suitable for use in scripts or applications. | |
727 |
|
727 | |||
728 | The only way to trigger plain mode is by setting either the |
|
728 | The only way to trigger plain mode is by setting either the | |
729 | `HGPLAIN' or `HGPLAINEXCEPT' environment variables. |
|
729 | `HGPLAIN' or `HGPLAINEXCEPT' environment variables. | |
730 |
|
730 | |||
731 | The return value can either be |
|
731 | The return value can either be | |
732 | - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT |
|
732 | - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT | |
733 | - True otherwise |
|
733 | - True otherwise | |
734 | ''' |
|
734 | ''' | |
735 | if ('HGPLAIN' not in encoding.environ and |
|
735 | if ('HGPLAIN' not in encoding.environ and | |
736 | 'HGPLAINEXCEPT' not in encoding.environ): |
|
736 | 'HGPLAINEXCEPT' not in encoding.environ): | |
737 | return False |
|
737 | return False | |
738 | exceptions = encoding.environ.get('HGPLAINEXCEPT', |
|
738 | exceptions = encoding.environ.get('HGPLAINEXCEPT', | |
739 | '').strip().split(',') |
|
739 | '').strip().split(',') | |
740 | if feature and exceptions: |
|
740 | if feature and exceptions: | |
741 | return feature not in exceptions |
|
741 | return feature not in exceptions | |
742 | return True |
|
742 | return True | |
743 |
|
743 | |||
744 | def username(self): |
|
744 | def username(self): | |
745 | """Return default username to be used in commits. |
|
745 | """Return default username to be used in commits. | |
746 |
|
746 | |||
747 | Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL |
|
747 | Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL | |
748 | and stop searching if one of these is set. |
|
748 | and stop searching if one of these is set. | |
749 | If not found and ui.askusername is True, ask the user, else use |
|
749 | If not found and ui.askusername is True, ask the user, else use | |
750 | ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname". |
|
750 | ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname". | |
751 | """ |
|
751 | """ | |
752 | user = encoding.environ.get("HGUSER") |
|
752 | user = encoding.environ.get("HGUSER") | |
753 | if user is None: |
|
753 | if user is None: | |
754 | user = self.config("ui", "username") |
|
754 | user = self.config("ui", "username") | |
755 | if user is not None: |
|
755 | if user is not None: | |
756 | user = os.path.expandvars(user) |
|
756 | user = os.path.expandvars(user) | |
757 | if user is None: |
|
757 | if user is None: | |
758 | user = encoding.environ.get("EMAIL") |
|
758 | user = encoding.environ.get("EMAIL") | |
759 | if user is None and self.configbool("ui", "askusername"): |
|
759 | if user is None and self.configbool("ui", "askusername"): | |
760 | user = self.prompt(_("enter a commit username:"), default=None) |
|
760 | user = self.prompt(_("enter a commit username:"), default=None) | |
761 | if user is None and not self.interactive(): |
|
761 | if user is None and not self.interactive(): | |
762 | try: |
|
762 | try: | |
763 | user = '%s@%s' % (util.getuser(), socket.getfqdn()) |
|
763 | user = '%s@%s' % (util.getuser(), socket.getfqdn()) | |
764 | self.warn(_("no username found, using '%s' instead\n") % user) |
|
764 | self.warn(_("no username found, using '%s' instead\n") % user) | |
765 | except KeyError: |
|
765 | except KeyError: | |
766 | pass |
|
766 | pass | |
767 | if not user: |
|
767 | if not user: | |
768 | raise error.Abort(_('no username supplied'), |
|
768 | raise error.Abort(_('no username supplied'), | |
769 | hint=_("use 'hg config --edit' " |
|
769 | hint=_("use 'hg config --edit' " | |
770 | 'to set your username')) |
|
770 | 'to set your username')) | |
771 | if "\n" in user: |
|
771 | if "\n" in user: | |
772 | raise error.Abort(_("username %s contains a newline\n") |
|
772 | raise error.Abort(_("username %s contains a newline\n") | |
773 | % repr(user)) |
|
773 | % repr(user)) | |
774 | return user |
|
774 | return user | |
775 |
|
775 | |||
776 | def shortuser(self, user): |
|
776 | def shortuser(self, user): | |
777 | """Return a short representation of a user name or email address.""" |
|
777 | """Return a short representation of a user name or email address.""" | |
778 | if not self.verbose: |
|
778 | if not self.verbose: | |
779 | user = util.shortuser(user) |
|
779 | user = util.shortuser(user) | |
780 | return user |
|
780 | return user | |
781 |
|
781 | |||
782 | def expandpath(self, loc, default=None): |
|
782 | def expandpath(self, loc, default=None): | |
783 | """Return repository location relative to cwd or from [paths]""" |
|
783 | """Return repository location relative to cwd or from [paths]""" | |
784 | try: |
|
784 | try: | |
785 | p = self.paths.getpath(loc) |
|
785 | p = self.paths.getpath(loc) | |
786 | if p: |
|
786 | if p: | |
787 | return p.rawloc |
|
787 | return p.rawloc | |
788 | except error.RepoError: |
|
788 | except error.RepoError: | |
789 | pass |
|
789 | pass | |
790 |
|
790 | |||
791 | if default: |
|
791 | if default: | |
792 | try: |
|
792 | try: | |
793 | p = self.paths.getpath(default) |
|
793 | p = self.paths.getpath(default) | |
794 | if p: |
|
794 | if p: | |
795 | return p.rawloc |
|
795 | return p.rawloc | |
796 | except error.RepoError: |
|
796 | except error.RepoError: | |
797 | pass |
|
797 | pass | |
798 |
|
798 | |||
799 | return loc |
|
799 | return loc | |
800 |
|
800 | |||
801 | @util.propertycache |
|
801 | @util.propertycache | |
802 | def paths(self): |
|
802 | def paths(self): | |
803 | return paths(self) |
|
803 | return paths(self) | |
804 |
|
804 | |||
805 | def pushbuffer(self, error=False, subproc=False, labeled=False): |
|
805 | def pushbuffer(self, error=False, subproc=False, labeled=False): | |
806 | """install a buffer to capture standard output of the ui object |
|
806 | """install a buffer to capture standard output of the ui object | |
807 |
|
807 | |||
808 | If error is True, the error output will be captured too. |
|
808 | If error is True, the error output will be captured too. | |
809 |
|
809 | |||
810 | If subproc is True, output from subprocesses (typically hooks) will be |
|
810 | If subproc is True, output from subprocesses (typically hooks) will be | |
811 | captured too. |
|
811 | captured too. | |
812 |
|
812 | |||
813 | If labeled is True, any labels associated with buffered |
|
813 | If labeled is True, any labels associated with buffered | |
814 | output will be handled. By default, this has no effect |
|
814 | output will be handled. By default, this has no effect | |
815 | on the output returned, but extensions and GUI tools may |
|
815 | on the output returned, but extensions and GUI tools may | |
816 | handle this argument and returned styled output. If output |
|
816 | handle this argument and returned styled output. If output | |
817 | is being buffered so it can be captured and parsed or |
|
817 | is being buffered so it can be captured and parsed or | |
818 | processed, labeled should not be set to True. |
|
818 | processed, labeled should not be set to True. | |
819 | """ |
|
819 | """ | |
820 | self._buffers.append([]) |
|
820 | self._buffers.append([]) | |
821 | self._bufferstates.append((error, subproc, labeled)) |
|
821 | self._bufferstates.append((error, subproc, labeled)) | |
822 | self._bufferapplylabels = labeled |
|
822 | self._bufferapplylabels = labeled | |
823 |
|
823 | |||
824 | def popbuffer(self): |
|
824 | def popbuffer(self): | |
825 | '''pop the last buffer and return the buffered output''' |
|
825 | '''pop the last buffer and return the buffered output''' | |
826 | self._bufferstates.pop() |
|
826 | self._bufferstates.pop() | |
827 | if self._bufferstates: |
|
827 | if self._bufferstates: | |
828 | self._bufferapplylabels = self._bufferstates[-1][2] |
|
828 | self._bufferapplylabels = self._bufferstates[-1][2] | |
829 | else: |
|
829 | else: | |
830 | self._bufferapplylabels = None |
|
830 | self._bufferapplylabels = None | |
831 |
|
831 | |||
832 | return "".join(self._buffers.pop()) |
|
832 | return "".join(self._buffers.pop()) | |
833 |
|
833 | |||
834 | def write(self, *args, **opts): |
|
834 | def write(self, *args, **opts): | |
835 | '''write args to output |
|
835 | '''write args to output | |
836 |
|
836 | |||
837 | By default, this method simply writes to the buffer or stdout. |
|
837 | By default, this method simply writes to the buffer or stdout. | |
838 | Color mode can be set on the UI class to have the output decorated |
|
838 | Color mode can be set on the UI class to have the output decorated | |
839 | with color modifier before being written to stdout. |
|
839 | with color modifier before being written to stdout. | |
840 |
|
840 | |||
841 | The color used is controlled by an optional keyword argument, "label". |
|
841 | The color used is controlled by an optional keyword argument, "label". | |
842 | This should be a string containing label names separated by space. |
|
842 | This should be a string containing label names separated by space. | |
843 | Label names take the form of "topic.type". For example, ui.debug() |
|
843 | Label names take the form of "topic.type". For example, ui.debug() | |
844 | issues a label of "ui.debug". |
|
844 | issues a label of "ui.debug". | |
845 |
|
845 | |||
846 | When labeling output for a specific command, a label of |
|
846 | When labeling output for a specific command, a label of | |
847 | "cmdname.type" is recommended. For example, status issues |
|
847 | "cmdname.type" is recommended. For example, status issues | |
848 | a label of "status.modified" for modified files. |
|
848 | a label of "status.modified" for modified files. | |
849 | ''' |
|
849 | ''' | |
850 | if self._buffers and not opts.get('prompt', False): |
|
850 | if self._buffers and not opts.get('prompt', False): | |
851 | if self._bufferapplylabels: |
|
851 | if self._bufferapplylabels: | |
852 | label = opts.get('label', '') |
|
852 | label = opts.get('label', '') | |
853 | self._buffers[-1].extend(self.label(a, label) for a in args) |
|
853 | self._buffers[-1].extend(self.label(a, label) for a in args) | |
854 | else: |
|
854 | else: | |
855 | self._buffers[-1].extend(args) |
|
855 | self._buffers[-1].extend(args) | |
856 | elif self._colormode == 'win32': |
|
856 | elif self._colormode == 'win32': | |
857 | # windows color printing is its own can of crab, defer to |
|
857 | # windows color printing is its own can of crab, defer to | |
858 | # the color module and that is it. |
|
858 | # the color module and that is it. | |
859 | color.win32print(self, self._write, *args, **opts) |
|
859 | color.win32print(self, self._write, *args, **opts) | |
860 | else: |
|
860 | else: | |
861 | msgs = args |
|
861 | msgs = args | |
862 | if self._colormode is not None: |
|
862 | if self._colormode is not None: | |
863 | label = opts.get('label', '') |
|
863 | label = opts.get('label', '') | |
864 | msgs = [self.label(a, label) for a in args] |
|
864 | msgs = [self.label(a, label) for a in args] | |
865 | self._write(*msgs, **opts) |
|
865 | self._write(*msgs, **opts) | |
866 |
|
866 | |||
867 | def _write(self, *msgs, **opts): |
|
867 | def _write(self, *msgs, **opts): | |
868 | self._progclear() |
|
868 | self._progclear() | |
869 | # opencode timeblockedsection because this is a critical path |
|
869 | # opencode timeblockedsection because this is a critical path | |
870 | starttime = util.timer() |
|
870 | starttime = util.timer() | |
871 | try: |
|
871 | try: | |
872 | for a in msgs: |
|
872 | for a in msgs: | |
873 | self.fout.write(a) |
|
873 | self.fout.write(a) | |
874 | except IOError as err: |
|
874 | except IOError as err: | |
875 | raise error.StdioError(err) |
|
875 | raise error.StdioError(err) | |
876 | finally: |
|
876 | finally: | |
877 | self._blockedtimes['stdio_blocked'] += \ |
|
877 | self._blockedtimes['stdio_blocked'] += \ | |
878 | (util.timer() - starttime) * 1000 |
|
878 | (util.timer() - starttime) * 1000 | |
879 |
|
879 | |||
880 | def write_err(self, *args, **opts): |
|
880 | def write_err(self, *args, **opts): | |
881 | self._progclear() |
|
881 | self._progclear() | |
882 | if self._bufferstates and self._bufferstates[-1][0]: |
|
882 | if self._bufferstates and self._bufferstates[-1][0]: | |
883 | self.write(*args, **opts) |
|
883 | self.write(*args, **opts) | |
884 | elif self._colormode == 'win32': |
|
884 | elif self._colormode == 'win32': | |
885 | # windows color printing is its own can of crab, defer to |
|
885 | # windows color printing is its own can of crab, defer to | |
886 | # the color module and that is it. |
|
886 | # the color module and that is it. | |
887 | color.win32print(self, self._write_err, *args, **opts) |
|
887 | color.win32print(self, self._write_err, *args, **opts) | |
888 | else: |
|
888 | else: | |
889 | msgs = args |
|
889 | msgs = args | |
890 | if self._colormode is not None: |
|
890 | if self._colormode is not None: | |
891 | label = opts.get('label', '') |
|
891 | label = opts.get('label', '') | |
892 | msgs = [self.label(a, label) for a in args] |
|
892 | msgs = [self.label(a, label) for a in args] | |
893 | self._write_err(*msgs, **opts) |
|
893 | self._write_err(*msgs, **opts) | |
894 |
|
894 | |||
895 | def _write_err(self, *msgs, **opts): |
|
895 | def _write_err(self, *msgs, **opts): | |
896 | try: |
|
896 | try: | |
897 | with self.timeblockedsection('stdio'): |
|
897 | with self.timeblockedsection('stdio'): | |
898 | if not getattr(self.fout, 'closed', False): |
|
898 | if not getattr(self.fout, 'closed', False): | |
899 | self.fout.flush() |
|
899 | self.fout.flush() | |
900 | for a in msgs: |
|
900 | for a in msgs: | |
901 | self.ferr.write(a) |
|
901 | self.ferr.write(a) | |
902 | # stderr may be buffered under win32 when redirected to files, |
|
902 | # stderr may be buffered under win32 when redirected to files, | |
903 | # including stdout. |
|
903 | # including stdout. | |
904 | if not getattr(self.ferr, 'closed', False): |
|
904 | if not getattr(self.ferr, 'closed', False): | |
905 | self.ferr.flush() |
|
905 | self.ferr.flush() | |
906 | except IOError as inst: |
|
906 | except IOError as inst: | |
907 | raise error.StdioError(inst) |
|
907 | raise error.StdioError(inst) | |
908 |
|
908 | |||
909 | def flush(self): |
|
909 | def flush(self): | |
910 | # opencode timeblockedsection because this is a critical path |
|
910 | # opencode timeblockedsection because this is a critical path | |
911 | starttime = util.timer() |
|
911 | starttime = util.timer() | |
912 | try: |
|
912 | try: | |
913 | try: |
|
913 | try: | |
914 | self.fout.flush() |
|
914 | self.fout.flush() | |
915 | except IOError as err: |
|
915 | except IOError as err: | |
916 | raise error.StdioError(err) |
|
916 | raise error.StdioError(err) | |
917 | finally: |
|
917 | finally: | |
918 | try: |
|
918 | try: | |
919 | self.ferr.flush() |
|
919 | self.ferr.flush() | |
920 | except IOError as err: |
|
920 | except IOError as err: | |
921 | raise error.StdioError(err) |
|
921 | raise error.StdioError(err) | |
922 | finally: |
|
922 | finally: | |
923 | self._blockedtimes['stdio_blocked'] += \ |
|
923 | self._blockedtimes['stdio_blocked'] += \ | |
924 | (util.timer() - starttime) * 1000 |
|
924 | (util.timer() - starttime) * 1000 | |
925 |
|
925 | |||
926 | def _isatty(self, fh): |
|
926 | def _isatty(self, fh): | |
927 | if self.configbool('ui', 'nontty'): |
|
927 | if self.configbool('ui', 'nontty'): | |
928 | return False |
|
928 | return False | |
929 | return util.isatty(fh) |
|
929 | return util.isatty(fh) | |
930 |
|
930 | |||
931 | def disablepager(self): |
|
931 | def disablepager(self): | |
932 | self._disablepager = True |
|
932 | self._disablepager = True | |
933 |
|
933 | |||
934 | def pager(self, command): |
|
934 | def pager(self, command): | |
935 | """Start a pager for subsequent command output. |
|
935 | """Start a pager for subsequent command output. | |
936 |
|
936 | |||
937 | Commands which produce a long stream of output should call |
|
937 | Commands which produce a long stream of output should call | |
938 | this function to activate the user's preferred pagination |
|
938 | this function to activate the user's preferred pagination | |
939 | mechanism (which may be no pager). Calling this function |
|
939 | mechanism (which may be no pager). Calling this function | |
940 | precludes any future use of interactive functionality, such as |
|
940 | precludes any future use of interactive functionality, such as | |
941 | prompting the user or activating curses. |
|
941 | prompting the user or activating curses. | |
942 |
|
942 | |||
943 | Args: |
|
943 | Args: | |
944 | command: The full, non-aliased name of the command. That is, "log" |
|
944 | command: The full, non-aliased name of the command. That is, "log" | |
945 | not "history, "summary" not "summ", etc. |
|
945 | not "history, "summary" not "summ", etc. | |
946 | """ |
|
946 | """ | |
947 | if (self._disablepager |
|
947 | if (self._disablepager | |
948 | or self.pageractive |
|
948 | or self.pageractive): | |
949 | or command in self.configlist('pager', 'ignore') |
|
949 | # how pager should do is already determined | |
|
950 | return | |||
|
951 | ||||
|
952 | if not command.startswith('internal-always-') and ( | |||
|
953 | # explicit --pager=on (= 'internal-always-' prefix) should | |||
|
954 | # take precedence over disabling factors below | |||
|
955 | command in self.configlist('pager', 'ignore') | |||
950 | or not self.configbool('ui', 'paginate') |
|
956 | or not self.configbool('ui', 'paginate') | |
951 | or not self.configbool('pager', 'attend-' + command, True) |
|
957 | or not self.configbool('pager', 'attend-' + command, True) | |
952 | # TODO: if we want to allow HGPLAINEXCEPT=pager, |
|
958 | # TODO: if we want to allow HGPLAINEXCEPT=pager, | |
953 | # formatted() will need some adjustment. |
|
959 | # formatted() will need some adjustment. | |
954 | or not self.formatted() |
|
960 | or not self.formatted() | |
955 | or self.plain() |
|
961 | or self.plain() | |
956 | # TODO: expose debugger-enabled on the UI object |
|
962 | # TODO: expose debugger-enabled on the UI object | |
957 | or '--debugger' in pycompat.sysargv): |
|
963 | or '--debugger' in pycompat.sysargv): | |
958 | # We only want to paginate if the ui appears to be |
|
964 | # We only want to paginate if the ui appears to be | |
959 | # interactive, the user didn't say HGPLAIN or |
|
965 | # interactive, the user didn't say HGPLAIN or | |
960 | # HGPLAINEXCEPT=pager, and the user didn't specify --debug. |
|
966 | # HGPLAINEXCEPT=pager, and the user didn't specify --debug. | |
961 | return |
|
967 | return | |
962 |
|
968 | |||
963 | pagercmd = self.config('pager', 'pager', rcutil.fallbackpager) |
|
969 | pagercmd = self.config('pager', 'pager', rcutil.fallbackpager) | |
964 | if not pagercmd: |
|
970 | if not pagercmd: | |
965 | return |
|
971 | return | |
966 |
|
972 | |||
967 | pagerenv = {} |
|
973 | pagerenv = {} | |
968 | for name, value in rcutil.defaultpagerenv().items(): |
|
974 | for name, value in rcutil.defaultpagerenv().items(): | |
969 | if name not in encoding.environ: |
|
975 | if name not in encoding.environ: | |
970 | pagerenv[name] = value |
|
976 | pagerenv[name] = value | |
971 |
|
977 | |||
972 | self.debug('starting pager for command %r\n' % command) |
|
978 | self.debug('starting pager for command %r\n' % command) | |
973 | self.flush() |
|
979 | self.flush() | |
974 |
|
980 | |||
975 | wasformatted = self.formatted() |
|
981 | wasformatted = self.formatted() | |
976 | if util.safehasattr(signal, "SIGPIPE"): |
|
982 | if util.safehasattr(signal, "SIGPIPE"): | |
977 | signal.signal(signal.SIGPIPE, _catchterm) |
|
983 | signal.signal(signal.SIGPIPE, _catchterm) | |
978 | if self._runpager(pagercmd, pagerenv): |
|
984 | if self._runpager(pagercmd, pagerenv): | |
979 | self.pageractive = True |
|
985 | self.pageractive = True | |
980 | # Preserve the formatted-ness of the UI. This is important |
|
986 | # Preserve the formatted-ness of the UI. This is important | |
981 | # because we mess with stdout, which might confuse |
|
987 | # because we mess with stdout, which might confuse | |
982 | # auto-detection of things being formatted. |
|
988 | # auto-detection of things being formatted. | |
983 | self.setconfig('ui', 'formatted', wasformatted, 'pager') |
|
989 | self.setconfig('ui', 'formatted', wasformatted, 'pager') | |
984 | self.setconfig('ui', 'interactive', False, 'pager') |
|
990 | self.setconfig('ui', 'interactive', False, 'pager') | |
985 |
|
991 | |||
986 | # If pagermode differs from color.mode, reconfigure color now that |
|
992 | # If pagermode differs from color.mode, reconfigure color now that | |
987 | # pageractive is set. |
|
993 | # pageractive is set. | |
988 | cm = self._colormode |
|
994 | cm = self._colormode | |
989 | if cm != self.config('color', 'pagermode', cm): |
|
995 | if cm != self.config('color', 'pagermode', cm): | |
990 | color.setup(self) |
|
996 | color.setup(self) | |
991 | else: |
|
997 | else: | |
992 | # If the pager can't be spawned in dispatch when --pager=on is |
|
998 | # If the pager can't be spawned in dispatch when --pager=on is | |
993 | # given, don't try again when the command runs, to avoid a duplicate |
|
999 | # given, don't try again when the command runs, to avoid a duplicate | |
994 | # warning about a missing pager command. |
|
1000 | # warning about a missing pager command. | |
995 | self.disablepager() |
|
1001 | self.disablepager() | |
996 |
|
1002 | |||
997 | def _runpager(self, command, env=None): |
|
1003 | def _runpager(self, command, env=None): | |
998 | """Actually start the pager and set up file descriptors. |
|
1004 | """Actually start the pager and set up file descriptors. | |
999 |
|
1005 | |||
1000 | This is separate in part so that extensions (like chg) can |
|
1006 | This is separate in part so that extensions (like chg) can | |
1001 | override how a pager is invoked. |
|
1007 | override how a pager is invoked. | |
1002 | """ |
|
1008 | """ | |
1003 | if command == 'cat': |
|
1009 | if command == 'cat': | |
1004 | # Save ourselves some work. |
|
1010 | # Save ourselves some work. | |
1005 | return False |
|
1011 | return False | |
1006 | # If the command doesn't contain any of these characters, we |
|
1012 | # If the command doesn't contain any of these characters, we | |
1007 | # assume it's a binary and exec it directly. This means for |
|
1013 | # assume it's a binary and exec it directly. This means for | |
1008 | # simple pager command configurations, we can degrade |
|
1014 | # simple pager command configurations, we can degrade | |
1009 | # gracefully and tell the user about their broken pager. |
|
1015 | # gracefully and tell the user about their broken pager. | |
1010 | shell = any(c in command for c in "|&;<>()$`\\\"' \t\n*?[#~=%") |
|
1016 | shell = any(c in command for c in "|&;<>()$`\\\"' \t\n*?[#~=%") | |
1011 |
|
1017 | |||
1012 | if pycompat.osname == 'nt' and not shell: |
|
1018 | if pycompat.osname == 'nt' and not shell: | |
1013 | # Window's built-in `more` cannot be invoked with shell=False, but |
|
1019 | # Window's built-in `more` cannot be invoked with shell=False, but | |
1014 | # its `more.com` can. Hide this implementation detail from the |
|
1020 | # its `more.com` can. Hide this implementation detail from the | |
1015 | # user so we can also get sane bad PAGER behavior. MSYS has |
|
1021 | # user so we can also get sane bad PAGER behavior. MSYS has | |
1016 | # `more.exe`, so do a cmd.exe style resolution of the executable to |
|
1022 | # `more.exe`, so do a cmd.exe style resolution of the executable to | |
1017 | # determine which one to use. |
|
1023 | # determine which one to use. | |
1018 | fullcmd = util.findexe(command) |
|
1024 | fullcmd = util.findexe(command) | |
1019 | if not fullcmd: |
|
1025 | if not fullcmd: | |
1020 | self.warn(_("missing pager command '%s', skipping pager\n") |
|
1026 | self.warn(_("missing pager command '%s', skipping pager\n") | |
1021 | % command) |
|
1027 | % command) | |
1022 | return False |
|
1028 | return False | |
1023 |
|
1029 | |||
1024 | command = fullcmd |
|
1030 | command = fullcmd | |
1025 |
|
1031 | |||
1026 | try: |
|
1032 | try: | |
1027 | pager = subprocess.Popen( |
|
1033 | pager = subprocess.Popen( | |
1028 | command, shell=shell, bufsize=-1, |
|
1034 | command, shell=shell, bufsize=-1, | |
1029 | close_fds=util.closefds, stdin=subprocess.PIPE, |
|
1035 | close_fds=util.closefds, stdin=subprocess.PIPE, | |
1030 | stdout=util.stdout, stderr=util.stderr, |
|
1036 | stdout=util.stdout, stderr=util.stderr, | |
1031 | env=util.shellenviron(env)) |
|
1037 | env=util.shellenviron(env)) | |
1032 | except OSError as e: |
|
1038 | except OSError as e: | |
1033 | if e.errno == errno.ENOENT and not shell: |
|
1039 | if e.errno == errno.ENOENT and not shell: | |
1034 | self.warn(_("missing pager command '%s', skipping pager\n") |
|
1040 | self.warn(_("missing pager command '%s', skipping pager\n") | |
1035 | % command) |
|
1041 | % command) | |
1036 | return False |
|
1042 | return False | |
1037 | raise |
|
1043 | raise | |
1038 |
|
1044 | |||
1039 | # back up original file descriptors |
|
1045 | # back up original file descriptors | |
1040 | stdoutfd = os.dup(util.stdout.fileno()) |
|
1046 | stdoutfd = os.dup(util.stdout.fileno()) | |
1041 | stderrfd = os.dup(util.stderr.fileno()) |
|
1047 | stderrfd = os.dup(util.stderr.fileno()) | |
1042 |
|
1048 | |||
1043 | os.dup2(pager.stdin.fileno(), util.stdout.fileno()) |
|
1049 | os.dup2(pager.stdin.fileno(), util.stdout.fileno()) | |
1044 | if self._isatty(util.stderr): |
|
1050 | if self._isatty(util.stderr): | |
1045 | os.dup2(pager.stdin.fileno(), util.stderr.fileno()) |
|
1051 | os.dup2(pager.stdin.fileno(), util.stderr.fileno()) | |
1046 |
|
1052 | |||
1047 | @self.atexit |
|
1053 | @self.atexit | |
1048 | def killpager(): |
|
1054 | def killpager(): | |
1049 | if util.safehasattr(signal, "SIGINT"): |
|
1055 | if util.safehasattr(signal, "SIGINT"): | |
1050 | signal.signal(signal.SIGINT, signal.SIG_IGN) |
|
1056 | signal.signal(signal.SIGINT, signal.SIG_IGN) | |
1051 | # restore original fds, closing pager.stdin copies in the process |
|
1057 | # restore original fds, closing pager.stdin copies in the process | |
1052 | os.dup2(stdoutfd, util.stdout.fileno()) |
|
1058 | os.dup2(stdoutfd, util.stdout.fileno()) | |
1053 | os.dup2(stderrfd, util.stderr.fileno()) |
|
1059 | os.dup2(stderrfd, util.stderr.fileno()) | |
1054 | pager.stdin.close() |
|
1060 | pager.stdin.close() | |
1055 | pager.wait() |
|
1061 | pager.wait() | |
1056 |
|
1062 | |||
1057 | return True |
|
1063 | return True | |
1058 |
|
1064 | |||
1059 | def atexit(self, func, *args, **kwargs): |
|
1065 | def atexit(self, func, *args, **kwargs): | |
1060 | '''register a function to run after dispatching a request |
|
1066 | '''register a function to run after dispatching a request | |
1061 |
|
1067 | |||
1062 | Handlers do not stay registered across request boundaries.''' |
|
1068 | Handlers do not stay registered across request boundaries.''' | |
1063 | self._exithandlers.append((func, args, kwargs)) |
|
1069 | self._exithandlers.append((func, args, kwargs)) | |
1064 | return func |
|
1070 | return func | |
1065 |
|
1071 | |||
1066 | def interface(self, feature): |
|
1072 | def interface(self, feature): | |
1067 | """what interface to use for interactive console features? |
|
1073 | """what interface to use for interactive console features? | |
1068 |
|
1074 | |||
1069 | The interface is controlled by the value of `ui.interface` but also by |
|
1075 | The interface is controlled by the value of `ui.interface` but also by | |
1070 | the value of feature-specific configuration. For example: |
|
1076 | the value of feature-specific configuration. For example: | |
1071 |
|
1077 | |||
1072 | ui.interface.histedit = text |
|
1078 | ui.interface.histedit = text | |
1073 | ui.interface.chunkselector = curses |
|
1079 | ui.interface.chunkselector = curses | |
1074 |
|
1080 | |||
1075 | Here the features are "histedit" and "chunkselector". |
|
1081 | Here the features are "histedit" and "chunkselector". | |
1076 |
|
1082 | |||
1077 | The configuration above means that the default interfaces for commands |
|
1083 | The configuration above means that the default interfaces for commands | |
1078 | is curses, the interface for histedit is text and the interface for |
|
1084 | is curses, the interface for histedit is text and the interface for | |
1079 | selecting chunk is crecord (the best curses interface available). |
|
1085 | selecting chunk is crecord (the best curses interface available). | |
1080 |
|
1086 | |||
1081 | Consider the following example: |
|
1087 | Consider the following example: | |
1082 | ui.interface = curses |
|
1088 | ui.interface = curses | |
1083 | ui.interface.histedit = text |
|
1089 | ui.interface.histedit = text | |
1084 |
|
1090 | |||
1085 | Then histedit will use the text interface and chunkselector will use |
|
1091 | Then histedit will use the text interface and chunkselector will use | |
1086 | the default curses interface (crecord at the moment). |
|
1092 | the default curses interface (crecord at the moment). | |
1087 | """ |
|
1093 | """ | |
1088 | alldefaults = frozenset(["text", "curses"]) |
|
1094 | alldefaults = frozenset(["text", "curses"]) | |
1089 |
|
1095 | |||
1090 | featureinterfaces = { |
|
1096 | featureinterfaces = { | |
1091 | "chunkselector": [ |
|
1097 | "chunkselector": [ | |
1092 | "text", |
|
1098 | "text", | |
1093 | "curses", |
|
1099 | "curses", | |
1094 | ] |
|
1100 | ] | |
1095 | } |
|
1101 | } | |
1096 |
|
1102 | |||
1097 | # Feature-specific interface |
|
1103 | # Feature-specific interface | |
1098 | if feature not in featureinterfaces.keys(): |
|
1104 | if feature not in featureinterfaces.keys(): | |
1099 | # Programming error, not user error |
|
1105 | # Programming error, not user error | |
1100 | raise ValueError("Unknown feature requested %s" % feature) |
|
1106 | raise ValueError("Unknown feature requested %s" % feature) | |
1101 |
|
1107 | |||
1102 | availableinterfaces = frozenset(featureinterfaces[feature]) |
|
1108 | availableinterfaces = frozenset(featureinterfaces[feature]) | |
1103 | if alldefaults > availableinterfaces: |
|
1109 | if alldefaults > availableinterfaces: | |
1104 | # Programming error, not user error. We need a use case to |
|
1110 | # Programming error, not user error. We need a use case to | |
1105 | # define the right thing to do here. |
|
1111 | # define the right thing to do here. | |
1106 | raise ValueError( |
|
1112 | raise ValueError( | |
1107 | "Feature %s does not handle all default interfaces" % |
|
1113 | "Feature %s does not handle all default interfaces" % | |
1108 | feature) |
|
1114 | feature) | |
1109 |
|
1115 | |||
1110 | if self.plain(): |
|
1116 | if self.plain(): | |
1111 | return "text" |
|
1117 | return "text" | |
1112 |
|
1118 | |||
1113 | # Default interface for all the features |
|
1119 | # Default interface for all the features | |
1114 | defaultinterface = "text" |
|
1120 | defaultinterface = "text" | |
1115 | i = self.config("ui", "interface") |
|
1121 | i = self.config("ui", "interface") | |
1116 | if i in alldefaults: |
|
1122 | if i in alldefaults: | |
1117 | defaultinterface = i |
|
1123 | defaultinterface = i | |
1118 |
|
1124 | |||
1119 | choseninterface = defaultinterface |
|
1125 | choseninterface = defaultinterface | |
1120 | f = self.config("ui", "interface.%s" % feature, None) |
|
1126 | f = self.config("ui", "interface.%s" % feature, None) | |
1121 | if f in availableinterfaces: |
|
1127 | if f in availableinterfaces: | |
1122 | choseninterface = f |
|
1128 | choseninterface = f | |
1123 |
|
1129 | |||
1124 | if i is not None and defaultinterface != i: |
|
1130 | if i is not None and defaultinterface != i: | |
1125 | if f is not None: |
|
1131 | if f is not None: | |
1126 | self.warn(_("invalid value for ui.interface: %s\n") % |
|
1132 | self.warn(_("invalid value for ui.interface: %s\n") % | |
1127 | (i,)) |
|
1133 | (i,)) | |
1128 | else: |
|
1134 | else: | |
1129 | self.warn(_("invalid value for ui.interface: %s (using %s)\n") % |
|
1135 | self.warn(_("invalid value for ui.interface: %s (using %s)\n") % | |
1130 | (i, choseninterface)) |
|
1136 | (i, choseninterface)) | |
1131 | if f is not None and choseninterface != f: |
|
1137 | if f is not None and choseninterface != f: | |
1132 | self.warn(_("invalid value for ui.interface.%s: %s (using %s)\n") % |
|
1138 | self.warn(_("invalid value for ui.interface.%s: %s (using %s)\n") % | |
1133 | (feature, f, choseninterface)) |
|
1139 | (feature, f, choseninterface)) | |
1134 |
|
1140 | |||
1135 | return choseninterface |
|
1141 | return choseninterface | |
1136 |
|
1142 | |||
1137 | def interactive(self): |
|
1143 | def interactive(self): | |
1138 | '''is interactive input allowed? |
|
1144 | '''is interactive input allowed? | |
1139 |
|
1145 | |||
1140 | An interactive session is a session where input can be reasonably read |
|
1146 | An interactive session is a session where input can be reasonably read | |
1141 | from `sys.stdin'. If this function returns false, any attempt to read |
|
1147 | from `sys.stdin'. If this function returns false, any attempt to read | |
1142 | from stdin should fail with an error, unless a sensible default has been |
|
1148 | from stdin should fail with an error, unless a sensible default has been | |
1143 | specified. |
|
1149 | specified. | |
1144 |
|
1150 | |||
1145 | Interactiveness is triggered by the value of the `ui.interactive' |
|
1151 | Interactiveness is triggered by the value of the `ui.interactive' | |
1146 | configuration variable or - if it is unset - when `sys.stdin' points |
|
1152 | configuration variable or - if it is unset - when `sys.stdin' points | |
1147 | to a terminal device. |
|
1153 | to a terminal device. | |
1148 |
|
1154 | |||
1149 | This function refers to input only; for output, see `ui.formatted()'. |
|
1155 | This function refers to input only; for output, see `ui.formatted()'. | |
1150 | ''' |
|
1156 | ''' | |
1151 | i = self.configbool("ui", "interactive") |
|
1157 | i = self.configbool("ui", "interactive") | |
1152 | if i is None: |
|
1158 | if i is None: | |
1153 | # some environments replace stdin without implementing isatty |
|
1159 | # some environments replace stdin without implementing isatty | |
1154 | # usually those are non-interactive |
|
1160 | # usually those are non-interactive | |
1155 | return self._isatty(self.fin) |
|
1161 | return self._isatty(self.fin) | |
1156 |
|
1162 | |||
1157 | return i |
|
1163 | return i | |
1158 |
|
1164 | |||
1159 | def termwidth(self): |
|
1165 | def termwidth(self): | |
1160 | '''how wide is the terminal in columns? |
|
1166 | '''how wide is the terminal in columns? | |
1161 | ''' |
|
1167 | ''' | |
1162 | if 'COLUMNS' in encoding.environ: |
|
1168 | if 'COLUMNS' in encoding.environ: | |
1163 | try: |
|
1169 | try: | |
1164 | return int(encoding.environ['COLUMNS']) |
|
1170 | return int(encoding.environ['COLUMNS']) | |
1165 | except ValueError: |
|
1171 | except ValueError: | |
1166 | pass |
|
1172 | pass | |
1167 | return scmutil.termsize(self)[0] |
|
1173 | return scmutil.termsize(self)[0] | |
1168 |
|
1174 | |||
1169 | def formatted(self): |
|
1175 | def formatted(self): | |
1170 | '''should formatted output be used? |
|
1176 | '''should formatted output be used? | |
1171 |
|
1177 | |||
1172 | It is often desirable to format the output to suite the output medium. |
|
1178 | It is often desirable to format the output to suite the output medium. | |
1173 | Examples of this are truncating long lines or colorizing messages. |
|
1179 | Examples of this are truncating long lines or colorizing messages. | |
1174 | However, this is not often not desirable when piping output into other |
|
1180 | However, this is not often not desirable when piping output into other | |
1175 | utilities, e.g. `grep'. |
|
1181 | utilities, e.g. `grep'. | |
1176 |
|
1182 | |||
1177 | Formatted output is triggered by the value of the `ui.formatted' |
|
1183 | Formatted output is triggered by the value of the `ui.formatted' | |
1178 | configuration variable or - if it is unset - when `sys.stdout' points |
|
1184 | configuration variable or - if it is unset - when `sys.stdout' points | |
1179 | to a terminal device. Please note that `ui.formatted' should be |
|
1185 | to a terminal device. Please note that `ui.formatted' should be | |
1180 | considered an implementation detail; it is not intended for use outside |
|
1186 | considered an implementation detail; it is not intended for use outside | |
1181 | Mercurial or its extensions. |
|
1187 | Mercurial or its extensions. | |
1182 |
|
1188 | |||
1183 | This function refers to output only; for input, see `ui.interactive()'. |
|
1189 | This function refers to output only; for input, see `ui.interactive()'. | |
1184 | This function always returns false when in plain mode, see `ui.plain()'. |
|
1190 | This function always returns false when in plain mode, see `ui.plain()'. | |
1185 | ''' |
|
1191 | ''' | |
1186 | if self.plain(): |
|
1192 | if self.plain(): | |
1187 | return False |
|
1193 | return False | |
1188 |
|
1194 | |||
1189 | i = self.configbool("ui", "formatted") |
|
1195 | i = self.configbool("ui", "formatted") | |
1190 | if i is None: |
|
1196 | if i is None: | |
1191 | # some environments replace stdout without implementing isatty |
|
1197 | # some environments replace stdout without implementing isatty | |
1192 | # usually those are non-interactive |
|
1198 | # usually those are non-interactive | |
1193 | return self._isatty(self.fout) |
|
1199 | return self._isatty(self.fout) | |
1194 |
|
1200 | |||
1195 | return i |
|
1201 | return i | |
1196 |
|
1202 | |||
1197 | def _readline(self, prompt=''): |
|
1203 | def _readline(self, prompt=''): | |
1198 | if self._isatty(self.fin): |
|
1204 | if self._isatty(self.fin): | |
1199 | try: |
|
1205 | try: | |
1200 | # magically add command line editing support, where |
|
1206 | # magically add command line editing support, where | |
1201 | # available |
|
1207 | # available | |
1202 | import readline |
|
1208 | import readline | |
1203 | # force demandimport to really load the module |
|
1209 | # force demandimport to really load the module | |
1204 | readline.read_history_file |
|
1210 | readline.read_history_file | |
1205 | # windows sometimes raises something other than ImportError |
|
1211 | # windows sometimes raises something other than ImportError | |
1206 | except Exception: |
|
1212 | except Exception: | |
1207 | pass |
|
1213 | pass | |
1208 |
|
1214 | |||
1209 | # call write() so output goes through subclassed implementation |
|
1215 | # call write() so output goes through subclassed implementation | |
1210 | # e.g. color extension on Windows |
|
1216 | # e.g. color extension on Windows | |
1211 | self.write(prompt, prompt=True) |
|
1217 | self.write(prompt, prompt=True) | |
|
1218 | self.flush() | |||
1212 |
|
1219 | |||
1213 | # instead of trying to emulate raw_input, swap (self.fin, |
|
1220 | # instead of trying to emulate raw_input, swap (self.fin, | |
1214 | # self.fout) with (sys.stdin, sys.stdout) |
|
1221 | # self.fout) with (sys.stdin, sys.stdout) | |
1215 | oldin = sys.stdin |
|
1222 | oldin = sys.stdin | |
1216 | oldout = sys.stdout |
|
1223 | oldout = sys.stdout | |
1217 | sys.stdin = self.fin |
|
1224 | sys.stdin = self.fin | |
1218 | sys.stdout = self.fout |
|
1225 | sys.stdout = self.fout | |
1219 | # prompt ' ' must exist; otherwise readline may delete entire line |
|
1226 | # prompt ' ' must exist; otherwise readline may delete entire line | |
1220 | # - http://bugs.python.org/issue12833 |
|
1227 | # - http://bugs.python.org/issue12833 | |
1221 | with self.timeblockedsection('stdio'): |
|
1228 | with self.timeblockedsection('stdio'): | |
1222 | line = raw_input(' ') |
|
1229 | line = raw_input(' ') | |
1223 | sys.stdin = oldin |
|
1230 | sys.stdin = oldin | |
1224 | sys.stdout = oldout |
|
1231 | sys.stdout = oldout | |
1225 |
|
1232 | |||
1226 | # When stdin is in binary mode on Windows, it can cause |
|
1233 | # When stdin is in binary mode on Windows, it can cause | |
1227 | # raw_input() to emit an extra trailing carriage return |
|
1234 | # raw_input() to emit an extra trailing carriage return | |
1228 | if pycompat.oslinesep == '\r\n' and line and line[-1] == '\r': |
|
1235 | if pycompat.oslinesep == '\r\n' and line and line[-1] == '\r': | |
1229 | line = line[:-1] |
|
1236 | line = line[:-1] | |
1230 | return line |
|
1237 | return line | |
1231 |
|
1238 | |||
1232 | def prompt(self, msg, default="y"): |
|
1239 | def prompt(self, msg, default="y"): | |
1233 | """Prompt user with msg, read response. |
|
1240 | """Prompt user with msg, read response. | |
1234 | If ui is not interactive, the default is returned. |
|
1241 | If ui is not interactive, the default is returned. | |
1235 | """ |
|
1242 | """ | |
1236 | if not self.interactive(): |
|
1243 | if not self.interactive(): | |
1237 | self.write(msg, ' ', default or '', "\n") |
|
1244 | self.write(msg, ' ', default or '', "\n") | |
1238 | return default |
|
1245 | return default | |
1239 | try: |
|
1246 | try: | |
1240 | r = self._readline(self.label(msg, 'ui.prompt')) |
|
1247 | r = self._readline(self.label(msg, 'ui.prompt')) | |
1241 | if not r: |
|
1248 | if not r: | |
1242 | r = default |
|
1249 | r = default | |
1243 | if self.configbool('ui', 'promptecho'): |
|
1250 | if self.configbool('ui', 'promptecho'): | |
1244 | self.write(r, "\n") |
|
1251 | self.write(r, "\n") | |
1245 | return r |
|
1252 | return r | |
1246 | except EOFError: |
|
1253 | except EOFError: | |
1247 | raise error.ResponseExpected() |
|
1254 | raise error.ResponseExpected() | |
1248 |
|
1255 | |||
1249 | @staticmethod |
|
1256 | @staticmethod | |
1250 | def extractchoices(prompt): |
|
1257 | def extractchoices(prompt): | |
1251 | """Extract prompt message and list of choices from specified prompt. |
|
1258 | """Extract prompt message and list of choices from specified prompt. | |
1252 |
|
1259 | |||
1253 | This returns tuple "(message, choices)", and "choices" is the |
|
1260 | This returns tuple "(message, choices)", and "choices" is the | |
1254 | list of tuple "(response character, text without &)". |
|
1261 | list of tuple "(response character, text without &)". | |
1255 |
|
1262 | |||
1256 | >>> ui.extractchoices("awake? $$ &Yes $$ &No") |
|
1263 | >>> ui.extractchoices("awake? $$ &Yes $$ &No") | |
1257 | ('awake? ', [('y', 'Yes'), ('n', 'No')]) |
|
1264 | ('awake? ', [('y', 'Yes'), ('n', 'No')]) | |
1258 | >>> ui.extractchoices("line\\nbreak? $$ &Yes $$ &No") |
|
1265 | >>> ui.extractchoices("line\\nbreak? $$ &Yes $$ &No") | |
1259 | ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')]) |
|
1266 | ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')]) | |
1260 | >>> ui.extractchoices("want lots of $$money$$?$$Ye&s$$N&o") |
|
1267 | >>> ui.extractchoices("want lots of $$money$$?$$Ye&s$$N&o") | |
1261 | ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')]) |
|
1268 | ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')]) | |
1262 | """ |
|
1269 | """ | |
1263 |
|
1270 | |||
1264 | # Sadly, the prompt string may have been built with a filename |
|
1271 | # Sadly, the prompt string may have been built with a filename | |
1265 | # containing "$$" so let's try to find the first valid-looking |
|
1272 | # containing "$$" so let's try to find the first valid-looking | |
1266 | # prompt to start parsing. Sadly, we also can't rely on |
|
1273 | # prompt to start parsing. Sadly, we also can't rely on | |
1267 | # choices containing spaces, ASCII, or basically anything |
|
1274 | # choices containing spaces, ASCII, or basically anything | |
1268 | # except an ampersand followed by a character. |
|
1275 | # except an ampersand followed by a character. | |
1269 | m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt) |
|
1276 | m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt) | |
1270 | msg = m.group(1) |
|
1277 | msg = m.group(1) | |
1271 | choices = [p.strip(' ') for p in m.group(2).split('$$')] |
|
1278 | choices = [p.strip(' ') for p in m.group(2).split('$$')] | |
1272 | def choicetuple(s): |
|
1279 | def choicetuple(s): | |
1273 | ampidx = s.index('&') |
|
1280 | ampidx = s.index('&') | |
1274 | return s[ampidx + 1:ampidx + 2].lower(), s.replace('&', '', 1) |
|
1281 | return s[ampidx + 1:ampidx + 2].lower(), s.replace('&', '', 1) | |
1275 | return (msg, [choicetuple(s) for s in choices]) |
|
1282 | return (msg, [choicetuple(s) for s in choices]) | |
1276 |
|
1283 | |||
1277 | def promptchoice(self, prompt, default=0): |
|
1284 | def promptchoice(self, prompt, default=0): | |
1278 | """Prompt user with a message, read response, and ensure it matches |
|
1285 | """Prompt user with a message, read response, and ensure it matches | |
1279 | one of the provided choices. The prompt is formatted as follows: |
|
1286 | one of the provided choices. The prompt is formatted as follows: | |
1280 |
|
1287 | |||
1281 | "would you like fries with that (Yn)? $$ &Yes $$ &No" |
|
1288 | "would you like fries with that (Yn)? $$ &Yes $$ &No" | |
1282 |
|
1289 | |||
1283 | The index of the choice is returned. Responses are case |
|
1290 | The index of the choice is returned. Responses are case | |
1284 | insensitive. If ui is not interactive, the default is |
|
1291 | insensitive. If ui is not interactive, the default is | |
1285 | returned. |
|
1292 | returned. | |
1286 | """ |
|
1293 | """ | |
1287 |
|
1294 | |||
1288 | msg, choices = self.extractchoices(prompt) |
|
1295 | msg, choices = self.extractchoices(prompt) | |
1289 | resps = [r for r, t in choices] |
|
1296 | resps = [r for r, t in choices] | |
1290 | while True: |
|
1297 | while True: | |
1291 | r = self.prompt(msg, resps[default]) |
|
1298 | r = self.prompt(msg, resps[default]) | |
1292 | if r.lower() in resps: |
|
1299 | if r.lower() in resps: | |
1293 | return resps.index(r.lower()) |
|
1300 | return resps.index(r.lower()) | |
1294 | self.write(_("unrecognized response\n")) |
|
1301 | self.write(_("unrecognized response\n")) | |
1295 |
|
1302 | |||
1296 | def getpass(self, prompt=None, default=None): |
|
1303 | def getpass(self, prompt=None, default=None): | |
1297 | if not self.interactive(): |
|
1304 | if not self.interactive(): | |
1298 | return default |
|
1305 | return default | |
1299 | try: |
|
1306 | try: | |
1300 | self.write_err(self.label(prompt or _('password: '), 'ui.prompt')) |
|
1307 | self.write_err(self.label(prompt or _('password: '), 'ui.prompt')) | |
1301 | # disable getpass() only if explicitly specified. it's still valid |
|
1308 | # disable getpass() only if explicitly specified. it's still valid | |
1302 | # to interact with tty even if fin is not a tty. |
|
1309 | # to interact with tty even if fin is not a tty. | |
1303 | with self.timeblockedsection('stdio'): |
|
1310 | with self.timeblockedsection('stdio'): | |
1304 | if self.configbool('ui', 'nontty'): |
|
1311 | if self.configbool('ui', 'nontty'): | |
1305 | l = self.fin.readline() |
|
1312 | l = self.fin.readline() | |
1306 | if not l: |
|
1313 | if not l: | |
1307 | raise EOFError |
|
1314 | raise EOFError | |
1308 | return l.rstrip('\n') |
|
1315 | return l.rstrip('\n') | |
1309 | else: |
|
1316 | else: | |
1310 | return getpass.getpass('') |
|
1317 | return getpass.getpass('') | |
1311 | except EOFError: |
|
1318 | except EOFError: | |
1312 | raise error.ResponseExpected() |
|
1319 | raise error.ResponseExpected() | |
1313 | def status(self, *msg, **opts): |
|
1320 | def status(self, *msg, **opts): | |
1314 | '''write status message to output (if ui.quiet is False) |
|
1321 | '''write status message to output (if ui.quiet is False) | |
1315 |
|
1322 | |||
1316 | This adds an output label of "ui.status". |
|
1323 | This adds an output label of "ui.status". | |
1317 | ''' |
|
1324 | ''' | |
1318 | if not self.quiet: |
|
1325 | if not self.quiet: | |
1319 | opts[r'label'] = opts.get(r'label', '') + ' ui.status' |
|
1326 | opts[r'label'] = opts.get(r'label', '') + ' ui.status' | |
1320 | self.write(*msg, **opts) |
|
1327 | self.write(*msg, **opts) | |
1321 | def warn(self, *msg, **opts): |
|
1328 | def warn(self, *msg, **opts): | |
1322 | '''write warning message to output (stderr) |
|
1329 | '''write warning message to output (stderr) | |
1323 |
|
1330 | |||
1324 | This adds an output label of "ui.warning". |
|
1331 | This adds an output label of "ui.warning". | |
1325 | ''' |
|
1332 | ''' | |
1326 | opts[r'label'] = opts.get(r'label', '') + ' ui.warning' |
|
1333 | opts[r'label'] = opts.get(r'label', '') + ' ui.warning' | |
1327 | self.write_err(*msg, **opts) |
|
1334 | self.write_err(*msg, **opts) | |
1328 | def note(self, *msg, **opts): |
|
1335 | def note(self, *msg, **opts): | |
1329 | '''write note to output (if ui.verbose is True) |
|
1336 | '''write note to output (if ui.verbose is True) | |
1330 |
|
1337 | |||
1331 | This adds an output label of "ui.note". |
|
1338 | This adds an output label of "ui.note". | |
1332 | ''' |
|
1339 | ''' | |
1333 | if self.verbose: |
|
1340 | if self.verbose: | |
1334 | opts[r'label'] = opts.get(r'label', '') + ' ui.note' |
|
1341 | opts[r'label'] = opts.get(r'label', '') + ' ui.note' | |
1335 | self.write(*msg, **opts) |
|
1342 | self.write(*msg, **opts) | |
1336 | def debug(self, *msg, **opts): |
|
1343 | def debug(self, *msg, **opts): | |
1337 | '''write debug message to output (if ui.debugflag is True) |
|
1344 | '''write debug message to output (if ui.debugflag is True) | |
1338 |
|
1345 | |||
1339 | This adds an output label of "ui.debug". |
|
1346 | This adds an output label of "ui.debug". | |
1340 | ''' |
|
1347 | ''' | |
1341 | if self.debugflag: |
|
1348 | if self.debugflag: | |
1342 | opts[r'label'] = opts.get(r'label', '') + ' ui.debug' |
|
1349 | opts[r'label'] = opts.get(r'label', '') + ' ui.debug' | |
1343 | self.write(*msg, **opts) |
|
1350 | self.write(*msg, **opts) | |
1344 |
|
1351 | |||
1345 | def edit(self, text, user, extra=None, editform=None, pending=None, |
|
1352 | def edit(self, text, user, extra=None, editform=None, pending=None, | |
1346 | repopath=None): |
|
1353 | repopath=None): | |
1347 | extra_defaults = { |
|
1354 | extra_defaults = { | |
1348 | 'prefix': 'editor', |
|
1355 | 'prefix': 'editor', | |
1349 | 'suffix': '.txt', |
|
1356 | 'suffix': '.txt', | |
1350 | } |
|
1357 | } | |
1351 | if extra is not None: |
|
1358 | if extra is not None: | |
1352 | extra_defaults.update(extra) |
|
1359 | extra_defaults.update(extra) | |
1353 | extra = extra_defaults |
|
1360 | extra = extra_defaults | |
1354 |
|
1361 | |||
1355 | rdir = None |
|
1362 | rdir = None | |
1356 | if self.configbool('experimental', 'editortmpinhg'): |
|
1363 | if self.configbool('experimental', 'editortmpinhg'): | |
1357 | rdir = repopath |
|
1364 | rdir = repopath | |
1358 | (fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-', |
|
1365 | (fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-', | |
1359 | suffix=extra['suffix'], |
|
1366 | suffix=extra['suffix'], | |
1360 | dir=rdir) |
|
1367 | dir=rdir) | |
1361 | try: |
|
1368 | try: | |
1362 | f = os.fdopen(fd, r'wb') |
|
1369 | f = os.fdopen(fd, r'wb') | |
1363 | f.write(util.tonativeeol(text)) |
|
1370 | f.write(util.tonativeeol(text)) | |
1364 | f.close() |
|
1371 | f.close() | |
1365 |
|
1372 | |||
1366 | environ = {'HGUSER': user} |
|
1373 | environ = {'HGUSER': user} | |
1367 | if 'transplant_source' in extra: |
|
1374 | if 'transplant_source' in extra: | |
1368 | environ.update({'HGREVISION': hex(extra['transplant_source'])}) |
|
1375 | environ.update({'HGREVISION': hex(extra['transplant_source'])}) | |
1369 | for label in ('intermediate-source', 'source', 'rebase_source'): |
|
1376 | for label in ('intermediate-source', 'source', 'rebase_source'): | |
1370 | if label in extra: |
|
1377 | if label in extra: | |
1371 | environ.update({'HGREVISION': extra[label]}) |
|
1378 | environ.update({'HGREVISION': extra[label]}) | |
1372 | break |
|
1379 | break | |
1373 | if editform: |
|
1380 | if editform: | |
1374 | environ.update({'HGEDITFORM': editform}) |
|
1381 | environ.update({'HGEDITFORM': editform}) | |
1375 | if pending: |
|
1382 | if pending: | |
1376 | environ.update({'HG_PENDING': pending}) |
|
1383 | environ.update({'HG_PENDING': pending}) | |
1377 |
|
1384 | |||
1378 | editor = self.geteditor() |
|
1385 | editor = self.geteditor() | |
1379 |
|
1386 | |||
1380 | self.system("%s \"%s\"" % (editor, name), |
|
1387 | self.system("%s \"%s\"" % (editor, name), | |
1381 | environ=environ, |
|
1388 | environ=environ, | |
1382 | onerr=error.Abort, errprefix=_("edit failed"), |
|
1389 | onerr=error.Abort, errprefix=_("edit failed"), | |
1383 | blockedtag='editor') |
|
1390 | blockedtag='editor') | |
1384 |
|
1391 | |||
1385 | f = open(name, r'rb') |
|
1392 | f = open(name, r'rb') | |
1386 | t = util.fromnativeeol(f.read()) |
|
1393 | t = util.fromnativeeol(f.read()) | |
1387 | f.close() |
|
1394 | f.close() | |
1388 | finally: |
|
1395 | finally: | |
1389 | os.unlink(name) |
|
1396 | os.unlink(name) | |
1390 |
|
1397 | |||
1391 | return t |
|
1398 | return t | |
1392 |
|
1399 | |||
1393 | def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None, |
|
1400 | def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None, | |
1394 | blockedtag=None): |
|
1401 | blockedtag=None): | |
1395 | '''execute shell command with appropriate output stream. command |
|
1402 | '''execute shell command with appropriate output stream. command | |
1396 | output will be redirected if fout is not stdout. |
|
1403 | output will be redirected if fout is not stdout. | |
1397 |
|
1404 | |||
1398 | if command fails and onerr is None, return status, else raise onerr |
|
1405 | if command fails and onerr is None, return status, else raise onerr | |
1399 | object as exception. |
|
1406 | object as exception. | |
1400 | ''' |
|
1407 | ''' | |
1401 | if blockedtag is None: |
|
1408 | if blockedtag is None: | |
1402 | # Long cmds tend to be because of an absolute path on cmd. Keep |
|
1409 | # Long cmds tend to be because of an absolute path on cmd. Keep | |
1403 | # the tail end instead |
|
1410 | # the tail end instead | |
1404 | cmdsuffix = cmd.translate(None, _keepalnum)[-85:] |
|
1411 | cmdsuffix = cmd.translate(None, _keepalnum)[-85:] | |
1405 | blockedtag = 'unknown_system_' + cmdsuffix |
|
1412 | blockedtag = 'unknown_system_' + cmdsuffix | |
1406 | out = self.fout |
|
1413 | out = self.fout | |
1407 | if any(s[1] for s in self._bufferstates): |
|
1414 | if any(s[1] for s in self._bufferstates): | |
1408 | out = self |
|
1415 | out = self | |
1409 | with self.timeblockedsection(blockedtag): |
|
1416 | with self.timeblockedsection(blockedtag): | |
1410 | rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out) |
|
1417 | rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out) | |
1411 | if rc and onerr: |
|
1418 | if rc and onerr: | |
1412 | errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]), |
|
1419 | errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]), | |
1413 | util.explainexit(rc)[0]) |
|
1420 | util.explainexit(rc)[0]) | |
1414 | if errprefix: |
|
1421 | if errprefix: | |
1415 | errmsg = '%s: %s' % (errprefix, errmsg) |
|
1422 | errmsg = '%s: %s' % (errprefix, errmsg) | |
1416 | raise onerr(errmsg) |
|
1423 | raise onerr(errmsg) | |
1417 | return rc |
|
1424 | return rc | |
1418 |
|
1425 | |||
1419 | def _runsystem(self, cmd, environ, cwd, out): |
|
1426 | def _runsystem(self, cmd, environ, cwd, out): | |
1420 | """actually execute the given shell command (can be overridden by |
|
1427 | """actually execute the given shell command (can be overridden by | |
1421 | extensions like chg)""" |
|
1428 | extensions like chg)""" | |
1422 | return util.system(cmd, environ=environ, cwd=cwd, out=out) |
|
1429 | return util.system(cmd, environ=environ, cwd=cwd, out=out) | |
1423 |
|
1430 | |||
1424 | def traceback(self, exc=None, force=False): |
|
1431 | def traceback(self, exc=None, force=False): | |
1425 | '''print exception traceback if traceback printing enabled or forced. |
|
1432 | '''print exception traceback if traceback printing enabled or forced. | |
1426 | only to call in exception handler. returns true if traceback |
|
1433 | only to call in exception handler. returns true if traceback | |
1427 | printed.''' |
|
1434 | printed.''' | |
1428 | if self.tracebackflag or force: |
|
1435 | if self.tracebackflag or force: | |
1429 | if exc is None: |
|
1436 | if exc is None: | |
1430 | exc = sys.exc_info() |
|
1437 | exc = sys.exc_info() | |
1431 | cause = getattr(exc[1], 'cause', None) |
|
1438 | cause = getattr(exc[1], 'cause', None) | |
1432 |
|
1439 | |||
1433 | if cause is not None: |
|
1440 | if cause is not None: | |
1434 | causetb = traceback.format_tb(cause[2]) |
|
1441 | causetb = traceback.format_tb(cause[2]) | |
1435 | exctb = traceback.format_tb(exc[2]) |
|
1442 | exctb = traceback.format_tb(exc[2]) | |
1436 | exconly = traceback.format_exception_only(cause[0], cause[1]) |
|
1443 | exconly = traceback.format_exception_only(cause[0], cause[1]) | |
1437 |
|
1444 | |||
1438 | # exclude frame where 'exc' was chained and rethrown from exctb |
|
1445 | # exclude frame where 'exc' was chained and rethrown from exctb | |
1439 | self.write_err('Traceback (most recent call last):\n', |
|
1446 | self.write_err('Traceback (most recent call last):\n', | |
1440 | ''.join(exctb[:-1]), |
|
1447 | ''.join(exctb[:-1]), | |
1441 | ''.join(causetb), |
|
1448 | ''.join(causetb), | |
1442 | ''.join(exconly)) |
|
1449 | ''.join(exconly)) | |
1443 | else: |
|
1450 | else: | |
1444 | output = traceback.format_exception(exc[0], exc[1], exc[2]) |
|
1451 | output = traceback.format_exception(exc[0], exc[1], exc[2]) | |
1445 | data = r''.join(output) |
|
1452 | data = r''.join(output) | |
1446 | if pycompat.ispy3: |
|
1453 | if pycompat.ispy3: | |
1447 | enc = pycompat.sysstr(encoding.encoding) |
|
1454 | enc = pycompat.sysstr(encoding.encoding) | |
1448 | data = data.encode(enc, errors=r'replace') |
|
1455 | data = data.encode(enc, errors=r'replace') | |
1449 | self.write_err(data) |
|
1456 | self.write_err(data) | |
1450 | return self.tracebackflag or force |
|
1457 | return self.tracebackflag or force | |
1451 |
|
1458 | |||
1452 | def geteditor(self): |
|
1459 | def geteditor(self): | |
1453 | '''return editor to use''' |
|
1460 | '''return editor to use''' | |
1454 | if pycompat.sysplatform == 'plan9': |
|
1461 | if pycompat.sysplatform == 'plan9': | |
1455 | # vi is the MIPS instruction simulator on Plan 9. We |
|
1462 | # vi is the MIPS instruction simulator on Plan 9. We | |
1456 | # instead default to E to plumb commit messages to |
|
1463 | # instead default to E to plumb commit messages to | |
1457 | # avoid confusion. |
|
1464 | # avoid confusion. | |
1458 | editor = 'E' |
|
1465 | editor = 'E' | |
1459 | else: |
|
1466 | else: | |
1460 | editor = 'vi' |
|
1467 | editor = 'vi' | |
1461 | return (encoding.environ.get("HGEDITOR") or |
|
1468 | return (encoding.environ.get("HGEDITOR") or | |
1462 | self.config("ui", "editor", editor)) |
|
1469 | self.config("ui", "editor", editor)) | |
1463 |
|
1470 | |||
1464 | @util.propertycache |
|
1471 | @util.propertycache | |
1465 | def _progbar(self): |
|
1472 | def _progbar(self): | |
1466 | """setup the progbar singleton to the ui object""" |
|
1473 | """setup the progbar singleton to the ui object""" | |
1467 | if (self.quiet or self.debugflag |
|
1474 | if (self.quiet or self.debugflag | |
1468 | or self.configbool('progress', 'disable') |
|
1475 | or self.configbool('progress', 'disable') | |
1469 | or not progress.shouldprint(self)): |
|
1476 | or not progress.shouldprint(self)): | |
1470 | return None |
|
1477 | return None | |
1471 | return getprogbar(self) |
|
1478 | return getprogbar(self) | |
1472 |
|
1479 | |||
1473 | def _progclear(self): |
|
1480 | def _progclear(self): | |
1474 | """clear progress bar output if any. use it before any output""" |
|
1481 | """clear progress bar output if any. use it before any output""" | |
1475 | if '_progbar' not in vars(self): # nothing loaded yet |
|
1482 | if '_progbar' not in vars(self): # nothing loaded yet | |
1476 | return |
|
1483 | return | |
1477 | if self._progbar is not None and self._progbar.printed: |
|
1484 | if self._progbar is not None and self._progbar.printed: | |
1478 | self._progbar.clear() |
|
1485 | self._progbar.clear() | |
1479 |
|
1486 | |||
1480 | def progress(self, topic, pos, item="", unit="", total=None): |
|
1487 | def progress(self, topic, pos, item="", unit="", total=None): | |
1481 | '''show a progress message |
|
1488 | '''show a progress message | |
1482 |
|
1489 | |||
1483 | By default a textual progress bar will be displayed if an operation |
|
1490 | By default a textual progress bar will be displayed if an operation | |
1484 | takes too long. 'topic' is the current operation, 'item' is a |
|
1491 | takes too long. 'topic' is the current operation, 'item' is a | |
1485 | non-numeric marker of the current position (i.e. the currently |
|
1492 | non-numeric marker of the current position (i.e. the currently | |
1486 | in-process file), 'pos' is the current numeric position (i.e. |
|
1493 | in-process file), 'pos' is the current numeric position (i.e. | |
1487 | revision, bytes, etc.), unit is a corresponding unit label, |
|
1494 | revision, bytes, etc.), unit is a corresponding unit label, | |
1488 | and total is the highest expected pos. |
|
1495 | and total is the highest expected pos. | |
1489 |
|
1496 | |||
1490 | Multiple nested topics may be active at a time. |
|
1497 | Multiple nested topics may be active at a time. | |
1491 |
|
1498 | |||
1492 | All topics should be marked closed by setting pos to None at |
|
1499 | All topics should be marked closed by setting pos to None at | |
1493 | termination. |
|
1500 | termination. | |
1494 | ''' |
|
1501 | ''' | |
1495 | if self._progbar is not None: |
|
1502 | if self._progbar is not None: | |
1496 | self._progbar.progress(topic, pos, item=item, unit=unit, |
|
1503 | self._progbar.progress(topic, pos, item=item, unit=unit, | |
1497 | total=total) |
|
1504 | total=total) | |
1498 | if pos is None or not self.configbool('progress', 'debug'): |
|
1505 | if pos is None or not self.configbool('progress', 'debug'): | |
1499 | return |
|
1506 | return | |
1500 |
|
1507 | |||
1501 | if unit: |
|
1508 | if unit: | |
1502 | unit = ' ' + unit |
|
1509 | unit = ' ' + unit | |
1503 | if item: |
|
1510 | if item: | |
1504 | item = ' ' + item |
|
1511 | item = ' ' + item | |
1505 |
|
1512 | |||
1506 | if total: |
|
1513 | if total: | |
1507 | pct = 100.0 * pos / total |
|
1514 | pct = 100.0 * pos / total | |
1508 | self.debug('%s:%s %s/%s%s (%4.2f%%)\n' |
|
1515 | self.debug('%s:%s %s/%s%s (%4.2f%%)\n' | |
1509 | % (topic, item, pos, total, unit, pct)) |
|
1516 | % (topic, item, pos, total, unit, pct)) | |
1510 | else: |
|
1517 | else: | |
1511 | self.debug('%s:%s %s%s\n' % (topic, item, pos, unit)) |
|
1518 | self.debug('%s:%s %s%s\n' % (topic, item, pos, unit)) | |
1512 |
|
1519 | |||
1513 | def log(self, service, *msg, **opts): |
|
1520 | def log(self, service, *msg, **opts): | |
1514 | '''hook for logging facility extensions |
|
1521 | '''hook for logging facility extensions | |
1515 |
|
1522 | |||
1516 | service should be a readily-identifiable subsystem, which will |
|
1523 | service should be a readily-identifiable subsystem, which will | |
1517 | allow filtering. |
|
1524 | allow filtering. | |
1518 |
|
1525 | |||
1519 | *msg should be a newline-terminated format string to log, and |
|
1526 | *msg should be a newline-terminated format string to log, and | |
1520 | then any values to %-format into that format string. |
|
1527 | then any values to %-format into that format string. | |
1521 |
|
1528 | |||
1522 | **opts currently has no defined meanings. |
|
1529 | **opts currently has no defined meanings. | |
1523 | ''' |
|
1530 | ''' | |
1524 |
|
1531 | |||
1525 | def label(self, msg, label): |
|
1532 | def label(self, msg, label): | |
1526 | '''style msg based on supplied label |
|
1533 | '''style msg based on supplied label | |
1527 |
|
1534 | |||
1528 | If some color mode is enabled, this will add the necessary control |
|
1535 | If some color mode is enabled, this will add the necessary control | |
1529 | characters to apply such color. In addition, 'debug' color mode adds |
|
1536 | characters to apply such color. In addition, 'debug' color mode adds | |
1530 | markup showing which label affects a piece of text. |
|
1537 | markup showing which label affects a piece of text. | |
1531 |
|
1538 | |||
1532 | ui.write(s, 'label') is equivalent to |
|
1539 | ui.write(s, 'label') is equivalent to | |
1533 | ui.write(ui.label(s, 'label')). |
|
1540 | ui.write(ui.label(s, 'label')). | |
1534 | ''' |
|
1541 | ''' | |
1535 | if self._colormode is not None: |
|
1542 | if self._colormode is not None: | |
1536 | return color.colorlabel(self, msg, label) |
|
1543 | return color.colorlabel(self, msg, label) | |
1537 | return msg |
|
1544 | return msg | |
1538 |
|
1545 | |||
1539 | def develwarn(self, msg, stacklevel=1, config=None): |
|
1546 | def develwarn(self, msg, stacklevel=1, config=None): | |
1540 | """issue a developer warning message |
|
1547 | """issue a developer warning message | |
1541 |
|
1548 | |||
1542 | Use 'stacklevel' to report the offender some layers further up in the |
|
1549 | Use 'stacklevel' to report the offender some layers further up in the | |
1543 | stack. |
|
1550 | stack. | |
1544 | """ |
|
1551 | """ | |
1545 | if not self.configbool('devel', 'all-warnings'): |
|
1552 | if not self.configbool('devel', 'all-warnings'): | |
1546 | if config is not None and not self.configbool('devel', config): |
|
1553 | if config is not None and not self.configbool('devel', config): | |
1547 | return |
|
1554 | return | |
1548 | msg = 'devel-warn: ' + msg |
|
1555 | msg = 'devel-warn: ' + msg | |
1549 | stacklevel += 1 # get in develwarn |
|
1556 | stacklevel += 1 # get in develwarn | |
1550 | if self.tracebackflag: |
|
1557 | if self.tracebackflag: | |
1551 | util.debugstacktrace(msg, stacklevel, self.ferr, self.fout) |
|
1558 | util.debugstacktrace(msg, stacklevel, self.ferr, self.fout) | |
1552 | self.log('develwarn', '%s at:\n%s' % |
|
1559 | self.log('develwarn', '%s at:\n%s' % | |
1553 | (msg, ''.join(util.getstackframes(stacklevel)))) |
|
1560 | (msg, ''.join(util.getstackframes(stacklevel)))) | |
1554 | else: |
|
1561 | else: | |
1555 | curframe = inspect.currentframe() |
|
1562 | curframe = inspect.currentframe() | |
1556 | calframe = inspect.getouterframes(curframe, 2) |
|
1563 | calframe = inspect.getouterframes(curframe, 2) | |
1557 | self.write_err('%s at: %s:%s (%s)\n' |
|
1564 | self.write_err('%s at: %s:%s (%s)\n' | |
1558 | % ((msg,) + calframe[stacklevel][1:4])) |
|
1565 | % ((msg,) + calframe[stacklevel][1:4])) | |
1559 | self.log('develwarn', '%s at: %s:%s (%s)\n', |
|
1566 | self.log('develwarn', '%s at: %s:%s (%s)\n', | |
1560 | msg, *calframe[stacklevel][1:4]) |
|
1567 | msg, *calframe[stacklevel][1:4]) | |
1561 | curframe = calframe = None # avoid cycles |
|
1568 | curframe = calframe = None # avoid cycles | |
1562 |
|
1569 | |||
1563 | def deprecwarn(self, msg, version): |
|
1570 | def deprecwarn(self, msg, version): | |
1564 | """issue a deprecation warning |
|
1571 | """issue a deprecation warning | |
1565 |
|
1572 | |||
1566 | - msg: message explaining what is deprecated and how to upgrade, |
|
1573 | - msg: message explaining what is deprecated and how to upgrade, | |
1567 | - version: last version where the API will be supported, |
|
1574 | - version: last version where the API will be supported, | |
1568 | """ |
|
1575 | """ | |
1569 | if not (self.configbool('devel', 'all-warnings') |
|
1576 | if not (self.configbool('devel', 'all-warnings') | |
1570 | or self.configbool('devel', 'deprec-warn')): |
|
1577 | or self.configbool('devel', 'deprec-warn')): | |
1571 | return |
|
1578 | return | |
1572 | msg += ("\n(compatibility will be dropped after Mercurial-%s," |
|
1579 | msg += ("\n(compatibility will be dropped after Mercurial-%s," | |
1573 | " update your code.)") % version |
|
1580 | " update your code.)") % version | |
1574 | self.develwarn(msg, stacklevel=2, config='deprec-warn') |
|
1581 | self.develwarn(msg, stacklevel=2, config='deprec-warn') | |
1575 |
|
1582 | |||
1576 | def exportableenviron(self): |
|
1583 | def exportableenviron(self): | |
1577 | """The environment variables that are safe to export, e.g. through |
|
1584 | """The environment variables that are safe to export, e.g. through | |
1578 | hgweb. |
|
1585 | hgweb. | |
1579 | """ |
|
1586 | """ | |
1580 | return self._exportableenviron |
|
1587 | return self._exportableenviron | |
1581 |
|
1588 | |||
1582 | @contextlib.contextmanager |
|
1589 | @contextlib.contextmanager | |
1583 | def configoverride(self, overrides, source=""): |
|
1590 | def configoverride(self, overrides, source=""): | |
1584 | """Context manager for temporary config overrides |
|
1591 | """Context manager for temporary config overrides | |
1585 | `overrides` must be a dict of the following structure: |
|
1592 | `overrides` must be a dict of the following structure: | |
1586 | {(section, name) : value}""" |
|
1593 | {(section, name) : value}""" | |
1587 | backups = {} |
|
1594 | backups = {} | |
1588 | try: |
|
1595 | try: | |
1589 | for (section, name), value in overrides.items(): |
|
1596 | for (section, name), value in overrides.items(): | |
1590 | backups[(section, name)] = self.backupconfig(section, name) |
|
1597 | backups[(section, name)] = self.backupconfig(section, name) | |
1591 | self.setconfig(section, name, value, source) |
|
1598 | self.setconfig(section, name, value, source) | |
1592 | yield |
|
1599 | yield | |
1593 | finally: |
|
1600 | finally: | |
1594 | for __, backup in backups.items(): |
|
1601 | for __, backup in backups.items(): | |
1595 | self.restoreconfig(backup) |
|
1602 | self.restoreconfig(backup) | |
1596 | # just restoring ui.quiet config to the previous value is not enough |
|
1603 | # just restoring ui.quiet config to the previous value is not enough | |
1597 | # as it does not update ui.quiet class member |
|
1604 | # as it does not update ui.quiet class member | |
1598 | if ('ui', 'quiet') in overrides: |
|
1605 | if ('ui', 'quiet') in overrides: | |
1599 | self.fixconfig(section='ui') |
|
1606 | self.fixconfig(section='ui') | |
1600 |
|
1607 | |||
1601 | class paths(dict): |
|
1608 | class paths(dict): | |
1602 | """Represents a collection of paths and their configs. |
|
1609 | """Represents a collection of paths and their configs. | |
1603 |
|
1610 | |||
1604 | Data is initially derived from ui instances and the config files they have |
|
1611 | Data is initially derived from ui instances and the config files they have | |
1605 | loaded. |
|
1612 | loaded. | |
1606 | """ |
|
1613 | """ | |
1607 | def __init__(self, ui): |
|
1614 | def __init__(self, ui): | |
1608 | dict.__init__(self) |
|
1615 | dict.__init__(self) | |
1609 |
|
1616 | |||
1610 | for name, loc in ui.configitems('paths', ignoresub=True): |
|
1617 | for name, loc in ui.configitems('paths', ignoresub=True): | |
1611 | # No location is the same as not existing. |
|
1618 | # No location is the same as not existing. | |
1612 | if not loc: |
|
1619 | if not loc: | |
1613 | continue |
|
1620 | continue | |
1614 | loc, sub = ui.configsuboptions('paths', name) |
|
1621 | loc, sub = ui.configsuboptions('paths', name) | |
1615 | self[name] = path(ui, name, rawloc=loc, suboptions=sub) |
|
1622 | self[name] = path(ui, name, rawloc=loc, suboptions=sub) | |
1616 |
|
1623 | |||
1617 | def getpath(self, name, default=None): |
|
1624 | def getpath(self, name, default=None): | |
1618 | """Return a ``path`` from a string, falling back to default. |
|
1625 | """Return a ``path`` from a string, falling back to default. | |
1619 |
|
1626 | |||
1620 | ``name`` can be a named path or locations. Locations are filesystem |
|
1627 | ``name`` can be a named path or locations. Locations are filesystem | |
1621 | paths or URIs. |
|
1628 | paths or URIs. | |
1622 |
|
1629 | |||
1623 | Returns None if ``name`` is not a registered path, a URI, or a local |
|
1630 | Returns None if ``name`` is not a registered path, a URI, or a local | |
1624 | path to a repo. |
|
1631 | path to a repo. | |
1625 | """ |
|
1632 | """ | |
1626 | # Only fall back to default if no path was requested. |
|
1633 | # Only fall back to default if no path was requested. | |
1627 | if name is None: |
|
1634 | if name is None: | |
1628 | if not default: |
|
1635 | if not default: | |
1629 | default = () |
|
1636 | default = () | |
1630 | elif not isinstance(default, (tuple, list)): |
|
1637 | elif not isinstance(default, (tuple, list)): | |
1631 | default = (default,) |
|
1638 | default = (default,) | |
1632 | for k in default: |
|
1639 | for k in default: | |
1633 | try: |
|
1640 | try: | |
1634 | return self[k] |
|
1641 | return self[k] | |
1635 | except KeyError: |
|
1642 | except KeyError: | |
1636 | continue |
|
1643 | continue | |
1637 | return None |
|
1644 | return None | |
1638 |
|
1645 | |||
1639 | # Most likely empty string. |
|
1646 | # Most likely empty string. | |
1640 | # This may need to raise in the future. |
|
1647 | # This may need to raise in the future. | |
1641 | if not name: |
|
1648 | if not name: | |
1642 | return None |
|
1649 | return None | |
1643 |
|
1650 | |||
1644 | try: |
|
1651 | try: | |
1645 | return self[name] |
|
1652 | return self[name] | |
1646 | except KeyError: |
|
1653 | except KeyError: | |
1647 | # Try to resolve as a local path or URI. |
|
1654 | # Try to resolve as a local path or URI. | |
1648 | try: |
|
1655 | try: | |
1649 | # We don't pass sub-options in, so no need to pass ui instance. |
|
1656 | # We don't pass sub-options in, so no need to pass ui instance. | |
1650 | return path(None, None, rawloc=name) |
|
1657 | return path(None, None, rawloc=name) | |
1651 | except ValueError: |
|
1658 | except ValueError: | |
1652 | raise error.RepoError(_('repository %s does not exist') % |
|
1659 | raise error.RepoError(_('repository %s does not exist') % | |
1653 | name) |
|
1660 | name) | |
1654 |
|
1661 | |||
1655 | _pathsuboptions = {} |
|
1662 | _pathsuboptions = {} | |
1656 |
|
1663 | |||
1657 | def pathsuboption(option, attr): |
|
1664 | def pathsuboption(option, attr): | |
1658 | """Decorator used to declare a path sub-option. |
|
1665 | """Decorator used to declare a path sub-option. | |
1659 |
|
1666 | |||
1660 | Arguments are the sub-option name and the attribute it should set on |
|
1667 | Arguments are the sub-option name and the attribute it should set on | |
1661 | ``path`` instances. |
|
1668 | ``path`` instances. | |
1662 |
|
1669 | |||
1663 | The decorated function will receive as arguments a ``ui`` instance, |
|
1670 | The decorated function will receive as arguments a ``ui`` instance, | |
1664 | ``path`` instance, and the string value of this option from the config. |
|
1671 | ``path`` instance, and the string value of this option from the config. | |
1665 | The function should return the value that will be set on the ``path`` |
|
1672 | The function should return the value that will be set on the ``path`` | |
1666 | instance. |
|
1673 | instance. | |
1667 |
|
1674 | |||
1668 | This decorator can be used to perform additional verification of |
|
1675 | This decorator can be used to perform additional verification of | |
1669 | sub-options and to change the type of sub-options. |
|
1676 | sub-options and to change the type of sub-options. | |
1670 | """ |
|
1677 | """ | |
1671 | def register(func): |
|
1678 | def register(func): | |
1672 | _pathsuboptions[option] = (attr, func) |
|
1679 | _pathsuboptions[option] = (attr, func) | |
1673 | return func |
|
1680 | return func | |
1674 | return register |
|
1681 | return register | |
1675 |
|
1682 | |||
1676 | @pathsuboption('pushurl', 'pushloc') |
|
1683 | @pathsuboption('pushurl', 'pushloc') | |
1677 | def pushurlpathoption(ui, path, value): |
|
1684 | def pushurlpathoption(ui, path, value): | |
1678 | u = util.url(value) |
|
1685 | u = util.url(value) | |
1679 | # Actually require a URL. |
|
1686 | # Actually require a URL. | |
1680 | if not u.scheme: |
|
1687 | if not u.scheme: | |
1681 | ui.warn(_('(paths.%s:pushurl not a URL; ignoring)\n') % path.name) |
|
1688 | ui.warn(_('(paths.%s:pushurl not a URL; ignoring)\n') % path.name) | |
1682 | return None |
|
1689 | return None | |
1683 |
|
1690 | |||
1684 | # Don't support the #foo syntax in the push URL to declare branch to |
|
1691 | # Don't support the #foo syntax in the push URL to declare branch to | |
1685 | # push. |
|
1692 | # push. | |
1686 | if u.fragment: |
|
1693 | if u.fragment: | |
1687 | ui.warn(_('("#fragment" in paths.%s:pushurl not supported; ' |
|
1694 | ui.warn(_('("#fragment" in paths.%s:pushurl not supported; ' | |
1688 | 'ignoring)\n') % path.name) |
|
1695 | 'ignoring)\n') % path.name) | |
1689 | u.fragment = None |
|
1696 | u.fragment = None | |
1690 |
|
1697 | |||
1691 | return str(u) |
|
1698 | return str(u) | |
1692 |
|
1699 | |||
1693 | @pathsuboption('pushrev', 'pushrev') |
|
1700 | @pathsuboption('pushrev', 'pushrev') | |
1694 | def pushrevpathoption(ui, path, value): |
|
1701 | def pushrevpathoption(ui, path, value): | |
1695 | return value |
|
1702 | return value | |
1696 |
|
1703 | |||
1697 | class path(object): |
|
1704 | class path(object): | |
1698 | """Represents an individual path and its configuration.""" |
|
1705 | """Represents an individual path and its configuration.""" | |
1699 |
|
1706 | |||
1700 | def __init__(self, ui, name, rawloc=None, suboptions=None): |
|
1707 | def __init__(self, ui, name, rawloc=None, suboptions=None): | |
1701 | """Construct a path from its config options. |
|
1708 | """Construct a path from its config options. | |
1702 |
|
1709 | |||
1703 | ``ui`` is the ``ui`` instance the path is coming from. |
|
1710 | ``ui`` is the ``ui`` instance the path is coming from. | |
1704 | ``name`` is the symbolic name of the path. |
|
1711 | ``name`` is the symbolic name of the path. | |
1705 | ``rawloc`` is the raw location, as defined in the config. |
|
1712 | ``rawloc`` is the raw location, as defined in the config. | |
1706 | ``pushloc`` is the raw locations pushes should be made to. |
|
1713 | ``pushloc`` is the raw locations pushes should be made to. | |
1707 |
|
1714 | |||
1708 | If ``name`` is not defined, we require that the location be a) a local |
|
1715 | If ``name`` is not defined, we require that the location be a) a local | |
1709 | filesystem path with a .hg directory or b) a URL. If not, |
|
1716 | filesystem path with a .hg directory or b) a URL. If not, | |
1710 | ``ValueError`` is raised. |
|
1717 | ``ValueError`` is raised. | |
1711 | """ |
|
1718 | """ | |
1712 | if not rawloc: |
|
1719 | if not rawloc: | |
1713 | raise ValueError('rawloc must be defined') |
|
1720 | raise ValueError('rawloc must be defined') | |
1714 |
|
1721 | |||
1715 | # Locations may define branches via syntax <base>#<branch>. |
|
1722 | # Locations may define branches via syntax <base>#<branch>. | |
1716 | u = util.url(rawloc) |
|
1723 | u = util.url(rawloc) | |
1717 | branch = None |
|
1724 | branch = None | |
1718 | if u.fragment: |
|
1725 | if u.fragment: | |
1719 | branch = u.fragment |
|
1726 | branch = u.fragment | |
1720 | u.fragment = None |
|
1727 | u.fragment = None | |
1721 |
|
1728 | |||
1722 | self.url = u |
|
1729 | self.url = u | |
1723 | self.branch = branch |
|
1730 | self.branch = branch | |
1724 |
|
1731 | |||
1725 | self.name = name |
|
1732 | self.name = name | |
1726 | self.rawloc = rawloc |
|
1733 | self.rawloc = rawloc | |
1727 | self.loc = '%s' % u |
|
1734 | self.loc = '%s' % u | |
1728 |
|
1735 | |||
1729 | # When given a raw location but not a symbolic name, validate the |
|
1736 | # When given a raw location but not a symbolic name, validate the | |
1730 | # location is valid. |
|
1737 | # location is valid. | |
1731 | if not name and not u.scheme and not self._isvalidlocalpath(self.loc): |
|
1738 | if not name and not u.scheme and not self._isvalidlocalpath(self.loc): | |
1732 | raise ValueError('location is not a URL or path to a local ' |
|
1739 | raise ValueError('location is not a URL or path to a local ' | |
1733 | 'repo: %s' % rawloc) |
|
1740 | 'repo: %s' % rawloc) | |
1734 |
|
1741 | |||
1735 | suboptions = suboptions or {} |
|
1742 | suboptions = suboptions or {} | |
1736 |
|
1743 | |||
1737 | # Now process the sub-options. If a sub-option is registered, its |
|
1744 | # Now process the sub-options. If a sub-option is registered, its | |
1738 | # attribute will always be present. The value will be None if there |
|
1745 | # attribute will always be present. The value will be None if there | |
1739 | # was no valid sub-option. |
|
1746 | # was no valid sub-option. | |
1740 | for suboption, (attr, func) in _pathsuboptions.iteritems(): |
|
1747 | for suboption, (attr, func) in _pathsuboptions.iteritems(): | |
1741 | if suboption not in suboptions: |
|
1748 | if suboption not in suboptions: | |
1742 | setattr(self, attr, None) |
|
1749 | setattr(self, attr, None) | |
1743 | continue |
|
1750 | continue | |
1744 |
|
1751 | |||
1745 | value = func(ui, self, suboptions[suboption]) |
|
1752 | value = func(ui, self, suboptions[suboption]) | |
1746 | setattr(self, attr, value) |
|
1753 | setattr(self, attr, value) | |
1747 |
|
1754 | |||
1748 | def _isvalidlocalpath(self, path): |
|
1755 | def _isvalidlocalpath(self, path): | |
1749 | """Returns True if the given path is a potentially valid repository. |
|
1756 | """Returns True if the given path is a potentially valid repository. | |
1750 | This is its own function so that extensions can change the definition of |
|
1757 | This is its own function so that extensions can change the definition of | |
1751 | 'valid' in this case (like when pulling from a git repo into a hg |
|
1758 | 'valid' in this case (like when pulling from a git repo into a hg | |
1752 | one).""" |
|
1759 | one).""" | |
1753 | return os.path.isdir(os.path.join(path, '.hg')) |
|
1760 | return os.path.isdir(os.path.join(path, '.hg')) | |
1754 |
|
1761 | |||
1755 | @property |
|
1762 | @property | |
1756 | def suboptions(self): |
|
1763 | def suboptions(self): | |
1757 | """Return sub-options and their values for this path. |
|
1764 | """Return sub-options and their values for this path. | |
1758 |
|
1765 | |||
1759 | This is intended to be used for presentation purposes. |
|
1766 | This is intended to be used for presentation purposes. | |
1760 | """ |
|
1767 | """ | |
1761 | d = {} |
|
1768 | d = {} | |
1762 | for subopt, (attr, _func) in _pathsuboptions.iteritems(): |
|
1769 | for subopt, (attr, _func) in _pathsuboptions.iteritems(): | |
1763 | value = getattr(self, attr) |
|
1770 | value = getattr(self, attr) | |
1764 | if value is not None: |
|
1771 | if value is not None: | |
1765 | d[subopt] = value |
|
1772 | d[subopt] = value | |
1766 | return d |
|
1773 | return d | |
1767 |
|
1774 | |||
1768 | # we instantiate one globally shared progress bar to avoid |
|
1775 | # we instantiate one globally shared progress bar to avoid | |
1769 | # competing progress bars when multiple UI objects get created |
|
1776 | # competing progress bars when multiple UI objects get created | |
1770 | _progresssingleton = None |
|
1777 | _progresssingleton = None | |
1771 |
|
1778 | |||
1772 | def getprogbar(ui): |
|
1779 | def getprogbar(ui): | |
1773 | global _progresssingleton |
|
1780 | global _progresssingleton | |
1774 | if _progresssingleton is None: |
|
1781 | if _progresssingleton is None: | |
1775 | # passing 'ui' object to the singleton is fishy, |
|
1782 | # passing 'ui' object to the singleton is fishy, | |
1776 | # this is how the extension used to work but feel free to rework it. |
|
1783 | # this is how the extension used to work but feel free to rework it. | |
1777 | _progresssingleton = progress.progbar(ui) |
|
1784 | _progresssingleton = progress.progbar(ui) | |
1778 | return _progresssingleton |
|
1785 | return _progresssingleton |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now